diff --git a/.claude/CLAUDE.md b/.claude/CLAUDE.md new file mode 100644 index 0000000..dba71e9 --- /dev/null +++ b/.claude/CLAUDE.md @@ -0,0 +1 @@ +@../AGENTS.md diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 41aef2e..256524a 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,6 +1,6 @@ version: 2 updates: - - package-ecosystem: "cargo" + - package-ecosystem: "gomod" directory: "/" schedule: interval: "weekly" @@ -8,7 +8,7 @@ updates: time: "11:00" timezone: "America/New_York" groups: - cargo: + go-modules: patterns: - "*" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7d1a407..21801c3 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,51 +10,116 @@ concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} cancel-in-progress: true -env: - CARGO_TERM_COLOR: always +permissions: + contents: read jobs: - lint: - name: Lint + quality: + name: Code Quality runs-on: ubuntu-latest + steps: - - uses: actions/checkout@v6 - - uses: dtolnay/rust-toolchain@stable + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Set up Go + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 + with: + go-version-file: go.mod + cache: true + + - name: Install golangci-lint + uses: golangci/golangci-lint-action@1e7e51e771db61008b38414a730f564565cf7c20 # v9.2.0 with: - components: rustfmt, clippy - - uses: Swatinem/rust-cache@v2 - - run: cargo fmt --all -- --check - - run: cargo clippy -- -D warnings + version: v2.11.3 + args: --help + + - name: Quality - formatting + run: make fmt-check + + - name: Quality - go vet + run: make vet + + - name: Quality - linting + run: make lint + + - name: Quality - go tidy + run: make tidy-check - test: - name: Test + - name: Regressions - CLI surface changes + run: make check-surface + + - name: Regressions - AI skill drift + run: make check-skill-drift + + unit-tests: + name: Unit Tests runs-on: ubuntu-latest + steps: - - uses: actions/checkout@v6 - - uses: dtolnay/rust-toolchain@stable - - uses: Swatinem/rust-cache@v2 - - run: cargo test - - build: - name: Build / ${{ matrix.os }} - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - include: - - os: ubuntu-latest - artifact: vector - - os: macos-latest - artifact: vector - - os: windows-latest - artifact: vector.exe + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Set up Go + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 + with: + go-version-file: go.mod + cache: true + + - name: Tests - go unit tests + run: make test + + integration-tests: + name: Integration Tests + runs-on: ubuntu-latest + steps: - - uses: actions/checkout@v6 - - uses: dtolnay/rust-toolchain@stable - - uses: Swatinem/rust-cache@v2 - - run: cargo build --release - - uses: actions/upload-artifact@v7 + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Set up Go + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 + with: + go-version-file: go.mod + cache: true + + - name: Set up Node.js + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 with: - name: vector-${{ matrix.os }} - path: target/release/${{ matrix.artifact }} - retention-days: 3 + node-version: '24' + + - name: Install Prism + run: npm install -g @stoplight/prism-cli + + - name: Cache BATS + id: cache-bats + uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 + with: + path: /usr/local/libexec/bats-core + key: bats-1.13.0 + + - name: Install BATS + if: steps.cache-bats.outputs.cache-hit != 'true' + run: | + git clone --depth 1 --branch v1.13.0 https://github.com/bats-core/bats-core.git /tmp/bats-core + sudo /tmp/bats-core/install.sh /usr/local + + - name: Tests - BATS e2e tests + env: + GH_TOKEN: ${{ github.token }} + run: make test-e2e + + security: + name: Security + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Set up Go + uses: actions/setup-go@4b73464bb391d4059bd26b0524d20df3927bd417 # v6.3.0 + with: + go-version-file: 'go.mod' + + - name: Run govulncheck + # @latest intentional — pinning delays scanning improvements and + # new Go version support for no meaningful reproducibility gain. + run: | + go install golang.org/x/vuln/cmd/govulncheck@latest + govulncheck -tags dev ./... diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6535cdf..cd69137 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -6,102 +6,33 @@ on: - 'v*' permissions: - actions: write contents: write -env: - CARGO_TERM_COLOR: always - jobs: - build: - name: Build / ${{ matrix.target }} - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - include: - - target: x86_64-unknown-linux-gnu - os: ubuntu-latest - archive: tar.gz - - target: aarch64-unknown-linux-gnu - os: ubuntu-latest - archive: tar.gz - - target: x86_64-apple-darwin - os: macos-latest - archive: tar.gz - - target: aarch64-apple-darwin - os: macos-latest - archive: tar.gz - - target: x86_64-pc-windows-msvc - os: windows-latest - archive: zip - - steps: - - uses: actions/checkout@v6 - - - name: Install Rust - uses: dtolnay/rust-toolchain@stable - with: - targets: ${{ matrix.target }} - - - name: Install cross-compilation tools - if: matrix.target == 'aarch64-unknown-linux-gnu' - run: | - sudo apt-get update - sudo apt-get install -y gcc-aarch64-linux-gnu - - - name: Build - run: cargo build --release --target ${{ matrix.target }} - env: - CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc - - - name: Package (Unix) - if: matrix.os != 'windows-latest' - run: | - staging="vector-${{ matrix.target }}" - mkdir -p "$staging/man/man1" - cp target/${{ matrix.target }}/release/vector "$staging/" - cp man/man1/vector.1 "$staging/man/man1/" - tar czvf "$staging.tar.gz" -C "$staging" . - rm -rf "$staging" - - - name: Package (Windows) - if: matrix.os == 'windows-latest' - run: | - cd target/${{ matrix.target }}/release - 7z a ../../../vector-${{ matrix.target }}.zip vector.exe - cd ../../.. - - - name: Upload artifact - uses: actions/upload-artifact@v7 - with: - name: vector-${{ matrix.target }} - path: vector-${{ matrix.target }}.${{ matrix.archive }} - release: name: Release - needs: build runs-on: ubuntu-latest steps: - uses: actions/checkout@v6 + with: + fetch-depth: 0 - - name: Download artifacts - uses: actions/download-artifact@v8 + - uses: actions/setup-go@v5 with: - path: artifacts - merge-multiple: true + go-version-file: go.mod - - name: Create release - uses: softprops/action-gh-release@v2 + - uses: goreleaser/goreleaser-action@v6 with: - files: artifacts/* - generate_release_notes: true + args: release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Update homebrew tap + - name: Update Homebrew formula + env: + GH_TOKEN: ${{ secrets.GH_TOKEN }} run: | gh workflow run bump-formula.yml \ + -f version="${GITHUB_REF_NAME}" \ -f formula=vector \ - -f version=${GITHUB_REF##refs/tags/} \ + -f type=go \ -R built-fast/homebrew-devtools - env: - GH_TOKEN: ${{ secrets.GH_TOKEN }} diff --git a/.gitignore b/.gitignore index 1cfda41..df6d1e2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,22 @@ -/target +# Go build artifacts +bin/ +dist/ + +# Go binary artifacts +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Downloaded specs +e2e/openapi.yaml + +# OS / Editor .DS_Store .envrc /.idea /.vscode + +# AI +/.claude/settings.local.json diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 0000000..aa3582b --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,20 @@ +version: "2" + +run: + timeout: 3m + go: "1.26.1" + +linters: + # These are enabled by default in v2, listed explicitly for clarity. + # Note: gosimple is merged into staticcheck in golangci-lint v2. + enable: + - govet + - errcheck + - staticcheck + - unused + - ineffassign + exclusions: + rules: + - path: _test\.go + linters: + - errcheck diff --git a/.goreleaser.yaml b/.goreleaser.yaml new file mode 100644 index 0000000..36201d5 --- /dev/null +++ b/.goreleaser.yaml @@ -0,0 +1,47 @@ +version: 2 + +project_name: vector + +builds: + - main: ./cmd/vector + binary: vector + env: + - CGO_ENABLED=0 + ldflags: + - -s -w + - -X github.com/built-fast/vector-cli/internal/version.Version={{.Version}} + - -X github.com/built-fast/vector-cli/internal/version.Commit={{.Commit}} + - -X github.com/built-fast/vector-cli/internal/version.Date={{.Date}} + goos: + - darwin + - linux + - windows + goarch: + - amd64 + - arm64 + ignore: + - goos: windows + goarch: arm64 + +archives: + - formats: + - tar.gz + format_overrides: + - goos: windows + formats: + - zip + name_template: "{{ .ProjectName }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}" + files: + - src: man/man1/vector.1 + dst: man/man1/ + +checksum: + name_template: "checksums.txt" + algorithm: sha256 +changelog: + sort: asc + filters: + exclude: + - "^docs:" + - "^test:" + - "^ci:" diff --git a/.surface b/.surface new file mode 100644 index 0000000..d0ecff6 --- /dev/null +++ b/.surface @@ -0,0 +1,353 @@ +ARG vector account api-key delete 0 key-id +ARG vector account secret delete 0 id +ARG vector account secret show 0 id +ARG vector account secret update 0 id +ARG vector account ssh-key delete 0 key-id +ARG vector account ssh-key show 0 key-id +ARG vector archive import 0 site-id +ARG vector archive import 1 file +ARG vector backup download create 0 backup-id +ARG vector backup download status 0 backup-id +ARG vector backup download status 1 download-id +ARG vector backup show 0 id +ARG vector db export create 0 site-id +ARG vector db export status 0 site-id +ARG vector db export status 1 export-id +ARG vector db import-session create 0 site-id +ARG vector db import-session run 0 site-id +ARG vector db import-session run 1 import-id +ARG vector db import-session status 0 site-id +ARG vector db import-session status 1 import-id +ARG vector deploy list 0 env-id +ARG vector deploy rollback 0 env-id +ARG vector deploy show 0 deploy-id +ARG vector deploy trigger 0 env-id +ARG vector env create 0 site-id +ARG vector env db promote 0 env-id +ARG vector env db promote-status 0 env-id +ARG vector env db promote-status 1 promote-id +ARG vector env delete 0 env-id +ARG vector env list 0 site-id +ARG vector env secret create 0 env-id +ARG vector env secret delete 0 secret-id +ARG vector env secret list 0 env-id +ARG vector env secret show 0 secret-id +ARG vector env secret update 0 secret-id +ARG vector env show 0 env-id +ARG vector env update 0 env-id +ARG vector restore create 0 backup-id +ARG vector restore show 0 id +ARG vector site clone 0 site-id +ARG vector site delete 0 site-id +ARG vector site logs 0 site-id +ARG vector site purge-cache 0 site-id +ARG vector site reset-db-password 0 site-id +ARG vector site reset-sftp-password 0 site-id +ARG vector site show 0 site-id +ARG vector site ssh-key add 0 site-id +ARG vector site ssh-key list 0 site-id +ARG vector site ssh-key remove 0 site-id +ARG vector site ssh-key remove 1 key-id +ARG vector site suspend 0 site-id +ARG vector site unsuspend 0 site-id +ARG vector site update 0 site-id +ARG vector site wp-reconfig 0 site-id +ARG vector ssl nudge 0 env-id +ARG vector ssl status 0 env-id +ARG vector waf allowed-referrer add 0 site-id +ARG vector waf allowed-referrer add 1 hostname +ARG vector waf allowed-referrer list 0 site-id +ARG vector waf allowed-referrer remove 0 site-id +ARG vector waf allowed-referrer remove 1 hostname +ARG vector waf blocked-ip add 0 site-id +ARG vector waf blocked-ip add 1 ip +ARG vector waf blocked-ip list 0 site-id +ARG vector waf blocked-ip remove 0 site-id +ARG vector waf blocked-ip remove 1 ip +ARG vector waf blocked-referrer add 0 site-id +ARG vector waf blocked-referrer add 1 hostname +ARG vector waf blocked-referrer list 0 site-id +ARG vector waf blocked-referrer remove 0 site-id +ARG vector waf blocked-referrer remove 1 hostname +ARG vector waf rate-limit create 0 site-id +ARG vector waf rate-limit delete 0 site-id +ARG vector waf rate-limit delete 1 rule-id +ARG vector waf rate-limit list 0 site-id +ARG vector waf rate-limit show 0 site-id +ARG vector waf rate-limit show 1 rule-id +ARG vector waf rate-limit update 0 site-id +ARG vector waf rate-limit update 1 rule-id +ARG vector webhook delete 0 id +ARG vector webhook show 0 id +ARG vector webhook update 0 id +CMD vector +CMD vector account +CMD vector account api-key +CMD vector account api-key create +CMD vector account api-key delete +CMD vector account api-key list +CMD vector account secret +CMD vector account secret create +CMD vector account secret delete +CMD vector account secret list +CMD vector account secret show +CMD vector account secret update +CMD vector account show +CMD vector account ssh-key +CMD vector account ssh-key create +CMD vector account ssh-key delete +CMD vector account ssh-key list +CMD vector account ssh-key show +CMD vector archive +CMD vector archive import +CMD vector auth +CMD vector auth login +CMD vector auth logout +CMD vector auth status +CMD vector backup +CMD vector backup create +CMD vector backup download +CMD vector backup download create +CMD vector backup download status +CMD vector backup list +CMD vector backup show +CMD vector db +CMD vector db export +CMD vector db export create +CMD vector db export status +CMD vector db import-session +CMD vector db import-session create +CMD vector db import-session run +CMD vector db import-session status +CMD vector deploy +CMD vector deploy list +CMD vector deploy rollback +CMD vector deploy show +CMD vector deploy trigger +CMD vector env +CMD vector env create +CMD vector env db +CMD vector env db promote +CMD vector env db promote-status +CMD vector env delete +CMD vector env list +CMD vector env secret +CMD vector env secret create +CMD vector env secret delete +CMD vector env secret list +CMD vector env secret show +CMD vector env secret update +CMD vector env show +CMD vector env update +CMD vector event +CMD vector event list +CMD vector mcp +CMD vector mcp setup +CMD vector php-versions +CMD vector restore +CMD vector restore create +CMD vector restore list +CMD vector restore show +CMD vector site +CMD vector site clone +CMD vector site create +CMD vector site delete +CMD vector site list +CMD vector site logs +CMD vector site purge-cache +CMD vector site reset-db-password +CMD vector site reset-sftp-password +CMD vector site show +CMD vector site ssh-key +CMD vector site ssh-key add +CMD vector site ssh-key list +CMD vector site ssh-key remove +CMD vector site suspend +CMD vector site unsuspend +CMD vector site update +CMD vector site wp-reconfig +CMD vector skill +CMD vector skill install +CMD vector skill uninstall +CMD vector ssl +CMD vector ssl nudge +CMD vector ssl status +CMD vector waf +CMD vector waf allowed-referrer +CMD vector waf allowed-referrer add +CMD vector waf allowed-referrer list +CMD vector waf allowed-referrer remove +CMD vector waf blocked-ip +CMD vector waf blocked-ip add +CMD vector waf blocked-ip list +CMD vector waf blocked-ip remove +CMD vector waf blocked-referrer +CMD vector waf blocked-referrer add +CMD vector waf blocked-referrer list +CMD vector waf blocked-referrer remove +CMD vector waf rate-limit +CMD vector waf rate-limit create +CMD vector waf rate-limit delete +CMD vector waf rate-limit list +CMD vector waf rate-limit show +CMD vector waf rate-limit update +CMD vector webhook +CMD vector webhook create +CMD vector webhook delete +CMD vector webhook list +CMD vector webhook show +CMD vector webhook update +FLAG vector --jq type=string +FLAG vector --json type=bool +FLAG vector --no-json type=bool +FLAG vector --token type=string +FLAG vector --version type=bool +FLAG vector account api-key create --abilities type=string +FLAG vector account api-key create --expires-at type=string +FLAG vector account api-key create --name type=string +FLAG vector account api-key list --page type=int +FLAG vector account api-key list --per-page type=int +FLAG vector account secret create --key type=string +FLAG vector account secret create --no-secret type=bool +FLAG vector account secret create --value type=string +FLAG vector account secret list --page type=int +FLAG vector account secret list --per-page type=int +FLAG vector account secret update --no-secret type=bool +FLAG vector account secret update --value type=string +FLAG vector account ssh-key create --name type=string +FLAG vector account ssh-key create --public-key type=string +FLAG vector account ssh-key list --page type=int +FLAG vector account ssh-key list --per-page type=int +FLAG vector archive import --disable-foreign-keys type=bool +FLAG vector archive import --drop-tables type=bool +FLAG vector archive import --search-replace-from type=string +FLAG vector archive import --search-replace-to type=string +FLAG vector backup create --description type=string +FLAG vector backup create --environment-id type=string +FLAG vector backup create --scope type=string +FLAG vector backup create --site-id type=string +FLAG vector backup list --environment-id type=string +FLAG vector backup list --page type=int +FLAG vector backup list --per-page type=int +FLAG vector backup list --site-id type=string +FLAG vector backup list --type type=string +FLAG vector db export create --format type=string +FLAG vector db import-session create --content-length type=int64 +FLAG vector db import-session create --disable-foreign-keys type=bool +FLAG vector db import-session create --drop-tables type=bool +FLAG vector db import-session create --filename type=string +FLAG vector db import-session create --search-replace-from type=string +FLAG vector db import-session create --search-replace-to type=string +FLAG vector deploy list --page type=int +FLAG vector deploy list --per-page type=int +FLAG vector deploy rollback --poll-interval type=duration +FLAG vector deploy rollback --target type=string +FLAG vector deploy rollback --timeout type=duration +FLAG vector deploy rollback --wait type=bool +FLAG vector deploy trigger --include-database type=bool +FLAG vector deploy trigger --include-uploads type=bool +FLAG vector deploy trigger --poll-interval type=duration +FLAG vector deploy trigger --timeout type=duration +FLAG vector deploy trigger --wait type=bool +FLAG vector env create --custom-domain type=string +FLAG vector env create --name type=string +FLAG vector env create --php-version type=string +FLAG vector env create --production type=bool +FLAG vector env create --tags type=string +FLAG vector env db promote --disable-foreign-keys type=bool +FLAG vector env db promote --drop-tables type=bool +FLAG vector env delete --force type=bool +FLAG vector env list --page type=int +FLAG vector env list --per-page type=int +FLAG vector env secret create --is-secret type=bool +FLAG vector env secret create --key type=string +FLAG vector env secret create --value type=string +FLAG vector env secret delete --force type=bool +FLAG vector env secret list --page type=int +FLAG vector env secret list --per-page type=int +FLAG vector env secret update --is-secret type=bool +FLAG vector env secret update --key type=string +FLAG vector env secret update --value type=string +FLAG vector env update --clear-custom-domain type=bool +FLAG vector env update --custom-domain type=string +FLAG vector env update --tags type=string +FLAG vector event list --event type=string +FLAG vector event list --from type=string +FLAG vector event list --page type=int +FLAG vector event list --per-page type=int +FLAG vector event list --to type=string +FLAG vector mcp setup --force type=bool +FLAG vector mcp setup --global type=bool +FLAG vector mcp setup --target type=string +FLAG vector restore create --disable-foreign-keys type=bool +FLAG vector restore create --drop-tables type=bool +FLAG vector restore create --poll-interval type=duration +FLAG vector restore create --search-replace-from type=string +FLAG vector restore create --search-replace-to type=string +FLAG vector restore create --timeout type=duration +FLAG vector restore create --wait type=bool +FLAG vector restore list --backup-id type=string +FLAG vector restore list --environment-id type=string +FLAG vector restore list --page type=int +FLAG vector restore list --per-page type=int +FLAG vector restore list --site-id type=string +FLAG vector restore list --type type=string +FLAG vector site clone --customer-id type=string +FLAG vector site clone --php-version type=string +FLAG vector site clone --tags type=string +FLAG vector site create --customer-id type=string +FLAG vector site create --php-version type=string +FLAG vector site create --poll-interval type=duration +FLAG vector site create --production-domain type=string +FLAG vector site create --staging-domain type=string +FLAG vector site create --tags type=string +FLAG vector site create --timeout type=duration +FLAG vector site create --wait type=bool +FLAG vector site create --wp-admin-email type=string +FLAG vector site create --wp-admin-user type=string +FLAG vector site create --wp-site-title type=string +FLAG vector site delete --force type=bool +FLAG vector site list --page type=int +FLAG vector site list --per-page type=int +FLAG vector site logs --cursor type=string +FLAG vector site logs --deployment-id type=string +FLAG vector site logs --end-time type=string +FLAG vector site logs --environment type=string +FLAG vector site logs --level type=string +FLAG vector site logs --limit type=int +FLAG vector site logs --start-time type=string +FLAG vector site purge-cache --cache-tag type=string +FLAG vector site purge-cache --url type=string +FLAG vector site ssh-key add --name type=string +FLAG vector site ssh-key add --public-key type=string +FLAG vector site ssh-key list --page type=int +FLAG vector site ssh-key list --per-page type=int +FLAG vector site update --customer-id type=string +FLAG vector site update --tags type=string +FLAG vector ssl nudge --retry type=bool +FLAG vector waf rate-limit create --block-time type=int +FLAG vector waf rate-limit create --description type=string +FLAG vector waf rate-limit create --name type=string +FLAG vector waf rate-limit create --operator type=string +FLAG vector waf rate-limit create --request-count type=int +FLAG vector waf rate-limit create --timeframe type=int +FLAG vector waf rate-limit create --transformations type=string +FLAG vector waf rate-limit create --value type=string +FLAG vector waf rate-limit create --variables type=string +FLAG vector waf rate-limit update --block-time type=int +FLAG vector waf rate-limit update --description type=string +FLAG vector waf rate-limit update --name type=string +FLAG vector waf rate-limit update --operator type=string +FLAG vector waf rate-limit update --request-count type=int +FLAG vector waf rate-limit update --timeframe type=int +FLAG vector waf rate-limit update --transformations type=string +FLAG vector waf rate-limit update --value type=string +FLAG vector waf rate-limit update --variables type=string +FLAG vector webhook create --events type=string +FLAG vector webhook create --type type=string +FLAG vector webhook create --url type=string +FLAG vector webhook list --page type=int +FLAG vector webhook list --per-page type=int +FLAG vector webhook update --enabled type=bool +FLAG vector webhook update --events type=string +FLAG vector webhook update --url type=string diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 0000000..9a8774a --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,177 @@ +@STYLE.md + +# Vector CLI Development Context + +## Development Loop + +Make changes, run `make check`, fix what it catches, repeat until green, then +push. `make check` runs fmt-check + vet + lint + test + test-e2e + surface + +skill-drift + tidy-check. Treat it as your inner-loop companion, not a final +hurdle. + +## Repository Structure + +``` +vector-cli/ +├── cmd/vector/ # Main entrypoint (main.go) +├── internal/ +│ ├── api/ # HTTP client and error handling +│ ├── appctx/ # Application context (App struct) +│ ├── cli/ # Root command wiring +│ ├── commands/ # Command implementations (one file per group) +│ ├── config/ # Configuration, keyring, paths +│ ├── output/ # Output formatting (Writer, Table, JSON, KeyValue) +│ ├── surface/ # CLI surface snapshot generator +│ └── version/ # Version info (injected via ldflags) +├── e2e/ # BATS end-to-end tests (Prism mock server) +├── man/ # Manpage generation +├── scripts/ # CI helper scripts (drift detection) +└── skills/ # Embedded agent skill documents + └── vector/ # SKILL.md — agent reference for vector-cli +``` + +## Vector Pro API Reference + +Base URL: `https://api.builtfast.com` + +All resource paths are under `/api/v1/vector/`. Key resources: + +- `/sites` — CRUD, suspend, unsuspend, clone, purge-cache, logs, wp-reconfig +- `/sites/{id}/environments` — environment management +- `/sites/{id}/environments/{id}/deployments` — deployments +- `/sites/{id}/backups` — backup management +- `/sites/{id}/backups/{id}/download` — backup download +- `/sites/{id}/restores` — restore management +- `/sites/{id}/waf/blocked-ips` — WAF blocked IPs +- `/sites/{id}/waf/blocked-referrers` — WAF blocked referrers +- `/sites/{id}/waf/allowed-referrers` — WAF allowed referrers +- `/sites/{id}/waf/rate-limits` — WAF rate limits +- `/sites/{id}/archives` — site archives +- `/sites/{id}/ssh-keys` — site SSH keys +- `/sites/{id}/db/export` — database export +- `/sites/{id}/db/import-sessions` — database import +- `/sites/{id}/events` — site events +- `/sites/{id}/environments/{id}/ssl` — SSL certificates +- `/sites/{id}/environments/{id}/secrets` — environment secrets +- `/sites/{id}/environments/{id}/db` — environment database info +- `/account` — account details +- `/account/ssh-keys` — account SSH keys +- `/account/api-keys` — API key management +- `/account/secrets` — account secrets +- `/webhooks` — webhook management +- `/php-versions` — available PHP versions +- `/auth/whoami` — authentication check +- `/mcp/config` — MCP server configuration + +## Testing + +`make check` is the local CI gate. Run it before pushing. + +```bash +make check # All checks (local CI gate) +make test # Go unit tests only +make lint # golangci-lint +make vet # go vet +make fmt # Format code (gofmt -s) +make fmt-check # Check formatting (fails if not formatted) +make tidy # go mod tidy +make tidy-check # Verify go.mod/go.sum are tidy (non-mutating) +make race-test # Tests with race detector +make test-e2e # BATS e2e tests (requires Prism) +make build # Build binary to ./bin/vector +make surface # Regenerate .surface snapshot +make check-surface # Verify .surface is up to date +make check-skill-drift # Verify SKILL.md matches .surface +make vuln # govulncheck for dependency vulnerabilities +make replace-check # Guard against replace directives in go.mod +make release-check # Full pre-flight: check + replace-check + vuln + race +``` + +When iterating on a specific area, use targeted targets for faster feedback, +then finish with `make check` before pushing. + +**E2E tests** use a [Prism](https://github.com/stoplightio/prism) mock server +that validates requests against `e2e/openapi.yaml`. The test helper (`e2e/test_helper.bash`) +starts Prism automatically. + +**Requirements**: Go 1.26+, [golangci-lint](https://golangci-lint.run), +[bats-core](https://github.com/bats-core/bats-core), Node.js/npx (for Prism), +[govulncheck](https://pkg.go.dev/golang.org/x/vuln/cmd/govulncheck) (for `make vuln`). + +## Surface Snapshot + +The `.surface` file at the repo root is a deterministic, sorted snapshot of +every command, argument, and flag in the CLI. It is generated by +`internal/surface/surface.go` and committed to version control. + +**Purpose**: catch accidental command/flag changes in CI and provide a stable +reference for skill drift detection. + +**Format** — one line per entry, sorted lexicographically: + +``` +CMD vector site list +ARG vector site show 0 site-id +FLAG vector site list --page type=string +FLAG vector --json type=bool # persistent flag on root +``` + +**Regenerate** after adding/removing/renaming commands, flags, or arguments: + +```bash +make surface # regenerate .surface +# or: go test ./internal/cli/ -run TestSurface -update +``` + +**CI enforcement**: `make check` includes `check-surface`, which fails with a +diff if `.surface` is stale. Always run `make surface` after command/flag changes +and commit the updated file. + +## Skill System + +`skills/vector/SKILL.md` is an agent-facing reference document embedded into the +binary via `skills/embed.go`. It teaches AI agents how to use vector-cli: +authentication, commands, flags, workflows, error codes, and decision trees. + +### Commands + +- `vector skill` — prints SKILL.md to stdout (no auth required) +- `vector skill install` — installs SKILL.md to `~/.agents/skills/vector/` and + symlinks it into `~/.claude/skills/vector/`; writes a `.version` stamp +- `vector skill uninstall` — removes installed skill files and symlinks + +### Auto-refresh + +`RefreshSkillsIfVersionChanged()` runs at CLI startup (in `internal/cli/execute.go`). +When the installed `.version` stamp differs from the current CLI version, it +silently re-installs the skill files. Skipped for dev builds and when the skill +has never been installed. + +### Keeping SKILL.md in sync + +Every command and flag mentioned in SKILL.md must match an entry in `.surface`. +After command/flag changes: + +1. Run `make surface` to regenerate `.surface` +2. Update `skills/vector/SKILL.md` if commands, flags, or workflows changed +3. Run `make check` — the `check-skill-drift` target will catch any mismatches + +### Drift detection + +`scripts/check-skill-drift.sh` scans SKILL.md for `vector ` and `--` +references and verifies each one exists in `.surface`. Known/accepted mismatches +can be baselined in `.surface-skill-drift`. + +```bash +make check-skill-drift # run drift check standalone +``` + +## Workflow: Command or Flag Changes + +When you add, remove, or rename a command or flag: + +1. Make the code change in `internal/commands/` +2. `make surface` — regenerate `.surface` +3. Update `skills/vector/SKILL.md` if the change affects agent-visible behavior +4. Update `man/man1/vector.1` — the e2e manpage test enforces this +5. `make check` — validates fmt, vet, lint, tests, e2e, surface, skill drift, and tidy diff --git a/Brewfile b/Brewfile new file mode 100644 index 0000000..dffa6a1 --- /dev/null +++ b/Brewfile @@ -0,0 +1,5 @@ +brew "go" +brew "golangci-lint" +brew "bats-core" +brew "node" +brew "jq" diff --git a/Cargo.lock b/Cargo.lock deleted file mode 100644 index 832fa1a..0000000 --- a/Cargo.lock +++ /dev/null @@ -1,1802 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "anstream" -version = "0.6.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" -dependencies = [ - "anstyle", - "anstyle-parse", - "anstyle-query", - "anstyle-wincon", - "colorchoice", - "is_terminal_polyfill", - "utf8parse", -] - -[[package]] -name = "anstyle" -version = "1.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" - -[[package]] -name = "anstyle-parse" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" -dependencies = [ - "utf8parse", -] - -[[package]] -name = "anstyle-query" -version = "1.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" -dependencies = [ - "windows-sys 0.61.2", -] - -[[package]] -name = "anstyle-wincon" -version = "3.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" -dependencies = [ - "anstyle", - "once_cell_polyfill", - "windows-sys 0.61.2", -] - -[[package]] -name = "atomic-waker" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" - -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi", - "libc", - "winapi", -] - -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - -[[package]] -name = "bitflags" -version = "2.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" - -[[package]] -name = "bumpalo" -version = "3.19.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" - -[[package]] -name = "bytes" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" - -[[package]] -name = "cc" -version = "1.2.53" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "755d2fce177175ffca841e9a06afdb2c4ab0f593d53b4dee48147dfaade85932" -dependencies = [ - "find-msvc-tools", - "shlex", -] - -[[package]] -name = "cfg-if" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" - -[[package]] -name = "cfg_aliases" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" - -[[package]] -name = "clap" -version = "4.5.60" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2797f34da339ce31042b27d23607e051786132987f595b02ba4f6a6dffb7030a" -dependencies = [ - "clap_builder", - "clap_derive", -] - -[[package]] -name = "clap_builder" -version = "4.5.60" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24a241312cea5059b13574bb9b3861cabf758b879c15190b37b6d6fd63ab6876" -dependencies = [ - "anstream", - "anstyle", - "clap_lex", - "strsim", -] - -[[package]] -name = "clap_derive" -version = "4.5.55" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "clap_lex" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a822ea5bc7590f9d40f1ba12c0dc3c2760f3482c6984db1573ad11031420831" - -[[package]] -name = "colorchoice" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" - -[[package]] -name = "comfy-table" -version = "7.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "958c5d6ecf1f214b4c2bbbbf6ab9523a864bd136dcf71a7e8904799acfe1ad47" -dependencies = [ - "crossterm", - "unicode-segmentation", - "unicode-width", -] - -[[package]] -name = "crossterm" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b9f2e4c67f833b660cdb0a3523065869fb35570177239812ed4c905aeff87b" -dependencies = [ - "bitflags", - "crossterm_winapi", - "document-features", - "parking_lot", - "rustix", - "winapi", -] - -[[package]] -name = "crossterm_winapi" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" -dependencies = [ - "winapi", -] - -[[package]] -name = "dirs" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" -dependencies = [ - "dirs-sys", -] - -[[package]] -name = "dirs-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" -dependencies = [ - "libc", - "option-ext", - "redox_users", - "windows-sys 0.61.2", -] - -[[package]] -name = "displaydoc" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "document-features" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4b8a88685455ed29a21542a33abd9cb6510b6b129abadabdcef0f4c55bc8f61" -dependencies = [ - "litrs", -] - -[[package]] -name = "errno" -version = "0.3.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" -dependencies = [ - "libc", - "windows-sys 0.61.2", -] - -[[package]] -name = "find-msvc-tools" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8591b0bcc8a98a64310a2fae1bb3e9b8564dd10e381e6e28010fde8e8e8568db" - -[[package]] -name = "form_urlencoded" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" -dependencies = [ - "percent-encoding", -] - -[[package]] -name = "futures-channel" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" - -[[package]] -name = "futures-io" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" - -[[package]] -name = "futures-sink" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" - -[[package]] -name = "futures-task" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" - -[[package]] -name = "futures-util" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" -dependencies = [ - "futures-core", - "futures-io", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite", - "pin-utils", - "slab", -] - -[[package]] -name = "getrandom" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi", - "wasm-bindgen", -] - -[[package]] -name = "getrandom" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "r-efi", - "wasip2", - "wasm-bindgen", -] - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - -[[package]] -name = "http" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" -dependencies = [ - "bytes", - "itoa", -] - -[[package]] -name = "http-body" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" -dependencies = [ - "bytes", - "http", -] - -[[package]] -name = "http-body-util" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" -dependencies = [ - "bytes", - "futures-core", - "http", - "http-body", - "pin-project-lite", -] - -[[package]] -name = "httparse" -version = "1.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" - -[[package]] -name = "hyper" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" -dependencies = [ - "atomic-waker", - "bytes", - "futures-channel", - "futures-core", - "http", - "http-body", - "httparse", - "itoa", - "pin-project-lite", - "pin-utils", - "smallvec", - "tokio", - "want", -] - -[[package]] -name = "hyper-rustls" -version = "0.27.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" -dependencies = [ - "http", - "hyper", - "hyper-util", - "rustls", - "rustls-pki-types", - "tokio", - "tokio-rustls", - "tower-service", - "webpki-roots", -] - -[[package]] -name = "hyper-util" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f" -dependencies = [ - "base64", - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "http", - "http-body", - "hyper", - "ipnet", - "libc", - "percent-encoding", - "pin-project-lite", - "socket2", - "tokio", - "tower-service", - "tracing", -] - -[[package]] -name = "icu_collections" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" -dependencies = [ - "displaydoc", - "potential_utf", - "yoke", - "zerofrom", - "zerovec", -] - -[[package]] -name = "icu_locale_core" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" -dependencies = [ - "displaydoc", - "litemap", - "tinystr", - "writeable", - "zerovec", -] - -[[package]] -name = "icu_normalizer" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" -dependencies = [ - "icu_collections", - "icu_normalizer_data", - "icu_properties", - "icu_provider", - "smallvec", - "zerovec", -] - -[[package]] -name = "icu_normalizer_data" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" - -[[package]] -name = "icu_properties" -version = "2.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" -dependencies = [ - "icu_collections", - "icu_locale_core", - "icu_properties_data", - "icu_provider", - "zerotrie", - "zerovec", -] - -[[package]] -name = "icu_properties_data" -version = "2.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" - -[[package]] -name = "icu_provider" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" -dependencies = [ - "displaydoc", - "icu_locale_core", - "writeable", - "yoke", - "zerofrom", - "zerotrie", - "zerovec", -] - -[[package]] -name = "idna" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" -dependencies = [ - "idna_adapter", - "smallvec", - "utf8_iter", -] - -[[package]] -name = "idna_adapter" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" -dependencies = [ - "icu_normalizer", - "icu_properties", -] - -[[package]] -name = "ipnet" -version = "2.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" - -[[package]] -name = "iri-string" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" -dependencies = [ - "memchr", - "serde", -] - -[[package]] -name = "is_terminal_polyfill" -version = "1.70.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" - -[[package]] -name = "itoa" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" - -[[package]] -name = "js-sys" -version = "0.3.85" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3" -dependencies = [ - "once_cell", - "wasm-bindgen", -] - -[[package]] -name = "libc" -version = "0.2.180" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" - -[[package]] -name = "libredox" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" -dependencies = [ - "bitflags", - "libc", -] - -[[package]] -name = "linux-raw-sys" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" - -[[package]] -name = "litemap" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" - -[[package]] -name = "litrs" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092" - -[[package]] -name = "lock_api" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" -dependencies = [ - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" - -[[package]] -name = "lru-slab" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" - -[[package]] -name = "memchr" -version = "2.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" - -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - -[[package]] -name = "mime_guess" -version = "2.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" -dependencies = [ - "mime", - "unicase", -] - -[[package]] -name = "mio" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" -dependencies = [ - "libc", - "wasi", - "windows-sys 0.61.2", -] - -[[package]] -name = "once_cell" -version = "1.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" - -[[package]] -name = "once_cell_polyfill" -version = "1.70.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" - -[[package]] -name = "option-ext" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" - -[[package]] -name = "parking_lot" -version = "0.12.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-link", -] - -[[package]] -name = "percent-encoding" -version = "2.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" - -[[package]] -name = "pin-project-lite" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "potential_utf" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" -dependencies = [ - "zerovec", -] - -[[package]] -name = "ppv-lite86" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" -dependencies = [ - "zerocopy", -] - -[[package]] -name = "proc-macro2" -version = "1.0.105" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "535d180e0ecab6268a3e718bb9fd44db66bbbc256257165fc699dadf70d16fe7" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "quinn" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" -dependencies = [ - "bytes", - "cfg_aliases", - "pin-project-lite", - "quinn-proto", - "quinn-udp", - "rustc-hash", - "rustls", - "socket2", - "thiserror", - "tokio", - "tracing", - "web-time", -] - -[[package]] -name = "quinn-proto" -version = "0.11.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" -dependencies = [ - "bytes", - "getrandom 0.3.4", - "lru-slab", - "rand", - "ring", - "rustc-hash", - "rustls", - "rustls-pki-types", - "slab", - "thiserror", - "tinyvec", - "tracing", - "web-time", -] - -[[package]] -name = "quinn-udp" -version = "0.5.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" -dependencies = [ - "cfg_aliases", - "libc", - "once_cell", - "socket2", - "tracing", - "windows-sys 0.60.2", -] - -[[package]] -name = "quote" -version = "1.0.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "r-efi" -version = "5.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" - -[[package]] -name = "rand" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" -dependencies = [ - "rand_chacha", - "rand_core", -] - -[[package]] -name = "rand_chacha" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" -dependencies = [ - "ppv-lite86", - "rand_core", -] - -[[package]] -name = "rand_core" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" -dependencies = [ - "getrandom 0.3.4", -] - -[[package]] -name = "redox_syscall" -version = "0.5.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" -dependencies = [ - "bitflags", -] - -[[package]] -name = "redox_users" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" -dependencies = [ - "getrandom 0.2.17", - "libredox", - "thiserror", -] - -[[package]] -name = "reqwest" -version = "0.12.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" -dependencies = [ - "base64", - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "http", - "http-body", - "http-body-util", - "hyper", - "hyper-rustls", - "hyper-util", - "js-sys", - "log", - "mime_guess", - "percent-encoding", - "pin-project-lite", - "quinn", - "rustls", - "rustls-pki-types", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper", - "tokio", - "tokio-rustls", - "tower", - "tower-http", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "webpki-roots", -] - -[[package]] -name = "ring" -version = "0.17.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" -dependencies = [ - "cc", - "cfg-if", - "getrandom 0.2.17", - "libc", - "untrusted", - "windows-sys 0.52.0", -] - -[[package]] -name = "rpassword" -version = "7.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66d4c8b64f049c6721ec8ccec37ddfc3d641c4a7fca57e8f2a89de509c73df39" -dependencies = [ - "libc", - "rtoolbox", - "windows-sys 0.59.0", -] - -[[package]] -name = "rtoolbox" -version = "0.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7cc970b249fbe527d6e02e0a227762c9108b2f49d81094fe357ffc6d14d7f6f" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "rustc-hash" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" - -[[package]] -name = "rustix" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" -dependencies = [ - "bitflags", - "errno", - "libc", - "linux-raw-sys", - "windows-sys 0.61.2", -] - -[[package]] -name = "rustls" -version = "0.23.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" -dependencies = [ - "once_cell", - "ring", - "rustls-pki-types", - "rustls-webpki", - "subtle", - "zeroize", -] - -[[package]] -name = "rustls-pki-types" -version = "1.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" -dependencies = [ - "web-time", - "zeroize", -] - -[[package]] -name = "rustls-webpki" -version = "0.103.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53" -dependencies = [ - "ring", - "rustls-pki-types", - "untrusted", -] - -[[package]] -name = "rustversion" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" - -[[package]] -name = "ryu" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984" - -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "serde" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" -dependencies = [ - "serde_core", - "serde_derive", -] - -[[package]] -name = "serde_core" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "serde_json" -version = "1.0.149" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" -dependencies = [ - "itoa", - "memchr", - "serde", - "serde_core", - "zmij", -] - -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - -[[package]] -name = "slab" -version = "0.4.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" - -[[package]] -name = "smallvec" -version = "1.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" - -[[package]] -name = "socket2" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" -dependencies = [ - "libc", - "windows-sys 0.60.2", -] - -[[package]] -name = "stable_deref_trait" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" - -[[package]] -name = "strsim" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" - -[[package]] -name = "subtle" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" - -[[package]] -name = "syn" -version = "2.0.114" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "sync_wrapper" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" -dependencies = [ - "futures-core", -] - -[[package]] -name = "synstructure" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "thiserror" -version = "2.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tinystr" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" -dependencies = [ - "displaydoc", - "zerovec", -] - -[[package]] -name = "tinyvec" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - -[[package]] -name = "tokio" -version = "1.49.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" -dependencies = [ - "bytes", - "libc", - "mio", - "pin-project-lite", - "socket2", - "windows-sys 0.61.2", -] - -[[package]] -name = "tokio-rustls" -version = "0.26.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" -dependencies = [ - "rustls", - "tokio", -] - -[[package]] -name = "tower" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" -dependencies = [ - "futures-core", - "futures-util", - "pin-project-lite", - "sync_wrapper", - "tokio", - "tower-layer", - "tower-service", -] - -[[package]] -name = "tower-http" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" -dependencies = [ - "bitflags", - "bytes", - "futures-util", - "http", - "http-body", - "iri-string", - "pin-project-lite", - "tower", - "tower-layer", - "tower-service", -] - -[[package]] -name = "tower-layer" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" - -[[package]] -name = "tower-service" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" - -[[package]] -name = "tracing" -version = "0.1.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" -dependencies = [ - "pin-project-lite", - "tracing-core", -] - -[[package]] -name = "tracing-core" -version = "0.1.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" -dependencies = [ - "once_cell", -] - -[[package]] -name = "try-lock" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" - -[[package]] -name = "unicase" -version = "2.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbc4bc3a9f746d862c45cb89d705aa10f187bb96c76001afab07a0d35ce60142" - -[[package]] -name = "unicode-ident" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" - -[[package]] -name = "unicode-segmentation" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" - -[[package]] -name = "unicode-width" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" - -[[package]] -name = "untrusted" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" - -[[package]] -name = "url" -version = "2.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" -dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", - "serde", -] - -[[package]] -name = "utf8_iter" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" - -[[package]] -name = "utf8parse" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" - -[[package]] -name = "vector" -version = "0.7.0" -dependencies = [ - "atty", - "clap", - "comfy-table", - "dirs", - "reqwest", - "rpassword", - "serde", - "serde_json", - "thiserror", -] - -[[package]] -name = "want" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" -dependencies = [ - "try-lock", -] - -[[package]] -name = "wasi" -version = "0.11.1+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" - -[[package]] -name = "wasip2" -version = "1.0.2+wasi-0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" -dependencies = [ - "wit-bindgen", -] - -[[package]] -name = "wasm-bindgen" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566" -dependencies = [ - "cfg-if", - "once_cell", - "rustversion", - "wasm-bindgen-macro", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.58" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70a6e77fd0ae8029c9ea0063f87c46fde723e7d887703d74ad2616d792e51e6f" -dependencies = [ - "cfg-if", - "futures-util", - "js-sys", - "once_cell", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55" -dependencies = [ - "bumpalo", - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "web-sys" -version = "0.3.85" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312e32e551d92129218ea9a2452120f4aabc03529ef03e4d0d82fb2780608598" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "web-time" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "webpki-roots" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12bed680863276c63889429bfd6cab3b99943659923822de1c8a39c49e4d722c" -dependencies = [ - "rustls-pki-types", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "windows-link" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" - -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.60.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" -dependencies = [ - "windows-targets 0.53.5", -] - -[[package]] -name = "windows-sys" -version = "0.61.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm 0.52.6", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", -] - -[[package]] -name = "windows-targets" -version = "0.53.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" -dependencies = [ - "windows-link", - "windows_aarch64_gnullvm 0.53.1", - "windows_aarch64_msvc 0.53.1", - "windows_i686_gnu 0.53.1", - "windows_i686_gnullvm 0.53.1", - "windows_i686_msvc 0.53.1", - "windows_x86_64_gnu 0.53.1", - "windows_x86_64_gnullvm 0.53.1", - "windows_x86_64_msvc 0.53.1", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnu" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_i686_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" - -[[package]] -name = "wit-bindgen" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" - -[[package]] -name = "writeable" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" - -[[package]] -name = "yoke" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" -dependencies = [ - "stable_deref_trait", - "yoke-derive", - "zerofrom", -] - -[[package]] -name = "yoke-derive" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", -] - -[[package]] -name = "zerocopy" -version = "0.8.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "668f5168d10b9ee831de31933dc111a459c97ec93225beb307aed970d1372dfd" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.8.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c7962b26b0a8685668b671ee4b54d007a67d4eaf05fda79ac0ecf41e32270f1" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "zerofrom" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" -dependencies = [ - "zerofrom-derive", -] - -[[package]] -name = "zerofrom-derive" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "synstructure", -] - -[[package]] -name = "zeroize" -version = "1.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" - -[[package]] -name = "zerotrie" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" -dependencies = [ - "displaydoc", - "yoke", - "zerofrom", -] - -[[package]] -name = "zerovec" -version = "0.11.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" -dependencies = [ - "yoke", - "zerofrom", - "zerovec-derive", -] - -[[package]] -name = "zerovec-derive" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "zmij" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd8f3f50b848df28f887acb68e41201b5aea6bc8a8dacc00fb40635ff9a72fea" diff --git a/Cargo.toml b/Cargo.toml deleted file mode 100644 index 616321f..0000000 --- a/Cargo.toml +++ /dev/null @@ -1,25 +0,0 @@ -[package] -name = "vector" -version = "0.7.0" -edition = "2024" -description = "CLI for Vector Pro API" -license = "MIT" -repository = "https://github.com/built-fast/vector-cli" -keywords = ["cli", "vector", "builtfast", "wordpress"] -categories = ["command-line-utilities"] - -[dependencies] -clap = { version = "4", features = ["derive", "env"] } -reqwest = { version = "0.12", features = ["blocking", "json", "rustls-tls", "multipart"], default-features = false } -serde = { version = "1", features = ["derive"] } -serde_json = "1" -dirs = "6" -comfy-table = "7" -thiserror = "2" -rpassword = "7" -atty = "0.2" - -[profile.release] -lto = true -strip = true -opt-level = "z" diff --git a/Makefile b/Makefile index 6ec1640..89d0d78 100644 --- a/Makefile +++ b/Makefile @@ -1,43 +1,84 @@ -.DEFAULT_GOAL := help +VERSION ?= dev +COMMIT := $(shell git rev-parse --short HEAD) +DATE := $(shell date -u +%Y-%m-%dT%H:%M:%SZ) -##@ Development +VERSION_PKG := github.com/built-fast/vector-cli/internal/version +LDFLAGS := -s -w \ + -X $(VERSION_PKG).Version=$(VERSION) \ + -X $(VERSION_PKG).Commit=$(COMMIT) \ + -X $(VERSION_PKG).Date=$(DATE) -.PHONY: build -build: ## Build debug binary - cargo build +.PHONY: build test lint clean check test-e2e surface check-surface check-skill-drift \ + fmt fmt-check vet tidy tidy-check race-test vuln replace-check release-check -.PHONY: test -test: ## Run tests - cargo test +build: + CGO_ENABLED=0 go build -trimpath -ldflags "$(LDFLAGS)" -o bin/vector ./cmd/vector -.PHONY: check -check: ## Run cargo check - cargo check +test: + go test ./... -.PHONY: fmt -fmt: ## Format code with rustfmt - cargo fmt +lint: + golangci-lint run -.PHONY: clippy -clippy: ## Run clippy lints - cargo clippy -- -D warnings +vet: + go vet ./... -##@ Release +fmt: + gofmt -s -w . -.PHONY: release -release: ## Build optimized release binary - cargo build --release +fmt-check: + @test -z "$$(gofmt -s -l . | tee /dev/stderr)" || (echo "Code is not formatted. Run 'make fmt'" && exit 1) -##@ Maintenance +race-test: + go test -race -count=1 ./... -.PHONY: clean -clean: ## Remove build artifacts - cargo clean +tidy: + go mod tidy -##@ Info +tidy-check: + @set -e; cp go.mod go.mod.tidycheck; cp go.sum go.sum.tidycheck; \ + restore() { mv go.mod.tidycheck go.mod; mv go.sum.tidycheck go.sum; }; \ + if ! go mod tidy; then \ + restore; \ + echo "'go mod tidy' failed. Restored original go.mod/go.sum."; \ + exit 1; \ + fi; \ + if ! git diff --quiet -- go.mod go.sum; then \ + restore; \ + echo "go.mod/go.sum are not tidy. Run 'make tidy' and commit the result."; \ + exit 1; \ + fi; \ + rm -f go.mod.tidycheck go.sum.tidycheck -.PHONY: help -help: ## Show this help - @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} \ - /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } \ - /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 }' $(MAKEFILE_LIST) +vuln: + @echo "Running govulncheck..." + govulncheck ./... + +replace-check: + @if grep -q '^[[:space:]]*replace[[:space:]]' go.mod; then \ + echo "ERROR: go.mod contains replace directives"; \ + grep '^[[:space:]]*replace[[:space:]]' go.mod; \ + echo ""; \ + echo "Remove replace directives before releasing."; \ + exit 1; \ + fi + @echo "Replace check passed (no local replace directives)" + +clean: + rm -rf bin/ + +test-e2e: + ./e2e/run.sh + +surface: + go test ./internal/cli/ -run TestSurface -update + +check-surface: + go test ./internal/cli/ -run TestSurface -v + +check-skill-drift: + ./scripts/check-skill-drift.sh + +check: fmt-check vet lint test test-e2e check-surface check-skill-drift tidy-check + +release-check: check replace-check vuln race-test diff --git a/README.md b/README.md index 6e91b4b..4097bd4 100644 --- a/README.md +++ b/README.md @@ -23,32 +23,66 @@ Download from [Releases](https://github.com/built-fast/vector-cli/releases): | Platform | Architecture | File | |----------|--------------|------| -| Linux | x86_64 | `vector-x86_64-unknown-linux-gnu.tar.gz` | -| Linux | ARM64 | `vector-aarch64-unknown-linux-gnu.tar.gz` | -| macOS | x86_64 (Intel) | `vector-x86_64-apple-darwin.tar.gz` | -| macOS | ARM64 (Apple Silicon) | `vector-aarch64-apple-darwin.tar.gz` | -| Windows | x86_64 | `vector-x86_64-pc-windows-msvc.zip` | +| Linux | x86_64 | `vector_VERSION_linux_amd64.tar.gz` | +| Linux | ARM64 | `vector_VERSION_linux_arm64.tar.gz` | +| macOS | x86_64 (Intel) | `vector_VERSION_darwin_amd64.tar.gz` | +| macOS | ARM64 (Apple Silicon) | `vector_VERSION_darwin_arm64.tar.gz` | +| Windows | x86_64 | `vector_VERSION_windows_amd64.zip` | ```bash # Example: Linux x86_64 -curl -LO https://github.com/built-fast/vector-cli/releases/latest/download/vector-x86_64-unknown-linux-gnu.tar.gz -tar xzf vector-x86_64-unknown-linux-gnu.tar.gz +curl -LO https://github.com/built-fast/vector-cli/releases/latest/download/vector_VERSION_linux_amd64.tar.gz +tar xzf vector_VERSION_linux_amd64.tar.gz sudo mv vector /usr/local/bin/ ``` -**macOS Gatekeeper:** If you get a security warning, run: +### From source + +Requires [Go](https://go.dev/) 1.26+. + ```bash -xattr -d com.apple.quarantine ./vector +go install github.com/built-fast/vector-cli/cmd/vector@latest ``` -### From source +Or build from a local clone: ```bash -cargo install --path . +make build +# Binary is at ./bin/vector ``` +### Shell Completions + +```bash +# Bash (add to ~/.bashrc) +eval "$(vector completion bash)" + +# Zsh (add to ~/.zshrc) +eval "$(vector completion zsh)" + +# Fish +vector completion fish | source +# To load on startup: +vector completion fish > ~/.config/fish/completions/vector.fish + +# PowerShell (add to $PROFILE) +vector completion powershell | Out-String | Invoke-Expression +``` + +Homebrew installs completions automatically. + ## Usage +### Global Flags + +```bash +vector --token YOUR_TOKEN # Use a specific API token for this invocation +vector --json # Force JSON output +vector --no-json # Force table output +vector --jq # Filter JSON output with a jq expression +vector --version # Print version +``` + ### Authentication ```bash @@ -281,18 +315,54 @@ vector site list --no-json # Force table vector site list | jq '.data' # Auto JSON when piped ``` +### JQ Filtering + +The `--jq` flag filters JSON output using a built-in jq processor (no external `jq` binary required). It automatically forces JSON output. + +```bash +# Extract specific fields +vector site list --jq '.[].id' +vector site show 456 --jq '.dev_domain' + +# Filter with select +vector env list --site-id 123 --jq '[.[] | select(.status == "active")]' + +# Count items +vector webhook list --jq 'length' +``` + +#### Format Strings + +The `--jq` flag supports jq format strings for converting values: + +```bash +# CSV output +vector site list --jq '[.[] | [.id, .name]] | .[] | @csv' + +# TSV output +vector site list --jq '[.[] | [.id, .name]] | .[] | @tsv' + +# URL-encode a value +vector site show 456 --jq '.name | @uri' + +# Base64-encode a value +vector site show 456 --jq '.name | @base64' +``` + +Supported format strings: `@csv`, `@tsv`, `@html`, `@uri`, `@base64`. + ## Configuration Configuration is stored in `~/.config/vector/` (XDG-compliant): -- `credentials.json` - API token (0600 permissions) - `config.json` - Optional settings +- API token is stored in the system keyring (macOS Keychain, Windows Credential Manager, Linux Secret Service) ### Environment Variables | Variable | Description | |----------|-------------| -| `VECTOR_API_KEY` | API token (overrides stored credentials) | +| `VECTOR_API_KEY` | API token (overrides keyring) | | `VECTOR_API_URL` | API base URL (default: `https://api.builtfast.com`) | | `VECTOR_CONFIG_DIR` | Config directory (default: `~/.config/vector`) | @@ -309,13 +379,14 @@ Configuration is stored in `~/.config/vector/` (XDG-compliant): ## Development +Requires [Go](https://go.dev/) 1.26+ and [golangci-lint](https://golangci-lint.run/). + ```bash -make build # Debug build -make release # Optimized release build -make test # Run tests -make check # Cargo check -make fmt # Format code -make clippy # Run lints +make build # Build binary to ./bin/vector +make test # Run unit tests +make lint # Run golangci-lint +make test-e2e # Run end-to-end tests +make check # Run lint + test + test-e2e make clean # Remove build artifacts ``` diff --git a/STYLE.md b/STYLE.md new file mode 100644 index 0000000..c677d4b --- /dev/null +++ b/STYLE.md @@ -0,0 +1,176 @@ +# Vector CLI Style Guide + +Conventions for contributors and agents working on vector-cli. + +## Command Constructors + +Exported `NewXxxCmd() *cobra.Command` for top-level command groups and nested +subgroups that are referenced from another file (e.g., `NewSiteSSHKeyCmd()` is +called from `site.go`). Unexported `newXxxCmd()` for leaf commands that live in +the same file. The rule: exported = referenced cross-file, unexported = same file +only. + +Group constructors build the command, call `cmd.AddCommand(...)` for each +subcommand, and return. They never set `RunE`. + +Leaf constructors build the command, set `RunE` (inline closure or factory +function), register local flags, and return. + +## RunE Body Sequence + +Leaf commands follow a canonical sequence. Not every step applies to every +command, but the order is fixed: + +1. `requireApp(cmd)` — extract App from context, verify auth token +2. Read flags / build request body +3. Confirmation prompt (destructive operations only) +4. API call (`app.Client.Get/Post/Put/Delete`) +5. `defer func() { _ = resp.Body.Close() }()` +6. `io.ReadAll(resp.Body)` +7. `parseResponseData(body)` or `parseResponseWithMeta(body)` +8. JSON branch — `if app.Output.Format() == output.JSON { return app.Output.JSON(...) }` +9. `json.Unmarshal` into `map[string]any` or `[]map[string]any` +10. Format and output (`app.Output.Table`, `app.Output.KeyValue`, `app.Output.Message`) +11. `return nil` + +For repetitive site actions, use the `siteActionRunE` and `sitePostActionRunE` +factory functions instead of duplicating the sequence. + +## Output + +Format detection follows `--json` > `--no-json` > TTY auto-detect (TTY → Table, +piped → JSON). The `--jq` flag forces JSON mode and is mutually exclusive with +`--no-json`. + +Writer methods for formatted output: + +- `app.Output.JSON(v)` — pretty-printed JSON (applies jq filter if set) +- `app.Output.Table(headers, rows)` — tabular output via tabwriter +- `app.Output.KeyValue(pairs)` — right-aligned key: value pairs (show commands) +- `app.Output.Pagination(page, lastPage, total)` — "Page X of Y (Z total)" +- `app.Output.Message(msg)` — plain text line + +Standalone `Print*` helpers (`PrintTable`, `PrintJSON`, `PrintKeyValue`, +`PrintPagination`, `PrintMessage`, `PrintError`) are used when no Writer is +available (e.g., `printLogEntries` writing to `cmd.OutOrStdout()`). + +## Error Handling + +Wrap errors with `fmt.Errorf("failed to : %w", err)`. Use a +consistent action phrase per command (e.g., "failed to list sites", +"failed to create site"). + +Exit codes are mapped from HTTP status in `api.exitCodeForStatus`: + +| HTTP Status | Exit Code | Meaning | +|-------------|-----------|---------| +| 401, 403 | 2 | Authentication/authorization | +| 422 | 3 | Validation error | +| 404 | 4 | Not found | +| 5xx | 5 | Server error | +| other | 1 | General error | + +Return `*api.APIError` directly for client-side validation failures (e.g., +missing required flag), setting `ExitCode` to match the equivalent HTTP status +category. + +## Flags + +All leaf command flags are **local** (`cmd.Flags()`). Persistent flags live only +on the root command: `--token`, `--json`, `--no-json`, `--jq`. + +Flag names use **kebab-case** (`--customer-id`, `--php-version`, `--cache-tag`). + +Use `cmd.Flags().Changed("flag-name")` to distinguish "flag not passed" from +"flag passed with zero value" — required for optional PATCH/PUT fields that +should only be included in the request body when explicitly set. + +## Config Resolution + +Token precedence: `--token` flag > `VECTOR_API_KEY` env > OS keyring. + +Config directory: `VECTOR_CONFIG_DIR` env > `XDG_CONFIG_HOME/vector` > +`~/.config/vector` (Linux/macOS) or `%APPDATA%/vector` (Windows). + +Keyring disabled via `VECTOR_NO_KEYRING` env (any non-empty value). + +## Bare Command Groups + +Group commands (resource nouns like `site`, `env`, `waf`, `backup`) must **not** +set `RunE`. Bare invocation shows help automatically via Cobra. + +The root command is the only exception — it sets `RunE` to handle `--version`. + +## File Organization + +One file per command group in `internal/commands/`. Nested subgroups get their +own file named with underscores: `site_ssh_key.go`, `waf_blocked_ip.go`, +`env_secret.go`. + +Tests mirror source files: `site_test.go`, `waf_blocked_ip_test.go`. + +Shared helpers live in `helpers.go` (`requireApp`, pagination, parsing, +formatting, confirm). + +## API Base Paths + +Declare a package-level `const` at the top of each file: + +```go +const sitesBasePath = "/api/v1/vector/sites" +``` + +For nested resource paths, use helper functions: + +```go +func wafBlockedIPsPath(siteID string) string { + return sitesBasePath + "/" + siteID + "/waf/blocked-ips" +} +``` + +## Import Ordering + +Three groups separated by blank lines, each alphabetically sorted: + +1. Standard library +2. Third-party modules +3. Project-internal (`github.com/built-fast/vector-cli/...`) + +`goimports` enforces this. + +## Declaration Ordering + +Within a command file, declarations follow this order: + +1. Package-level constants (base paths) +2. Package-level variables (if any) +3. Exported group constructor (`NewXxxCmd`) +4. Unexported leaf constructors (in the same order as `AddCommand` calls) +5. Private helper functions + +## Testing + +Same-package tests (`package commands`). Test files follow these patterns: + +**Command builder**: `buildXxxCmd(baseURL, token, format)` returns +`(*cobra.Command, *bytes.Buffer, *bytes.Buffer)` — root command wired with App +context, stdout buffer, stderr buffer. + +**Test server**: `newXxxTestServer(validToken)` returns `*httptest.Server` with +a `method + path` switch dispatching fixture responses. + +**Fixture variables**: Package-level `var xxxResponse = map[string]any{...}` for +each endpoint response. + +**Confirm override**: Replace `confirmReader` with `strings.NewReader(...)` and +restore via `t.Cleanup`. + +**Keyring**: Call `keyring.MockInit()` per test (not TestMain). Use +`t.Setenv("VECTOR_CONFIG_DIR", t.TempDir())` to isolate config. + +**Assertions**: `require` for preconditions and fatal checks, `assert` for +outcome verification. Prefer `require.NoError` / `require.Error` for error +checks that guard subsequent assertions. + +**E2E tests**: BATS scripts in `e2e/` using a Prism mock server against the +OpenAPI spec (`e2e/openapi.yaml`). diff --git a/cmd/vector/main.go b/cmd/vector/main.go new file mode 100644 index 0000000..a012186 --- /dev/null +++ b/cmd/vector/main.go @@ -0,0 +1,11 @@ +package main + +import ( + "os" + + "github.com/built-fast/vector-cli/internal/cli" +) + +func main() { + os.Exit(cli.Execute()) +} diff --git a/e2e/account.bats b/e2e/account.bats new file mode 100644 index 0000000..0488d9d --- /dev/null +++ b/e2e/account.bats @@ -0,0 +1,323 @@ +#!/usr/bin/env bats +# account.bats - E2E tests for vector account commands + +load test_helper + + +# --- account show --- + +@test "account show returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector account show + assert_success + is_valid_json +} + +@test "account show --no-json returns key-value output" { + create_credentials "test-token" + run vector account show --no-json + assert_success + assert_output_contains "Owner Name" + assert_output_contains "Account Name" + assert_output_contains "Total Sites" +} + + +# --- account ssh-key list --- + +@test "account ssh-key list returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector account ssh-key list + assert_success + is_valid_json +} + +@test "account ssh-key list --no-json returns table output" { + create_credentials "test-token" + run vector account ssh-key list --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "NAME" + assert_output_contains "FINGERPRINT" +} + +@test "account ssh-key list --json returns valid JSON" { + create_credentials "test-token" + run vector account ssh-key list --json + assert_success + is_valid_json +} + + +# --- account ssh-key show --- + +@test "account ssh-key show returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector account ssh-key show 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "account ssh-key show --no-json returns key-value output" { + create_credentials "test-token" + run vector account ssh-key show 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Name" + assert_output_contains "Fingerprint" +} + + +# --- account ssh-key create --- + +@test "account ssh-key create succeeds" { + create_credentials "test-token" + run vector account ssh-key create \ + --name "deploy key" \ + --public-key "ssh-rsa AAAAB3NzaC1yc2EA user@host" + assert_success + is_valid_json +} + +@test "account ssh-key create --no-json returns key-value output" { + create_credentials "test-token" + run vector account ssh-key create \ + --name "deploy key" \ + --public-key "ssh-rsa AAAAB3NzaC1yc2EA user@host" \ + --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Name" +} + + +# --- account ssh-key delete --- + +@test "account ssh-key delete succeeds" { + create_credentials "test-token" + run vector account ssh-key delete 01JTEST00000000000000000AA + assert_success +} + + +# --- account api-key list --- + +@test "account api-key list returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector account api-key list + assert_success + is_valid_json +} + +@test "account api-key list --no-json returns table output" { + create_credentials "test-token" + run vector account api-key list --no-json + assert_success + assert_output_contains "NAME" + assert_output_contains "ABILITIES" +} + +@test "account api-key list --json returns valid JSON" { + create_credentials "test-token" + run vector account api-key list --json + assert_success + is_valid_json +} + + +# --- account api-key create --- + +@test "account api-key create succeeds" { + create_credentials "test-token" + run vector account api-key create --name "Test API Key" + assert_success + is_valid_json +} + +@test "account api-key create --no-json returns key-value output" { + create_credentials "test-token" + run vector account api-key create --name "Test API Key" --no-json + assert_success + assert_output_contains "Name" + assert_output_contains "Token" +} + + +# --- account api-key delete --- + +@test "account api-key delete succeeds" { + create_credentials "test-token" + run vector account api-key delete 12345 + assert_success +} + + +# --- account secret list --- + +@test "account secret list returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector account secret list + assert_success + is_valid_json +} + +@test "account secret list --no-json returns table output" { + create_credentials "test-token" + run vector account secret list --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "KEY" + assert_output_contains "SECRET" +} + +@test "account secret list --json returns valid JSON" { + create_credentials "test-token" + run vector account secret list --json + assert_success + is_valid_json +} + + +# --- account secret show --- + +@test "account secret show returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector account secret show 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "account secret show --no-json returns key-value output" { + create_credentials "test-token" + run vector account secret show 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Key" + assert_output_contains "Secret" +} + + +# --- account secret create --- + +@test "account secret create succeeds" { + create_credentials "test-token" + run vector account secret create --key "MY_SECRET" --value "secret123" + assert_success + is_valid_json +} + +@test "account secret create --no-json returns key-value output" { + create_credentials "test-token" + run vector account secret create --key "MY_SECRET" --value "secret123" --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Key" +} + + +# --- account secret update --- + +@test "account secret update succeeds" { + create_credentials "test-token" + run vector account secret update 01JTEST00000000000000000AA --value "new-value" + assert_success + is_valid_json +} + +@test "account secret update --no-json returns key-value output" { + create_credentials "test-token" + run vector account secret update 01JTEST00000000000000000AA --value "new-value" --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Key" +} + + +# --- account secret delete --- + +@test "account secret delete succeeds" { + create_credentials "test-token" + run vector account secret delete 01JTEST00000000000000000AA + assert_success +} + + +# --- auth required --- + +@test "account show without auth fails with exit code 2" { + run vector account show + assert_failure + assert_exit_code 2 +} + +@test "account ssh-key list without auth fails with exit code 2" { + run vector account ssh-key list + assert_failure + assert_exit_code 2 +} + +@test "account ssh-key show without auth fails with exit code 2" { + run vector account ssh-key show 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} + +@test "account ssh-key create without auth fails with exit code 2" { + run vector account ssh-key create --name "test" --public-key "ssh-rsa AAAA" + assert_failure + assert_exit_code 2 +} + +@test "account ssh-key delete without auth fails with exit code 2" { + run vector account ssh-key delete 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} + +@test "account api-key list without auth fails with exit code 2" { + run vector account api-key list + assert_failure + assert_exit_code 2 +} + +@test "account api-key create without auth fails with exit code 2" { + run vector account api-key create --name "test" + assert_failure + assert_exit_code 2 +} + +@test "account api-key delete without auth fails with exit code 2" { + run vector account api-key delete 12345 + assert_failure + assert_exit_code 2 +} + +@test "account secret list without auth fails with exit code 2" { + run vector account secret list + assert_failure + assert_exit_code 2 +} + +@test "account secret show without auth fails with exit code 2" { + run vector account secret show 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} + +@test "account secret create without auth fails with exit code 2" { + run vector account secret create --key "MY_SECRET" --value "secret123" + assert_failure + assert_exit_code 2 +} + +@test "account secret update without auth fails with exit code 2" { + run vector account secret update 01JTEST00000000000000000AA --value "new-value" + assert_failure + assert_exit_code 2 +} + +@test "account secret delete without auth fails with exit code 2" { + run vector account secret delete 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} diff --git a/e2e/archive.bats b/e2e/archive.bats new file mode 100644 index 0000000..d509d64 --- /dev/null +++ b/e2e/archive.bats @@ -0,0 +1,31 @@ +#!/usr/bin/env bats +# archive.bats - E2E tests for vector archive commands + +load test_helper + + +# --- archive import help --- + +@test "archive import --help shows usage" { + run vector archive import --help + assert_success + assert_output_contains "Import a site archive from a local file" +} + +@test "archive --help shows import subcommand" { + run vector archive --help + assert_success + assert_output_contains "import" +} + + +# --- auth required --- + +@test "archive import without auth fails with exit code 2" { + # Create a dummy file to pass argument validation + local tmpfile="$TEST_TEMP_DIR/test-archive.tar.gz" + echo "dummy" > "$tmpfile" + run vector archive import 01JTEST00000000000000SITE01 "$tmpfile" + assert_failure + assert_exit_code 2 +} diff --git a/e2e/auth.bats b/e2e/auth.bats new file mode 100644 index 0000000..ff77a93 --- /dev/null +++ b/e2e/auth.bats @@ -0,0 +1,86 @@ +#!/usr/bin/env bats +# auth.bats - E2E tests for vector auth commands + +load test_helper + + +# --- auth login --token --- + +@test "auth login --token succeeds with valid token" { + run vector auth login --token test-token-12345 + assert_success +} + +@test "auth login --token with --no-json shows success message" { + run vector auth login --token test-token --no-json + assert_success + assert_output_contains "Authenticated as" +} + +@test "auth login --token overwrites existing token" { + run vector auth login --token old-token + assert_success + + run vector auth login --token new-token-67890 + assert_success +} + + +# --- auth login without token (non-TTY) --- + +@test "auth login without token and without TTY fails" { + # In BATS, stdin is not a TTY. Provide empty input via /dev/null. + run vector auth login < /dev/null + assert_failure + assert_exit_code 2 + assert_output_contains "No API token provided" +} + + +# --- auth status --- + +@test "auth status with --token flag shows token source" { + run vector auth status --token some-token + assert_success + assert_output_contains "flag" +} + +@test "auth status without credentials fails with exit code 2" { + # No credentials — config dir is empty (except config.json) + run vector auth status + assert_failure + assert_exit_code 2 + assert_output_contains "Not logged in" +} + + +# --- auth logout --- + +@test "auth logout succeeds" { + run vector auth logout + assert_success + assert_output_contains "Logged out successfully" +} + +@test "auth logout without credentials succeeds (idempotent)" { + run vector auth logout + assert_success + assert_output_contains "Logged out successfully" +} + + +# --- VECTOR_API_KEY env var --- + +@test "VECTOR_API_KEY env var is used when no stored credentials exist" { + export VECTOR_API_KEY="env-token-abc" + run vector auth status + assert_success + assert_output_contains "env" +} + +@test "VECTOR_API_KEY env var is overridden by --token flag" { + export VECTOR_API_KEY="env-token" + run vector auth status --token flag-token + assert_success + assert_output_contains "flag" +} diff --git a/e2e/backup.bats b/e2e/backup.bats new file mode 100644 index 0000000..60cb729 --- /dev/null +++ b/e2e/backup.bats @@ -0,0 +1,150 @@ +#!/usr/bin/env bats +# backup.bats - E2E tests for vector backup commands + +load test_helper + + +# --- backup list --- + +@test "backup list returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector backup list + assert_success + is_valid_json +} + +@test "backup list --no-json returns table output" { + create_credentials "test-token" + run vector backup list --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "STATUS" +} + +@test "backup list --json returns valid JSON" { + create_credentials "test-token" + run vector backup list --json + assert_success + is_valid_json +} + + +# --- backup show --- + +@test "backup show returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector backup show 01JTEST000000000000BACKUP01 + assert_success + is_valid_json +} + +@test "backup show --no-json returns key-value output" { + create_credentials "test-token" + run vector backup show 01JTEST000000000000BACKUP01 --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Status" +} + + +# --- backup create --- + +@test "backup create with --site-id succeeds" { + create_credentials "test-token" + run vector backup create --site-id 01JTEST00000000000000SITE01 + assert_success +} + +@test "backup create with --environment-id succeeds" { + create_credentials "test-token" + run vector backup create --environment-id 01JTEST00000000000000000AA + assert_success +} + +@test "backup create --no-json returns text output" { + create_credentials "test-token" + run vector backup create --site-id 01JTEST00000000000000SITE01 --no-json + assert_success + assert_output_contains "Backup created" +} + +@test "backup create with --scope and --description succeeds" { + create_credentials "test-token" + run vector backup create --site-id 01JTEST00000000000000SITE01 --scope database --description "Test backup" + assert_success +} + +@test "backup create without --site-id or --environment-id fails" { + create_credentials "test-token" + run vector backup create + assert_failure +} + + +# --- backup download create --- + +@test "backup download create returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector backup download create 01JTEST000000000000BACKUP01 + assert_success + is_valid_json +} + +@test "backup download create --no-json returns key-value output" { + create_credentials "test-token" + run vector backup download create 01JTEST000000000000BACKUP01 --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Status" +} + + +# --- backup download status --- + +@test "backup download status returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector backup download status 01JTEST000000000000BACKUP01 01JTEST00000000000DOWNLOAD01 + assert_success + is_valid_json +} + +@test "backup download status --no-json returns key-value output" { + create_credentials "test-token" + run vector backup download status 01JTEST000000000000BACKUP01 01JTEST00000000000DOWNLOAD01 --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Status" +} + + +# --- auth required --- + +@test "backup list without auth fails with exit code 2" { + run vector backup list + assert_failure + assert_exit_code 2 +} + +@test "backup show without auth fails with exit code 2" { + run vector backup show 01JTEST000000000000BACKUP01 + assert_failure + assert_exit_code 2 +} + +@test "backup create without auth fails with exit code 2" { + run vector backup create --site-id 01JTEST00000000000000SITE01 + assert_failure + assert_exit_code 2 +} + +@test "backup download create without auth fails with exit code 2" { + run vector backup download create 01JTEST000000000000BACKUP01 + assert_failure + assert_exit_code 2 +} + +@test "backup download status without auth fails with exit code 2" { + run vector backup download status 01JTEST000000000000BACKUP01 01JTEST00000000000DOWNLOAD01 + assert_failure + assert_exit_code 2 +} diff --git a/e2e/db.bats b/e2e/db.bats new file mode 100644 index 0000000..d8a0916 --- /dev/null +++ b/e2e/db.bats @@ -0,0 +1,137 @@ +#!/usr/bin/env bats +# db.bats - E2E tests for vector db commands + +load test_helper + + +# --- db export create --- + +@test "db export create succeeds" { + create_credentials "test-token" + run vector db export create 01JTEST00000000000000SITE01 + assert_success +} + +@test "db export create --no-json returns text output" { + create_credentials "test-token" + run vector db export create 01JTEST00000000000000SITE01 --no-json + assert_success + assert_output_contains "Export started" +} + +@test "db export create with --format succeeds" { + create_credentials "test-token" + run vector db export create 01JTEST00000000000000SITE01 --format sql + assert_success +} + + +# --- db export status --- + +@test "db export status returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector db export status 01JTEST00000000000000SITE01 01JTEST0000000000000EXPORT01 + assert_success + is_valid_json +} + +@test "db export status --no-json returns key-value output" { + create_credentials "test-token" + run vector db export status 01JTEST00000000000000SITE01 01JTEST0000000000000EXPORT01 --no-json + assert_success + assert_output_contains "Export ID" + assert_output_contains "Status" +} + + +# --- db import-session create --- + +@test "db import-session create returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector db import-session create 01JTEST00000000000000SITE01 + assert_success + is_valid_json +} + +@test "db import-session create --no-json returns key-value output" { + create_credentials "test-token" + run vector db import-session create 01JTEST00000000000000SITE01 --no-json + assert_success + assert_output_contains "Import ID" + assert_output_contains "Status" +} + +@test "db import-session create with flags succeeds" { + create_credentials "test-token" + run vector db import-session create 01JTEST00000000000000SITE01 --filename test.sql --drop-tables + assert_success +} + + +# --- db import-session run --- + +@test "db import-session run returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector db import-session run 01JTEST00000000000000SITE01 01JTEST000000000000IMPORT01 + assert_success + is_valid_json +} + +@test "db import-session run --no-json returns key-value output" { + create_credentials "test-token" + run vector db import-session run 01JTEST00000000000000SITE01 01JTEST000000000000IMPORT01 --no-json + assert_success + assert_output_contains "Import ID" + assert_output_contains "Status" +} + + +# --- db import-session status --- + +@test "db import-session status returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector db import-session status 01JTEST00000000000000SITE01 01JTEST000000000000IMPORT01 + assert_success + is_valid_json +} + +@test "db import-session status --no-json returns key-value output" { + create_credentials "test-token" + run vector db import-session status 01JTEST00000000000000SITE01 01JTEST000000000000IMPORT01 --no-json + assert_success + assert_output_contains "Import ID" + assert_output_contains "Status" +} + + +# --- auth required --- + +@test "db export create without auth fails with exit code 2" { + run vector db export create 01JTEST00000000000000SITE01 + assert_failure + assert_exit_code 2 +} + +@test "db export status without auth fails with exit code 2" { + run vector db export status 01JTEST00000000000000SITE01 01JTEST0000000000000EXPORT01 + assert_failure + assert_exit_code 2 +} + +@test "db import-session create without auth fails with exit code 2" { + run vector db import-session create 01JTEST00000000000000SITE01 + assert_failure + assert_exit_code 2 +} + +@test "db import-session run without auth fails with exit code 2" { + run vector db import-session run 01JTEST00000000000000SITE01 01JTEST000000000000IMPORT01 + assert_failure + assert_exit_code 2 +} + +@test "db import-session status without auth fails with exit code 2" { + run vector db import-session status 01JTEST00000000000000SITE01 01JTEST000000000000IMPORT01 + assert_failure + assert_exit_code 2 +} diff --git a/e2e/deploy.bats b/e2e/deploy.bats new file mode 100644 index 0000000..5d6c5ff --- /dev/null +++ b/e2e/deploy.bats @@ -0,0 +1,124 @@ +#!/usr/bin/env bats +# deploy.bats - E2E tests for vector deploy commands + +load test_helper + + +# --- deploy list --- + +@test "deploy list returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector deploy list 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "deploy list --no-json returns table output" { + create_credentials "test-token" + run vector deploy list 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "STATUS" +} + +@test "deploy list --json returns valid JSON" { + create_credentials "test-token" + run vector deploy list 01JTEST00000000000000000AA --json + assert_success + is_valid_json +} + + +# --- deploy show --- + +@test "deploy show returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector deploy show 01JTEST00000000000000DEP01 + assert_success + is_valid_json +} + +@test "deploy show --no-json returns key-value output" { + create_credentials "test-token" + run vector deploy show 01JTEST00000000000000DEP01 --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Status" +} + + +# --- deploy trigger --- + +@test "deploy trigger succeeds" { + create_credentials "test-token" + run vector deploy trigger 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "deploy trigger --no-json returns key-value output" { + create_credentials "test-token" + run vector deploy trigger 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Status" +} + +@test "deploy trigger with flags succeeds" { + create_credentials "test-token" + run vector deploy trigger 01JTEST00000000000000000AA --include-uploads --include-database=false + assert_success + is_valid_json +} + + +# --- deploy rollback --- + +@test "deploy rollback succeeds" { + create_credentials "test-token" + run vector deploy rollback 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "deploy rollback --no-json returns key-value output" { + create_credentials "test-token" + run vector deploy rollback 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Status" +} + +@test "deploy rollback with --target succeeds" { + create_credentials "test-token" + run vector deploy rollback 01JTEST00000000000000000AA --target 01JTEST00000000000000DEP01 + assert_success + is_valid_json +} + + +# --- auth required --- + +@test "deploy list without auth fails with exit code 2" { + run vector deploy list 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} + +@test "deploy show without auth fails with exit code 2" { + run vector deploy show 01JTEST00000000000000DEP01 + assert_failure + assert_exit_code 2 +} + +@test "deploy trigger without auth fails with exit code 2" { + run vector deploy trigger 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} + +@test "deploy rollback without auth fails with exit code 2" { + run vector deploy rollback 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} diff --git a/e2e/env.bats b/e2e/env.bats new file mode 100644 index 0000000..1c5b375 --- /dev/null +++ b/e2e/env.bats @@ -0,0 +1,279 @@ +#!/usr/bin/env bats +# env.bats - E2E tests for vector env commands (environments, secrets, db promote) + +load test_helper + + +# --- env list --- + +@test "env list returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector env list 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "env list --no-json returns table output" { + create_credentials "test-token" + run vector env list 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "STATUS" +} + +@test "env list --json returns valid JSON" { + create_credentials "test-token" + run vector env list 01JTEST00000000000000000AA --json + assert_success + is_valid_json +} + + +# --- env show --- + +@test "env show returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector env show 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "env show --no-json returns key-value output" { + create_credentials "test-token" + run vector env show 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Status" +} + + +# --- env create --- + +@test "env create with required flags succeeds" { + create_credentials "test-token" + run vector env create 01JTEST00000000000000000AA --name staging --php-version 8.3 --custom-domain example.com + assert_success + is_valid_json +} + +@test "env create --no-json returns key-value output" { + create_credentials "test-token" + run vector env create 01JTEST00000000000000000AA --name staging --php-version 8.3 --custom-domain example.com --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Status" +} + +@test "env create without --name fails with exit code 3" { + create_credentials "test-token" + run vector env create 01JTEST00000000000000000AA --php-version 8.3 + assert_failure + assert_exit_code 3 +} + +@test "env create without --php-version fails with exit code 3" { + create_credentials "test-token" + run vector env create 01JTEST00000000000000000AA --name staging + assert_failure + assert_exit_code 3 +} + + +# --- env update --- + +@test "env update with flags succeeds" { + create_credentials "test-token" + run vector env update 01JTEST00000000000000000AA --tags "live,primary" + assert_success + is_valid_json +} + +@test "env update --no-json returns key-value output" { + create_credentials "test-token" + run vector env update 01JTEST00000000000000000AA --tags "live,primary" --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Name" +} + + +# --- env delete --- + +@test "env delete with --force succeeds" { + create_credentials "test-token" + run vector env delete 01JTEST00000000000000000AA --force + assert_success +} + +@test "env delete without --force aborts in non-TTY" { + create_credentials "test-token" + run vector env delete 01JTEST00000000000000000AA < /dev/null + assert_success + assert_output_contains "Aborted" +} + + +# --- env secret list --- + +@test "env secret list returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector env secret list 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "env secret list --no-json returns table output" { + create_credentials "test-token" + run vector env secret list 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "KEY" +} + +@test "env secret list --json returns valid JSON" { + create_credentials "test-token" + run vector env secret list 01JTEST00000000000000000AA --json + assert_success + is_valid_json +} + + +# --- env secret show --- + +@test "env secret show returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector env secret show 01JTEST00000000000000SEC01 + assert_success + is_valid_json +} + +@test "env secret show --no-json returns key-value output" { + create_credentials "test-token" + run vector env secret show 01JTEST00000000000000SEC01 --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Key" +} + + +# --- env secret create --- + +@test "env secret create with required flags succeeds" { + create_credentials "test-token" + run vector env secret create 01JTEST00000000000000000AA \ + --key MY_SECRET_KEY --value my-secret-value + assert_success + is_valid_json +} + +@test "env secret create --no-json returns key-value output" { + create_credentials "test-token" + run vector env secret create 01JTEST00000000000000000AA \ + --key MY_SECRET_KEY --value my-secret-value --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Key" +} + + +# --- env secret update --- + +@test "env secret update with flags succeeds" { + create_credentials "test-token" + run vector env secret update 01JTEST00000000000000SEC01 --value new-value + assert_success + is_valid_json +} + +@test "env secret update --no-json returns key-value output" { + create_credentials "test-token" + run vector env secret update 01JTEST00000000000000SEC01 --value new-value --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Key" +} + + +# --- env secret delete --- + +@test "env secret delete with --force succeeds" { + create_credentials "test-token" + run vector env secret delete 01JTEST00000000000000SEC01 --force + assert_success +} + +@test "env secret delete without --force aborts in non-TTY" { + create_credentials "test-token" + run vector env secret delete 01JTEST00000000000000SEC01 < /dev/null + assert_success + assert_output_contains "Aborted" +} + + +# --- env db promote --- + +@test "env db promote succeeds" { + create_credentials "test-token" + run vector env db promote 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "env db promote --no-json returns key-value output" { + create_credentials "test-token" + run vector env db promote 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Status" +} + + +# --- env db promote-status --- + +@test "env db promote-status succeeds" { + create_credentials "test-token" + run vector env db promote-status 01JTEST00000000000000000AA 01JTEST0000000000000PROM01 + assert_success + is_valid_json +} + +@test "env db promote-status --no-json returns key-value output" { + create_credentials "test-token" + run vector env db promote-status 01JTEST00000000000000000AA 01JTEST0000000000000PROM01 --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Status" +} + + +# --- auth required --- + +@test "env list without auth fails with exit code 2" { + run vector env list 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} + +@test "env show without auth fails with exit code 2" { + run vector env show 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} + +@test "env create without auth fails with exit code 2" { + run vector env create 01JTEST00000000000000000AA --name staging --php-version 8.3 + assert_failure + assert_exit_code 2 +} + +@test "env secret list without auth fails with exit code 2" { + run vector env secret list 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} + +@test "env db promote without auth fails with exit code 2" { + run vector env db promote 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} diff --git a/e2e/event.bats b/e2e/event.bats new file mode 100644 index 0000000..3ab453f --- /dev/null +++ b/e2e/event.bats @@ -0,0 +1,38 @@ +#!/usr/bin/env bats +# event.bats - E2E tests for vector event commands + +load test_helper + + +# --- event list --- + +@test "event list returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector event list + assert_success + is_valid_json +} + +@test "event list --no-json returns table output" { + create_credentials "test-token" + run vector event list --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "EVENT" +} + +@test "event list --json returns valid JSON" { + create_credentials "test-token" + run vector event list --json + assert_success + is_valid_json +} + + +# --- auth required --- + +@test "event list without auth fails with exit code 2" { + run vector event list + assert_failure + assert_exit_code 2 +} diff --git a/e2e/manpage.bats b/e2e/manpage.bats new file mode 100644 index 0000000..aa00321 --- /dev/null +++ b/e2e/manpage.bats @@ -0,0 +1,79 @@ +#!/usr/bin/env bats +# manpage.bats - Verify man page documents all CLI commands + +load test_helper + +MANPAGE="$(cd "$(dirname "$BATS_TEST_FILENAME")/.." && pwd)/man/man1/vector.1" + +# Extract subcommand names from "Available Commands:" in --help output +extract_subcommands() { + echo "$1" | sed -n '/^Available Commands:/,/^$/p' | \ + tail -n +2 | awk '{print $1}' | grep -v '^$' +} + +# Check if help output indicates a group command (has Available Commands) +is_group() { + echo "$1" | grep -q "^Available Commands:" +} + +# Discover all leaf command paths recursively +discover_commands() { + local prefix="$1" + local help_output + + if [ -z "$prefix" ]; then + help_output=$("$VECTOR_BINARY" --help 2>&1) + else + help_output=$("$VECTOR_BINARY" $prefix --help 2>&1) + fi + + if is_group "$help_output"; then + local subs + subs=$(extract_subcommands "$help_output") + for sub in $subs; do + case "$sub" in help|completion) continue ;; esac + if [ -z "$prefix" ]; then + discover_commands "$sub" + else + discover_commands "$prefix $sub" + fi + done + else + [ -n "$prefix" ] && echo "$prefix" + fi +} + +@test "man page exists" { + [ -f "$MANPAGE" ] +} + +@test "all CLI commands are documented in the man page" { + local commands + commands=$(discover_commands "") + + local manpage_text + manpage_text=$(cat "$MANPAGE") + + local missing="" + local count=0 + + while IFS= read -r cmd; do + [ -z "$cmd" ] && continue + count=$((count + 1)) + + # Normalize hyphens: troff uses \- for literal hyphens + local escaped + escaped=$(echo "$cmd" | sed 's/-/\\\\-/g') + + if ! echo "$manpage_text" | grep -qE "^\.B ${escaped}( |$)" && \ + ! echo "$manpage_text" | grep -qE "^\.SS ${escaped} "; then + missing="${missing} ${cmd}\n" + fi + done <<< "$commands" + + if [ -n "$missing" ]; then + echo "Commands missing from man page ($count total checked):" + printf "$missing" + return 1 + fi +} diff --git a/e2e/misc.bats b/e2e/misc.bats new file mode 100644 index 0000000..e19b166 --- /dev/null +++ b/e2e/misc.bats @@ -0,0 +1,74 @@ +#!/usr/bin/env bats +# misc.bats - E2E tests for vector php-versions and mcp commands + +load test_helper + + +# --- php-versions --- + +@test "php-versions returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector php-versions + assert_success + is_valid_json +} + +@test "php-versions --no-json returns table output" { + create_credentials "test-token" + run vector php-versions --no-json + assert_success + assert_output_contains "VERSION" +} + +@test "php-versions --json returns valid JSON" { + create_credentials "test-token" + run vector php-versions --json + assert_success + is_valid_json +} + +@test "php-versions without auth fails with exit code 2" { + run vector php-versions + assert_failure + assert_exit_code 2 +} + + +# --- mcp setup help --- + +@test "mcp setup --help shows usage" { + run vector mcp setup --help + assert_success + assert_output_contains "Configure the Vector MCP server" +} + +@test "mcp --help shows setup subcommand" { + run vector mcp --help + assert_success + assert_output_contains "setup" +} + +@test "mcp setup --help shows --target flag" { + run vector mcp setup --help + assert_success + assert_output_contains "--target" +} + +@test "mcp setup --help shows --force flag" { + run vector mcp setup --help + assert_success + assert_output_contains "--force" +} + +@test "mcp setup --help shows --global flag" { + run vector mcp setup --help + assert_success + assert_output_contains "--global" +} + +@test "mcp setup --global without --target code fails" { + create_credentials "test-token" + run vector mcp setup --global + assert_failure + assert_output_contains "only applies" +} diff --git a/e2e/output.bats b/e2e/output.bats new file mode 100644 index 0000000..2225338 --- /dev/null +++ b/e2e/output.bats @@ -0,0 +1,228 @@ +#!/usr/bin/env bats +# output.bats - E2E tests for output formatting, global flags, and error handling + +load test_helper + + +# --- --json flag forces JSON output --- + +@test "--json flag forces JSON output on site list" { + create_credentials "test-token" + run vector site list --json + assert_success + is_valid_json +} + +@test "--json flag forces JSON output on auth status" { + create_credentials "test-token" + run vector auth status --json + assert_success + is_valid_json +} + + +# --- --no-json flag forces table output --- + +@test "--no-json flag forces table output on site list" { + create_credentials "test-token" + run vector site list --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "STATUS" + # Table output should not be valid JSON + ! is_valid_json +} + +@test "--no-json flag forces table output on auth status" { + create_credentials "test-token" + run vector auth status --no-json + assert_success + assert_output_contains "Token source" + assert_output_contains "API URL" + ! is_valid_json +} + + +# --- Piped output (non-TTY) defaults to JSON --- + +@test "non-TTY output defaults to JSON for site list" { + create_credentials "test-token" + # BATS runs without a TTY, so default output should be JSON + run vector site list + assert_success + is_valid_json +} + +@test "non-TTY output defaults to JSON for account show" { + create_credentials "test-token" + run vector account show + assert_success + is_valid_json +} + + +# --- Error responses include structured error messages on stderr --- + +@test "auth error includes structured error message" { + # No credentials — should fail with auth error + run vector site list + assert_failure + assert_output_contains "Error:" + assert_output_contains "Authentication required" +} + +@test "validation error includes structured error message" { + create_credentials "test-token" + # site create without --customer-id triggers client-side validation + run vector site create + assert_failure + assert_output_contains "Error:" + assert_output_contains "--customer-id is required" +} + +@test "network error includes structured error message" { + # Point at a non-existent server + create_config "http://127.0.0.1:1" + create_credentials "test-token" + run vector auth login --token test-token + assert_failure + assert_output_contains "Error:" + assert_output_contains "Network error" +} + + +# --- --version prints version string --- + +@test "--version prints version string" { + run vector --version + assert_success + assert_output_contains "vector v" +} + +@test "--version output contains build info" { + run vector --version + assert_success + # Format: "vector v () built " + assert_output_contains "built" +} + + +# --- --help prints help text --- + +@test "--help prints help text" { + run vector --help + assert_success + assert_output_contains "Usage:" + assert_output_contains "vector" + assert_output_contains "Available Commands:" +} + +@test "--help shows global flags" { + run vector --help + assert_success + assert_output_contains "--json" + assert_output_contains "--no-json" + assert_output_contains "--token" + assert_output_contains "--version" +} + +@test "--help shows available command groups" { + run vector --help + assert_success + assert_output_contains "auth" + assert_output_contains "site" + assert_output_contains "env" + assert_output_contains "deploy" +} + +@test "subcommand --help shows command usage" { + run vector site --help + assert_success + assert_output_contains "Usage:" + assert_output_contains "site" +} + + +# --- Invalid commands show usage hint and exit non-zero --- + +@test "invalid command exits non-zero" { + run vector nonexistentcommand + assert_failure +} + +@test "invalid command shows error message" { + run vector nonexistentcommand + assert_failure + assert_output_contains "Error:" + assert_output_contains "unknown command" +} + +@test "invalid subcommand shows help text" { + # Cobra shows help for unknown subcommands of command groups (exit 0) + run vector site nonexistentsubcmd + assert_success + assert_output_contains "Usage:" + assert_output_contains "Available Commands:" +} + + +# --- Exit codes match expected values --- + +# Exit code 1 = generic/config errors +@test "exit code 1 for unknown command" { + run vector nonexistentcommand + assert_exit_code 1 +} + +# Exit code 2 = auth errors +@test "exit code 2 for unauthenticated request" { + # No credentials created + run vector site list + assert_exit_code 2 +} + +@test "exit code 2 for auth status without credentials" { + run vector auth status + assert_exit_code 2 +} + +# Exit code 3 = validation errors +@test "exit code 3 for missing required flag" { + create_credentials "test-token" + # site create requires --customer-id + run vector site create + assert_exit_code 3 +} + +@test "exit code 3 for conflicting flags" { + create_credentials "test-token" + # env update with both --custom-domain and --clear-custom-domain + run vector env update 01JTEST00000000000000000AA --custom-domain foo.com --clear-custom-domain + assert_exit_code 3 +} + +# Exit code 4 = not found (404 from API) +@test "exit code 4 for API 404 response" { + create_credentials "test-token" + # Point API URL at Prism with a prefix that won't match any spec route + # Prism returns 404 for unmatched routes + create_config "$PRISM_URL/nonexistent" + run vector site list + assert_exit_code 4 +} + +# Exit code 5 = server/network errors +@test "exit code 5 for network error on auth login" { + # Point at a port with nothing listening + create_config "http://127.0.0.1:1" + create_credentials "test-token" + run vector auth login --token test-token + assert_exit_code 5 +} + +@test "exit code 5 for network error on auth status" { + create_config "http://127.0.0.1:1" + create_credentials "test-token" + run vector auth status + assert_exit_code 5 +} diff --git a/e2e/restore.bats b/e2e/restore.bats new file mode 100644 index 0000000..14989c0 --- /dev/null +++ b/e2e/restore.bats @@ -0,0 +1,90 @@ +#!/usr/bin/env bats +# restore.bats - E2E tests for vector restore commands + +load test_helper + + +# --- restore list --- + +@test "restore list returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector restore list + assert_success + is_valid_json +} + +@test "restore list --no-json returns table output" { + create_credentials "test-token" + run vector restore list --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "STATUS" +} + +@test "restore list --json returns valid JSON" { + create_credentials "test-token" + run vector restore list --json + assert_success + is_valid_json +} + + +# --- restore show --- + +@test "restore show returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector restore show 01JTEST0000000000000RESTORE01 + assert_success + is_valid_json +} + +@test "restore show --no-json returns key-value output" { + create_credentials "test-token" + run vector restore show 01JTEST0000000000000RESTORE01 --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Status" +} + + +# --- restore create --- + +@test "restore create succeeds" { + create_credentials "test-token" + run vector restore create 01JTEST000000000000BACKUP01 + assert_success +} + +@test "restore create --no-json returns text output" { + create_credentials "test-token" + run vector restore create 01JTEST000000000000BACKUP01 --no-json + assert_success + assert_output_contains "Restore initiated" +} + +@test "restore create with flags succeeds" { + create_credentials "test-token" + run vector restore create 01JTEST000000000000BACKUP01 --drop-tables --disable-foreign-keys + assert_success +} + + +# --- auth required --- + +@test "restore list without auth fails with exit code 2" { + run vector restore list + assert_failure + assert_exit_code 2 +} + +@test "restore show without auth fails with exit code 2" { + run vector restore show 01JTEST0000000000000RESTORE01 + assert_failure + assert_exit_code 2 +} + +@test "restore create without auth fails with exit code 2" { + run vector restore create 01JTEST000000000000BACKUP01 + assert_failure + assert_exit_code 2 +} diff --git a/e2e/run.sh b/e2e/run.sh new file mode 100755 index 0000000..a886c28 --- /dev/null +++ b/e2e/run.sh @@ -0,0 +1,132 @@ +#!/usr/bin/env bash +# E2E test runner for vector-cli +# Builds the binary, starts Prism mock server, runs BATS tests, cleans up. + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)" +SPEC_FILE="$SCRIPT_DIR/openapi.yaml" +BINARY="$PROJECT_ROOT/bin/vector" + +# --- Dependency checks --- + +if ! command -v bats &>/dev/null; then + echo "Error: bats not found. Install with: brew install bats-core (macOS) or from https://github.com/bats-core/bats-core" >&2 + exit 1 +fi + +if ! command -v npx &>/dev/null; then + echo "Error: npx not found. Install Node.js to use Prism mock server." >&2 + exit 1 +fi + +if ! command -v jq &>/dev/null; then + echo "Error: jq not found. Install with: brew install jq (macOS) or apt-get install jq (Linux)" >&2 + exit 1 +fi + +if ! command -v gh &>/dev/null; then + echo "Error: gh not found. Install with: brew install gh (macOS) or from https://cli.github.com" >&2 + exit 1 +fi + +# --- Download OpenAPI spec if missing --- + +if [[ ! -f "$SPEC_FILE" ]]; then + echo "Downloading OpenAPI spec from GitHub..." + if ! gh api repos/built-fast/builtfast.dev/contents/api/openapi.yaml --header "Accept: application/vnd.github.raw+json" > "$SPEC_FILE"; then + rm -f "$SPEC_FILE" + echo "Error: failed to download OpenAPI spec from GitHub" >&2 + exit 1 + fi +fi + +# --- Build --- + +if [[ "${VECTOR_E2E_SKIP_BUILD:-}" != "1" ]]; then + echo "Building vector binary..." + make -C "$PROJECT_ROOT" build +else + if [[ ! -x "$BINARY" ]]; then + echo "Error: VECTOR_E2E_SKIP_BUILD=1 but no binary at $BINARY" >&2 + exit 1 + fi + echo "Using pre-built binary: $BINARY" +fi + +# --- Find available port --- + +find_available_port() { + if command -v python3 &>/dev/null; then + python3 -c 'import socket; s=socket.socket(); s.bind(("",0)); print(s.getsockname()[1]); s.close()' + elif command -v python &>/dev/null; then + python -c 'import socket; s=socket.socket(); s.bind(("",0)); print(s.getsockname()[1]); s.close()' + else + # Fallback: pick a random port in the dynamic range + echo $(( (RANDOM % 16383) + 49152 )) + fi +} + +PRISM_PORT=$(find_available_port) + +# --- Prism lifecycle --- + +PRISM_PID="" +PRISM_LOG="$(mktemp)" + +cleanup() { + if [[ -n "$PRISM_PID" ]]; then + kill "$PRISM_PID" 2>/dev/null || true + wait "$PRISM_PID" 2>/dev/null || true + PRISM_PID="" + fi + rm -f "$PRISM_LOG" +} + +trap cleanup EXIT INT TERM + +echo "Starting Prism mock server on port $PRISM_PORT..." +npx @stoplight/prism-cli mock "$SPEC_FILE" \ + --port "$PRISM_PORT" \ + --host 127.0.0.1 \ + --dynamic \ + >"$PRISM_LOG" 2>&1 & +PRISM_PID=$! + +# Wait for Prism to be ready (up to 30 seconds) +TRIES=0 +MAX_TRIES=60 +while ! curl -so /dev/null -w '' "http://127.0.0.1:$PRISM_PORT/" 2>/dev/null; do + if ! kill -0 "$PRISM_PID" 2>/dev/null; then + echo "Error: Prism failed to start. Log:" >&2 + cat "$PRISM_LOG" >&2 + exit 1 + fi + TRIES=$((TRIES + 1)) + if [[ "$TRIES" -ge "$MAX_TRIES" ]]; then + echo "Error: Prism did not become ready within 30s. Log:" >&2 + cat "$PRISM_LOG" >&2 + exit 1 + fi + sleep 0.5 +done + +echo "Prism ready on http://127.0.0.1:$PRISM_PORT" + +# Export for test_helper.bash +export PRISM_URL="http://127.0.0.1:$PRISM_PORT" +export VECTOR_BINARY="$BINARY" + +# --- Run BATS --- + +BATS_FILES=("$SCRIPT_DIR"/*.bats) + +# If no .bats files exist, the glob returns the literal pattern +if [[ ${#BATS_FILES[@]} -eq 0 ]] || [[ "${BATS_FILES[0]}" == "$SCRIPT_DIR/*.bats" ]]; then + echo "No .bats test files found — exiting cleanly." + exit 0 +fi + +echo "Running BATS tests..." +bats "${BATS_FILES[@]}" diff --git a/e2e/site.bats b/e2e/site.bats new file mode 100644 index 0000000..a5e9715 --- /dev/null +++ b/e2e/site.bats @@ -0,0 +1,241 @@ +#!/usr/bin/env bats +# site.bats - E2E tests for vector site commands + +load test_helper + + +# --- site list --- + +@test "site list returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector site list + assert_success + is_valid_json +} + +@test "site list --no-json returns table output with site data" { + create_credentials "test-token" + run vector site list --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "STATUS" +} + +@test "site list --json returns valid JSON" { + create_credentials "test-token" + run vector site list --json + assert_success + is_valid_json +} + + +# --- site show --- + +@test "site show returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector site show 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "site show --no-json returns key-value output" { + create_credentials "test-token" + run vector site show 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Status" +} + + +# --- site create --- + +@test "site create with required flags succeeds" { + create_credentials "test-token" + run vector site create --customer-id cust-123 --php-version 8.3 + assert_success + is_valid_json +} + +@test "site create --no-json returns key-value output" { + create_credentials "test-token" + run vector site create --customer-id cust-123 --php-version 8.3 --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Status" +} + +@test "site create without --customer-id fails with exit code 3" { + create_credentials "test-token" + run vector site create + assert_failure + assert_exit_code 3 +} + + +# --- site update --- + +@test "site update with flags succeeds" { + create_credentials "test-token" + run vector site update 01JTEST00000000000000000AA --customer-id new-cust + assert_success + is_valid_json +} + +@test "site update --no-json returns key-value output" { + create_credentials "test-token" + run vector site update 01JTEST00000000000000000AA --customer-id new-cust --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Customer ID" +} + + +# --- site delete --- + +@test "site delete with --force succeeds" { + create_credentials "test-token" + run vector site delete 01JTEST00000000000000000AA --force + assert_success +} + +@test "site delete without --force aborts in non-TTY" { + create_credentials "test-token" + run vector site delete 01JTEST00000000000000000AA < /dev/null + assert_success + assert_output_contains "Aborted" +} + + +# --- site clone --- + +@test "site clone succeeds" { + create_credentials "test-token" + run vector site clone 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "site clone --no-json returns key-value output" { + create_credentials "test-token" + run vector site clone 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Status" +} + + +# --- site suspend / unsuspend --- + +@test "site suspend succeeds" { + create_credentials "test-token" + run vector site suspend 01JTEST00000000000000000AA + assert_success +} + +@test "site unsuspend succeeds" { + create_credentials "test-token" + run vector site unsuspend 01JTEST00000000000000000AA + assert_success +} + + +# --- site reset-sftp-password / reset-db-password --- + +@test "site reset-sftp-password succeeds" { + create_credentials "test-token" + run vector site reset-sftp-password 01JTEST00000000000000000AA + assert_success +} + +@test "site reset-db-password succeeds" { + create_credentials "test-token" + run vector site reset-db-password 01JTEST00000000000000000AA + assert_success +} + + +# --- site purge-cache --- + +@test "site purge-cache succeeds" { + create_credentials "test-token" + run vector site purge-cache 01JTEST00000000000000000AA + assert_success +} + + +# --- site logs --- + +@test "site logs succeeds" { + create_credentials "test-token" + run vector site logs 01JTEST00000000000000000AA + assert_success +} + + +# --- site ssh-key --- + +@test "site ssh-key list succeeds" { + create_credentials "test-token" + run vector site ssh-key list 01JTEST00000000000000000AA + assert_success +} + +@test "site ssh-key list --json returns valid JSON" { + create_credentials "test-token" + run vector site ssh-key list 01JTEST00000000000000000AA --json + assert_success + is_valid_json +} + +@test "site ssh-key add succeeds" { + create_credentials "test-token" + run vector site ssh-key add 01JTEST00000000000000000AA \ + --name "test-key" \ + --public-key "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAITestKeyData test@example.com" + assert_success +} + +@test "site ssh-key remove succeeds" { + create_credentials "test-token" + run vector site ssh-key remove 01JTEST00000000000000000AA 01JTEST00000000000000KEY01 + assert_success +} + + +# --- auth required --- + +@test "site list without auth fails with exit code 2" { + run vector site list + assert_failure + assert_exit_code 2 +} + +@test "site show without auth fails with exit code 2" { + run vector site show 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} + +@test "site create without auth fails with exit code 2" { + run vector site create --customer-id cust-123 + assert_failure + assert_exit_code 2 +} + + +# --- help --- + +@test "site --help renders correctly" { + run vector site --help + assert_success + assert_output_contains "Manage Vector sites" + assert_output_contains "list" + assert_output_contains "show" + assert_output_contains "create" + assert_output_contains "update" + assert_output_contains "delete" + assert_output_contains "clone" + assert_output_contains "suspend" + assert_output_contains "unsuspend" + assert_output_contains "ssh-key" +} diff --git a/e2e/ssl.bats b/e2e/ssl.bats new file mode 100644 index 0000000..8e7aba1 --- /dev/null +++ b/e2e/ssl.bats @@ -0,0 +1,68 @@ +#!/usr/bin/env bats +# ssl.bats - E2E tests for vector ssl commands + +load test_helper + + +# --- ssl status --- + +@test "ssl status returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector ssl status 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "ssl status --no-json returns key-value output" { + create_credentials "test-token" + run vector ssl status 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "Status" + assert_output_contains "Production" +} + +@test "ssl status --json returns valid JSON" { + create_credentials "test-token" + run vector ssl status 01JTEST00000000000000000AA --json + assert_success + is_valid_json +} + + +# --- ssl nudge --- + +@test "ssl nudge returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector ssl nudge 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "ssl nudge --no-json returns key-value output" { + create_credentials "test-token" + run vector ssl nudge 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "Status" +} + +@test "ssl nudge with --retry succeeds" { + create_credentials "test-token" + run vector ssl nudge 01JTEST00000000000000000AA --retry + assert_success + is_valid_json +} + + +# --- auth required --- + +@test "ssl status without auth fails with exit code 2" { + run vector ssl status 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} + +@test "ssl nudge without auth fails with exit code 2" { + run vector ssl nudge 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} diff --git a/e2e/test_helper.bash b/e2e/test_helper.bash new file mode 100644 index 0000000..9956967 --- /dev/null +++ b/e2e/test_helper.bash @@ -0,0 +1,119 @@ +#!/usr/bin/env bash +# test_helper.bash - Shared test utilities for vector-cli E2E tests + +# --- Setup / Teardown --- + +setup() { + # Create isolated temp config directory + TEST_TEMP_DIR="$(mktemp -d)" + TEST_CONFIG_DIR="$TEST_TEMP_DIR/config" + mkdir -p "$TEST_CONFIG_DIR" + + # Point vector at the temp config directory + export VECTOR_CONFIG_DIR="$TEST_CONFIG_DIR" + + # Set API URL to the Prism mock server + export VECTOR_API_URL="$PRISM_URL" + + # Ensure the binary is on PATH + export PATH="$(dirname "$VECTOR_BINARY"):$PATH" + + # Disable OS keyring — not available on CI (no D-Bus Secret Service on Linux) + export VECTOR_NO_KEYRING=1 + + # Clear env vars that could interfere + unset VECTOR_API_KEY + unset XDG_CONFIG_HOME + + # Write config.json pointing at Prism + create_config "$PRISM_URL" +} + +# create_credentials TOKEN +# Sets VECTOR_API_KEY so the CLI picks up the token via env var. +# Replaces the old file-based credential helper now that tokens +# are stored in the OS keyring (which is unavailable on CI). +create_credentials() { + export VECTOR_API_KEY="$1" +} + +teardown() { + if [[ -d "${TEST_TEMP_DIR:-}" ]]; then + rm -rf "$TEST_TEMP_DIR" + fi +} + +# --- Fixture helpers --- + +# create_config API_URL +# Writes config.json with the given API URL. +create_config() { + local api_url="${1:-$PRISM_URL}" + cat > "$TEST_CONFIG_DIR/config.json" </dev/null +} + +assert_json_value() { + local jq_path="$1" + local expected="$2" + local actual + actual=$(echo "$output" | jq -r "$jq_path") + + if [[ "$actual" != "$expected" ]]; then + echo "JSON path $jq_path: expected '$expected', got '$actual'" + echo "Full output: $output" + return 1 + fi +} diff --git a/e2e/waf.bats b/e2e/waf.bats new file mode 100644 index 0000000..e731b29 --- /dev/null +++ b/e2e/waf.bats @@ -0,0 +1,318 @@ +#!/usr/bin/env bats +# waf.bats - E2E tests for vector waf commands + +load test_helper + + +# --- waf rate-limit list --- + +@test "waf rate-limit list returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector waf rate-limit list 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "waf rate-limit list --no-json returns table output" { + create_credentials "test-token" + run vector waf rate-limit list 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "NAME" +} + +@test "waf rate-limit list --json returns valid JSON" { + create_credentials "test-token" + run vector waf rate-limit list 01JTEST00000000000000000AA --json + assert_success + is_valid_json +} + + +# --- waf rate-limit show --- + +@test "waf rate-limit show returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector waf rate-limit show 01JTEST00000000000000000AA 12345 + assert_success + is_valid_json +} + +@test "waf rate-limit show --no-json returns key-value output" { + create_credentials "test-token" + run vector waf rate-limit show 01JTEST00000000000000000AA 12345 --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Name" +} + + +# --- waf rate-limit create --- + +@test "waf rate-limit create succeeds" { + create_credentials "test-token" + run vector waf rate-limit create 01JTEST00000000000000000AA \ + --name "Test Rate Limit" \ + --request-count 100 \ + --timeframe 1 \ + --block-time 60 + assert_success + is_valid_json +} + +@test "waf rate-limit create --no-json returns key-value output" { + create_credentials "test-token" + run vector waf rate-limit create 01JTEST00000000000000000AA \ + --name "Test Rate Limit" \ + --request-count 100 \ + --timeframe 1 \ + --block-time 60 \ + --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Name" +} + + +# --- waf rate-limit update --- + +@test "waf rate-limit update succeeds" { + create_credentials "test-token" + run vector waf rate-limit update 01JTEST00000000000000000AA 12345 \ + --name "Updated Rate Limit" + assert_success + is_valid_json +} + +@test "waf rate-limit update --no-json returns key-value output" { + create_credentials "test-token" + run vector waf rate-limit update 01JTEST00000000000000000AA 12345 \ + --name "Updated Rate Limit" \ + --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "Name" +} + + +# --- waf rate-limit delete --- + +@test "waf rate-limit delete succeeds" { + create_credentials "test-token" + run vector waf rate-limit delete 01JTEST00000000000000000AA 12345 + assert_success +} + + +# --- waf blocked-ip list --- + +@test "waf blocked-ip list returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector waf blocked-ip list 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "waf blocked-ip list --no-json returns table output" { + create_credentials "test-token" + run vector waf blocked-ip list 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "IP" +} + +@test "waf blocked-ip list --json returns valid JSON" { + create_credentials "test-token" + run vector waf blocked-ip list 01JTEST00000000000000000AA --json + assert_success + is_valid_json +} + + +# --- waf blocked-ip add --- + +@test "waf blocked-ip add succeeds" { + create_credentials "test-token" + run vector waf blocked-ip add 01JTEST00000000000000000AA 192.0.2.1 + assert_success +} + + +# --- waf blocked-ip remove --- + +@test "waf blocked-ip remove succeeds" { + create_credentials "test-token" + run vector waf blocked-ip remove 01JTEST00000000000000000AA 192.0.2.1 + assert_success +} + + +# --- waf blocked-referrer list --- + +@test "waf blocked-referrer list returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector waf blocked-referrer list 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "waf blocked-referrer list --no-json returns table output" { + create_credentials "test-token" + run vector waf blocked-referrer list 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "HOSTNAME" +} + +@test "waf blocked-referrer list --json returns valid JSON" { + create_credentials "test-token" + run vector waf blocked-referrer list 01JTEST00000000000000000AA --json + assert_success + is_valid_json +} + + +# --- waf blocked-referrer add --- + +@test "waf blocked-referrer add succeeds" { + create_credentials "test-token" + run vector waf blocked-referrer add 01JTEST00000000000000000AA spam.example.com + assert_success +} + + +# --- waf blocked-referrer remove --- + +@test "waf blocked-referrer remove succeeds" { + create_credentials "test-token" + run vector waf blocked-referrer remove 01JTEST00000000000000000AA spam.example.com + assert_success +} + + +# --- waf allowed-referrer list --- + +@test "waf allowed-referrer list returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector waf allowed-referrer list 01JTEST00000000000000000AA + assert_success + is_valid_json +} + +@test "waf allowed-referrer list --no-json returns table output" { + create_credentials "test-token" + run vector waf allowed-referrer list 01JTEST00000000000000000AA --no-json + assert_success + assert_output_contains "HOSTNAME" +} + +@test "waf allowed-referrer list --json returns valid JSON" { + create_credentials "test-token" + run vector waf allowed-referrer list 01JTEST00000000000000000AA --json + assert_success + is_valid_json +} + + +# --- waf allowed-referrer add --- + +@test "waf allowed-referrer add succeeds" { + create_credentials "test-token" + run vector waf allowed-referrer add 01JTEST00000000000000000AA example.com + assert_success +} + + +# --- waf allowed-referrer remove --- + +@test "waf allowed-referrer remove succeeds" { + create_credentials "test-token" + run vector waf allowed-referrer remove 01JTEST00000000000000000AA example.com + assert_success +} + + +# --- auth required --- + +@test "waf rate-limit list without auth fails with exit code 2" { + run vector waf rate-limit list 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} + +@test "waf rate-limit show without auth fails with exit code 2" { + run vector waf rate-limit show 01JTEST00000000000000000AA 12345 + assert_failure + assert_exit_code 2 +} + +@test "waf rate-limit create without auth fails with exit code 2" { + run vector waf rate-limit create 01JTEST00000000000000000AA \ + --name "Test" --request-count 100 --timeframe 1 --block-time 60 + assert_failure + assert_exit_code 2 +} + +@test "waf rate-limit update without auth fails with exit code 2" { + run vector waf rate-limit update 01JTEST00000000000000000AA 12345 --name "Test" + assert_failure + assert_exit_code 2 +} + +@test "waf rate-limit delete without auth fails with exit code 2" { + run vector waf rate-limit delete 01JTEST00000000000000000AA 12345 + assert_failure + assert_exit_code 2 +} + +@test "waf blocked-ip list without auth fails with exit code 2" { + run vector waf blocked-ip list 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} + +@test "waf blocked-ip add without auth fails with exit code 2" { + run vector waf blocked-ip add 01JTEST00000000000000000AA 192.0.2.1 + assert_failure + assert_exit_code 2 +} + +@test "waf blocked-ip remove without auth fails with exit code 2" { + run vector waf blocked-ip remove 01JTEST00000000000000000AA 192.0.2.1 + assert_failure + assert_exit_code 2 +} + +@test "waf blocked-referrer list without auth fails with exit code 2" { + run vector waf blocked-referrer list 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} + +@test "waf blocked-referrer add without auth fails with exit code 2" { + run vector waf blocked-referrer add 01JTEST00000000000000000AA spam.example.com + assert_failure + assert_exit_code 2 +} + +@test "waf blocked-referrer remove without auth fails with exit code 2" { + run vector waf blocked-referrer remove 01JTEST00000000000000000AA spam.example.com + assert_failure + assert_exit_code 2 +} + +@test "waf allowed-referrer list without auth fails with exit code 2" { + run vector waf allowed-referrer list 01JTEST00000000000000000AA + assert_failure + assert_exit_code 2 +} + +@test "waf allowed-referrer add without auth fails with exit code 2" { + run vector waf allowed-referrer add 01JTEST00000000000000000AA example.com + assert_failure + assert_exit_code 2 +} + +@test "waf allowed-referrer remove without auth fails with exit code 2" { + run vector waf allowed-referrer remove 01JTEST00000000000000000AA example.com + assert_failure + assert_exit_code 2 +} diff --git a/e2e/webhook.bats b/e2e/webhook.bats new file mode 100644 index 0000000..c421793 --- /dev/null +++ b/e2e/webhook.bats @@ -0,0 +1,143 @@ +#!/usr/bin/env bats +# webhook.bats - E2E tests for vector webhook commands + +load test_helper + + +# --- webhook list --- + +@test "webhook list returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector webhook list + assert_success + is_valid_json +} + +@test "webhook list --no-json returns table output" { + create_credentials "test-token" + run vector webhook list --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "URL" +} + +@test "webhook list --json returns valid JSON" { + create_credentials "test-token" + run vector webhook list --json + assert_success + is_valid_json +} + + +# --- webhook show --- + +@test "webhook show returns valid JSON (default non-TTY)" { + create_credentials "test-token" + run vector webhook show 01JTEST0000000000000WEBHOOK01 + assert_success + is_valid_json +} + +@test "webhook show --no-json returns key-value output" { + create_credentials "test-token" + run vector webhook show 01JTEST0000000000000WEBHOOK01 --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "URL" + assert_output_contains "Enabled" +} + + +# --- webhook create --- + +@test "webhook create succeeds" { + create_credentials "test-token" + run vector webhook create --url https://example.com/webhook --events site.created,deployment.completed + assert_success +} + +@test "webhook create --no-json returns key-value output" { + create_credentials "test-token" + run vector webhook create --url https://example.com/webhook --events site.created --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "URL" +} + +@test "webhook create with --type slack succeeds" { + create_credentials "test-token" + run vector webhook create --url https://hooks.slack.com/services/test --events site.created --type slack + assert_success +} + + +# --- webhook update --- + +@test "webhook update succeeds" { + create_credentials "test-token" + run vector webhook update 01JTEST0000000000000WEBHOOK01 --url https://example.com/new-webhook + assert_success +} + +@test "webhook update --no-json returns key-value output" { + create_credentials "test-token" + run vector webhook update 01JTEST0000000000000WEBHOOK01 --url https://example.com/new-webhook --no-json + assert_success + assert_output_contains "ID" + assert_output_contains "URL" +} + +@test "webhook update with --enabled succeeds" { + create_credentials "test-token" + run vector webhook update 01JTEST0000000000000WEBHOOK01 --enabled=false + assert_success +} + + +# --- webhook delete --- + +@test "webhook delete succeeds" { + create_credentials "test-token" + run vector webhook delete 01JTEST0000000000000WEBHOOK01 + assert_success +} + +@test "webhook delete --no-json returns success message" { + create_credentials "test-token" + run vector webhook delete 01JTEST0000000000000WEBHOOK01 --no-json + assert_success + assert_output_contains "Webhook deleted successfully" +} + + +# --- auth required --- + +@test "webhook list without auth fails with exit code 2" { + run vector webhook list + assert_failure + assert_exit_code 2 +} + +@test "webhook show without auth fails with exit code 2" { + run vector webhook show 01JTEST0000000000000WEBHOOK01 + assert_failure + assert_exit_code 2 +} + +@test "webhook create without auth fails with exit code 2" { + run vector webhook create --url https://example.com/webhook --events site.created + assert_failure + assert_exit_code 2 +} + +@test "webhook update without auth fails with exit code 2" { + run vector webhook update 01JTEST0000000000000WEBHOOK01 --url https://example.com/new + assert_failure + assert_exit_code 2 +} + +@test "webhook delete without auth fails with exit code 2" { + run vector webhook delete 01JTEST0000000000000WEBHOOK01 + assert_failure + assert_exit_code 2 +} diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..58dddef --- /dev/null +++ b/go.mod @@ -0,0 +1,24 @@ +module github.com/built-fast/vector-cli + +go 1.26.1 + +require ( + github.com/itchyny/gojq v0.12.18 + github.com/spf13/cobra v1.10.2 + github.com/spf13/pflag v1.0.9 + github.com/stretchr/testify v1.11.1 + github.com/zalando/go-keyring v0.2.6 + golang.org/x/term v0.41.0 +) + +require ( + al.essio.dev/pkg/shellescape v1.5.1 // indirect + github.com/danieljoos/wincred v1.2.2 // indirect + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/godbus/dbus/v5 v5.1.0 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect + github.com/itchyny/timefmt-go v0.1.7 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + golang.org/x/sys v0.42.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..0c88c6c --- /dev/null +++ b/go.sum @@ -0,0 +1,39 @@ +al.essio.dev/pkg/shellescape v1.5.1 h1:86HrALUujYS/h+GtqoB26SBEdkWfmMI6FubjXlsXyho= +al.essio.dev/pkg/shellescape v1.5.1/go.mod h1:6sIqp7X2P6mThCQ7twERpZTuigpr6KbZWtls1U8I890= +github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g= +github.com/danieljoos/wincred v1.2.2 h1:774zMFJrqaeYCK2W57BgAem/MLi6mtSE47MB6BOJ0i0= +github.com/danieljoos/wincred v1.2.2/go.mod h1:w7w4Utbrz8lqeMbDAK0lkNJUv5sAOkFi7nd/ogr0Uh8= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk= +github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 h1:El6M4kTTCOh6aBiKaUGG7oYTSPP8MxqL4YI3kZKwcP4= +github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510/go.mod h1:pupxD2MaaD3pAXIBCelhxNneeOaAeabZDe5s4K6zSpQ= +github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= +github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/itchyny/gojq v0.12.18 h1:gFGHyt/MLbG9n6dqnvlliiya2TaMMh6FFaR2b1H6Drc= +github.com/itchyny/gojq v0.12.18/go.mod h1:4hPoZ/3lN9fDL1D+aK7DY1f39XZpY9+1Xpjz8atrEkg= +github.com/itchyny/timefmt-go v0.1.7 h1:xyftit9Tbw+Dc/huSSPJaEmX1TVL8lw5vxjJLK4GMMA= +github.com/itchyny/timefmt-go v0.1.7/go.mod h1:5E46Q+zj7vbTgWY8o5YkMeYb4I6GeWLFnetPy5oBrAI= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU= +github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4= +github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY= +github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY= +github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/zalando/go-keyring v0.2.6 h1:r7Yc3+H+Ux0+M72zacZoItR3UDxeWfKTcabvkI8ua9s= +github.com/zalando/go-keyring v0.2.6/go.mod h1:2TCrxYrbUNYfNS/Kgy/LSrkSQzZ5UPVH85RwfczwvcI= +go.yaml.in/yaml/v3 v3.0.4/go.mod h1:DhzuOOF2ATzADvBadXxruRBLzYTpT36CKvDb3+aBEFg= +golang.org/x/sys v0.42.0 h1:omrd2nAlyT5ESRdCLYdm3+fMfNFE/+Rf4bDIQImRJeo= +golang.org/x/sys v0.42.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw= +golang.org/x/term v0.41.0 h1:QCgPso/Q3RTJx2Th4bDLqML4W6iJiaXFq2/ftQF13YU= +golang.org/x/term v0.41.0/go.mod h1:3pfBgksrReYfZ5lvYM0kSO0LIkAl4Yl2bXOkKP7Ec2A= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/api/api.go b/internal/api/api.go new file mode 100644 index 0000000..c72a3e2 --- /dev/null +++ b/internal/api/api.go @@ -0,0 +1,2 @@ +// Package api provides the HTTP client and error types. +package api diff --git a/internal/api/client.go b/internal/api/client.go new file mode 100644 index 0000000..16c8f35 --- /dev/null +++ b/internal/api/client.go @@ -0,0 +1,132 @@ +package api + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "net/http" + "net/url" + "os" + "strings" + + "github.com/built-fast/vector-cli/internal/version" +) + +// Client is an HTTP client for the Vector API. +type Client struct { + BaseURL string + Token string + UserAgent string + httpClient *http.Client +} + +// NewClient creates a new API client. If userAgent is empty, it defaults to +// "vector-cli/". +func NewClient(baseURL, token, userAgent string) *Client { + if userAgent == "" { + userAgent = "vector-cli/" + version.Version + } + return &Client{ + BaseURL: strings.TrimRight(baseURL, "/"), + Token: token, + UserAgent: userAgent, + httpClient: &http.Client{}, + } +} + +// Get performs a GET request to the given API path with optional query parameters. +func (c *Client) Get(ctx context.Context, path string, query url.Values) (*http.Response, error) { + reqURL := c.BaseURL + path + if len(query) > 0 { + reqURL += "?" + query.Encode() + } + req, err := http.NewRequestWithContext(ctx, http.MethodGet, reqURL, nil) + if err != nil { + return nil, fmt.Errorf("creating GET request: %w", err) + } + return c.do(req) +} + +// Post performs a POST request with a JSON-encoded body. +func (c *Client) Post(ctx context.Context, path string, body any) (*http.Response, error) { + return c.jsonRequest(ctx, http.MethodPost, path, body) +} + +// Put performs a PUT request with a JSON-encoded body. +func (c *Client) Put(ctx context.Context, path string, body any) (*http.Response, error) { + return c.jsonRequest(ctx, http.MethodPut, path, body) +} + +// Delete performs a DELETE request to the given API path. +func (c *Client) Delete(ctx context.Context, path string) (*http.Response, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodDelete, c.BaseURL+path, nil) + if err != nil { + return nil, fmt.Errorf("creating DELETE request: %w", err) + } + return c.do(req) +} + +// PutFile uploads a file via PUT to the given URL (typically a presigned S3 URL). +// Unlike other methods, this sends the raw file content and does not add +// Authorization or Accept headers. +func (c *Client) PutFile(ctx context.Context, url string, file *os.File) (*http.Response, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodPut, url, file) + if err != nil { + return nil, fmt.Errorf("creating file upload request: %w", err) + } + req.Header.Set("User-Agent", c.UserAgent) + + resp, err := c.httpClient.Do(req) + if err != nil { + return nil, fmt.Errorf("executing file upload: %w", err) + } + if resp.StatusCode >= 300 || resp.StatusCode < 200 { + return nil, ParseErrorResponse(resp) + } + return resp, nil +} + +// jsonRequest is a helper that JSON-encodes a body and sends a request. +// When body is nil, the request is sent with no body and no Content-Type header. +func (c *Client) jsonRequest(ctx context.Context, method, path string, body any) (*http.Response, error) { + var buf bytes.Buffer + hasBody := body != nil + if hasBody { + if err := json.NewEncoder(&buf).Encode(body); err != nil { + return nil, fmt.Errorf("encoding request body: %w", err) + } + } + var req *http.Request + var err error + if hasBody { + req, err = http.NewRequestWithContext(ctx, method, c.BaseURL+path, &buf) + } else { + req, err = http.NewRequestWithContext(ctx, method, c.BaseURL+path, nil) + } + if err != nil { + return nil, fmt.Errorf("creating %s request: %w", method, err) + } + if hasBody { + req.Header.Set("Content-Type", "application/json") + } + return c.do(req) +} + +// do executes a request, adding standard headers and handling error responses. +func (c *Client) do(req *http.Request) (*http.Response, error) { + if c.Token != "" { + req.Header.Set("Authorization", "Bearer "+c.Token) + } + req.Header.Set("Accept", "application/json") + req.Header.Set("User-Agent", c.UserAgent) + + resp, err := c.httpClient.Do(req) + if err != nil { + return nil, err + } + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + return nil, ParseErrorResponse(resp) + } + return resp, nil +} diff --git a/internal/api/client_test.go b/internal/api/client_test.go new file mode 100644 index 0000000..1057628 --- /dev/null +++ b/internal/api/client_test.go @@ -0,0 +1,276 @@ +package api + +import ( + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "net/url" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestNewClient_DefaultUserAgent(t *testing.T) { + c := NewClient("https://api.example.com", "tok", "") + assert.Contains(t, c.UserAgent, "vector-cli/") +} + +func TestNewClient_CustomUserAgent(t *testing.T) { + c := NewClient("https://api.example.com", "tok", "custom/1.0") + assert.Equal(t, "custom/1.0", c.UserAgent) +} + +func TestNewClient_TrimsTrailingSlash(t *testing.T) { + c := NewClient("https://api.example.com/", "tok", "") + assert.Equal(t, "https://api.example.com", c.BaseURL) +} + +func TestClient_HeaderInjection(t *testing.T) { + var gotHeaders http.Header + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + gotHeaders = r.Header.Clone() + w.WriteHeader(http.StatusOK) + })) + defer srv.Close() + + c := NewClient(srv.URL, "test-token", "vector-cli/test") + _, err := c.Get(context.Background(), "/test", nil) + require.NoError(t, err) + + assert.Equal(t, "Bearer test-token", gotHeaders.Get("Authorization")) + assert.Equal(t, "application/json", gotHeaders.Get("Accept")) + assert.Equal(t, "vector-cli/test", gotHeaders.Get("User-Agent")) +} + +func TestClient_NoAuthHeaderWithoutToken(t *testing.T) { + var gotHeaders http.Header + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + gotHeaders = r.Header.Clone() + w.WriteHeader(http.StatusOK) + })) + defer srv.Close() + + c := NewClient(srv.URL, "", "vector-cli/test") + _, err := c.Get(context.Background(), "/test", nil) + require.NoError(t, err) + + assert.Empty(t, gotHeaders.Get("Authorization")) +} + +func TestClient_Get(t *testing.T) { + var gotMethod, gotPath, gotQuery string + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + gotMethod = r.Method + gotPath = r.URL.Path + gotQuery = r.URL.RawQuery + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{"data":"ok"}`)) + })) + defer srv.Close() + + c := NewClient(srv.URL, "tok", "") + query := url.Values{"page": []string{"1"}, "limit": []string{"10"}} + resp, err := c.Get(context.Background(), "/api/v1/items", query) + require.NoError(t, err) + defer resp.Body.Close() + + assert.Equal(t, http.MethodGet, gotMethod) + assert.Equal(t, "/api/v1/items", gotPath) + assert.Contains(t, gotQuery, "page=1") + assert.Contains(t, gotQuery, "limit=10") + assert.Equal(t, http.StatusOK, resp.StatusCode) +} + +func TestClient_GetWithoutQuery(t *testing.T) { + var gotQuery string + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + gotQuery = r.URL.RawQuery + w.WriteHeader(http.StatusOK) + })) + defer srv.Close() + + c := NewClient(srv.URL, "tok", "") + resp, err := c.Get(context.Background(), "/test", nil) + require.NoError(t, err) + defer resp.Body.Close() + + assert.Empty(t, gotQuery) +} + +func TestClient_Post(t *testing.T) { + var gotMethod, gotContentType string + var gotBody map[string]any + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + gotMethod = r.Method + gotContentType = r.Header.Get("Content-Type") + _ = json.NewDecoder(r.Body).Decode(&gotBody) + w.WriteHeader(http.StatusCreated) + _, _ = w.Write([]byte(`{"data":{"id":1}}`)) + })) + defer srv.Close() + + c := NewClient(srv.URL, "tok", "") + body := map[string]string{"name": "test", "email": "test@example.com"} + resp, err := c.Post(context.Background(), "/api/v1/items", body) + require.NoError(t, err) + defer resp.Body.Close() + + assert.Equal(t, http.MethodPost, gotMethod) + assert.Equal(t, "application/json", gotContentType) + assert.Equal(t, "test", gotBody["name"]) + assert.Equal(t, "test@example.com", gotBody["email"]) + assert.Equal(t, http.StatusCreated, resp.StatusCode) +} + +func TestClient_Put(t *testing.T) { + var gotMethod string + var gotBody map[string]any + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + gotMethod = r.Method + _ = json.NewDecoder(r.Body).Decode(&gotBody) + w.WriteHeader(http.StatusOK) + _, _ = w.Write([]byte(`{"data":{"id":1}}`)) + })) + defer srv.Close() + + c := NewClient(srv.URL, "tok", "") + body := map[string]string{"name": "updated"} + resp, err := c.Put(context.Background(), "/api/v1/items/1", body) + require.NoError(t, err) + defer resp.Body.Close() + + assert.Equal(t, http.MethodPut, gotMethod) + assert.Equal(t, "updated", gotBody["name"]) +} + +func TestClient_Delete(t *testing.T) { + var gotMethod, gotPath string + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + gotMethod = r.Method + gotPath = r.URL.Path + w.WriteHeader(http.StatusNoContent) + })) + defer srv.Close() + + c := NewClient(srv.URL, "tok", "") + resp, err := c.Delete(context.Background(), "/api/v1/items/1") + require.NoError(t, err) + defer resp.Body.Close() + + assert.Equal(t, http.MethodDelete, gotMethod) + assert.Equal(t, "/api/v1/items/1", gotPath) + assert.Equal(t, http.StatusNoContent, resp.StatusCode) +} + +func TestClient_PutFile(t *testing.T) { + var gotMethod, gotUserAgent string + var gotBody []byte + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + gotMethod = r.Method + gotUserAgent = r.Header.Get("User-Agent") + gotBody, _ = io.ReadAll(r.Body) + // PutFile should not add Authorization or Accept headers (presigned S3 URL). + assert.Empty(t, r.Header.Get("Authorization")) + assert.Empty(t, r.Header.Get("Accept")) + w.WriteHeader(http.StatusOK) + })) + defer srv.Close() + + tmpFile := filepath.Join(t.TempDir(), "upload.txt") + require.NoError(t, os.WriteFile(tmpFile, []byte("file-content"), 0644)) + + f, err := os.Open(tmpFile) + require.NoError(t, err) + defer f.Close() + + c := NewClient("https://api.example.com", "tok", "vector-cli/test") + // PutFile uses the full URL directly (presigned S3 URL), not BaseURL+path. + resp, err := c.PutFile(context.Background(), srv.URL+"/upload", f) + require.NoError(t, err) + defer resp.Body.Close() + + assert.Equal(t, http.MethodPut, gotMethod) + assert.Equal(t, "vector-cli/test", gotUserAgent) + assert.Equal(t, "file-content", string(gotBody)) +} + +func TestClient_ErrorResponse(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _, _ = w.Write([]byte(`{"data":{},"message":"Unauthenticated.","http_status":401}`)) + })) + defer srv.Close() + + c := NewClient(srv.URL, "bad-token", "") + _, err := c.Get(context.Background(), "/api/v1/ping", nil) + require.Error(t, err) + + apiErr, ok := err.(*APIError) + require.True(t, ok, "error should be *APIError") + assert.Equal(t, 401, apiErr.HTTPStatus) + assert.Equal(t, 2, apiErr.ExitCode) + assert.Equal(t, "Unauthenticated.", apiErr.Message) +} + +func TestClient_ValidationErrorResponse(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnprocessableEntity) + _, _ = w.Write([]byte(`{"errors":{"name":["The name field is required."]}}`)) + })) + defer srv.Close() + + c := NewClient(srv.URL, "tok", "") + _, err := c.Post(context.Background(), "/api/v1/items", map[string]string{}) + require.Error(t, err) + + apiErr, ok := err.(*APIError) + require.True(t, ok, "error should be *APIError") + assert.Equal(t, 422, apiErr.HTTPStatus) + assert.Equal(t, 3, apiErr.ExitCode) + assert.Contains(t, apiErr.Error(), "name: The name field is required.") +} + +func TestClient_ServerErrorResponse(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusInternalServerError) + })) + defer srv.Close() + + c := NewClient(srv.URL, "tok", "") + _, err := c.Delete(context.Background(), "/api/v1/items/1") + require.Error(t, err) + + apiErr, ok := err.(*APIError) + require.True(t, ok, "error should be *APIError") + assert.Equal(t, 500, apiErr.HTTPStatus) + assert.Equal(t, 5, apiErr.ExitCode) +} + +func TestClient_PutFileErrorResponse(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusForbidden) + })) + defer srv.Close() + + tmpFile := filepath.Join(t.TempDir(), "upload.txt") + require.NoError(t, os.WriteFile(tmpFile, []byte("data"), 0644)) + + f, err := os.Open(tmpFile) + require.NoError(t, err) + defer f.Close() + + c := NewClient("https://api.example.com", "tok", "") + _, err = c.PutFile(context.Background(), srv.URL+"/upload", f) + require.Error(t, err) + + apiErr, ok := err.(*APIError) + require.True(t, ok, "error should be *APIError") + assert.Equal(t, 403, apiErr.HTTPStatus) +} diff --git a/internal/api/error.go b/internal/api/error.go new file mode 100644 index 0000000..3501d70 --- /dev/null +++ b/internal/api/error.go @@ -0,0 +1,115 @@ +package api + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "sort" + "strings" +) + +// APIError represents a structured error from the Vector API. +type APIError struct { + Code string `json:"code"` + Message string `json:"message"` + HTTPStatus int `json:"http_status"` + ExitCode int `json:"exit_code"` + ValidationErrors map[string][]string `json:"validation_errors,omitempty"` +} + +// Error implements the error interface. +func (e *APIError) Error() string { + if len(e.ValidationErrors) > 0 { + var parts []string + // Sort keys for deterministic output. + keys := make([]string, 0, len(e.ValidationErrors)) + for k := range e.ValidationErrors { + keys = append(keys, k) + } + sort.Strings(keys) + for _, field := range keys { + for _, msg := range e.ValidationErrors[field] { + parts = append(parts, fmt.Sprintf("%s: %s", field, msg)) + } + } + return "Validation failed: " + strings.Join(parts, "; ") + } + return e.Message +} + +// exitCodeForStatus maps an HTTP status code to a CLI exit code. +func exitCodeForStatus(status int) int { + switch { + case status == 401 || status == 403: + return 2 + case status == 422: + return 3 + case status == 404: + return 4 + case status >= 500: + return 5 + default: + return 1 + } +} + +// standardResponse represents the standard API response format: +// {"data": {}, "message": "...", "http_status": N} +type standardResponse struct { + Message string `json:"message"` + HTTPStatus int `json:"http_status"` +} + +// validationResponse represents the validation error format: +// {"errors": {"field": ["msg"]}} +type validationResponse struct { + Errors map[string][]string `json:"errors"` +} + +// ParseErrorResponse reads an HTTP response body and parses it into an APIError. +// It handles both standard and validation API response formats, with a fallback +// for malformed JSON. +func ParseErrorResponse(resp *http.Response) *APIError { + status := resp.StatusCode + exitCode := exitCodeForStatus(status) + + body, err := io.ReadAll(resp.Body) + if err != nil || len(body) == 0 { + return &APIError{ + Message: http.StatusText(status), + HTTPStatus: status, + ExitCode: exitCode, + } + } + + // Try validation response format first ({"errors": {"field": ["msg"]}}). + var valResp validationResponse + if json.Unmarshal(body, &valResp) == nil && len(valResp.Errors) > 0 { + apiErr := &APIError{ + HTTPStatus: status, + ExitCode: exitCode, + ValidationErrors: valResp.Errors, + } + // Set the message via Error() so it's populated. + apiErr.Message = apiErr.Error() + return apiErr + } + + // Try standard response format ({"data": {}, "message": "...", "http_status": N}). + var stdResp standardResponse + if json.Unmarshal(body, &stdResp) == nil && stdResp.Message != "" { + return &APIError{ + Message: stdResp.Message, + HTTPStatus: status, + ExitCode: exitCode, + } + } + + // Fallback for malformed JSON or unexpected format. + return &APIError{ + Message: http.StatusText(status), + HTTPStatus: status, + ExitCode: exitCode, + } +} diff --git a/internal/api/error_test.go b/internal/api/error_test.go new file mode 100644 index 0000000..b31ff84 --- /dev/null +++ b/internal/api/error_test.go @@ -0,0 +1,167 @@ +package api + +import ( + "io" + "net/http" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestAPIError_Error_SimpleMessage(t *testing.T) { + err := &APIError{Message: "Something went wrong"} + assert.Equal(t, "Something went wrong", err.Error()) +} + +func TestAPIError_Error_ValidationErrors(t *testing.T) { + err := &APIError{ + Message: "ignored when validation errors present", + ValidationErrors: map[string][]string{ + "your_customer_id": {"The partner customer id field is required."}, + }, + } + assert.Equal(t, "Validation failed: your_customer_id: The partner customer id field is required.", err.Error()) +} + +func TestAPIError_Error_MultipleValidationErrors(t *testing.T) { + err := &APIError{ + ValidationErrors: map[string][]string{ + "email": {"The email field is required.", "The email must be valid."}, + "name": {"The name field is required."}, + }, + } + result := err.Error() + assert.Contains(t, result, "Validation failed: ") + assert.Contains(t, result, "email: The email field is required.") + assert.Contains(t, result, "email: The email must be valid.") + assert.Contains(t, result, "name: The name field is required.") +} + +func TestAPIError_ImplementsErrorInterface(t *testing.T) { + var err error = &APIError{Message: "test"} + assert.NotNil(t, err) + assert.Equal(t, "test", err.Error()) +} + +func TestExitCodeForStatus(t *testing.T) { + tests := []struct { + status int + exitCode int + }{ + {401, 2}, + {403, 2}, + {404, 4}, + {422, 3}, + {500, 5}, + {502, 5}, + {503, 5}, + {400, 1}, + } + + for _, tt := range tests { + t.Run(http.StatusText(tt.status), func(t *testing.T) { + assert.Equal(t, tt.exitCode, exitCodeForStatus(tt.status)) + }) + } +} + +func newResponse(status int, body string) *http.Response { + return &http.Response{ + StatusCode: status, + Body: io.NopCloser(strings.NewReader(body)), + } +} + +func TestParseErrorResponse_StandardFormat(t *testing.T) { + resp := newResponse(401, `{"data": {}, "message": "Unauthenticated.", "http_status": 401}`) + apiErr := ParseErrorResponse(resp) + + require.NotNil(t, apiErr) + assert.Equal(t, "Unauthenticated.", apiErr.Message) + assert.Equal(t, 401, apiErr.HTTPStatus) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestParseErrorResponse_ValidationFormat(t *testing.T) { + body := `{"errors": {"your_customer_id": ["The partner customer id field is required."]}}` + resp := newResponse(422, body) + apiErr := ParseErrorResponse(resp) + + require.NotNil(t, apiErr) + assert.Equal(t, 422, apiErr.HTTPStatus) + assert.Equal(t, 3, apiErr.ExitCode) + assert.Contains(t, apiErr.ValidationErrors, "your_customer_id") + assert.Equal(t, "Validation failed: your_customer_id: The partner customer id field is required.", apiErr.Error()) +} + +func TestParseErrorResponse_MalformedJSON(t *testing.T) { + resp := newResponse(500, `not json at all`) + apiErr := ParseErrorResponse(resp) + + require.NotNil(t, apiErr) + assert.Equal(t, "Internal Server Error", apiErr.Message) + assert.Equal(t, 500, apiErr.HTTPStatus) + assert.Equal(t, 5, apiErr.ExitCode) +} + +func TestParseErrorResponse_EmptyBody(t *testing.T) { + resp := newResponse(404, "") + apiErr := ParseErrorResponse(resp) + + require.NotNil(t, apiErr) + assert.Equal(t, "Not Found", apiErr.Message) + assert.Equal(t, 404, apiErr.HTTPStatus) + assert.Equal(t, 4, apiErr.ExitCode) +} + +func TestParseErrorResponse_403Forbidden(t *testing.T) { + resp := newResponse(403, `{"data": {}, "message": "Forbidden.", "http_status": 403}`) + apiErr := ParseErrorResponse(resp) + + require.NotNil(t, apiErr) + assert.Equal(t, "Forbidden.", apiErr.Message) + assert.Equal(t, 403, apiErr.HTTPStatus) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestParseErrorResponse_404NotFound(t *testing.T) { + resp := newResponse(404, `{"data": {}, "message": "Resource not found.", "http_status": 404}`) + apiErr := ParseErrorResponse(resp) + + require.NotNil(t, apiErr) + assert.Equal(t, "Resource not found.", apiErr.Message) + assert.Equal(t, 404, apiErr.HTTPStatus) + assert.Equal(t, 4, apiErr.ExitCode) +} + +func TestParseErrorResponse_5xxServerError(t *testing.T) { + resp := newResponse(503, `{"data": {}, "message": "Service unavailable.", "http_status": 503}`) + apiErr := ParseErrorResponse(resp) + + require.NotNil(t, apiErr) + assert.Equal(t, "Service unavailable.", apiErr.Message) + assert.Equal(t, 503, apiErr.HTTPStatus) + assert.Equal(t, 5, apiErr.ExitCode) +} + +func TestParseErrorResponse_EmptyJSONObject(t *testing.T) { + resp := newResponse(500, `{}`) + apiErr := ParseErrorResponse(resp) + + require.NotNil(t, apiErr) + assert.Equal(t, "Internal Server Error", apiErr.Message) + assert.Equal(t, 500, apiErr.HTTPStatus) + assert.Equal(t, 5, apiErr.ExitCode) +} + +func TestParseErrorResponse_MultipleValidationFields(t *testing.T) { + body := `{"errors": {"email": ["The email field is required."], "name": ["The name field is required."]}}` + resp := newResponse(422, body) + apiErr := ParseErrorResponse(resp) + + require.NotNil(t, apiErr) + assert.Len(t, apiErr.ValidationErrors, 2) + assert.Equal(t, 3, apiErr.ExitCode) +} diff --git a/internal/appctx/appctx.go b/internal/appctx/appctx.go new file mode 100644 index 0000000..0fe5c6c --- /dev/null +++ b/internal/appctx/appctx.go @@ -0,0 +1,41 @@ +// Package appctx provides the App struct, context helpers, and global flags. +package appctx + +import ( + "context" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +// contextKey is an unexported type for context keys to avoid collisions. +type contextKey struct{} + +// App holds shared application state accessible to all commands. +type App struct { + Config *config.Config + Client *api.Client + Output *output.Writer + TokenSource string // "flag", "env", "keyring", or "" +} + +// NewApp creates a new App with the given dependencies. +func NewApp(cfg *config.Config, client *api.Client, tokenSource string) *App { + return &App{ + Config: cfg, + Client: client, + TokenSource: tokenSource, + } +} + +// WithApp stores an App in the context. +func WithApp(ctx context.Context, app *App) context.Context { + return context.WithValue(ctx, contextKey{}, app) +} + +// FromContext retrieves the App from the context. Returns nil if not set. +func FromContext(ctx context.Context) *App { + app, _ := ctx.Value(contextKey{}).(*App) + return app +} diff --git a/internal/appctx/appctx_test.go b/internal/appctx/appctx_test.go new file mode 100644 index 0000000..a9dbdbe --- /dev/null +++ b/internal/appctx/appctx_test.go @@ -0,0 +1,49 @@ +package appctx_test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" +) + +func TestNewApp(t *testing.T) { + cfg := &config.Config{ApiURL: "https://example.com"} + client := api.NewClient("https://example.com", "test-key", "") + + app := appctx.NewApp(cfg, client, "") + + require.NotNil(t, app) + assert.Equal(t, cfg, app.Config) + assert.Equal(t, client, app.Client) +} + +func TestContextRoundTrip(t *testing.T) { + cfg := &config.Config{ApiURL: "https://example.com"} + client := api.NewClient("https://example.com", "test-key", "") + app := appctx.NewApp(cfg, client, "") + + ctx := appctx.WithApp(context.Background(), app) + got := appctx.FromContext(ctx) + + require.NotNil(t, got) + assert.Equal(t, app, got) +} + +func TestFromContext_NotSet(t *testing.T) { + got := appctx.FromContext(context.Background()) + assert.Nil(t, got) +} + +func TestFromContext_WrongType(t *testing.T) { + // Using a different key type should not collide + type otherKey struct{} + ctx := context.WithValue(context.Background(), otherKey{}, "not an app") + got := appctx.FromContext(ctx) + assert.Nil(t, got) +} diff --git a/internal/cli/execute.go b/internal/cli/execute.go new file mode 100644 index 0000000..bdf85ac --- /dev/null +++ b/internal/cli/execute.go @@ -0,0 +1,26 @@ +package cli + +import ( + "errors" + "fmt" + "os" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/commands" +) + +// Execute creates the root command and runs it. +// It returns 0 on success, or an appropriate exit code on error. +func Execute() int { + commands.RefreshSkillsIfVersionChanged() + cmd := NewRootCmd() + if err := cmd.Execute(); err != nil { + _, _ = fmt.Fprintf(os.Stderr, "Error: %s\n", err) + var apiErr *api.APIError + if errors.As(err, &apiErr) { + return apiErr.ExitCode + } + return 1 + } + return 0 +} diff --git a/internal/cli/root.go b/internal/cli/root.go new file mode 100644 index 0000000..460ec9d --- /dev/null +++ b/internal/cli/root.go @@ -0,0 +1,134 @@ +package cli + +import ( + "fmt" + "os" + + "github.com/itchyny/gojq" + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/commands" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" + "github.com/built-fast/vector-cli/internal/version" +) + +// NewRootCmd creates and returns the root cobra command. +func NewRootCmd() *cobra.Command { + var showVersion bool + + cmd := &cobra.Command{ + Use: "vector", + Short: "Vector CLI — manage your Vector hosting", + Long: "Vector CLI — manage your Vector hosting\n\nA command-line tool for managing sites, deployments, and configurations via the Vector Pro API by BuiltFast (builtfast.com).", + Example: ` # Force JSON output for any command + vector --json site list + + # Use a one-off token without logging in + vector --token mytoken123 site list + + # Filter JSON output with built-in jq + vector site list --jq '.[].id'`, + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + // 1. Load config (defaults if missing) + cfg, err := config.LoadConfig() + if err != nil { + return err + } + + // 2. Resolve token: --token flag > VECTOR_API_KEY env > keyring + var token, tokenSource string + token, _ = cmd.Flags().GetString("token") + if token != "" { + tokenSource = "flag" + } + if token == "" { + token = os.Getenv("VECTOR_API_KEY") + if token != "" { + tokenSource = "env" + } + } + if token == "" { + if t, err := config.Load(); err == nil && t != "" { + token = t + tokenSource = "keyring" + } + } + + // 3. Build API client + client := api.NewClient(cfg.ApiURL, token, "") + + // 4. Detect output format from --json/--no-json flags + jsonFlag, _ := cmd.Flags().GetBool("json") + noJsonFlag, _ := cmd.Flags().GetBool("no-json") + format := output.DetectFormat(jsonFlag, noJsonFlag) + + // 5. Handle --jq flag + jqExpr, _ := cmd.Flags().GetString("jq") + var writerOpts []output.WriterOption + + if jqExpr != "" { + if noJsonFlag { + return fmt.Errorf("--jq and --no-json cannot be used together") + } + + query, err := gojq.Parse(jqExpr) + if err != nil { + return fmt.Errorf("invalid jq expression: %w", err) + } + + code, err := gojq.Compile(query) + if err != nil { + return fmt.Errorf("failed to compile jq expression: %w", err) + } + + // jq implies JSON output + format = output.JSON + writerOpts = append(writerOpts, output.WithJQ(jqExpr, code)) + } + + // 6. Create App and store in context + app := appctx.NewApp(cfg, client, tokenSource) + app.Output = output.NewWriter(os.Stdout, format, writerOpts...) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + + return nil + }, + RunE: func(cmd *cobra.Command, args []string) error { + if showVersion { + _, _ = fmt.Fprintln(cmd.OutOrStdout(), version.FullVersion()) + return nil + } + return cmd.Help() + }, + SilenceUsage: true, + SilenceErrors: true, + } + + cmd.Flags().BoolVar(&showVersion, "version", false, "Print version information and exit") + cmd.PersistentFlags().String("token", "", "API token (overrides VECTOR_API_KEY and keyring)") + cmd.PersistentFlags().Bool("json", false, "Force JSON output") + cmd.PersistentFlags().Bool("no-json", false, "Force table output") + cmd.PersistentFlags().String("jq", "", `Filter JSON output with a jq expression (built-in, no external jq required)`) + + cmd.AddCommand(commands.NewAuthCmd()) + cmd.AddCommand(commands.NewSiteCmd()) + cmd.AddCommand(commands.NewEnvCmd()) + cmd.AddCommand(commands.NewDeployCmd()) + cmd.AddCommand(commands.NewSSLCmd()) + cmd.AddCommand(commands.NewPHPVersionsCmd()) + cmd.AddCommand(commands.NewEventCmd()) + cmd.AddCommand(commands.NewAccountCmd()) + cmd.AddCommand(commands.NewWebhookCmd()) + cmd.AddCommand(commands.NewBackupCmd()) + cmd.AddCommand(commands.NewRestoreCmd()) + cmd.AddCommand(commands.NewWafCmd()) + cmd.AddCommand(commands.NewDbCmd()) + cmd.AddCommand(commands.NewArchiveCmd()) + cmd.AddCommand(commands.NewMcpCmd()) + cmd.AddCommand(commands.NewSkillCmd()) + + return cmd +} diff --git a/internal/cli/root_test.go b/internal/cli/root_test.go new file mode 100644 index 0000000..cc8d9d7 --- /dev/null +++ b/internal/cli/root_test.go @@ -0,0 +1,496 @@ +package cli + +import ( + "bytes" + "encoding/json" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/zalando/go-keyring" + + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" + "github.com/built-fast/vector-cli/internal/version" +) + +func TestMain(m *testing.M) { + keyring.MockInit() + os.Exit(m.Run()) +} + +func TestNewRootCmd_Use(t *testing.T) { + cmd := NewRootCmd() + assert.Equal(t, "vector", cmd.Use) +} + +func TestNewRootCmd_VersionFlag(t *testing.T) { + keyring.MockInit() + origVersion, origCommit, origDate := version.Version, version.Commit, version.Date + t.Cleanup(func() { + version.Version = origVersion + version.Commit = origCommit + version.Date = origDate + }) + + version.Version = "1.2.3" + version.Commit = "abc1234" + version.Date = "2026-01-01" + + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + cmd := NewRootCmd() + buf := new(bytes.Buffer) + cmd.SetOut(buf) + cmd.SetArgs([]string{"--version"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "vector v1.2.3 (abc1234) built 2026-01-01", strings.TrimSpace(buf.String())) +} + +func TestNewRootCmd_FlagsRegistered(t *testing.T) { + cmd := NewRootCmd() + + tests := []struct { + name string + flag string + persistent bool + defValue string + }{ + {"version flag", "version", false, "false"}, + {"token flag", "token", true, ""}, + {"json flag", "json", true, "false"}, + {"no-json flag", "no-json", true, "false"}, + {"jq flag", "jq", true, ""}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var f = cmd.Flags().Lookup(tt.flag) + if tt.persistent { + f = cmd.PersistentFlags().Lookup(tt.flag) + } + require.NotNil(t, f, "--%s flag should be registered", tt.flag) + assert.Equal(t, tt.defValue, f.DefValue) + }) + } +} + +func TestNewRootCmd_NoArgsShowsHelp(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + cmd := NewRootCmd() + buf := new(bytes.Buffer) + cmd.SetOut(buf) + cmd.SetArgs([]string{}) + + err := cmd.Execute() + require.NoError(t, err) + + out := buf.String() + assert.Contains(t, out, "Usage:") + assert.Contains(t, out, "vector") + assert.Contains(t, out, "--json") + assert.Contains(t, out, "--no-json") + assert.Contains(t, out, "--version") + assert.Contains(t, out, "--token") +} + +func TestPersistentPreRunE_LoadsDefaultConfig(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + var captured *appctx.App + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + captured = appctx.FromContext(cmd.Context()) + return nil + } + cmd.SetArgs([]string{}) + + err := cmd.Execute() + require.NoError(t, err) + require.NotNil(t, captured) + assert.Equal(t, "https://api.builtfast.com", captured.Config.ApiURL) +} + +func TestPersistentPreRunE_TokenFromFlag(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + var captured *appctx.App + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + captured = appctx.FromContext(cmd.Context()) + return nil + } + cmd.SetArgs([]string{"--token", "flag-token"}) + + err := cmd.Execute() + require.NoError(t, err) + require.NotNil(t, captured) + assert.Equal(t, "flag-token", captured.Client.Token) + assert.Equal(t, "flag", captured.TokenSource) +} + +func TestPersistentPreRunE_TokenFromEnv(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + t.Setenv("VECTOR_API_KEY", "env-token") + + var captured *appctx.App + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + captured = appctx.FromContext(cmd.Context()) + return nil + } + cmd.SetArgs([]string{}) + + err := cmd.Execute() + require.NoError(t, err) + require.NotNil(t, captured) + assert.Equal(t, "env-token", captured.Client.Token) + assert.Equal(t, "env", captured.TokenSource) +} + +func TestPersistentPreRunE_TokenFromKeyring(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + t.Setenv("VECTOR_NO_KEYRING", "") + + // Store token in keyring + require.NoError(t, config.Save("stored-token")) + + var captured *appctx.App + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + captured = appctx.FromContext(cmd.Context()) + return nil + } + cmd.SetArgs([]string{}) + + err := cmd.Execute() + require.NoError(t, err) + require.NotNil(t, captured) + assert.Equal(t, "stored-token", captured.Client.Token) + assert.Equal(t, "keyring", captured.TokenSource) +} + +func TestPersistentPreRunE_TokenPrecedence(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + t.Setenv("VECTOR_API_KEY", "env-token") + t.Setenv("VECTOR_NO_KEYRING", "") + + // Store token in keyring + require.NoError(t, config.Save("stored-token")) + + var captured *appctx.App + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + captured = appctx.FromContext(cmd.Context()) + return nil + } + // --token flag takes precedence over env and stored credentials + cmd.SetArgs([]string{"--token", "flag-token"}) + + err := cmd.Execute() + require.NoError(t, err) + require.NotNil(t, captured) + assert.Equal(t, "flag-token", captured.Client.Token) + assert.Equal(t, "flag", captured.TokenSource) +} + +func TestPersistentPreRunE_NoTokenIsOK(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + var captured *appctx.App + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + captured = appctx.FromContext(cmd.Context()) + return nil + } + cmd.SetArgs([]string{}) + + err := cmd.Execute() + require.NoError(t, err) + require.NotNil(t, captured) + assert.Empty(t, captured.Client.Token) + assert.Empty(t, captured.TokenSource) +} + +func TestPersistentPreRunE_KeyringDisabledNoToken(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + t.Setenv("VECTOR_NO_KEYRING", "1") + + var captured *appctx.App + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + captured = appctx.FromContext(cmd.Context()) + return nil + } + cmd.SetArgs([]string{}) + + // Commands that don't require auth still work without a token + err := cmd.Execute() + require.NoError(t, err) + require.NotNil(t, captured) + assert.Empty(t, captured.Client.Token) + assert.Empty(t, captured.TokenSource) +} + +func TestPersistentPreRunE_KeyringDisabledFlagTokenWorks(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + t.Setenv("VECTOR_NO_KEYRING", "1") + + var captured *appctx.App + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + captured = appctx.FromContext(cmd.Context()) + return nil + } + cmd.SetArgs([]string{"--token", "flag-token"}) + + err := cmd.Execute() + require.NoError(t, err) + require.NotNil(t, captured) + assert.Equal(t, "flag-token", captured.Client.Token) + assert.Equal(t, "flag", captured.TokenSource) +} + +func TestPersistentPreRunE_KeyringDisabledEnvTokenWorks(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + t.Setenv("VECTOR_NO_KEYRING", "1") + t.Setenv("VECTOR_API_KEY", "env-token") + + var captured *appctx.App + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + captured = appctx.FromContext(cmd.Context()) + return nil + } + cmd.SetArgs([]string{}) + + err := cmd.Execute() + require.NoError(t, err) + require.NotNil(t, captured) + assert.Equal(t, "env-token", captured.Client.Token) + assert.Equal(t, "env", captured.TokenSource) +} + +func TestPersistentPreRunE_DetectsOutputFormat(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + tests := []struct { + name string + args []string + expected output.Format + }{ + {"json flag", []string{"--json"}, output.JSON}, + {"no-json flag", []string{"--no-json"}, output.Table}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var captured *appctx.App + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + captured = appctx.FromContext(cmd.Context()) + return nil + } + cmd.SetArgs(tt.args) + + err := cmd.Execute() + require.NoError(t, err) + require.NotNil(t, captured) + assert.Equal(t, tt.expected, captured.Output.Format()) + }) + } +} + +func TestPersistentPreRunE_InvalidConfigJSON(t *testing.T) { + keyring.MockInit() + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + + // Write invalid JSON to config file + require.NoError(t, os.WriteFile(filepath.Join(tmpDir, "config.json"), []byte("{invalid"), 0o644)) + + cmd := NewRootCmd() + cmd.SetArgs([]string{}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid JSON") +} + +func TestPersistentPreRunE_CustomAPIURL(t *testing.T) { + keyring.MockInit() + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + + // Write custom config + cfg := config.Config{ApiURL: "https://custom.api.com"} + data, err := json.MarshalIndent(cfg, "", " ") + require.NoError(t, err) + require.NoError(t, os.WriteFile(filepath.Join(tmpDir, "config.json"), data, 0o644)) + + var captured *appctx.App + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + captured = appctx.FromContext(cmd.Context()) + return nil + } + cmd.SetArgs([]string{}) + + err = cmd.Execute() + require.NoError(t, err) + require.NotNil(t, captured) + assert.Equal(t, "https://custom.api.com", captured.Client.BaseURL) +} + +func TestPersistentPreRunE_HelpWorksWithoutCredentials(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + cmd := NewRootCmd() + buf := new(bytes.Buffer) + cmd.SetOut(buf) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, buf.String(), "Usage:") +} + +func TestPersistentPreRunE_VersionWorksWithoutCredentials(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + cmd := NewRootCmd() + buf := new(bytes.Buffer) + cmd.SetOut(buf) + cmd.SetArgs([]string{"--version"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, buf.String(), "vector v") +} + +func TestPersistentPreRunE_JQCompilesWithoutError(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + var captured *appctx.App + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + captured = appctx.FromContext(cmd.Context()) + return nil + } + cmd.SetArgs([]string{"--jq", ".name"}) + + err := cmd.Execute() + require.NoError(t, err) + require.NotNil(t, captured) + require.NotNil(t, captured.Output) + assert.True(t, captured.Output.HasJQ()) +} + +func TestPersistentPreRunE_JQForcesJSON(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + var captured *appctx.App + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + captured = appctx.FromContext(cmd.Context()) + return nil + } + cmd.SetArgs([]string{"--jq", ".name"}) + + err := cmd.Execute() + require.NoError(t, err) + require.NotNil(t, captured) + assert.Equal(t, output.JSON, captured.Output.Format()) +} + +func TestPersistentPreRunE_JQAndNoJSONError(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + return nil + } + cmd.SetArgs([]string{"--jq", ".name", "--no-json"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Equal(t, "--jq and --no-json cannot be used together", err.Error()) +} + +func TestPersistentPreRunE_JQInvalidExpression(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + return nil + } + cmd.SetArgs([]string{"--jq", ".[["}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid jq expression") +} + +func TestPersistentPreRunE_JQIdentityFilter(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + var captured *appctx.App + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + captured = appctx.FromContext(cmd.Context()) + return nil + } + cmd.SetArgs([]string{"--jq", "."}) + + err := cmd.Execute() + require.NoError(t, err) + require.NotNil(t, captured) + require.NotNil(t, captured.Output) + assert.True(t, captured.Output.HasJQ()) + assert.Equal(t, output.JSON, captured.Output.Format()) +} + +func TestPersistentPreRunE_OutputSetWithoutJQ(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + var captured *appctx.App + cmd := NewRootCmd() + cmd.RunE = func(cmd *cobra.Command, args []string) error { + captured = appctx.FromContext(cmd.Context()) + return nil + } + cmd.SetArgs([]string{}) + + err := cmd.Execute() + require.NoError(t, err) + require.NotNil(t, captured) + require.NotNil(t, captured.Output) + assert.False(t, captured.Output.HasJQ()) +} diff --git a/internal/cli/surface_test.go b/internal/cli/surface_test.go new file mode 100644 index 0000000..1db1ece --- /dev/null +++ b/internal/cli/surface_test.go @@ -0,0 +1,42 @@ +package cli + +import ( + "flag" + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/surface" +) + +var update = flag.Bool("update", false, "update .surface file") + +func repoRoot(t *testing.T) string { + t.Helper() + _, file, _, ok := runtime.Caller(0) + require.True(t, ok, "failed to determine caller file") + return filepath.Join(filepath.Dir(file), "..", "..") +} + +func TestSurface(t *testing.T) { + root := NewRootCmd() + got := surface.Generate(root) + + surfacePath := filepath.Join(repoRoot(t), ".surface") + + if *update { + require.NoError(t, os.WriteFile(surfacePath, []byte(got), 0o644)) + t.Log("updated .surface file") + return + } + + want, err := os.ReadFile(surfacePath) + require.NoError(t, err, ".surface file not found — run: go test ./internal/cli/ -run TestSurface -update") + + assert.Equal(t, string(want), got, + "CLI surface has changed. If intentional, run:\n\n go test ./internal/cli/ -run TestSurface -update\n\nand commit the updated .surface file.") +} diff --git a/internal/commands/account.go b/internal/commands/account.go new file mode 100644 index 0000000..d6992ed --- /dev/null +++ b/internal/commands/account.go @@ -0,0 +1,88 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +const accountBasePath = "/api/v1/vector/account" + +// NewAccountCmd creates the account command group. +func NewAccountCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "account", + Short: "Manage account", + Long: "View account details and manage account-level resources.", + } + + cmd.AddCommand(newAccountShowCmd()) + cmd.AddCommand(NewAccountSSHKeyCmd()) + cmd.AddCommand(NewAccountAPIKeyCmd()) + cmd.AddCommand(NewAccountSecretCmd()) + + return cmd +} + +func newAccountShowCmd() *cobra.Command { + return &cobra.Command{ + Use: "show", + Short: "Show account summary", + Long: "Display account details including owner information and resource usage.", + Example: ` # Show account summary + vector account show`, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), accountBasePath, nil) + if err != nil { + return fmt.Errorf("failed to get account: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to get account: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to get account: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to get account: %w", err) + } + + owner := getMap(item, "owner") + account := getMap(item, "account") + sites := getMap(item, "sites") + envs := getMap(item, "environments") + + app.Output.KeyValue([]output.KeyValue{ + {Key: "Owner Name", Value: getString(owner, "name")}, + {Key: "Owner Email", Value: getString(owner, "email")}, + {Key: "Account Name", Value: getString(account, "name")}, + {Key: "Company", Value: getString(account, "company")}, + {Key: "Total Sites", Value: fmt.Sprintf("%.0f", getFloat(sites, "total"))}, + {Key: "Active Sites", Value: fmt.Sprintf("%.0f", getFloat(getMap(sites, "by_status"), "active"))}, + {Key: "Total Environments", Value: fmt.Sprintf("%.0f", getFloat(envs, "total"))}, + {Key: "Active Environments", Value: fmt.Sprintf("%.0f", getFloat(getMap(envs, "by_status"), "active"))}, + }) + return nil + }, + } +} diff --git a/internal/commands/account_api_key.go b/internal/commands/account_api_key.go new file mode 100644 index 0000000..09cf1a0 --- /dev/null +++ b/internal/commands/account_api_key.go @@ -0,0 +1,238 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + "strings" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +const apiKeysBasePath = "/api/v1/vector/api-keys" + +// NewAccountAPIKeyCmd creates the account api-key command group. +func NewAccountAPIKeyCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "api-key", + Short: "Manage account API keys", + Long: "Manage API keys for controlling programmatic access to your account.", + } + + cmd.AddCommand(newAccountAPIKeyListCmd()) + cmd.AddCommand(newAccountAPIKeyCreateCmd()) + cmd.AddCommand(newAccountAPIKeyDeleteCmd()) + + return cmd +} + +func newAccountAPIKeyListCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "list", + Short: "List API keys", + Long: "Retrieve a paginated list of API keys for your account.", + Example: ` # List API keys + vector account api-key list`, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + page, perPage := getPagination(cmd) + query := buildPaginationQuery(page, perPage) + + resp, err := app.Client.Get(cmd.Context(), apiKeysBasePath, query) + if err != nil { + return fmt.Errorf("failed to list API keys: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list API keys: %w", err) + } + + if app.Output.Format() == output.JSON { + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list API keys: %w", err) + } + return app.Output.JSON(json.RawMessage(data)) + } + + data, meta, err := parseResponseWithMeta(body) + if err != nil { + return fmt.Errorf("failed to list API keys: %w", err) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list API keys: %w", err) + } + + headers := []string{"ID", "NAME", "ABILITIES", "LAST USED", "EXPIRES"} + var rows [][]string + for _, item := range items { + rows = append(rows, []string{ + getString(item, "id"), + getString(item, "name"), + formatAbilities(item), + formatString(getString(item, "last_used_at")), + formatString(getString(item, "expires_at")), + }) + } + + app.Output.Table(headers, rows) + if meta != nil { + app.Output.Pagination(meta.CurrentPage, meta.LastPage, meta.Total) + } + return nil + }, + } + addPaginationFlags(cmd) + return cmd +} + +func newAccountAPIKeyCreateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "create", + Short: "Create an API key", + Long: "Create a new API key for programmatic access to your account.", + Example: ` # Create an API key + vector account api-key create --name "ci-deploy" + + # Create with abilities and expiration + vector account api-key create --name "readonly" --abilities "site:read,env:read" --expires-at 2025-12-31T23:59:59Z`, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + name, _ := cmd.Flags().GetString("name") + + reqBody := map[string]any{ + "name": name, + } + + if cmd.Flags().Changed("abilities") { + abilitiesStr, _ := cmd.Flags().GetString("abilities") + abilities := strings.Split(abilitiesStr, ",") + reqBody["abilities"] = abilities + } + + if cmd.Flags().Changed("expires-at") { + expiresAt, _ := cmd.Flags().GetString("expires-at") + reqBody["expires_at"] = expiresAt + } + + resp, err := app.Client.Post(cmd.Context(), apiKeysBasePath, reqBody) + if err != nil { + return fmt.Errorf("failed to create API key: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to create API key: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to create API key: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to create API key: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Name", Value: getString(item, "name")}, + {Key: "Token", Value: getString(item, "token")}, + {Key: "Abilities", Value: formatAbilities(item)}, + {Key: "Expires", Value: formatString(getString(item, "expires_at"))}, + {Key: "Created", Value: getString(item, "created_at")}, + }) + w := cmd.OutOrStdout() + output.PrintMessage(w, "") + output.PrintMessage(w, "Save this token — it won't be shown again!") + return nil + }, + } + + cmd.Flags().String("name", "", "Name for the API key (required)") + cmd.Flags().String("abilities", "", "Comma-separated abilities (e.g., \"site:read,site:write\")") + cmd.Flags().String("expires-at", "", "ISO datetime for token expiration") + _ = cmd.MarkFlagRequired("name") + + return cmd +} + +func newAccountAPIKeyDeleteCmd() *cobra.Command { + return &cobra.Command{ + Use: "delete ", + Short: "Delete an API key", + Long: "Delete an API key. You cannot delete the token currently being used for authentication.", + Example: ` # Delete an API key + vector account api-key delete token-456`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Delete(cmd.Context(), apiKeysBasePath+"/"+args[0]) + if err != nil { + return fmt.Errorf("failed to delete API key: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to delete API key: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to delete API key: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + output.PrintMessage(cmd.OutOrStdout(), "API key deleted successfully.") + return nil + }, + } +} + +// formatAbilities joins the abilities array into a comma-separated string. +func formatAbilities(item map[string]any) string { + abilities := getSlice(item, "abilities") + if len(abilities) == 0 { + return "-" + } + parts := make([]string, 0, len(abilities)) + for _, a := range abilities { + if s, ok := a.(string); ok { + parts = append(parts, s) + } + } + if len(parts) == 0 { + return "-" + } + return strings.Join(parts, ", ") +} diff --git a/internal/commands/account_api_key_test.go b/internal/commands/account_api_key_test.go new file mode 100644 index 0000000..1d06d0e --- /dev/null +++ b/internal/commands/account_api_key_test.go @@ -0,0 +1,401 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/output" +) + +var accountAPIKeyListResponse = map[string]any{ + "data": []map[string]any{ + { + "id": "token-001", + "name": "Production API Key", + "abilities": []string{"site:read", "site:write"}, + "last_used_at": "2025-01-15T12:00:00+00:00", + "expires_at": "2025-12-31T23:59:59+00:00", + "created_at": "2025-01-01T00:00:00+00:00", + }, + }, + "meta": map[string]any{ + "current_page": 1, + "last_page": 1, + "total": 1, + }, + "message": "API keys retrieved successfully", + "http_status": 200, +} + +var accountAPIKeyCreateResponse = map[string]any{ + "data": map[string]any{ + "id": "token-002", + "name": "New API Key", + "token": "1|abc123def456789", + "abilities": []string{"*"}, + "expires_at": nil, + "created_at": "2025-01-15T12:00:00+00:00", + }, + "message": "API key created successfully", + "http_status": 201, +} + +var accountAPIKeyDeleteResponse = map[string]any{ + "data": map[string]any{ + "id": "token-001", + "name": "Production API Key", + "abilities": []string{"site:read", "site:write"}, + "last_used_at": "2025-01-15T12:00:00+00:00", + "expires_at": nil, + "created_at": "2025-01-01T00:00:00+00:00", + }, + "message": "API key deleted successfully", + "http_status": 200, +} + +func newAccountAPIKeyTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/api-keys": + _ = json.NewEncoder(w).Encode(accountAPIKeyListResponse) + + case method == "POST" && path == "/api/v1/vector/api-keys": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(accountAPIKeyCreateResponse) + + case method == "DELETE" && path == "/api/v1/vector/api-keys/token-001": + _ = json.NewEncoder(w).Encode(accountAPIKeyDeleteResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +// --- API Key List Tests --- + +func TestAccountAPIKeyListCmd_TableOutput(t *testing.T) { + ts := newAccountAPIKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "api-key", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "token-001") + assert.Contains(t, out, "Production API Key") + assert.Contains(t, out, "site:read, site:write") + assert.Contains(t, out, "2025-01-15T12:00:00+00:00") + assert.Contains(t, out, "2025-12-31T23:59:59+00:00") +} + +func TestAccountAPIKeyListCmd_JSONOutput(t *testing.T) { + ts := newAccountAPIKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"account", "api-key", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 1) + assert.Equal(t, "token-001", result[0]["id"]) +} + +func TestAccountAPIKeyListCmd_Pagination(t *testing.T) { + var receivedPage, receivedPerPage string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedPage = r.URL.Query().Get("page") + receivedPerPage = r.URL.Query().Get("per_page") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(accountAPIKeyListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "api-key", "list", "--page", "2", "--per-page", "10"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "2", receivedPage) + assert.Equal(t, "10", receivedPerPage) +} + +func TestAccountAPIKeyListCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(accountAPIKeyListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "api-key", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/api-keys", receivedPath) +} + +func TestAccountAPIKeyListCmd_AuthError(t *testing.T) { + ts := newAccountAPIKeyTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"account", "api-key", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestAccountAPIKeyListCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildAccountCmdNoAuth(output.Table) + cmd.SetArgs([]string{"account", "api-key", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- API Key Create Tests --- + +func TestAccountAPIKeyCreateCmd_TableOutput(t *testing.T) { + ts := newAccountAPIKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "api-key", "create", "--name", "New API Key"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "token-002") + assert.Contains(t, out, "New API Key") + assert.Contains(t, out, "1|abc123def456789") + assert.Contains(t, out, "Save this token") +} + +func TestAccountAPIKeyCreateCmd_JSONOutput(t *testing.T) { + ts := newAccountAPIKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"account", "api-key", "create", "--name", "New API Key"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "token-002", result["id"]) + assert.Equal(t, "1|abc123def456789", result["token"]) +} + +func TestAccountAPIKeyCreateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(accountAPIKeyCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "api-key", "create", "--name", "My Key"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/api-keys", receivedPath) + assert.Equal(t, "My Key", receivedBody["name"]) +} + +func TestAccountAPIKeyCreateCmd_WithAbilities(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(accountAPIKeyCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "api-key", "create", + "--name", "My Key", + "--abilities", "site:read,site:write", + }) + + err := cmd.Execute() + require.NoError(t, err) + + abilities, ok := receivedBody["abilities"].([]any) + require.True(t, ok) + assert.Equal(t, []any{"site:read", "site:write"}, abilities) +} + +func TestAccountAPIKeyCreateCmd_WithExpiresAt(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(accountAPIKeyCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "api-key", "create", + "--name", "My Key", + "--expires-at", "2025-12-31T23:59:59+00:00", + }) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "2025-12-31T23:59:59+00:00", receivedBody["expires_at"]) +} + +func TestAccountAPIKeyCreateCmd_MissingRequiredFlags(t *testing.T) { + ts := newAccountAPIKeyTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "api-key", "create"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "required") +} + +// --- API Key Delete Tests --- + +func TestAccountAPIKeyDeleteCmd_TableOutput(t *testing.T) { + ts := newAccountAPIKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "api-key", "delete", "token-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "API key deleted successfully") +} + +func TestAccountAPIKeyDeleteCmd_JSONOutput(t *testing.T) { + ts := newAccountAPIKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"account", "api-key", "delete", "token-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "token-001", result["id"]) +} + +func TestAccountAPIKeyDeleteCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(accountAPIKeyDeleteResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "api-key", "delete", "token-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "DELETE", receivedMethod) + assert.Equal(t, "/api/v1/vector/api-keys/token-001", receivedPath) +} + +func TestAccountAPIKeyDeleteCmd_MissingArg(t *testing.T) { + ts := newAccountAPIKeyTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "api-key", "delete"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Help Tests --- + +func TestAccountAPIKeyCmd_Help(t *testing.T) { + cmd := NewAccountCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"api-key", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "create") + assert.Contains(t, out, "delete") + assert.Contains(t, out, "programmatic access") +} diff --git a/internal/commands/account_secret.go b/internal/commands/account_secret.go new file mode 100644 index 0000000..1b3ba21 --- /dev/null +++ b/internal/commands/account_secret.go @@ -0,0 +1,355 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +const globalSecretsBasePath = "/api/v1/vector/global-secrets" + +// NewAccountSecretCmd creates the account secret command group. +func NewAccountSecretCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "secret", + Short: "Manage account secrets", + Long: "Manage account-level secrets and environment variables shared across sites.", + } + + cmd.AddCommand(newAccountSecretListCmd()) + cmd.AddCommand(newAccountSecretShowCmd()) + cmd.AddCommand(newAccountSecretCreateCmd()) + cmd.AddCommand(newAccountSecretUpdateCmd()) + cmd.AddCommand(newAccountSecretDeleteCmd()) + + return cmd +} + +func newAccountSecretListCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "list", + Short: "List global secrets", + Long: "Retrieve a paginated list of account-level secrets and environment variables.", + Example: ` # List global secrets + vector account secret list`, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + page, perPage := getPagination(cmd) + query := buildPaginationQuery(page, perPage) + + resp, err := app.Client.Get(cmd.Context(), globalSecretsBasePath, query) + if err != nil { + return fmt.Errorf("failed to list secrets: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list secrets: %w", err) + } + + if app.Output.Format() == output.JSON { + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list secrets: %w", err) + } + return app.Output.JSON(json.RawMessage(data)) + } + + data, meta, err := parseResponseWithMeta(body) + if err != nil { + return fmt.Errorf("failed to list secrets: %w", err) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list secrets: %w", err) + } + + headers := []string{"ID", "KEY", "SECRET", "VALUE", "CREATED"} + var rows [][]string + for _, item := range items { + isSecret := getBool(item, "is_secret") + value := getString(item, "value") + if isSecret { + value = "-" + } + rows = append(rows, []string{ + getString(item, "id"), + getString(item, "key"), + formatBool(isSecret), + formatString(value), + getString(item, "created_at"), + }) + } + + app.Output.Table(headers, rows) + if meta != nil { + app.Output.Pagination(meta.CurrentPage, meta.LastPage, meta.Total) + } + return nil + }, + } + addPaginationFlags(cmd) + return cmd +} + +func newAccountSecretShowCmd() *cobra.Command { + return &cobra.Command{ + Use: "show ", + Short: "Show a secret", + Long: "Display details of an account-level secret or environment variable.", + Example: ` # Show secret details + vector account secret show secret-456`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), globalSecretsBasePath+"/"+args[0], nil) + if err != nil { + return fmt.Errorf("failed to get secret: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to get secret: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to get secret: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to get secret: %w", err) + } + + isSecret := getBool(item, "is_secret") + value := getString(item, "value") + if isSecret { + value = "-" + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Key", Value: getString(item, "key")}, + {Key: "Secret", Value: formatBool(isSecret)}, + {Key: "Value", Value: formatString(value)}, + {Key: "Created", Value: getString(item, "created_at")}, + {Key: "Updated", Value: getString(item, "updated_at")}, + }) + return nil + }, + } +} + +func newAccountSecretCreateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "create", + Short: "Create a secret", + Long: "Create a new account-level secret or environment variable.", + Example: ` # Create a global secret + vector account secret create --key STRIPE_KEY --value sk_live_xxx + + # Create as a plain environment variable + vector account secret create --key APP_ENV --value production --no-secret`, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + key, _ := cmd.Flags().GetString("key") + value, _ := cmd.Flags().GetString("value") + noSecret, _ := cmd.Flags().GetBool("no-secret") + + reqBody := map[string]any{ + "key": key, + "value": value, + "is_secret": !noSecret, + } + + resp, err := app.Client.Post(cmd.Context(), globalSecretsBasePath, reqBody) + if err != nil { + return fmt.Errorf("failed to create secret: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to create secret: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to create secret: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to create secret: %w", err) + } + + isSecret := getBool(item, "is_secret") + displayValue := getString(item, "value") + if isSecret { + displayValue = "-" + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Key", Value: getString(item, "key")}, + {Key: "Secret", Value: formatBool(isSecret)}, + {Key: "Value", Value: formatString(displayValue)}, + {Key: "Created", Value: getString(item, "created_at")}, + }) + return nil + }, + } + + cmd.Flags().String("key", "", "Key name for the secret (required)") + cmd.Flags().String("value", "", "Value for the secret (required)") + cmd.Flags().Bool("no-secret", false, "Store as plain environment variable (not secret)") + _ = cmd.MarkFlagRequired("key") + _ = cmd.MarkFlagRequired("value") + + return cmd +} + +func newAccountSecretUpdateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "update ", + Short: "Update a secret", + Long: "Update an account-level secret or environment variable.", + Example: ` # Update a secret value + vector account secret update secret-456 --value new-value`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + reqBody := map[string]any{} + + if cmd.Flags().Changed("value") { + value, _ := cmd.Flags().GetString("value") + reqBody["value"] = value + } + + if cmd.Flags().Changed("no-secret") { + noSecret, _ := cmd.Flags().GetBool("no-secret") + reqBody["is_secret"] = !noSecret + } + + resp, err := app.Client.Put(cmd.Context(), globalSecretsBasePath+"/"+args[0], reqBody) + if err != nil { + return fmt.Errorf("failed to update secret: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to update secret: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to update secret: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to update secret: %w", err) + } + + isSecret := getBool(item, "is_secret") + displayValue := getString(item, "value") + if isSecret { + displayValue = "-" + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Key", Value: getString(item, "key")}, + {Key: "Secret", Value: formatBool(isSecret)}, + {Key: "Value", Value: formatString(displayValue)}, + {Key: "Created", Value: getString(item, "created_at")}, + {Key: "Updated", Value: getString(item, "updated_at")}, + }) + return nil + }, + } + + cmd.Flags().String("value", "", "New value for the secret") + cmd.Flags().Bool("no-secret", false, "Store as plain environment variable (not secret)") + + return cmd +} + +func newAccountSecretDeleteCmd() *cobra.Command { + return &cobra.Command{ + Use: "delete ", + Short: "Delete a secret", + Long: "Delete an account-level secret or environment variable.", + Example: ` # Delete a global secret + vector account secret delete secret-456`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Delete(cmd.Context(), globalSecretsBasePath+"/"+args[0]) + if err != nil { + return fmt.Errorf("failed to delete secret: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to delete secret: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to delete secret: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + app.Output.Message("Secret deleted successfully.") + return nil + }, + } +} diff --git a/internal/commands/account_secret_test.go b/internal/commands/account_secret_test.go new file mode 100644 index 0000000..6a5e957 --- /dev/null +++ b/internal/commands/account_secret_test.go @@ -0,0 +1,628 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/output" +) + +var accountSecretListResponse = map[string]any{ + "data": []map[string]any{ + { + "id": "secret-001", + "key": "DB_PASSWORD", + "value": "", + "is_secret": true, + "created_at": "2025-01-01T00:00:00+00:00", + "updated_at": "2025-01-01T00:00:00+00:00", + }, + { + "id": "secret-002", + "key": "APP_ENV", + "value": "production", + "is_secret": false, + "created_at": "2025-01-02T00:00:00+00:00", + "updated_at": "2025-01-02T00:00:00+00:00", + }, + }, + "meta": map[string]any{ + "current_page": 1, + "last_page": 1, + "total": 2, + }, + "message": "Global secrets retrieved successfully", + "http_status": 200, +} + +var accountSecretShowResponse = map[string]any{ + "data": map[string]any{ + "id": "secret-001", + "key": "DB_PASSWORD", + "value": "", + "is_secret": true, + "created_at": "2025-01-01T00:00:00+00:00", + "updated_at": "2025-01-05T00:00:00+00:00", + }, + "message": "Global secret retrieved successfully", + "http_status": 200, +} + +var accountSecretCreateResponse = map[string]any{ + "data": map[string]any{ + "id": "secret-003", + "key": "API_TOKEN", + "value": "", + "is_secret": true, + "created_at": "2025-01-15T00:00:00+00:00", + "updated_at": "2025-01-15T00:00:00+00:00", + }, + "message": "Global secret created successfully", + "http_status": 201, +} + +var accountSecretCreatePlainResponse = map[string]any{ + "data": map[string]any{ + "id": "secret-004", + "key": "APP_DEBUG", + "value": "true", + "is_secret": false, + "created_at": "2025-01-15T00:00:00+00:00", + "updated_at": "2025-01-15T00:00:00+00:00", + }, + "message": "Global secret created successfully", + "http_status": 201, +} + +var accountSecretUpdateResponse = map[string]any{ + "data": map[string]any{ + "id": "secret-001", + "key": "DB_PASSWORD", + "value": "", + "is_secret": true, + "created_at": "2025-01-01T00:00:00+00:00", + "updated_at": "2025-01-20T00:00:00+00:00", + }, + "message": "Global secret updated successfully", + "http_status": 200, +} + +var accountSecretDeleteResponse = map[string]any{ + "data": map[string]any{ + "id": "secret-001", + "key": "DB_PASSWORD", + "value": "", + "is_secret": true, + "created_at": "2025-01-01T00:00:00+00:00", + "updated_at": "2025-01-05T00:00:00+00:00", + }, + "message": "Global secret deleted successfully", + "http_status": 200, +} + +func newAccountSecretTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/global-secrets": + _ = json.NewEncoder(w).Encode(accountSecretListResponse) + + case method == "GET" && path == "/api/v1/vector/global-secrets/secret-001": + _ = json.NewEncoder(w).Encode(accountSecretShowResponse) + + case method == "POST" && path == "/api/v1/vector/global-secrets": + var reqBody map[string]any + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &reqBody) + isSecret, _ := reqBody["is_secret"].(bool) + if !isSecret { + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(accountSecretCreatePlainResponse) + } else { + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(accountSecretCreateResponse) + } + + case method == "PUT" && path == "/api/v1/vector/global-secrets/secret-001": + _ = json.NewEncoder(w).Encode(accountSecretUpdateResponse) + + case method == "DELETE" && path == "/api/v1/vector/global-secrets/secret-001": + _ = json.NewEncoder(w).Encode(accountSecretDeleteResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +// --- Secret List Tests --- + +func TestAccountSecretListCmd_TableOutput(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "secret-001") + assert.Contains(t, out, "DB_PASSWORD") + assert.Contains(t, out, "Yes") + assert.Contains(t, out, "secret-002") + assert.Contains(t, out, "APP_ENV") + assert.Contains(t, out, "No") + assert.Contains(t, out, "production") +} + +func TestAccountSecretListCmd_SecretValueHidden(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + // The secret item should show "-" for value, the plain one should show "production" + assert.Contains(t, out, "production") +} + +func TestAccountSecretListCmd_JSONOutput(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"account", "secret", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 2) + assert.Equal(t, "secret-001", result[0]["id"]) +} + +func TestAccountSecretListCmd_Pagination(t *testing.T) { + var receivedPage, receivedPerPage string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedPage = r.URL.Query().Get("page") + receivedPerPage = r.URL.Query().Get("per_page") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(accountSecretListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "list", "--page", "3", "--per-page", "25"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "3", receivedPage) + assert.Equal(t, "25", receivedPerPage) +} + +func TestAccountSecretListCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(accountSecretListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/global-secrets", receivedPath) +} + +func TestAccountSecretListCmd_AuthError(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestAccountSecretListCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildAccountCmdNoAuth(output.Table) + cmd.SetArgs([]string{"account", "secret", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Secret Show Tests --- + +func TestAccountSecretShowCmd_TableOutput(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "show", "secret-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "secret-001") + assert.Contains(t, out, "DB_PASSWORD") + assert.Contains(t, out, "Yes") + assert.Contains(t, out, "2025-01-05T00:00:00+00:00") +} + +func TestAccountSecretShowCmd_JSONOutput(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"account", "secret", "show", "secret-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "secret-001", result["id"]) + assert.Equal(t, "DB_PASSWORD", result["key"]) +} + +func TestAccountSecretShowCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(accountSecretShowResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "show", "secret-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/global-secrets/secret-001", receivedPath) +} + +func TestAccountSecretShowCmd_MissingArg(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "show"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Secret Create Tests --- + +func TestAccountSecretCreateCmd_TableOutput(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "create", "--key", "API_TOKEN", "--value", "my-secret-value"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "secret-003") + assert.Contains(t, out, "API_TOKEN") + assert.Contains(t, out, "Yes") +} + +func TestAccountSecretCreateCmd_JSONOutput(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"account", "secret", "create", "--key", "API_TOKEN", "--value", "my-secret-value"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "secret-003", result["id"]) +} + +func TestAccountSecretCreateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(accountSecretCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "create", "--key", "API_TOKEN", "--value", "secret123"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/global-secrets", receivedPath) + assert.Equal(t, "API_TOKEN", receivedBody["key"]) + assert.Equal(t, "secret123", receivedBody["value"]) + assert.Equal(t, true, receivedBody["is_secret"]) +} + +func TestAccountSecretCreateCmd_NoSecretFlag(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(accountSecretCreatePlainResponse) + })) + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "create", "--key", "APP_DEBUG", "--value", "true", "--no-secret"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, false, receivedBody["is_secret"]) + + out := stdout.String() + assert.Contains(t, out, "APP_DEBUG") + assert.Contains(t, out, "No") + assert.Contains(t, out, "true") +} + +func TestAccountSecretCreateCmd_MissingRequiredFlags(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "create"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "required") +} + +func TestAccountSecretCreateCmd_MissingValueFlag(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "create", "--key", "MY_KEY"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "required") +} + +// --- Secret Update Tests --- + +func TestAccountSecretUpdateCmd_TableOutput(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "update", "secret-001", "--value", "new-password"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "secret-001") + assert.Contains(t, out, "DB_PASSWORD") + assert.Contains(t, out, "2025-01-20T00:00:00+00:00") +} + +func TestAccountSecretUpdateCmd_JSONOutput(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"account", "secret", "update", "secret-001", "--value", "new-password"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "secret-001", result["id"]) +} + +func TestAccountSecretUpdateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(accountSecretUpdateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "update", "secret-001", "--value", "new-password"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "PUT", receivedMethod) + assert.Equal(t, "/api/v1/vector/global-secrets/secret-001", receivedPath) + assert.Equal(t, "new-password", receivedBody["value"]) + // is_secret should not be sent when --no-secret is not provided + _, hasIsSecret := receivedBody["is_secret"] + assert.False(t, hasIsSecret) +} + +func TestAccountSecretUpdateCmd_NoSecretFlag(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(accountSecretUpdateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "update", "secret-001", "--no-secret"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, false, receivedBody["is_secret"]) +} + +func TestAccountSecretUpdateCmd_MissingArg(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "update"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Secret Delete Tests --- + +func TestAccountSecretDeleteCmd_TableOutput(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "delete", "secret-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "Secret deleted successfully") +} + +func TestAccountSecretDeleteCmd_JSONOutput(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"account", "secret", "delete", "secret-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "secret-001", result["id"]) +} + +func TestAccountSecretDeleteCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(accountSecretDeleteResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "delete", "secret-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "DELETE", receivedMethod) + assert.Equal(t, "/api/v1/vector/global-secrets/secret-001", receivedPath) +} + +func TestAccountSecretDeleteCmd_MissingArg(t *testing.T) { + ts := newAccountSecretTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "secret", "delete"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Help Tests --- + +func TestAccountSecretCmd_Help(t *testing.T) { + cmd := NewAccountCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"secret", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "show") + assert.Contains(t, out, "create") + assert.Contains(t, out, "update") + assert.Contains(t, out, "delete") + assert.Contains(t, out, "secrets and environment variables") +} diff --git a/internal/commands/account_ssh_key.go b/internal/commands/account_ssh_key.go new file mode 100644 index 0000000..7e00382 --- /dev/null +++ b/internal/commands/account_ssh_key.go @@ -0,0 +1,256 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +const sshKeysBasePath = "/api/v1/vector/ssh-keys" + +// NewAccountSSHKeyCmd creates the account ssh-key command group. +func NewAccountSSHKeyCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "ssh-key", + Short: "Manage account SSH keys", + Long: "Manage account-level SSH keys for controlling SSH access.", + } + + cmd.AddCommand(newAccountSSHKeyListCmd()) + cmd.AddCommand(newAccountSSHKeyShowCmd()) + cmd.AddCommand(newAccountSSHKeyCreateCmd()) + cmd.AddCommand(newAccountSSHKeyDeleteCmd()) + + return cmd +} + +func newAccountSSHKeyListCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "list", + Short: "List account SSH keys", + Long: "Retrieve a paginated list of account-level SSH keys.", + Example: ` # List account SSH keys + vector account ssh-key list`, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + page, perPage := getPagination(cmd) + query := buildPaginationQuery(page, perPage) + + resp, err := app.Client.Get(cmd.Context(), sshKeysBasePath, query) + if err != nil { + return fmt.Errorf("failed to list SSH keys: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list SSH keys: %w", err) + } + + if app.Output.Format() == output.JSON { + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list SSH keys: %w", err) + } + return app.Output.JSON(json.RawMessage(data)) + } + + data, meta, err := parseResponseWithMeta(body) + if err != nil { + return fmt.Errorf("failed to list SSH keys: %w", err) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list SSH keys: %w", err) + } + + headers := []string{"ID", "NAME", "FINGERPRINT", "CREATED"} + var rows [][]string + for _, item := range items { + rows = append(rows, []string{ + getString(item, "id"), + getString(item, "name"), + formatString(getString(item, "fingerprint")), + getString(item, "created_at"), + }) + } + + app.Output.Table(headers, rows) + if meta != nil { + app.Output.Pagination(meta.CurrentPage, meta.LastPage, meta.Total) + } + return nil + }, + } + addPaginationFlags(cmd) + return cmd +} + +func newAccountSSHKeyShowCmd() *cobra.Command { + return &cobra.Command{ + Use: "show ", + Short: "Show SSH key details", + Long: "Retrieve details of a specific account-level SSH key.", + Example: ` # Show SSH key details + vector account ssh-key show key-456`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), sshKeysBasePath+"/"+args[0], nil) + if err != nil { + return fmt.Errorf("failed to show SSH key: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to show SSH key: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to show SSH key: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to show SSH key: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Name", Value: getString(item, "name")}, + {Key: "Fingerprint", Value: formatString(getString(item, "fingerprint"))}, + {Key: "Public Key Preview", Value: formatString(getString(item, "public_key_preview"))}, + {Key: "Account Default", Value: formatBool(getBool(item, "is_account_default"))}, + {Key: "Created", Value: getString(item, "created_at")}, + }) + return nil + }, + } +} + +func newAccountSSHKeyCreateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "create", + Short: "Create an SSH key", + Long: "Create a new account-level SSH key.", + Example: ` # Create an SSH key + vector account ssh-key create --name "deploy-key" --public-key "ssh-ed25519 AAAA..."`, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + name, _ := cmd.Flags().GetString("name") + publicKey, _ := cmd.Flags().GetString("public-key") + + reqBody := map[string]any{ + "name": name, + "public_key": publicKey, + } + + resp, err := app.Client.Post(cmd.Context(), sshKeysBasePath, reqBody) + if err != nil { + return fmt.Errorf("failed to create SSH key: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to create SSH key: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to create SSH key: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to create SSH key: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Name", Value: getString(item, "name")}, + {Key: "Fingerprint", Value: formatString(getString(item, "fingerprint"))}, + {Key: "Public Key Preview", Value: formatString(getString(item, "public_key_preview"))}, + {Key: "Account Default", Value: formatBool(getBool(item, "is_account_default"))}, + {Key: "Created", Value: getString(item, "created_at")}, + }) + return nil + }, + } + + cmd.Flags().String("name", "", "Friendly name for the SSH key (required)") + cmd.Flags().String("public-key", "", "SSH public key in OpenSSH format (required)") + _ = cmd.MarkFlagRequired("name") + _ = cmd.MarkFlagRequired("public-key") + + return cmd +} + +func newAccountSSHKeyDeleteCmd() *cobra.Command { + return &cobra.Command{ + Use: "delete ", + Short: "Delete an SSH key", + Long: "Delete an account-level SSH key.", + Example: ` # Delete an SSH key + vector account ssh-key delete key-456`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Delete(cmd.Context(), sshKeysBasePath+"/"+args[0]) + if err != nil { + return fmt.Errorf("failed to delete SSH key: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to delete SSH key: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to delete SSH key: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + output.PrintMessage(cmd.OutOrStdout(), "SSH key deleted successfully.") + return nil + }, + } +} diff --git a/internal/commands/account_ssh_key_test.go b/internal/commands/account_ssh_key_test.go new file mode 100644 index 0000000..ad63dcc --- /dev/null +++ b/internal/commands/account_ssh_key_test.go @@ -0,0 +1,451 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/output" +) + +var accountSSHKeyListResponse = map[string]any{ + "data": []map[string]any{ + { + "id": "key-001", + "account_id": 1, + "name": "deploy key", + "fingerprint": "SHA256:abc123def456", + "public_key_preview": "ssh-rsa AAAAB3...user@host", + "is_account_default": true, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + }, + "meta": map[string]any{ + "current_page": 1, + "last_page": 1, + "total": 1, + }, + "message": "SSH keys retrieved successfully", + "http_status": 200, +} + +var accountSSHKeyShowResponse = map[string]any{ + "data": map[string]any{ + "id": "key-001", + "account_id": 1, + "name": "deploy key", + "fingerprint": "SHA256:abc123def456", + "public_key_preview": "ssh-rsa AAAAB3...user@host", + "is_account_default": true, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "SSH key retrieved successfully", + "http_status": 200, +} + +var accountSSHKeyCreateResponse = map[string]any{ + "data": map[string]any{ + "id": "key-002", + "account_id": 1, + "name": "new key", + "fingerprint": "SHA256:xyz789", + "public_key_preview": "ssh-rsa BBBBB3...user@host", + "is_account_default": false, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "SSH key created successfully", + "http_status": 201, +} + +var accountSSHKeyDeleteResponse = map[string]any{ + "data": map[string]any{ + "id": "key-001", + "account_id": 1, + "name": "deploy key", + "fingerprint": "SHA256:abc123def456", + "public_key_preview": "ssh-rsa AAAAB3...user@host", + "is_account_default": true, + }, + "message": "SSH key deleted successfully", + "http_status": 200, +} + +func newAccountSSHKeyTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/ssh-keys": + _ = json.NewEncoder(w).Encode(accountSSHKeyListResponse) + + case method == "GET" && path == "/api/v1/vector/ssh-keys/key-001": + _ = json.NewEncoder(w).Encode(accountSSHKeyShowResponse) + + case method == "POST" && path == "/api/v1/vector/ssh-keys": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(accountSSHKeyCreateResponse) + + case method == "DELETE" && path == "/api/v1/vector/ssh-keys/key-001": + _ = json.NewEncoder(w).Encode(accountSSHKeyDeleteResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +// --- SSH Key List Tests --- + +func TestAccountSSHKeyListCmd_TableOutput(t *testing.T) { + ts := newAccountSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "ssh-key", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "key-001") + assert.Contains(t, out, "deploy key") + assert.Contains(t, out, "SHA256:abc123def456") + assert.Contains(t, out, "2025-01-15T12:00:00+00:00") +} + +func TestAccountSSHKeyListCmd_JSONOutput(t *testing.T) { + ts := newAccountSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"account", "ssh-key", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 1) + assert.Equal(t, "key-001", result[0]["id"]) +} + +func TestAccountSSHKeyListCmd_Pagination(t *testing.T) { + var receivedPage, receivedPerPage string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedPage = r.URL.Query().Get("page") + receivedPerPage = r.URL.Query().Get("per_page") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(accountSSHKeyListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "ssh-key", "list", "--page", "2", "--per-page", "10"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "2", receivedPage) + assert.Equal(t, "10", receivedPerPage) +} + +func TestAccountSSHKeyListCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(accountSSHKeyListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "ssh-key", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/ssh-keys", receivedPath) +} + +func TestAccountSSHKeyListCmd_AuthError(t *testing.T) { + ts := newAccountSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"account", "ssh-key", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestAccountSSHKeyListCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildAccountCmdNoAuth(output.Table) + cmd.SetArgs([]string{"account", "ssh-key", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- SSH Key Show Tests --- + +func TestAccountSSHKeyShowCmd_TableOutput(t *testing.T) { + ts := newAccountSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "ssh-key", "show", "key-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "key-001") + assert.Contains(t, out, "deploy key") + assert.Contains(t, out, "SHA256:abc123def456") + assert.Contains(t, out, "ssh-rsa AAAAB3...user@host") + assert.Contains(t, out, "Yes") + assert.Contains(t, out, "2025-01-15T12:00:00+00:00") +} + +func TestAccountSSHKeyShowCmd_JSONOutput(t *testing.T) { + ts := newAccountSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"account", "ssh-key", "show", "key-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "key-001", result["id"]) + assert.Equal(t, "deploy key", result["name"]) +} + +func TestAccountSSHKeyShowCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(accountSSHKeyShowResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "ssh-key", "show", "key-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/ssh-keys/key-001", receivedPath) +} + +func TestAccountSSHKeyShowCmd_MissingArg(t *testing.T) { + ts := newAccountSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "ssh-key", "show"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- SSH Key Create Tests --- + +func TestAccountSSHKeyCreateCmd_TableOutput(t *testing.T) { + ts := newAccountSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "ssh-key", "create", + "--name", "new key", + "--public-key", "ssh-rsa BBBBB3...", + }) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "key-002") + assert.Contains(t, out, "new key") + assert.Contains(t, out, "SHA256:xyz789") +} + +func TestAccountSSHKeyCreateCmd_JSONOutput(t *testing.T) { + ts := newAccountSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"account", "ssh-key", "create", + "--name", "new key", + "--public-key", "ssh-rsa BBBBB3...", + }) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "key-002", result["id"]) +} + +func TestAccountSSHKeyCreateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(accountSSHKeyCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "ssh-key", "create", + "--name", "my key", + "--public-key", "ssh-rsa AAAAB3NzaC1yc2EA...", + }) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/ssh-keys", receivedPath) + assert.Equal(t, "my key", receivedBody["name"]) + assert.Equal(t, "ssh-rsa AAAAB3NzaC1yc2EA...", receivedBody["public_key"]) +} + +func TestAccountSSHKeyCreateCmd_MissingRequiredFlags(t *testing.T) { + ts := newAccountSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "ssh-key", "create"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "required") +} + +// --- SSH Key Delete Tests --- + +func TestAccountSSHKeyDeleteCmd_TableOutput(t *testing.T) { + ts := newAccountSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "ssh-key", "delete", "key-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "SSH key deleted successfully") +} + +func TestAccountSSHKeyDeleteCmd_JSONOutput(t *testing.T) { + ts := newAccountSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"account", "ssh-key", "delete", "key-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "key-001", result["id"]) +} + +func TestAccountSSHKeyDeleteCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(accountSSHKeyDeleteResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "ssh-key", "delete", "key-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "DELETE", receivedMethod) + assert.Equal(t, "/api/v1/vector/ssh-keys/key-001", receivedPath) +} + +func TestAccountSSHKeyDeleteCmd_MissingArg(t *testing.T) { + ts := newAccountSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "ssh-key", "delete"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- SSH Key Help Tests --- + +func TestAccountSSHKeyCmd_Help(t *testing.T) { + cmd := NewAccountCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"ssh-key", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "show") + assert.Contains(t, out, "create") + assert.Contains(t, out, "delete") + assert.Contains(t, out, "account-level SSH keys") +} diff --git a/internal/commands/account_test.go b/internal/commands/account_test.go new file mode 100644 index 0000000..4278a81 --- /dev/null +++ b/internal/commands/account_test.go @@ -0,0 +1,272 @@ +package commands + +import ( + "bytes" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +var accountShowResponse = map[string]any{ + "data": map[string]any{ + "owner": map[string]any{ + "name": "John Doe", + "email": "user@example.com", + }, + "account": map[string]any{ + "name": "My Account", + "company": "Acme Corp", + }, + "cluster": map[string]any{ + "alb_dns_name": "alb-abc123.us-west-2.elb.amazonaws.com", + "aurora_cluster_endpoint": "cluster.abc123.us-east-1.rds.amazonaws.com", + "ssh_nlb_dns": "nlb-abc123.us-west-2.elb.amazonaws.com", + }, + "domains": []any{"example.com", "example.org"}, + "sites": map[string]any{ + "total": float64(5), + "by_status": map[string]any{ + "pending": float64(0), + "activation_requested": float64(0), + "active": float64(3), + "suspension_requested": float64(0), + "suspended": float64(1), + "unsuspension_requested": float64(0), + "termination_requested": float64(0), + "terminated": float64(1), + "canceled": float64(0), + }, + }, + "environments": map[string]any{ + "total": float64(8), + "by_status": map[string]any{ + "pending": float64(1), + "provisioning": float64(0), + "active": float64(5), + "suspending": float64(0), + "suspended": float64(1), + "unsuspending": float64(0), + "terminating": float64(0), + "terminated": float64(1), + "failed": float64(0), + }, + }, + }, + "message": "Account summary retrieved successfully", + "http_status": 200, +} + +func newAccountTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + if r.Method == "GET" && r.URL.Path == "/api/v1/vector/account" { + _ = json.NewEncoder(w).Encode(accountShowResponse) + } else { + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +func buildAccountCmd(baseURL, token string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient(baseURL, token, "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + accountCmd := NewAccountCmd() + root.AddCommand(accountCmd) + + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func buildAccountCmdNoAuth(format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient("http://localhost", "", "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + accountCmd := NewAccountCmd() + root.AddCommand(accountCmd) + + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +// --- Account Show Tests --- + +func TestAccountShowCmd_TableOutput(t *testing.T) { + ts := newAccountTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "show"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "John Doe") + assert.Contains(t, out, "user@example.com") + assert.Contains(t, out, "My Account") + assert.Contains(t, out, "Acme Corp") + assert.Contains(t, out, "5") + assert.Contains(t, out, "3") + assert.Contains(t, out, "8") +} + +func TestAccountShowCmd_JSONOutput(t *testing.T) { + ts := newAccountTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"account", "show"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + owner := result["owner"].(map[string]any) + assert.Equal(t, "John Doe", owner["name"]) + assert.Equal(t, "user@example.com", owner["email"]) + account := result["account"].(map[string]any) + assert.Equal(t, "My Account", account["name"]) +} + +func TestAccountShowCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(accountShowResponse) + })) + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "show"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/account", receivedPath) +} + +func TestAccountShowCmd_AuthError(t *testing.T) { + ts := newAccountTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildAccountCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"account", "show"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestAccountShowCmd_NoAuthToken(t *testing.T) { + cmd, _, _ := buildAccountCmdNoAuth(output.Table) + cmd.SetArgs([]string{"account", "show"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Help Text Tests --- + +func TestAccountCmd_HelpText(t *testing.T) { + ts := newAccountTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "show") + assert.Contains(t, out, "account-level resources") +} + +func TestAccountShowCmd_HelpText(t *testing.T) { + ts := newAccountTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAccountCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"account", "show", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "account details") +} diff --git a/internal/commands/archive.go b/internal/commands/archive.go new file mode 100644 index 0000000..04fc6e2 --- /dev/null +++ b/internal/commands/archive.go @@ -0,0 +1,183 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + "os" + "path/filepath" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +// NewArchiveCmd creates the archive command group. +func NewArchiveCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "archive", + Short: "Manage site archives", + Long: "Manage site archives including importing archive files.", + } + + cmd.AddCommand(newArchiveImportCmd()) + + return cmd +} + +func newArchiveImportCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "import ", + Short: "Import a site archive from a local file", + Long: "Import a site archive from a local file. Creates an import session, uploads the file to a presigned URL, and triggers the import.", + Example: ` # Import an archive + vector archive import site-abc123 site-backup.tar.gz + + # Import with search and replace + vector archive import site-abc123 site.tar.gz --search-replace-from old.example.com --search-replace-to new.example.com`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + siteID := args[0] + filePath := args[1] + + // Open and stat the file + file, err := os.Open(filePath) + if err != nil { + return fmt.Errorf("cannot open file: %w", err) + } + defer func() { _ = file.Close() }() + + fi, err := file.Stat() + if err != nil { + return fmt.Errorf("cannot read file info: %w", err) + } + + filename := filepath.Base(filePath) + fileSize := fi.Size() + w := cmd.ErrOrStderr() + + // Step 1: Create import session + _, _ = fmt.Fprintln(w, "Creating import session...") + + reqBody := map[string]any{ + "filename": filename, + "content_length": fileSize, + } + + options := map[string]any{} + + dropTables, _ := cmd.Flags().GetBool("drop-tables") + if dropTables { + options["drop_tables"] = true + } + + disableForeignKeys, _ := cmd.Flags().GetBool("disable-foreign-keys") + if disableForeignKeys { + options["disable_foreign_keys"] = true + } + + srFrom, _ := cmd.Flags().GetString("search-replace-from") + srTo, _ := cmd.Flags().GetString("search-replace-to") + if srFrom != "" && srTo != "" { + options["search_replace"] = map[string]string{ + "from": srFrom, + "to": srTo, + } + } + + if len(options) > 0 { + reqBody["options"] = options + } + + resp, err := app.Client.Post(cmd.Context(), importsPath(siteID), reqBody) + if err != nil { + return fmt.Errorf("failed to create import session: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to create import session: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to create import session: %w", err) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to create import session: %w", err) + } + + importID := getString(item, "id") + uploadURL := getString(item, "upload_url") + + if importID == "" || uploadURL == "" { + return fmt.Errorf("import session response missing upload URL or import ID") + } + + // Step 2: Upload file to presigned URL + sizeMB := float64(fileSize) / (1024 * 1024) + _, _ = fmt.Fprintf(w, "Uploading %s (%.1f MB)...\n", filename, sizeMB) + + uploadResp, err := app.Client.PutFile(cmd.Context(), uploadURL, file) + if err != nil { + return fmt.Errorf("failed to upload file: %w", err) + } + defer func() { _ = uploadResp.Body.Close() }() + + _, _ = fmt.Fprintln(w, "Upload complete.") + + // Step 3: Trigger import + _, _ = fmt.Fprintln(w, "Starting import...") + + runEndpoint := fmt.Sprintf("%s/%s/run", importsPath(siteID), importID) + runResp, err := app.Client.Post(cmd.Context(), runEndpoint, nil) + if err != nil { + return fmt.Errorf("failed to start import: %w", err) + } + defer func() { _ = runResp.Body.Close() }() + + runBody, err := io.ReadAll(runResp.Body) + if err != nil { + return fmt.Errorf("failed to start import: %w", err) + } + + runData, err := parseResponseData(runBody) + if err != nil { + return fmt.Errorf("failed to start import: %w", err) + } + + _, _ = fmt.Fprintln(w, "Import started.") + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(runData)) + } + + var runItem map[string]any + if err := json.Unmarshal(runData, &runItem); err != nil { + return fmt.Errorf("failed to parse import result: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "Import ID", Value: getString(runItem, "id")}, + {Key: "Status", Value: getString(runItem, "status")}, + }) + + return nil + }, + } + + cmd.Flags().Bool("drop-tables", false, "Drop existing tables before import") + cmd.Flags().Bool("disable-foreign-keys", false, "Disable foreign key checks during import") + cmd.Flags().String("search-replace-from", "", "Value to search for (used with --search-replace-to)") + cmd.Flags().String("search-replace-to", "", "Replacement value (used with --search-replace-from)") + + return cmd +} diff --git a/internal/commands/archive_test.go b/internal/commands/archive_test.go new file mode 100644 index 0000000..372625f --- /dev/null +++ b/internal/commands/archive_test.go @@ -0,0 +1,448 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +var archiveImportCreateResponse = map[string]any{ + "data": map[string]any{ + "id": "imp-100", + "vector_site_id": "site-001", + "status": "pending", + "filename": "archive.tar.gz", + "content_length": float64(10485760), + "upload_url": "__UPLOAD_URL__", + "upload_expires_at": "2025-01-15T13:00:00+00:00", + "created_at": "2025-01-15T12:00:00+00:00", + }, + "message": "Import session created successfully", + "http_status": 201, +} + +var archiveImportRunResponse = map[string]any{ + "data": map[string]any{ + "id": "imp-100", + "vector_site_id": "site-001", + "status": "importing", + "filename": "archive.tar.gz", + "created_at": "2025-01-15T12:00:00+00:00", + }, + "message": "Archive import started", + "http_status": 202, +} + +func newArchiveImportTestServer(validToken string) *httptest.Server { + mux := http.NewServeMux() + + // Upload endpoint (presigned URL simulation — no auth required) + mux.HandleFunc("/upload/", func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPut { + w.WriteHeader(http.StatusMethodNotAllowed) + return + } + // Consume the body + _, _ = io.ReadAll(r.Body) + w.WriteHeader(http.StatusOK) + }) + + // API endpoints + mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + path := r.URL.Path + method := r.Method + + switch { + case method == "POST" && path == "/api/v1/vector/sites/site-001/imports": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(archiveImportCreateResponse) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/imports/imp-100/run": + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(archiveImportRunResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + }) + + return httptest.NewServer(mux) +} + +func buildArchiveCmd(baseURL, token string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + + root := &cobra.Command{Use: "vector"} + root.AddCommand(NewArchiveCmd()) + + root.PersistentPreRunE = func(cmd *cobra.Command, args []string) error { + client := api.NewClient(baseURL, token, "") + app := appctx.NewApp(&config.Config{}, client, "test") + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + } + + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func buildArchiveCmdNoAuth(format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + + root := &cobra.Command{Use: "vector"} + root.AddCommand(NewArchiveCmd()) + + root.PersistentPreRunE = func(cmd *cobra.Command, args []string) error { + client := api.NewClient("", "", "") + app := appctx.NewApp(&config.Config{}, client, "") + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + } + + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func createTempArchiveFile(t *testing.T) string { + t.Helper() + dir := t.TempDir() + path := filepath.Join(dir, "archive.tar.gz") + err := os.WriteFile(path, []byte("fake archive content for testing"), 0644) + require.NoError(t, err) + return path +} + +// --- Archive Import Tests --- + +func TestArchiveImportCmd_TableOutput(t *testing.T) { + ts := newArchiveImportTestServer("valid-token") + defer ts.Close() + + // Patch the upload URL in the create response to point to our test server + archiveImportCreateResponse["data"].(map[string]any)["upload_url"] = ts.URL + "/upload/imp-100" + defer func() { + archiveImportCreateResponse["data"].(map[string]any)["upload_url"] = "__UPLOAD_URL__" + }() + + tmpFile := createTempArchiveFile(t) + + cmd, stdout, stderr := buildArchiveCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"archive", "import", "site-001", tmpFile}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "imp-100") + assert.Contains(t, out, "importing") + + errOut := stderr.String() + assert.Contains(t, errOut, "Creating import session...") + assert.Contains(t, errOut, "Uploading archive.tar.gz") + assert.Contains(t, errOut, "Upload complete.") + assert.Contains(t, errOut, "Starting import...") + assert.Contains(t, errOut, "Import started.") +} + +func TestArchiveImportCmd_JSONOutput(t *testing.T) { + ts := newArchiveImportTestServer("valid-token") + defer ts.Close() + + archiveImportCreateResponse["data"].(map[string]any)["upload_url"] = ts.URL + "/upload/imp-100" + defer func() { + archiveImportCreateResponse["data"].(map[string]any)["upload_url"] = "__UPLOAD_URL__" + }() + + tmpFile := createTempArchiveFile(t) + + cmd, stdout, _ := buildArchiveCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"archive", "import", "site-001", tmpFile}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "imp-100", result["id"]) + assert.Equal(t, "importing", result["status"]) +} + +func TestArchiveImportCmd_HTTPPaths(t *testing.T) { + var createMethod, createPath string + var createBody map[string]any + var runMethod, runPath string + var uploadMethod, uploadPath string + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + path := r.URL.Path + method := r.Method + + switch { + case method == "PUT" && path == "/upload/imp-100": + uploadMethod = method + uploadPath = path + _, _ = io.ReadAll(r.Body) + w.WriteHeader(http.StatusOK) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/imports": + createMethod = method + createPath = path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &createBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + // Return create response with this server's upload URL + resp := map[string]any{ + "data": map[string]any{ + "id": "imp-100", + "status": "pending", + "upload_url": "http://" + r.Host + "/upload/imp-100", + }, + } + _ = json.NewEncoder(w).Encode(resp) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/imports/imp-100/run": + runMethod = method + runPath = path + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(archiveImportRunResponse) + + default: + w.WriteHeader(http.StatusNotFound) + } + })) + defer ts.Close() + + tmpFile := createTempArchiveFile(t) + + cmd, _, _ := buildArchiveCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"archive", "import", "site-001", tmpFile}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", createMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/imports", createPath) + assert.Equal(t, "archive.tar.gz", createBody["filename"]) + assert.NotNil(t, createBody["content_length"]) + + assert.Equal(t, "PUT", uploadMethod) + assert.Equal(t, "/upload/imp-100", uploadPath) + + assert.Equal(t, "POST", runMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/imports/imp-100/run", runPath) +} + +func TestArchiveImportCmd_WithOptions(t *testing.T) { + var createBody map[string]any + + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + path := r.URL.Path + method := r.Method + + switch { + case method == "PUT": + _, _ = io.ReadAll(r.Body) + w.WriteHeader(http.StatusOK) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/imports": + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &createBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + resp := map[string]any{ + "data": map[string]any{ + "id": "imp-100", + "status": "pending", + "upload_url": "http://" + r.Host + "/upload/imp-100", + }, + } + _ = json.NewEncoder(w).Encode(resp) + + case method == "POST": + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(archiveImportRunResponse) + + default: + w.WriteHeader(http.StatusNotFound) + } + })) + defer ts.Close() + + tmpFile := createTempArchiveFile(t) + + cmd, _, _ := buildArchiveCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{ + "archive", "import", "site-001", tmpFile, + "--drop-tables", + "--disable-foreign-keys", + "--search-replace-from", "example.org", + "--search-replace-to", "example.com", + }) + + err := cmd.Execute() + require.NoError(t, err) + + options, ok := createBody["options"].(map[string]any) + require.True(t, ok) + assert.Equal(t, true, options["drop_tables"]) + assert.Equal(t, true, options["disable_foreign_keys"]) + + sr, ok := options["search_replace"].(map[string]any) + require.True(t, ok) + assert.Equal(t, "example.org", sr["from"]) + assert.Equal(t, "example.com", sr["to"]) +} + +func TestArchiveImportCmd_FileNotFound(t *testing.T) { + ts := newArchiveImportTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildArchiveCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"archive", "import", "site-001", "/nonexistent/file.tar.gz"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "cannot open file") +} + +func TestArchiveImportCmd_MissingArgs(t *testing.T) { + ts := newArchiveImportTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildArchiveCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"archive", "import", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} + +func TestArchiveImportCmd_MissingUploadURL(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(map[string]any{ + "data": map[string]any{ + "id": "imp-100", + "status": "pending", + }, + }) + })) + defer ts.Close() + + tmpFile := createTempArchiveFile(t) + + cmd, _, _ := buildArchiveCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"archive", "import", "site-001", tmpFile}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "import session response missing upload URL or import ID") +} + +func TestArchiveImportCmd_AuthError(t *testing.T) { + ts := newArchiveImportTestServer("valid-token") + defer ts.Close() + + tmpFile := createTempArchiveFile(t) + + cmd, _, _ := buildArchiveCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"archive", "import", "site-001", tmpFile}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestArchiveImportCmd_NoAuth(t *testing.T) { + tmpFile := createTempArchiveFile(t) + + cmd, _, _ := buildArchiveCmdNoAuth(output.Table) + cmd.SetArgs([]string{"archive", "import", "site-001", tmpFile}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Help Tests --- + +func TestArchiveCmd_Help(t *testing.T) { + cmd := NewArchiveCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "import") + assert.Contains(t, out, "Manage site archives") +} + +func TestArchiveImportCmd_Help(t *testing.T) { + cmd := NewArchiveCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"import", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "Import a site archive from a local file") + assert.Contains(t, out, "--drop-tables") + assert.Contains(t, out, "--disable-foreign-keys") + assert.Contains(t, out, "--search-replace-from") + assert.Contains(t, out, "--search-replace-to") +} diff --git a/internal/commands/auth.go b/internal/commands/auth.go new file mode 100644 index 0000000..860bbb7 --- /dev/null +++ b/internal/commands/auth.go @@ -0,0 +1,317 @@ +package commands + +import ( + "encoding/json" + "errors" + "fmt" + "io" + "os" + "strings" + + "github.com/spf13/cobra" + "github.com/zalando/go-keyring" + "golang.org/x/term" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +// whoamiResponse represents the parsed response from GET /api/v1/auth/whoami. +type whoamiResponse struct { + Data struct { + User struct { + ID int `json:"id"` + Name string `json:"name"` + Email string `json:"email"` + } `json:"user"` + Token struct { + Name string `json:"name"` + Abilities []string `json:"abilities"` + ExpiresAt *string `json:"expires_at"` + LastUsedAt *string `json:"last_used_at"` + } `json:"token"` + Account struct { + ID int `json:"id"` + Name string `json:"name"` + } `json:"account"` + } `json:"data"` + Message string `json:"message"` + HTTPStatus int `json:"http_status"` +} + +// stdinFd is the file descriptor used for reading terminal input. +// Override in tests to use a pipe instead. +var stdinFd = int(os.Stdin.Fd()) + +// stdinReader is the reader used for reading non-terminal (piped) input. +// Override in tests. +var stdinReader io.Reader = os.Stdin + +// NewAuthCmd creates the auth command group. +func NewAuthCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "auth", + Short: "Manage authentication", + Long: "Manage Vector API authentication including login, logout, and status.", + } + + cmd.AddCommand(newAuthLoginCmd()) + cmd.AddCommand(newAuthLogoutCmd()) + cmd.AddCommand(newAuthStatusCmd()) + + return cmd +} + +func newAuthLoginCmd() *cobra.Command { + return &cobra.Command{ + Use: "login", + Short: "Authenticate with the Vector API", + Long: "Validate an API token and store it in the system keyring.", + Example: ` # Log in interactively (prompts for token) + vector auth login + + # Log in with a token + vector auth login --token mytoken123`, + RunE: func(cmd *cobra.Command, args []string) error { + app := appctx.FromContext(cmd.Context()) + if app == nil { + return fmt.Errorf("app not initialized") + } + + token := app.Client.Token + + // If no token from flag/env/stored credentials, prompt interactively + if token == "" { + var err error + token, err = promptForToken(cmd.ErrOrStderr()) + if err != nil { + return err + } + } + + if token == "" { + return &api.APIError{ + Message: "No API token provided.", + HTTPStatus: 401, + ExitCode: 2, + } + } + + // Build a client with the provided token + client := api.NewClient(app.Client.BaseURL, token, app.Client.UserAgent) + + // Validate via GET /api/v1/auth/whoami + resp, err := client.Get(cmd.Context(), "/api/v1/auth/whoami", nil) + if err != nil { + var apiErr *api.APIError + if errors.As(err, &apiErr) { + if apiErr.HTTPStatus == 401 || apiErr.HTTPStatus == 403 { + return &api.APIError{ + Message: "Invalid API token.", + HTTPStatus: apiErr.HTTPStatus, + ExitCode: 2, + } + } + return apiErr + } + // Network error + return &api.APIError{ + Message: fmt.Sprintf("Network error: %s", err), + ExitCode: 5, + } + } + defer func() { _ = resp.Body.Close() }() + + // Read and parse the response body + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("reading response: %w", err) + } + + var whoami whoamiResponse + if err := json.Unmarshal(body, &whoami); err != nil { + return fmt.Errorf("parsing response: %w", err) + } + + // Save token to system keyring + storedInKeyring := true + if err := config.Save(token); err != nil { + if errors.Is(err, config.ErrKeyringDisabled) { + storedInKeyring = false + _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "Warning: keyring is disabled. Token validated but not persisted. Use --token flag or VECTOR_API_KEY environment variable.") + } else { + return fmt.Errorf("saving token: %w", err) + } + } + + // Output + if app.Output.Format() == output.JSON { + var raw json.RawMessage = body + return app.Output.JSON(raw) + } + + suffix := "Token stored in system keyring." + if !storedInKeyring { + suffix = "Token not persisted (keyring disabled)." + } + output.PrintMessage(cmd.OutOrStdout(), fmt.Sprintf( + "Authenticated as %s (%s). %s", + whoami.Data.User.Email, whoami.Data.Account.Name, suffix, + )) + return nil + }, + } +} + +func newAuthLogoutCmd() *cobra.Command { + return &cobra.Command{ + Use: "logout", + Short: "Remove stored credentials", + Long: "Log out by deleting stored API credentials from the system keyring.", + Example: ` # Log out and clear stored credentials + vector auth logout`, + RunE: func(cmd *cobra.Command, args []string) error { + app := appctx.FromContext(cmd.Context()) + if app == nil { + return fmt.Errorf("app not initialized") + } + + if err := config.Delete(); err != nil { + if !errors.Is(err, config.ErrKeyringDisabled) && !errors.Is(err, keyring.ErrNotFound) { + return fmt.Errorf("clearing token: %w", err) + } + } + + msg := "Logged out successfully." + if app.Output.Format() == output.JSON { + return app.Output.JSON(map[string]string{ + "message": msg, + }) + } + + output.PrintMessage(cmd.OutOrStdout(), msg) + return nil + }, + } +} + +func newAuthStatusCmd() *cobra.Command { + return &cobra.Command{ + Use: "status", + Short: "Show authentication status", + Long: "Check whether you are authenticated and display account details.", + Example: ` # Check authentication status + vector auth status`, + RunE: func(cmd *cobra.Command, args []string) error { + app := appctx.FromContext(cmd.Context()) + if app == nil { + return fmt.Errorf("app not initialized") + } + + // Not authenticated if no token + if app.Client.Token == "" { + _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "Not logged in.") + return &api.APIError{ + Message: "Not logged in.", + ExitCode: 2, + } + } + + // Validate via GET /api/v1/auth/whoami + resp, err := app.Client.Get(cmd.Context(), "/api/v1/auth/whoami", nil) + if err != nil { + var apiErr *api.APIError + if errors.As(err, &apiErr) { + if apiErr.HTTPStatus == 401 || apiErr.HTTPStatus == 403 { + _, _ = fmt.Fprintln(cmd.ErrOrStderr(), "Not logged in.") + return &api.APIError{ + Message: "Not logged in.", + ExitCode: 2, + } + } + return apiErr + } + return &api.APIError{ + Message: fmt.Sprintf("Network error: %s", err), + ExitCode: 5, + } + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("reading response: %w", err) + } + + var whoami whoamiResponse + if err := json.Unmarshal(body, &whoami); err != nil { + return fmt.Errorf("parsing response: %w", err) + } + + configDir, _ := config.ConfigDir() + + expires := "Never" + if whoami.Data.Token.ExpiresAt != nil { + expires = *whoami.Data.Token.ExpiresAt + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(map[string]any{ + "authenticated": true, + "user": whoami.Data.User, + "token": whoami.Data.Token, + "account": whoami.Data.Account, + "token_source": app.TokenSource, + "config_dir": configDir, + "api_url": app.Config.ApiURL, + }) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "User", Value: fmt.Sprintf("%s (%s)", whoami.Data.User.Name, whoami.Data.User.Email)}, + {Key: "Account", Value: whoami.Data.Account.Name}, + {Key: "Token", Value: whoami.Data.Token.Name}, + {Key: "Abilities", Value: strings.Join(whoami.Data.Token.Abilities, ", ")}, + {Key: "Expires", Value: expires}, + {Key: "Token source", Value: app.TokenSource}, + {Key: "API URL", Value: app.Config.ApiURL}, + {Key: "Config directory", Value: configDir}, + }) + return nil + }, + } +} + +// promptForToken prompts the user for an API token on stderr. +// If stdin is a terminal, input is masked. Otherwise, it reads plain text. +func promptForToken(w io.Writer) (string, error) { + _, _ = fmt.Fprint(w, "Enter API token: ") + + if term.IsTerminal(stdinFd) { + tokenBytes, err := term.ReadPassword(stdinFd) + _, _ = fmt.Fprintln(w) // newline after masked input + if err != nil { + return "", fmt.Errorf("reading token: %w", err) + } + return string(tokenBytes), nil + } + + // Non-terminal: read a line from stdin + var token string + buf := make([]byte, 4096) + n, err := stdinReader.Read(buf) + if err != nil && err != io.EOF { + return "", fmt.Errorf("reading token: %w", err) + } + token = string(buf[:n]) + // Trim trailing newline + if len(token) > 0 && token[len(token)-1] == '\n' { + token = token[:len(token)-1] + } + if len(token) > 0 && token[len(token)-1] == '\r' { + token = token[:len(token)-1] + } + return token, nil +} diff --git a/internal/commands/auth_test.go b/internal/commands/auth_test.go new file mode 100644 index 0000000..e8754c9 --- /dev/null +++ b/internal/commands/auth_test.go @@ -0,0 +1,748 @@ +package commands + +import ( + "bytes" + "encoding/json" + "net/http" + "net/http/httptest" + "os" + "strings" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/zalando/go-keyring" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +// whoamiTestResponse is the standard response from GET /api/v1/auth/whoami. +var whoamiTestResponse = map[string]any{ + "data": map[string]any{ + "user": map[string]any{ + "id": 1, + "name": "John Doe", + "email": "john@example.com", + }, + "token": map[string]any{ + "name": "vector-cli", + "abilities": []string{"*"}, + "expires_at": nil, + "last_used_at": "2026-03-14T12:00:00.000000Z", + }, + "account": map[string]any{ + "id": 1, + "name": "Acme Inc", + }, + }, + "message": "Success", + "http_status": 200, +} + +// newTestServer creates an httptest server that responds to /api/v1/auth/whoami. +// validToken is the token that triggers a 200; anything else gets 401. +func newTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/api/v1/auth/whoami" { + w.WriteHeader(http.StatusNotFound) + return + } + + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(whoamiTestResponse) + })) +} + +// buildAuthLoginCmd creates a root + auth + login command wired with an App context. +func buildAuthLoginCmd(baseURL, token string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient(baseURL, token, "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + } + + authCmd := NewAuthCmd() + root.AddCommand(authCmd) + + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func TestAuthLoginCmd_ValidToken_TableOutput(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + t.Setenv("VECTOR_NO_KEYRING", "") + + ts := newTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAuthLoginCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"auth", "login"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "Authenticated as john@example.com (Acme Inc). Token stored in system keyring.", strings.TrimSpace(stdout.String())) + + // Verify token was saved + token, err := config.Load() + require.NoError(t, err) + assert.Equal(t, "valid-token", token) +} + +func TestAuthLoginCmd_ValidToken_JSONOutput(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + t.Setenv("VECTOR_NO_KEYRING", "") + + ts := newTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAuthLoginCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"auth", "login"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + data := result["data"].(map[string]any) + user := data["user"].(map[string]any) + assert.Equal(t, "john@example.com", user["email"]) + assert.Equal(t, "Acme Inc", data["account"].(map[string]any)["name"]) + assert.Equal(t, "Success", result["message"]) +} + +func TestAuthLoginCmd_InvalidToken(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + t.Setenv("VECTOR_NO_KEYRING", "") + + ts := newTestServer("valid-token") + defer ts.Close() + + cmd, _, stderr := buildAuthLoginCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"auth", "login"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) + assert.Equal(t, "Invalid API token.", apiErr.Message) + + // stderr should show the error (via the root command's silence + execute.go pattern) + _ = stderr // error is returned, not printed by login itself +} + +func TestAuthLoginCmd_NetworkError(t *testing.T) { + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + // Use an unreachable URL to trigger a network error + cmd, _, _ := buildAuthLoginCmd("http://127.0.0.1:1", "some-token", output.Table) + cmd.SetArgs([]string{"auth", "login"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 5, apiErr.ExitCode) +} + +func TestAuthLoginCmd_OverwritesExistingToken(t *testing.T) { + keyring.MockInit() + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + t.Setenv("VECTOR_NO_KEYRING", "") + + // Pre-existing token + require.NoError(t, config.Save("old-token")) + + ts := newTestServer("new-token") + defer ts.Close() + + cmd, _, _ := buildAuthLoginCmd(ts.URL, "new-token", output.Table) + cmd.SetArgs([]string{"auth", "login"}) + + err := cmd.Execute() + require.NoError(t, err) + + token, err := config.Load() + require.NoError(t, err) + assert.Equal(t, "new-token", token) +} + +func TestAuthLoginCmd_KeyringDisabled(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + t.Setenv("VECTOR_NO_KEYRING", "1") + + ts := newTestServer("valid-token") + defer ts.Close() + + cmd, stdout, stderr := buildAuthLoginCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"auth", "login"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stderr.String(), "keyring is disabled") + assert.Contains(t, stdout.String(), "Authenticated as") + assert.Contains(t, stdout.String(), "Token not persisted (keyring disabled)") +} + +func TestAuthLoginCmd_TokenFromEnv(t *testing.T) { + keyring.MockInit() + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + t.Setenv("VECTOR_NO_KEYRING", "") + + ts := newTestServer("env-token") + defer ts.Close() + + // Token comes through the client (simulating env resolution in PersistentPreRunE) + cmd, stdout, _ := buildAuthLoginCmd(ts.URL, "env-token", output.Table) + cmd.SetArgs([]string{"auth", "login"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "Authenticated as john@example.com (Acme Inc). Token stored in system keyring.", strings.TrimSpace(stdout.String())) + + token, err := config.Load() + require.NoError(t, err) + assert.Equal(t, "env-token", token) +} + +func TestAuthLoginCmd_PipedInput(t *testing.T) { + keyring.MockInit() + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + t.Setenv("VECTOR_NO_KEYRING", "") + + ts := newTestServer("piped-token") + defer ts.Close() + + // Override stdinFd to a non-terminal fd and stdinReader to our pipe + origFd := stdinFd + origReader := stdinReader + t.Cleanup(func() { + stdinFd = origFd + stdinReader = origReader + }) + + // Use a pipe fd (not a terminal) + r, w, err := os.Pipe() + require.NoError(t, err) + defer r.Close() + + stdinFd = int(r.Fd()) + stdinReader = r + + // Write token to pipe + _, err = w.Write([]byte("piped-token\n")) + require.NoError(t, err) + w.Close() + + // No token in client — forces interactive prompt + cmd, stdout, stderr := buildAuthLoginCmd(ts.URL, "", output.Table) + cmd.SetArgs([]string{"auth", "login"}) + + err = cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "Authenticated as john@example.com (Acme Inc). Token stored in system keyring.", strings.TrimSpace(stdout.String())) + assert.Contains(t, stderr.String(), "Enter API token: ") + + token, loadErr := config.Load() + require.NoError(t, loadErr) + assert.Equal(t, "piped-token", token) +} + +func TestAuthLoginCmd_NoTokenProvided(t *testing.T) { + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + + // Override stdinFd/Reader to return empty input + origFd := stdinFd + origReader := stdinReader + t.Cleanup(func() { + stdinFd = origFd + stdinReader = origReader + }) + + r, w, err := os.Pipe() + require.NoError(t, err) + defer r.Close() + + stdinFd = int(r.Fd()) + stdinReader = r + w.Close() // EOF immediately + + cmd, _, _ := buildAuthLoginCmd("http://localhost:0", "", output.Table) + cmd.SetArgs([]string{"auth", "login"}) + + err = cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// Integration test: full flow with root command +func TestAuthLogin_Integration_ValidToken(t *testing.T) { + keyring.MockInit() + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + t.Setenv("VECTOR_NO_KEYRING", "") + + ts := newTestServer("integration-token") + defer ts.Close() + + // Write config with test server URL + cfg := &config.Config{ApiURL: ts.URL} + require.NoError(t, config.SaveConfig(cfg)) + + root, stdout := buildRootWithAuth() + root.SetArgs([]string{"--no-json", "auth", "login", "--token", "integration-token"}) + + err := root.Execute() + require.NoError(t, err) + assert.Equal(t, "Authenticated as john@example.com (Acme Inc). Token stored in system keyring.", strings.TrimSpace(stdout.String())) + + // Verify token stored in keyring + token, err := config.Load() + require.NoError(t, err) + assert.Equal(t, "integration-token", token) +} + +func TestAuthLogin_Integration_InvalidToken(t *testing.T) { + keyring.MockInit() + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + t.Setenv("VECTOR_NO_KEYRING", "") + + ts := newTestServer("valid-token") + defer ts.Close() + + cfg := &config.Config{ApiURL: ts.URL} + require.NoError(t, config.SaveConfig(cfg)) + + root, _ := buildRootWithAuth() + root.SetArgs([]string{"auth", "login", "--token", "wrong-token"}) + + err := root.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) + assert.Equal(t, "Invalid API token.", apiErr.Message) + + // Token should NOT be saved + _, err = config.Load() + assert.ErrorIs(t, err, keyring.ErrNotFound) +} + +func TestAuthLogin_Integration_EnvToken(t *testing.T) { + keyring.MockInit() + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + t.Setenv("VECTOR_API_KEY", "env-integration-token") + t.Setenv("VECTOR_NO_KEYRING", "") + + ts := newTestServer("env-integration-token") + defer ts.Close() + + cfg := &config.Config{ApiURL: ts.URL} + require.NoError(t, config.SaveConfig(cfg)) + + root, _ := buildRootWithAuth() + root.SetArgs([]string{"auth", "login"}) + + err := root.Execute() + require.NoError(t, err) + + token, err := config.Load() + require.NoError(t, err) + assert.Equal(t, "env-integration-token", token) +} + +// --- Auth Logout Tests --- + +func buildAuthLogoutCmd(format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient("http://localhost", "", "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + } + + authCmd := NewAuthCmd() + root.AddCommand(authCmd) + + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func TestAuthLogoutCmd_TableOutput(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + t.Setenv("VECTOR_NO_KEYRING", "") + + // Save token first + require.NoError(t, config.Save("some-token")) + + cmd, stdout, _ := buildAuthLogoutCmd(output.Table) + cmd.SetArgs([]string{"auth", "logout"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "Logged out successfully.", strings.TrimSpace(stdout.String())) + + // Verify token was removed from keyring + _, err = config.Load() + assert.ErrorIs(t, err, keyring.ErrNotFound) +} + +func TestAuthLogoutCmd_JSONOutput(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + t.Setenv("VECTOR_NO_KEYRING", "") + + require.NoError(t, config.Save("some-token")) + + cmd, stdout, _ := buildAuthLogoutCmd(output.JSON) + cmd.SetArgs([]string{"auth", "logout"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]string + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "Logged out successfully.", result["message"]) +} + +func TestAuthLogoutCmd_AlreadyLoggedOut(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + t.Setenv("VECTOR_NO_KEYRING", "") + + // No token stored — should succeed silently + cmd, stdout, _ := buildAuthLogoutCmd(output.Table) + cmd.SetArgs([]string{"auth", "logout"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "Logged out successfully.", strings.TrimSpace(stdout.String())) +} + +func TestAuthLogoutCmd_KeyringDisabled(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + t.Setenv("VECTOR_NO_KEYRING", "1") + + cmd, stdout, _ := buildAuthLogoutCmd(output.Table) + cmd.SetArgs([]string{"auth", "logout"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "Logged out successfully.", strings.TrimSpace(stdout.String())) +} + +func TestAuthLogoutCmd_KeyringDisabled_JSONOutput(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + t.Setenv("VECTOR_NO_KEYRING", "1") + + cmd, stdout, _ := buildAuthLogoutCmd(output.JSON) + cmd.SetArgs([]string{"auth", "logout"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]string + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "Logged out successfully.", result["message"]) +} + +func TestAuthLogout_Integration_RemovesToken(t *testing.T) { + keyring.MockInit() + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + t.Setenv("VECTOR_NO_KEYRING", "") + + // Save config and token + require.NoError(t, config.SaveConfig(&config.Config{ApiURL: "http://localhost"})) + require.NoError(t, config.Save("test-token")) + + // Verify token exists in keyring + token, err := config.Load() + require.NoError(t, err) + assert.Equal(t, "test-token", token) + + root, stdout := buildRootWithAuth() + root.SetArgs([]string{"--no-json", "auth", "logout"}) + + err = root.Execute() + require.NoError(t, err) + assert.Equal(t, "Logged out successfully.", strings.TrimSpace(stdout.String())) + + // Token should be gone from keyring + _, err = config.Load() + assert.ErrorIs(t, err, keyring.ErrNotFound) +} + +// buildRootWithAuth creates a real root command (with PersistentPreRunE) + auth subcommand. +func buildRootWithAuth() (*cobra.Command, *bytes.Buffer) { + stdout := new(bytes.Buffer) + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + cfg, err := config.LoadConfig() + if err != nil { + return err + } + var token, tokenSource string + token, _ = cmd.Flags().GetString("token") + if token != "" { + tokenSource = "flag" + } + if token == "" { + token = os.Getenv("VECTOR_API_KEY") + if token != "" { + tokenSource = "env" + } + } + if token == "" { + if t, err := config.Load(); err == nil && t != "" { + token = t + tokenSource = "keyring" + } + } + client := api.NewClient(cfg.ApiURL, token, "") + jsonFlag, _ := cmd.Flags().GetBool("json") + noJsonFlag, _ := cmd.Flags().GetBool("no-json") + format := output.DetectFormat(jsonFlag, noJsonFlag) + app := appctx.NewApp(cfg, client, tokenSource) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + root.PersistentFlags().String("token", "", "API token") + root.PersistentFlags().Bool("json", false, "Force JSON output") + root.PersistentFlags().Bool("no-json", false, "Force table output") + + root.SetOut(stdout) + root.AddCommand(NewAuthCmd()) + return root, stdout +} + +// --- Auth Status Tests --- + +// buildAuthStatusCmd creates a root + auth + status command wired with an App context. +func buildAuthStatusCmd(baseURL, token, tokenSource string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + cfg := config.DefaultConfig() + cfg.ApiURL = baseURL + client := api.NewClient(baseURL, token, "test-agent") + app := appctx.NewApp( + cfg, + client, + tokenSource, + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + } + + authCmd := NewAuthCmd() + root.AddCommand(authCmd) + + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func TestAuthStatusCmd_Authenticated_TableOutput(t *testing.T) { + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + ts := newTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAuthStatusCmd(ts.URL, "valid-token", "keyring", output.Table) + cmd.SetArgs([]string{"auth", "status"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "John Doe (john@example.com)") + assert.Contains(t, out, "Acme Inc") + assert.Contains(t, out, "vector-cli") + assert.Contains(t, out, "keyring") + assert.Contains(t, out, ts.URL) +} + +func TestAuthStatusCmd_Authenticated_JSONOutput(t *testing.T) { + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + ts := newTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildAuthStatusCmd(ts.URL, "valid-token", "flag", output.JSON) + cmd.SetArgs([]string{"auth", "status"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, true, result["authenticated"]) + assert.Equal(t, "flag", result["token_source"]) + assert.Equal(t, ts.URL, result["api_url"]) + assert.NotEmpty(t, result["config_dir"]) + + user := result["user"].(map[string]any) + assert.Equal(t, "john@example.com", user["email"]) + assert.Equal(t, "John Doe", user["name"]) + + account := result["account"].(map[string]any) + assert.Equal(t, "Acme Inc", account["name"]) + + token := result["token"].(map[string]any) + assert.Equal(t, "vector-cli", token["name"]) +} + +func TestAuthStatusCmd_NotAuthenticated(t *testing.T) { + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + cmd, _, stderr := buildAuthStatusCmd("http://localhost", "", "", output.Table) + cmd.SetArgs([]string{"auth", "status"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) + assert.Contains(t, stderr.String(), "Not logged in.") +} + +func TestAuthStatusCmd_InvalidToken(t *testing.T) { + t.Setenv("VECTOR_CONFIG_DIR", t.TempDir()) + + ts := newTestServer("valid-token") + defer ts.Close() + + cmd, _, stderr := buildAuthStatusCmd(ts.URL, "bad-token", "keyring", output.Table) + cmd.SetArgs([]string{"auth", "status"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) + assert.Contains(t, stderr.String(), "Not logged in.") +} + +// Integration test: login → status → logout → status +func TestAuthStatus_Integration_FullFlow(t *testing.T) { + keyring.MockInit() + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + t.Setenv("VECTOR_NO_KEYRING", "") + + ts := newTestServer("flow-token") + defer ts.Close() + + // Save config with test server URL + cfg := &config.Config{ApiURL: ts.URL} + require.NoError(t, config.SaveConfig(cfg)) + + // Step 1: Login + root, stdout := buildRootWithAuth() + root.SetArgs([]string{"--no-json", "auth", "login", "--token", "flow-token"}) + require.NoError(t, root.Execute()) + assert.Equal(t, "Authenticated as john@example.com (Acme Inc). Token stored in system keyring.", strings.TrimSpace(stdout.String())) + + // Step 2: Status shows authenticated + root2, stdout2 := buildRootWithAuth() + root2.SetArgs([]string{"--no-json", "auth", "status"}) + require.NoError(t, root2.Execute()) + + out := stdout2.String() + assert.Contains(t, out, "John Doe (john@example.com)") + assert.Contains(t, out, "Acme Inc") + assert.Contains(t, out, "keyring") + assert.Contains(t, out, ts.URL) + + // Step 3: Logout + root3, stdout3 := buildRootWithAuth() + root3.SetArgs([]string{"--no-json", "auth", "logout"}) + require.NoError(t, root3.Execute()) + assert.Equal(t, "Logged out successfully.", strings.TrimSpace(stdout3.String())) + + // Step 4: Status shows not authenticated + root4, _ := buildRootWithAuth() + stderr4 := new(bytes.Buffer) + root4.SetErr(stderr4) + root4.SetArgs([]string{"--no-json", "auth", "status"}) + err := root4.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) + assert.Contains(t, stderr4.String(), "Not logged in.") +} diff --git a/internal/commands/backup.go b/internal/commands/backup.go new file mode 100644 index 0000000..f97831f --- /dev/null +++ b/internal/commands/backup.go @@ -0,0 +1,294 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + "strings" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +const backupsBasePath = "/api/v1/vector/backups" + +// NewBackupCmd creates the backup command group. +func NewBackupCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "backup", + Short: "Manage backups", + Long: "Manage backups to protect your site data.", + } + + cmd.AddCommand(newBackupListCmd()) + cmd.AddCommand(newBackupShowCmd()) + cmd.AddCommand(newBackupCreateCmd()) + cmd.AddCommand(NewBackupDownloadCmd()) + + return cmd +} + +// formatArchivableType formats the archivable_type for display. +// e.g., "vector_site" becomes "Site", "vector_environment" becomes "Environment". +func formatArchivableType(raw string) string { + raw = strings.TrimPrefix(raw, "vector_") + if raw == "" { + return "-" + } + // Capitalize first letter + return strings.ToUpper(raw[:1]) + raw[1:] +} + +func newBackupListCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "list", + Short: "List backups", + Long: "Retrieve a paginated list of backups, optionally filtered by type, site, or environment.", + Example: ` # List all backups + vector backup list + + # List backups for a site + vector backup list --site-id site-abc123`, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + page, perPage := getPagination(cmd) + query := buildPaginationQuery(page, perPage) + + if cmd.Flags().Changed("site-id") { + v, _ := cmd.Flags().GetString("site-id") + if v != "" { + query.Set("site_id", v) + } + } + if cmd.Flags().Changed("environment-id") { + v, _ := cmd.Flags().GetString("environment-id") + if v != "" { + query.Set("environment_id", v) + } + } + if cmd.Flags().Changed("type") { + v, _ := cmd.Flags().GetString("type") + if v != "" { + query.Set("type", v) + } + } + + resp, err := app.Client.Get(cmd.Context(), backupsBasePath, query) + if err != nil { + return fmt.Errorf("failed to list backups: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list backups: %w", err) + } + + if app.Output.Format() == output.JSON { + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list backups: %w", err) + } + return app.Output.JSON(json.RawMessage(data)) + } + + data, meta, err := parseResponseWithMeta(body) + if err != nil { + return fmt.Errorf("failed to list backups: %w", err) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list backups: %w", err) + } + + headers := []string{"ID", "MODEL", "TYPE", "SCOPE", "STATUS", "DESCRIPTION", "CREATED"} + var rows [][]string + for _, item := range items { + rows = append(rows, []string{ + getString(item, "id"), + formatArchivableType(getString(item, "archivable_type")), + getString(item, "type"), + getString(item, "scope"), + getString(item, "status"), + formatString(getString(item, "description")), + getString(item, "created_at"), + }) + } + + app.Output.Table(headers, rows) + if meta.LastPage > 1 { + app.Output.Pagination(meta.CurrentPage, meta.LastPage, meta.Total) + } + return nil + }, + } + addPaginationFlags(cmd) + cmd.Flags().String("site-id", "", "Filter by site ID") + cmd.Flags().String("environment-id", "", "Filter by environment ID") + cmd.Flags().String("type", "", "Filter by backup type (site/environment)") + return cmd +} + +func newBackupShowCmd() *cobra.Command { + return &cobra.Command{ + Use: "show ", + Short: "Show a backup", + Long: "Display details of a specific backup.", + Example: ` # Show backup details + vector backup show backup-456`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), backupsBasePath+"/"+args[0], nil) + if err != nil { + return fmt.Errorf("failed to get backup: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to get backup: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to get backup: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to get backup: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Model", Value: formatArchivableType(getString(item, "archivable_type"))}, + {Key: "Model ID", Value: getString(item, "archivable_id")}, + {Key: "Type", Value: getString(item, "type")}, + {Key: "Scope", Value: getString(item, "scope")}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Description", Value: formatString(getString(item, "description"))}, + {Key: "File Snapshot ID", Value: formatString(getString(item, "file_snapshot_id"))}, + {Key: "Database Snapshot ID", Value: formatString(getString(item, "database_snapshot_id"))}, + {Key: "Started At", Value: formatString(getString(item, "started_at"))}, + {Key: "Completed At", Value: formatString(getString(item, "completed_at"))}, + {Key: "Created At", Value: getString(item, "created_at")}, + {Key: "Updated At", Value: getString(item, "updated_at")}, + }) + return nil + }, + } +} + +func newBackupCreateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "create", + Short: "Create a backup", + Long: "Create a new backup for a site or environment.", + Example: ` # Create a full backup + vector backup create --site-id site-abc123 + + # Create a database-only backup + vector backup create --environment-id env-abc123 --scope database`, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + siteID, _ := cmd.Flags().GetString("site-id") + envID, _ := cmd.Flags().GetString("environment-id") + + if siteID == "" && envID == "" { + return fmt.Errorf("either --site-id or --environment-id is required") + } + + scope, _ := cmd.Flags().GetString("scope") + description, _ := cmd.Flags().GetString("description") + + reqBody := map[string]any{ + "type": "manual", + "scope": scope, + } + + if siteID != "" { + reqBody["site_id"] = siteID + } + if envID != "" { + reqBody["environment_id"] = envID + } + if description != "" { + reqBody["description"] = description + } + + resp, err := app.Client.Post(cmd.Context(), backupsBasePath, reqBody) + if err != nil { + return fmt.Errorf("failed to create backup: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to create backup: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to create backup: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to create backup: %w", err) + } + + w := cmd.OutOrStdout() + output.PrintMessage(w, fmt.Sprintf("Backup created: %s (%s)", getString(item, "id"), getString(item, "status"))) + output.PrintMessage(w, "") + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Model", Value: formatArchivableType(getString(item, "archivable_type"))}, + {Key: "Model ID", Value: getString(item, "archivable_id")}, + {Key: "Type", Value: getString(item, "type")}, + {Key: "Scope", Value: getString(item, "scope")}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Description", Value: formatString(getString(item, "description"))}, + {Key: "File Snapshot ID", Value: formatString(getString(item, "file_snapshot_id"))}, + {Key: "Database Snapshot ID", Value: formatString(getString(item, "database_snapshot_id"))}, + {Key: "Started At", Value: formatString(getString(item, "started_at"))}, + {Key: "Completed At", Value: formatString(getString(item, "completed_at"))}, + {Key: "Created At", Value: getString(item, "created_at")}, + {Key: "Updated At", Value: getString(item, "updated_at")}, + }) + return nil + }, + } + + cmd.Flags().String("site-id", "", "Site ID to back up") + cmd.Flags().String("environment-id", "", "Environment ID to back up") + cmd.Flags().String("scope", "full", "Backup scope (full/database/files)") + cmd.Flags().String("description", "", "Description for the backup") + + return cmd +} diff --git a/internal/commands/backup_download.go b/internal/commands/backup_download.go new file mode 100644 index 0000000..2fc9a97 --- /dev/null +++ b/internal/commands/backup_download.go @@ -0,0 +1,161 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +// NewBackupDownloadCmd creates the backup download command group. +func NewBackupDownloadCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "download", + Short: "Manage backup downloads", + Long: "Create and check backup download requests to retrieve backup archives.", + } + + cmd.AddCommand(newBackupDownloadCreateCmd()) + cmd.AddCommand(newBackupDownloadStatusCmd()) + + return cmd +} + +func newBackupDownloadCreateCmd() *cobra.Command { + return &cobra.Command{ + Use: "create ", + Short: "Create a backup download", + Long: "Create a new download request for a backup. The download is created with a pending status and processed asynchronously.", + Example: ` # Request a backup download + vector backup download create backup-456`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + backupID := args[0] + endpoint := fmt.Sprintf("%s/%s/downloads", backupsBasePath, backupID) + + resp, err := app.Client.Post(cmd.Context(), endpoint, nil) + if err != nil { + return fmt.Errorf("failed to create backup download: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to create backup download: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to create backup download: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to create backup download: %w", err) + } + + downloadID := getString(item, "id") + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: downloadID}, + {Key: "Status", Value: getString(item, "status")}, + }) + + w := cmd.OutOrStdout() + output.PrintMessage(w, "") + output.PrintMessage(w, fmt.Sprintf("Check download status with: vector backup download status %s %s", backupID, downloadID)) + + return nil + }, + } +} + +func newBackupDownloadStatusCmd() *cobra.Command { + return &cobra.Command{ + Use: "status ", + Short: "Check backup download status", + Long: "Retrieve the status of a backup download. Includes a download URL when the download is completed.", + Example: ` # Check download status + vector backup download status backup-456 download-789`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + backupID := args[0] + downloadID := args[1] + endpoint := fmt.Sprintf("%s/%s/downloads/%s", backupsBasePath, backupID, downloadID) + + resp, err := app.Client.Get(cmd.Context(), endpoint, nil) + if err != nil { + return fmt.Errorf("failed to get backup download status: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to get backup download status: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to get backup download status: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to get backup download status: %w", err) + } + + status := getString(item, "status") + + kvs := []output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Status", Value: status}, + {Key: "Size", Value: formatFloat(getFloat(item, "size_bytes"))}, + {Key: "Duration", Value: formatFloat(getFloat(item, "duration_ms"))}, + {Key: "Error", Value: formatString(getString(item, "error_message"))}, + } + + if status == "completed" { + kvs = append(kvs, output.KeyValue{Key: "Download URL", Value: getString(item, "download_url")}) + kvs = append(kvs, output.KeyValue{Key: "Download Expires", Value: formatString(getString(item, "download_expires_at"))}) + } + + kvs = append(kvs, + output.KeyValue{Key: "Started At", Value: formatString(getString(item, "started_at"))}, + output.KeyValue{Key: "Completed At", Value: formatString(getString(item, "completed_at"))}, + output.KeyValue{Key: "Created At", Value: getString(item, "created_at")}, + ) + + app.Output.KeyValue(kvs) + return nil + }, + } +} + +// formatFloat formats a float64 for display, returning "-" for zero values. +func formatFloat(v float64) string { + if v == 0 { + return "-" + } + return fmt.Sprintf("%.0f", v) +} diff --git a/internal/commands/backup_download_test.go b/internal/commands/backup_download_test.go new file mode 100644 index 0000000..63edbbb --- /dev/null +++ b/internal/commands/backup_download_test.go @@ -0,0 +1,349 @@ +package commands + +import ( + "bytes" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/output" +) + +var backupDownloadCreateResponse = map[string]any{ + "data": map[string]any{ + "id": "dl-001", + "vector_backup_id": "bk-001", + "status": "pending", + "s3_key": nil, + "size_bytes": nil, + "duration_ms": nil, + "error_message": nil, + "download_url": nil, + "download_expires_at": nil, + "started_at": nil, + "completed_at": nil, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "Backup download initiated", + "http_status": 202, +} + +var backupDownloadStatusCompletedResponse = map[string]any{ + "data": map[string]any{ + "id": "dl-001", + "vector_backup_id": "bk-001", + "status": "completed", + "s3_key": "backups/downloads/dl-001.tar.gz", + "size_bytes": float64(52428800), + "duration_ms": float64(12500), + "error_message": nil, + "download_url": "https://s3.amazonaws.com/bucket/backups/downloads/dl-001.tar.gz?presigned=abc", + "download_expires_at": "2025-01-15T18:00:00+00:00", + "started_at": "2025-01-15T12:00:00+00:00", + "completed_at": "2025-01-15T12:00:12+00:00", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:12+00:00", + }, + "message": "Backup download retrieved successfully", + "http_status": 200, +} + +var backupDownloadStatusPendingResponse = map[string]any{ + "data": map[string]any{ + "id": "dl-001", + "vector_backup_id": "bk-001", + "status": "processing", + "s3_key": nil, + "size_bytes": nil, + "duration_ms": nil, + "error_message": nil, + "download_url": nil, + "download_expires_at": nil, + "started_at": nil, + "completed_at": nil, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "Backup download retrieved successfully", + "http_status": 200, +} + +func newBackupDownloadTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "POST" && path == "/api/v1/vector/backups/bk-001/downloads": + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(backupDownloadCreateResponse) + + case method == "GET" && path == "/api/v1/vector/backups/bk-001/downloads/dl-001": + _ = json.NewEncoder(w).Encode(backupDownloadStatusCompletedResponse) + + case method == "GET" && path == "/api/v1/vector/backups/bk-001/downloads/dl-pending": + _ = json.NewEncoder(w).Encode(backupDownloadStatusPendingResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +// --- Download Create Tests --- + +func TestBackupDownloadCreateCmd_TableOutput(t *testing.T) { + ts := newBackupDownloadTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "download", "create", "bk-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "dl-001") + assert.Contains(t, out, "pending") + assert.Contains(t, out, "Check download status with: vector backup download status bk-001 dl-001") +} + +func TestBackupDownloadCreateCmd_JSONOutput(t *testing.T) { + ts := newBackupDownloadTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildBackupCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"backup", "download", "create", "bk-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "dl-001", result["id"]) + assert.Equal(t, "pending", result["status"]) +} + +func TestBackupDownloadCreateCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(backupDownloadCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "download", "create", "bk-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/backups/bk-001/downloads", receivedPath) +} + +func TestBackupDownloadCreateCmd_MissingArg(t *testing.T) { + ts := newBackupDownloadTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "download", "create"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +func TestBackupDownloadCreateCmd_AuthError(t *testing.T) { + ts := newBackupDownloadTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"backup", "download", "create", "bk-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestBackupDownloadCreateCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildBackupCmdNoAuth(output.Table) + cmd.SetArgs([]string{"backup", "download", "create", "bk-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Download Status Tests --- + +func TestBackupDownloadStatusCmd_CompletedOutput(t *testing.T) { + ts := newBackupDownloadTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "download", "status", "bk-001", "dl-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "dl-001") + assert.Contains(t, out, "completed") + assert.Contains(t, out, "52428800") + assert.Contains(t, out, "12500") + assert.Contains(t, out, "https://s3.amazonaws.com/bucket/backups/downloads/dl-001.tar.gz?presigned=abc") + assert.Contains(t, out, "2025-01-15T18:00:00+00:00") + assert.Contains(t, out, "2025-01-15T12:00:00+00:00") + assert.Contains(t, out, "2025-01-15T12:00:12+00:00") +} + +func TestBackupDownloadStatusCmd_PendingOutput(t *testing.T) { + ts := newBackupDownloadTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "download", "status", "bk-001", "dl-pending"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "dl-001") + assert.Contains(t, out, "processing") + // Download URL should NOT be shown when not completed + assert.NotContains(t, out, "Download URL") + assert.NotContains(t, out, "Download Expires") +} + +func TestBackupDownloadStatusCmd_JSONOutput(t *testing.T) { + ts := newBackupDownloadTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildBackupCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"backup", "download", "status", "bk-001", "dl-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "dl-001", result["id"]) + assert.Equal(t, "completed", result["status"]) + assert.Equal(t, float64(52428800), result["size_bytes"]) +} + +func TestBackupDownloadStatusCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(backupDownloadStatusCompletedResponse) + })) + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "download", "status", "bk-001", "dl-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/backups/bk-001/downloads/dl-001", receivedPath) +} + +func TestBackupDownloadStatusCmd_MissingArgs(t *testing.T) { + ts := newBackupDownloadTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "download", "status", "bk-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} + +func TestBackupDownloadStatusCmd_AuthError(t *testing.T) { + ts := newBackupDownloadTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"backup", "download", "status", "bk-001", "dl-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestBackupDownloadStatusCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildBackupCmdNoAuth(output.Table) + cmd.SetArgs([]string{"backup", "download", "status", "bk-001", "dl-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Help Tests --- + +func TestBackupDownloadCmd_Help(t *testing.T) { + cmd := NewBackupDownloadCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "create") + assert.Contains(t, out, "status") + assert.Contains(t, out, "backup download") +} + +// --- formatFloat Tests --- + +func TestFormatFloat(t *testing.T) { + assert.Equal(t, "-", formatFloat(0)) + assert.Equal(t, "52428800", formatFloat(52428800)) + assert.Equal(t, "12500", formatFloat(12500)) +} diff --git a/internal/commands/backup_test.go b/internal/commands/backup_test.go new file mode 100644 index 0000000..ebfdf6f --- /dev/null +++ b/internal/commands/backup_test.go @@ -0,0 +1,547 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +var backupListResponse = map[string]any{ + "data": []map[string]any{ + { + "id": "bk-001", + "archivable_type": "vector_site", + "archivable_id": "site-001", + "type": "manual", + "scope": "full", + "status": "completed", + "description": "Pre-deployment backup", + "file_snapshot_id": "abc123", + "database_snapshot_id": "def456", + "started_at": "2025-01-15T12:00:00+00:00", + "completed_at": "2025-01-15T12:05:00+00:00", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:05:00+00:00", + }, + { + "id": "bk-002", + "archivable_type": "vector_environment", + "archivable_id": "env-001", + "type": "scheduled", + "scope": "database", + "status": "pending", + "description": nil, + "file_snapshot_id": nil, + "database_snapshot_id": nil, + "started_at": nil, + "completed_at": nil, + "created_at": "2025-01-16T12:00:00+00:00", + "updated_at": "2025-01-16T12:00:00+00:00", + }, + }, + "meta": map[string]any{ + "current_page": 1, + "last_page": 1, + "total": 2, + }, + "message": "Backups retrieved successfully", + "http_status": 200, +} + +var backupShowResponse = map[string]any{ + "data": map[string]any{ + "id": "bk-001", + "archivable_type": "vector_site", + "archivable_id": "site-001", + "type": "manual", + "scope": "full", + "status": "completed", + "description": "Pre-deployment backup", + "file_snapshot_id": "abc123", + "database_snapshot_id": "def456", + "started_at": "2025-01-15T12:00:00+00:00", + "completed_at": "2025-01-15T12:05:00+00:00", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:05:00+00:00", + }, + "message": "Backup retrieved successfully", + "http_status": 200, +} + +var backupCreateResponse = map[string]any{ + "data": map[string]any{ + "id": "bk-003", + "archivable_type": "vector_site", + "archivable_id": "site-001", + "type": "manual", + "scope": "full", + "status": "pending", + "description": "Manual backup", + "file_snapshot_id": nil, + "database_snapshot_id": nil, + "started_at": nil, + "completed_at": nil, + "created_at": "2025-01-20T12:00:00+00:00", + "updated_at": "2025-01-20T12:00:00+00:00", + }, + "message": "Backup initiated successfully", + "http_status": 202, +} + +func newBackupTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/backups": + _ = json.NewEncoder(w).Encode(backupListResponse) + + case method == "GET" && path == "/api/v1/vector/backups/bk-001": + _ = json.NewEncoder(w).Encode(backupShowResponse) + + case method == "POST" && path == "/api/v1/vector/backups": + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(backupCreateResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +func buildBackupCmd(baseURL, token string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient(baseURL, token, "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + root.AddCommand(NewBackupCmd()) + + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func buildBackupCmdNoAuth(format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient("http://localhost", "", "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + root.AddCommand(NewBackupCmd()) + + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +// --- Backup List Tests --- + +func TestBackupListCmd_TableOutput(t *testing.T) { + ts := newBackupTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "bk-001") + assert.Contains(t, out, "Site") + assert.Contains(t, out, "manual") + assert.Contains(t, out, "full") + assert.Contains(t, out, "completed") + assert.Contains(t, out, "Pre-deployment backup") + assert.Contains(t, out, "bk-002") + assert.Contains(t, out, "Environment") + assert.Contains(t, out, "scheduled") + assert.Contains(t, out, "pending") +} + +func TestBackupListCmd_JSONOutput(t *testing.T) { + ts := newBackupTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildBackupCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"backup", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 2) + assert.Equal(t, "bk-001", result[0]["id"]) +} + +func TestBackupListCmd_Pagination(t *testing.T) { + var receivedPage, receivedPerPage string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedPage = r.URL.Query().Get("page") + receivedPerPage = r.URL.Query().Get("per_page") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(backupListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "list", "--page", "3", "--per-page", "25"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "3", receivedPage) + assert.Equal(t, "25", receivedPerPage) +} + +func TestBackupListCmd_FilterFlags(t *testing.T) { + var receivedSiteID, receivedEnvID, receivedType string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedSiteID = r.URL.Query().Get("site_id") + receivedEnvID = r.URL.Query().Get("environment_id") + receivedType = r.URL.Query().Get("type") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(backupListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "list", "--site-id", "site-001", "--environment-id", "env-001", "--type", "site"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "site-001", receivedSiteID) + assert.Equal(t, "env-001", receivedEnvID) + assert.Equal(t, "site", receivedType) +} + +func TestBackupListCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(backupListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/backups", receivedPath) +} + +func TestBackupListCmd_AuthError(t *testing.T) { + ts := newBackupTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"backup", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestBackupListCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildBackupCmdNoAuth(output.Table) + cmd.SetArgs([]string{"backup", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Backup Show Tests --- + +func TestBackupShowCmd_TableOutput(t *testing.T) { + ts := newBackupTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "show", "bk-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "bk-001") + assert.Contains(t, out, "Site") + assert.Contains(t, out, "site-001") + assert.Contains(t, out, "manual") + assert.Contains(t, out, "full") + assert.Contains(t, out, "completed") + assert.Contains(t, out, "Pre-deployment backup") + assert.Contains(t, out, "abc123") + assert.Contains(t, out, "def456") + assert.Contains(t, out, "2025-01-15T12:00:00+00:00") + assert.Contains(t, out, "2025-01-15T12:05:00+00:00") +} + +func TestBackupShowCmd_JSONOutput(t *testing.T) { + ts := newBackupTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildBackupCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"backup", "show", "bk-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "bk-001", result["id"]) + assert.Equal(t, "vector_site", result["archivable_type"]) +} + +func TestBackupShowCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(backupShowResponse) + })) + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "show", "bk-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/backups/bk-001", receivedPath) +} + +func TestBackupShowCmd_MissingArg(t *testing.T) { + ts := newBackupTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "show"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Backup Create Tests --- + +func TestBackupCreateCmd_TableOutput(t *testing.T) { + ts := newBackupTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "create", "--site-id", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "Backup created: bk-003 (pending)") + assert.Contains(t, out, "bk-003") + assert.Contains(t, out, "Site") + assert.Contains(t, out, "manual") + assert.Contains(t, out, "full") + assert.Contains(t, out, "pending") +} + +func TestBackupCreateCmd_JSONOutput(t *testing.T) { + ts := newBackupTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildBackupCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"backup", "create", "--site-id", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "bk-003", result["id"]) + assert.Equal(t, "pending", result["status"]) +} + +func TestBackupCreateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(backupCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "create", "--site-id", "site-001", "--scope", "database", "--description", "My backup"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/backups", receivedPath) + assert.Equal(t, "site-001", receivedBody["site_id"]) + assert.Equal(t, "manual", receivedBody["type"]) + assert.Equal(t, "database", receivedBody["scope"]) + assert.Equal(t, "My backup", receivedBody["description"]) +} + +func TestBackupCreateCmd_EnvironmentID(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(backupCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "create", "--environment-id", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "env-001", receivedBody["environment_id"]) + assert.Equal(t, "manual", receivedBody["type"]) + assert.Equal(t, "full", receivedBody["scope"]) + _, hasSiteID := receivedBody["site_id"] + assert.False(t, hasSiteID) +} + +func TestBackupCreateCmd_MissingSiteAndEnv(t *testing.T) { + ts := newBackupTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "create"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "either --site-id or --environment-id is required") +} + +func TestBackupCreateCmd_DefaultScope(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(backupCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildBackupCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"backup", "create", "--site-id", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "full", receivedBody["scope"]) +} + +// --- Help Tests --- + +func TestBackupCmd_Help(t *testing.T) { + cmd := NewBackupCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "show") + assert.Contains(t, out, "create") + assert.Contains(t, out, "backups") +} + +// --- formatArchivableType Tests --- + +func TestFormatArchivableType(t *testing.T) { + assert.Equal(t, "Site", formatArchivableType("vector_site")) + assert.Equal(t, "Environment", formatArchivableType("vector_environment")) + assert.Equal(t, "Site", formatArchivableType("site")) + assert.Equal(t, "-", formatArchivableType("")) +} diff --git a/internal/commands/commands.go b/internal/commands/commands.go new file mode 100644 index 0000000..bb53d1c --- /dev/null +++ b/internal/commands/commands.go @@ -0,0 +1,2 @@ +// Package commands contains CLI command implementations. +package commands diff --git a/internal/commands/db.go b/internal/commands/db.go new file mode 100644 index 0000000..f67d321 --- /dev/null +++ b/internal/commands/db.go @@ -0,0 +1,19 @@ +package commands + +import ( + "github.com/spf13/cobra" +) + +// NewDbCmd creates the db command group. +func NewDbCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "db", + Short: "Manage database operations", + Long: "Manage database operations including import sessions and exports.", + } + + cmd.AddCommand(NewDbImportSessionCmd()) + cmd.AddCommand(NewDbExportCmd()) + + return cmd +} diff --git a/internal/commands/db_export.go b/internal/commands/db_export.go new file mode 100644 index 0000000..d3596ab --- /dev/null +++ b/internal/commands/db_export.go @@ -0,0 +1,158 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +// NewDbExportCmd creates the db export command group. +func NewDbExportCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "export", + Short: "Manage database exports", + Long: "Create and check database export requests to download SQL dumps of site databases.", + } + + cmd.AddCommand(newDbExportCreateCmd()) + cmd.AddCommand(newDbExportStatusCmd()) + + return cmd +} + +func newDbExportCreateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "create ", + Short: "Create a database export", + Long: "Create a new database export for a site. The export is created with a pending status and processed asynchronously.", + Example: ` # Create a database export + vector db export create site-abc123`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + siteID := args[0] + endpoint := fmt.Sprintf("%s/%s/db/export", sitesBasePath, siteID) + + format, _ := cmd.Flags().GetString("format") + payload := map[string]any{ + "format": format, + } + + resp, err := app.Client.Post(cmd.Context(), endpoint, payload) + if err != nil { + return fmt.Errorf("failed to create database export: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to create database export: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to create database export: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to create database export: %w", err) + } + + w := cmd.OutOrStdout() + exportID := getString(item, "id") + status := getString(item, "status") + + output.PrintMessage(w, fmt.Sprintf("Export started: %s (%s)", exportID, status)) + output.PrintMessage(w, fmt.Sprintf("Check status with: vector db export status %s %s", siteID, exportID)) + + return nil + }, + } + + cmd.Flags().String("format", "sql", "Export format") + + return cmd +} + +func newDbExportStatusCmd() *cobra.Command { + return &cobra.Command{ + Use: "status ", + Short: "Check database export status", + Long: "Retrieve the status of a database export. Includes a download URL when the export is completed.", + Example: ` # Check export status + vector db export status site-abc123 export-789`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + siteID := args[0] + exportID := args[1] + endpoint := fmt.Sprintf("%s/%s/db/exports/%s", sitesBasePath, siteID, exportID) + + resp, err := app.Client.Get(cmd.Context(), endpoint, nil) + if err != nil { + return fmt.Errorf("failed to get database export status: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to get database export status: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to get database export status: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to get database export status: %w", err) + } + + status := getString(item, "status") + + kvs := []output.KeyValue{ + {Key: "Export ID", Value: getString(item, "id")}, + {Key: "Status", Value: status}, + {Key: "Format", Value: formatString(getString(item, "format"))}, + {Key: "Size", Value: formatFloat(getFloat(item, "size_bytes"))}, + {Key: "Duration", Value: formatFloat(getFloat(item, "duration_ms"))}, + {Key: "Error", Value: formatString(getString(item, "error_message"))}, + } + + if status == "completed" { + kvs = append(kvs, output.KeyValue{Key: "Download URL", Value: getString(item, "download_url")}) + kvs = append(kvs, output.KeyValue{Key: "Download Expires", Value: formatString(getString(item, "download_expires_at"))}) + } + + kvs = append(kvs, + output.KeyValue{Key: "Created", Value: getString(item, "created_at")}, + output.KeyValue{Key: "Completed", Value: formatString(getString(item, "completed_at"))}, + ) + + app.Output.KeyValue(kvs) + return nil + }, + } +} diff --git a/internal/commands/db_export_test.go b/internal/commands/db_export_test.go new file mode 100644 index 0000000..506dd89 --- /dev/null +++ b/internal/commands/db_export_test.go @@ -0,0 +1,346 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/output" +) + +var dbExportCreateResponse = map[string]any{ + "data": map[string]any{ + "id": "exp-001", + "vector_site_id": "site-001", + "status": "pending", + "format": "sql", + "size_bytes": nil, + "duration_ms": nil, + "error_message": nil, + "download_url": nil, + "download_expires_at": nil, + "created_at": "2025-01-15T12:00:00+00:00", + "completed_at": nil, + }, + "message": "Database export started", + "http_status": 202, +} + +var dbExportStatusCompletedResponse = map[string]any{ + "data": map[string]any{ + "id": "exp-001", + "vector_site_id": "site-001", + "status": "completed", + "format": "sql", + "size_bytes": float64(10485760), + "duration_ms": float64(5000), + "error_message": nil, + "download_url": "https://s3.amazonaws.com/bucket/exports/exp-001.sql.gz?presigned=abc", + "download_expires_at": "2025-01-15T18:00:00+00:00", + "created_at": "2025-01-15T12:00:00+00:00", + "completed_at": "2025-01-15T12:00:05+00:00", + }, + "message": "Database export retrieved successfully", + "http_status": 200, +} + +var dbExportStatusPendingResponse = map[string]any{ + "data": map[string]any{ + "id": "exp-001", + "vector_site_id": "site-001", + "status": "processing", + "format": "sql", + "size_bytes": nil, + "duration_ms": nil, + "error_message": nil, + "download_url": nil, + "download_expires_at": nil, + "created_at": "2025-01-15T12:00:00+00:00", + "completed_at": nil, + }, + "message": "Database export retrieved successfully", + "http_status": 200, +} + +func newDbExportTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "POST" && path == "/api/v1/vector/sites/site-001/db/export": + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(dbExportCreateResponse) + + case method == "GET" && path == "/api/v1/vector/sites/site-001/db/exports/exp-001": + _ = json.NewEncoder(w).Encode(dbExportStatusCompletedResponse) + + case method == "GET" && path == "/api/v1/vector/sites/site-001/db/exports/exp-pending": + _ = json.NewEncoder(w).Encode(dbExportStatusPendingResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +// --- Export Create Tests --- + +func TestDbExportCreateCmd_TableOutput(t *testing.T) { + ts := newDbExportTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "export", "create", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "Export started: exp-001 (pending)") + assert.Contains(t, out, "Check status with: vector db export status site-001 exp-001") +} + +func TestDbExportCreateCmd_JSONOutput(t *testing.T) { + ts := newDbExportTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDbCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"db", "export", "create", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "exp-001", result["id"]) + assert.Equal(t, "pending", result["status"]) +} + +func TestDbExportCreateCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(dbExportCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "export", "create", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/db/export", receivedPath) + assert.Equal(t, "sql", receivedBody["format"]) +} + +func TestDbExportCreateCmd_WithFormat(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(dbExportCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "export", "create", "site-001", "--format", "csv"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "csv", receivedBody["format"]) +} + +func TestDbExportCreateCmd_MissingArg(t *testing.T) { + ts := newDbExportTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "export", "create"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +func TestDbExportCreateCmd_AuthError(t *testing.T) { + ts := newDbExportTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"db", "export", "create", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestDbExportCreateCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildDbCmdNoAuth(output.Table) + cmd.SetArgs([]string{"db", "export", "create", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Export Status Tests --- + +func TestDbExportStatusCmd_CompletedOutput(t *testing.T) { + ts := newDbExportTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "export", "status", "site-001", "exp-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "exp-001") + assert.Contains(t, out, "completed") + assert.Contains(t, out, "sql") + assert.Contains(t, out, "10485760") + assert.Contains(t, out, "5000") + assert.Contains(t, out, "https://s3.amazonaws.com/bucket/exports/exp-001.sql.gz?presigned=abc") + assert.Contains(t, out, "2025-01-15T18:00:00+00:00") + assert.Contains(t, out, "2025-01-15T12:00:00+00:00") + assert.Contains(t, out, "2025-01-15T12:00:05+00:00") +} + +func TestDbExportStatusCmd_PendingOutput(t *testing.T) { + ts := newDbExportTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "export", "status", "site-001", "exp-pending"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "exp-001") + assert.Contains(t, out, "processing") + assert.NotContains(t, out, "Download URL") + assert.NotContains(t, out, "Download Expires") +} + +func TestDbExportStatusCmd_JSONOutput(t *testing.T) { + ts := newDbExportTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDbCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"db", "export", "status", "site-001", "exp-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "exp-001", result["id"]) + assert.Equal(t, "completed", result["status"]) + assert.Equal(t, float64(10485760), result["size_bytes"]) +} + +func TestDbExportStatusCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(dbExportStatusCompletedResponse) + })) + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "export", "status", "site-001", "exp-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/db/exports/exp-001", receivedPath) +} + +func TestDbExportStatusCmd_MissingArgs(t *testing.T) { + ts := newDbExportTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "export", "status", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} + +func TestDbExportStatusCmd_AuthError(t *testing.T) { + ts := newDbExportTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"db", "export", "status", "site-001", "exp-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Help Tests --- + +func TestDbExportCmd_Help(t *testing.T) { + cmd := NewDbExportCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "create") + assert.Contains(t, out, "status") + assert.Contains(t, out, "database export") +} diff --git a/internal/commands/db_import_session.go b/internal/commands/db_import_session.go new file mode 100644 index 0000000..f5c8bbe --- /dev/null +++ b/internal/commands/db_import_session.go @@ -0,0 +1,254 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +func importsPath(siteID string) string { + return sitesBasePath + "/" + siteID + "/imports" +} + +// NewDbImportSessionCmd creates the db import-session command group. +func NewDbImportSessionCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "import-session", + Short: "Manage database import sessions", + Long: "Manage database import sessions to import SQL dumps into your sites via a presigned upload URL.", + } + + cmd.AddCommand(newDbImportSessionCreateCmd()) + cmd.AddCommand(newDbImportSessionRunCmd()) + cmd.AddCommand(newDbImportSessionStatusCmd()) + + return cmd +} + +func newDbImportSessionCreateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "create ", + Short: "Create a database import session", + Long: "Create a new database import session. Returns a presigned upload URL for uploading a SQL dump file.", + Example: ` # Create an import session + vector db import-session create site-abc123 + + # Create with options + vector db import-session create site-abc123 --filename dump.sql --drop-tables`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + siteID := args[0] + + reqBody := map[string]any{ + "scope": "database", + } + + if cmd.Flags().Changed("filename") { + v, _ := cmd.Flags().GetString("filename") + if v != "" { + reqBody["filename"] = v + } + } + + if cmd.Flags().Changed("content-length") { + v, _ := cmd.Flags().GetInt64("content-length") + if v > 0 { + reqBody["content_length"] = v + } + } + + options := map[string]any{} + + dropTables, _ := cmd.Flags().GetBool("drop-tables") + if dropTables { + options["drop_tables"] = true + } + + disableForeignKeys, _ := cmd.Flags().GetBool("disable-foreign-keys") + if disableForeignKeys { + options["disable_foreign_keys"] = true + } + + srFrom, _ := cmd.Flags().GetString("search-replace-from") + srTo, _ := cmd.Flags().GetString("search-replace-to") + if srFrom != "" && srTo != "" { + options["search_replace"] = map[string]string{ + "from": srFrom, + "to": srTo, + } + } + + if len(options) > 0 { + reqBody["options"] = options + } + + resp, err := app.Client.Post(cmd.Context(), importsPath(siteID), reqBody) + if err != nil { + return fmt.Errorf("failed to create import session: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to create import session: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to create import session: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to create import session: %w", err) + } + + importID := getString(item, "id") + + app.Output.KeyValue([]output.KeyValue{ + {Key: "Import ID", Value: importID}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Upload URL", Value: getString(item, "upload_url")}, + {Key: "Expires", Value: formatString(getString(item, "upload_expires_at"))}, + }) + + app.Output.Message("") + app.Output.Message(fmt.Sprintf("Upload your SQL file to the URL above, then run: vector db import-session run %s %s", siteID, importID)) + + return nil + }, + } + + cmd.Flags().String("filename", "", "Name of the SQL dump file") + cmd.Flags().Int64("content-length", 0, "File size in bytes") + cmd.Flags().Bool("drop-tables", false, "Drop existing tables before import") + cmd.Flags().Bool("disable-foreign-keys", false, "Disable foreign key checks during import") + cmd.Flags().String("search-replace-from", "", "Value to search for (used with --search-replace-to)") + cmd.Flags().String("search-replace-to", "", "Replacement value (used with --search-replace-from)") + + return cmd +} + +func newDbImportSessionRunCmd() *cobra.Command { + return &cobra.Command{ + Use: "run ", + Short: "Run a database import", + Long: "Execute a database import after the SQL dump has been uploaded to the presigned URL.", + Example: ` # Run a database import + vector db import-session run site-abc123 import-456`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + siteID := args[0] + importID := args[1] + endpoint := fmt.Sprintf("%s/%s/run", importsPath(siteID), importID) + + resp, err := app.Client.Post(cmd.Context(), endpoint, nil) + if err != nil { + return fmt.Errorf("failed to run import: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to run import: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to run import: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to run import: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "Import ID", Value: getString(item, "id")}, + {Key: "Status", Value: getString(item, "status")}, + }) + + return nil + }, + } +} + +func newDbImportSessionStatusCmd() *cobra.Command { + return &cobra.Command{ + Use: "status ", + Short: "Check database import status", + Long: "Retrieve the current status of a database import session.", + Example: ` # Check import status + vector db import-session status site-abc123 import-456`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + siteID := args[0] + importID := args[1] + endpoint := fmt.Sprintf("%s/%s", importsPath(siteID), importID) + + resp, err := app.Client.Get(cmd.Context(), endpoint, nil) + if err != nil { + return fmt.Errorf("failed to get import status: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to get import status: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to get import status: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to get import status: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "Import ID", Value: getString(item, "id")}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Filename", Value: formatString(getString(item, "filename"))}, + {Key: "Duration", Value: formatFloat(getFloat(item, "duration_ms"))}, + {Key: "Error", Value: formatString(getString(item, "error_message"))}, + {Key: "Created", Value: getString(item, "created_at")}, + {Key: "Completed", Value: formatString(getString(item, "completed_at"))}, + }) + + return nil + }, + } +} diff --git a/internal/commands/db_import_session_test.go b/internal/commands/db_import_session_test.go new file mode 100644 index 0000000..5c3030b --- /dev/null +++ b/internal/commands/db_import_session_test.go @@ -0,0 +1,489 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +var importSessionCreateResponse = map[string]any{ + "data": map[string]any{ + "id": "imp-001", + "vector_site_id": "site-001", + "status": "pending", + "scope": nil, + "filename": "backup.sql.gz", + "content_length": float64(52428800), + "duration_ms": nil, + "error_message": nil, + "created_at": "2025-01-15T12:00:00+00:00", + "uploaded_at": nil, + "started_at": nil, + "completed_at": nil, + "upload_url": "https://s3.amazonaws.com/bucket/imports/imp-001.sql.gz?X-Amz-Expires=3600", + "upload_expires_at": "2025-01-15T13:00:00+00:00", + }, + "message": "Import session created successfully", + "http_status": 201, +} + +var importSessionRunResponse = map[string]any{ + "data": map[string]any{ + "id": "imp-001", + "vector_site_id": "site-001", + "status": "uploaded", + "filename": "backup.sql.gz", + "duration_ms": nil, + "error_message": nil, + "created_at": "2025-01-15T12:00:00+00:00", + "uploaded_at": "2025-01-15T12:00:01+00:00", + "started_at": nil, + "completed_at": nil, + }, + "message": "Archive import started", + "http_status": 202, +} + +var importSessionStatusResponse = map[string]any{ + "data": map[string]any{ + "id": "imp-001", + "vector_site_id": "site-001", + "status": "completed", + "filename": "backup.sql.gz", + "duration_ms": float64(30000), + "error_message": nil, + "created_at": "2025-01-15T12:00:00+00:00", + "uploaded_at": "2025-01-15T12:00:01+00:00", + "started_at": "2025-01-15T12:00:02+00:00", + "completed_at": "2025-01-15T12:00:32+00:00", + }, + "message": "Import retrieved successfully", + "http_status": 200, +} + +func newImportSessionTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "POST" && path == "/api/v1/vector/sites/site-001/imports": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(importSessionCreateResponse) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/imports/imp-001/run": + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(importSessionRunResponse) + + case method == "GET" && path == "/api/v1/vector/sites/site-001/imports/imp-001": + _ = json.NewEncoder(w).Encode(importSessionStatusResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +func buildDbCmd(baseURL, token string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + + root := &cobra.Command{Use: "vector"} + root.AddCommand(NewDbCmd()) + + root.PersistentPreRunE = func(cmd *cobra.Command, args []string) error { + client := api.NewClient(baseURL, token, "") + app := appctx.NewApp(&config.Config{}, client, "test") + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + } + + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func buildDbCmdNoAuth(format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + + root := &cobra.Command{Use: "vector"} + root.AddCommand(NewDbCmd()) + + root.PersistentPreRunE = func(cmd *cobra.Command, args []string) error { + client := api.NewClient("", "", "") + app := appctx.NewApp(&config.Config{}, client, "") + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + } + + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +// --- Import Session Create Tests --- + +func TestDbImportSessionCreateCmd_TableOutput(t *testing.T) { + ts := newImportSessionTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "import-session", "create", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "imp-001") + assert.Contains(t, out, "pending") + assert.Contains(t, out, "https://s3.amazonaws.com/bucket/imports/imp-001.sql.gz?X-Amz-Expires=3600") + assert.Contains(t, out, "2025-01-15T13:00:00+00:00") + assert.Contains(t, out, "Upload your SQL file to the URL above, then run: vector db import-session run site-001 imp-001") +} + +func TestDbImportSessionCreateCmd_JSONOutput(t *testing.T) { + ts := newImportSessionTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDbCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"db", "import-session", "create", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "imp-001", result["id"]) + assert.Equal(t, "pending", result["status"]) +} + +func TestDbImportSessionCreateCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(importSessionCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "import-session", "create", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/imports", receivedPath) + assert.Equal(t, "database", receivedBody["scope"]) +} + +func TestDbImportSessionCreateCmd_WithOptions(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(importSessionCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{ + "db", "import-session", "create", "site-001", + "--filename", "dump.sql.gz", + "--content-length", "12345", + "--drop-tables", + "--disable-foreign-keys", + "--search-replace-from", "example.org", + "--search-replace-to", "example.com", + }) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "database", receivedBody["scope"]) + assert.Equal(t, "dump.sql.gz", receivedBody["filename"]) + assert.Equal(t, float64(12345), receivedBody["content_length"]) + + options, ok := receivedBody["options"].(map[string]any) + require.True(t, ok) + assert.Equal(t, true, options["drop_tables"]) + assert.Equal(t, true, options["disable_foreign_keys"]) + + sr, ok := options["search_replace"].(map[string]any) + require.True(t, ok) + assert.Equal(t, "example.org", sr["from"]) + assert.Equal(t, "example.com", sr["to"]) +} + +func TestDbImportSessionCreateCmd_MissingArg(t *testing.T) { + ts := newImportSessionTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "import-session", "create"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +func TestDbImportSessionCreateCmd_AuthError(t *testing.T) { + ts := newImportSessionTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"db", "import-session", "create", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestDbImportSessionCreateCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildDbCmdNoAuth(output.Table) + cmd.SetArgs([]string{"db", "import-session", "create", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Import Session Run Tests --- + +func TestDbImportSessionRunCmd_TableOutput(t *testing.T) { + ts := newImportSessionTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "import-session", "run", "site-001", "imp-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "imp-001") + assert.Contains(t, out, "uploaded") +} + +func TestDbImportSessionRunCmd_JSONOutput(t *testing.T) { + ts := newImportSessionTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDbCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"db", "import-session", "run", "site-001", "imp-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "imp-001", result["id"]) + assert.Equal(t, "uploaded", result["status"]) +} + +func TestDbImportSessionRunCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(importSessionRunResponse) + })) + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "import-session", "run", "site-001", "imp-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/imports/imp-001/run", receivedPath) +} + +func TestDbImportSessionRunCmd_MissingArgs(t *testing.T) { + ts := newImportSessionTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "import-session", "run", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} + +func TestDbImportSessionRunCmd_AuthError(t *testing.T) { + ts := newImportSessionTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"db", "import-session", "run", "site-001", "imp-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestDbImportSessionRunCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildDbCmdNoAuth(output.Table) + cmd.SetArgs([]string{"db", "import-session", "run", "site-001", "imp-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Import Session Status Tests --- + +func TestDbImportSessionStatusCmd_TableOutput(t *testing.T) { + ts := newImportSessionTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "import-session", "status", "site-001", "imp-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "imp-001") + assert.Contains(t, out, "completed") + assert.Contains(t, out, "backup.sql.gz") + assert.Contains(t, out, "30000") + assert.Contains(t, out, "2025-01-15T12:00:00+00:00") + assert.Contains(t, out, "2025-01-15T12:00:32+00:00") +} + +func TestDbImportSessionStatusCmd_JSONOutput(t *testing.T) { + ts := newImportSessionTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDbCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"db", "import-session", "status", "site-001", "imp-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "imp-001", result["id"]) + assert.Equal(t, "completed", result["status"]) + assert.Equal(t, float64(30000), result["duration_ms"]) +} + +func TestDbImportSessionStatusCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(importSessionStatusResponse) + })) + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "import-session", "status", "site-001", "imp-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/imports/imp-001", receivedPath) +} + +func TestDbImportSessionStatusCmd_MissingArgs(t *testing.T) { + ts := newImportSessionTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"db", "import-session", "status", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} + +func TestDbImportSessionStatusCmd_AuthError(t *testing.T) { + ts := newImportSessionTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDbCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"db", "import-session", "status", "site-001", "imp-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Help Tests --- + +func TestDbImportSessionCmd_Help(t *testing.T) { + cmd := NewDbImportSessionCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "create") + assert.Contains(t, out, "run") + assert.Contains(t, out, "status") + assert.Contains(t, out, "Manage database import sessions") +} diff --git a/internal/commands/db_test.go b/internal/commands/db_test.go new file mode 100644 index 0000000..164086d --- /dev/null +++ b/internal/commands/db_test.go @@ -0,0 +1,25 @@ +package commands + +import ( + "bytes" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDbCmd_HelpText(t *testing.T) { + cmd := NewDbCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "import-session") + assert.Contains(t, out, "export") + assert.Contains(t, out, "Manage database operations") +} diff --git a/internal/commands/deploy.go b/internal/commands/deploy.go new file mode 100644 index 0000000..5856728 --- /dev/null +++ b/internal/commands/deploy.go @@ -0,0 +1,430 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + "time" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +const deploysBasePath = "/api/v1/vector/deployments" + +// NewDeployCmd creates the deploy command group. +func NewDeployCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "deploy", + Short: "Manage deployments", + Long: "Manage Vector deployments including listing, viewing, triggering, and rolling back deployments.", + } + + cmd.AddCommand(newDeployListCmd()) + cmd.AddCommand(newDeployShowCmd()) + cmd.AddCommand(newDeployTriggerCmd()) + cmd.AddCommand(newDeployRollbackCmd()) + + return cmd +} + +func newDeployListCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "list ", + Short: "List deployments for an environment", + Long: "Retrieve a paginated list of deployments for an environment.", + Example: ` # List deployments for an environment + vector deploy list env-abc123`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + page, perPage := getPagination(cmd) + query := buildPaginationQuery(page, perPage) + + path := envsBasePath + "/" + args[0] + "/deployments" + resp, err := app.Client.Get(cmd.Context(), path, query) + if err != nil { + return fmt.Errorf("failed to list deployments: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list deployments: %w", err) + } + + if app.Output.Format() == output.JSON { + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list deployments: %w", err) + } + return app.Output.JSON(json.RawMessage(data)) + } + + data, meta, err := parseResponseWithMeta(body) + if err != nil { + return fmt.Errorf("failed to list deployments: %w", err) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list deployments: %w", err) + } + + headers := []string{"ID", "STATUS", "ACTOR", "CREATED"} + var rows [][]string + for _, item := range items { + rows = append(rows, []string{ + getString(item, "id"), + getString(item, "status"), + getString(item, "actor"), + getString(item, "created_at"), + }) + } + + app.Output.Table(headers, rows) + if meta != nil { + app.Output.Pagination(meta.CurrentPage, meta.LastPage, meta.Total) + } + return nil + }, + } + addPaginationFlags(cmd) + return cmd +} + +func newDeployShowCmd() *cobra.Command { + return &cobra.Command{ + Use: "show ", + Short: "Show deployment details", + Long: "Retrieve details of a specific deployment, including stdout and stderr output.", + Example: ` # Show deployment details + vector deploy show deploy-456`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), deploysBasePath+"/"+args[0], nil) + if err != nil { + return fmt.Errorf("failed to show deployment: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to show deployment: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to show deployment: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to show deployment: %w", err) + } + + w := cmd.OutOrStdout() + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Environment ID", Value: getString(item, "vector_environment_id")}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Actor", Value: getString(item, "actor")}, + {Key: "Created", Value: getString(item, "created_at")}, + {Key: "Updated", Value: getString(item, "updated_at")}, + }) + + stdoutStr := getString(item, "stdout") + if stdoutStr != "" { + _, _ = fmt.Fprintln(w) + _, _ = fmt.Fprintln(w, "Stdout:") + _, _ = fmt.Fprintln(w, stdoutStr) + } + + stderrStr := getString(item, "stderr") + if stderrStr != "" { + _, _ = fmt.Fprintln(w) + _, _ = fmt.Fprintln(w, "Stderr:") + _, _ = fmt.Fprintln(w, stderrStr) + } + + return nil + }, + } +} + +func newDeployTriggerCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "trigger ", + Short: "Trigger a deployment", + Long: "Initiate a new deployment for an environment.", + Example: ` # Trigger a deployment + vector deploy trigger env-abc123 + + # Include uploads + vector deploy trigger env-abc123 --include-uploads + + # Trigger and wait for completion + vector deploy trigger env-abc123 --wait`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + waitEnabled, interval, timeout, err := getWaitConfig(cmd) + if err != nil { + return err + } + + reqBody := map[string]any{} + + if cmd.Flags().Changed("include-uploads") { + v, _ := cmd.Flags().GetBool("include-uploads") + reqBody["include_uploads"] = v + } + if cmd.Flags().Changed("include-database") { + v, _ := cmd.Flags().GetBool("include-database") + reqBody["include_database"] = v + } + + path := envsBasePath + "/" + args[0] + "/deployments" + resp, err := app.Client.Post(cmd.Context(), path, reqBody) + if err != nil { + return fmt.Errorf("failed to trigger deployment: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to trigger deployment: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to trigger deployment: %w", err) + } + + if !waitEnabled { + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to trigger deployment: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Environment ID", Value: getString(item, "vector_environment_id")}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Actor", Value: getString(item, "actor")}, + {Key: "Created", Value: getString(item, "created_at")}, + }) + return nil + } + + var triggerItem map[string]any + if err := json.Unmarshal(data, &triggerItem); err != nil { + return fmt.Errorf("failed to trigger deployment: %w", err) + } + + deployID := getString(triggerItem, "id") + if deployID == "" { + return fmt.Errorf("failed to trigger deployment: response missing deployment ID") + } + + cfg := &waitConfig{ + ResourceID: deployID, + PollPath: deploysBasePath + "/" + deployID, + Interval: interval, + Timeout: timeout, + TerminalStatuses: map[string]bool{"deployed": true}, + FailedStatuses: map[string]bool{"failed": true, "cancelled": true}, + Noun: "Deployment", + FormatDisplay: deployFormatDisplay, + } + + result, err := waitForResource(cmd.Context(), app, cfg) + if err != nil { + return err + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(result.FinalData) + } + + var finalItem map[string]any + if err := json.Unmarshal(result.FinalData, &finalItem); err != nil { + return fmt.Errorf("failed to trigger deployment: %w", err) + } + + app.Output.Message(fmt.Sprintf("Deployment %s %s in %s", deployID, result.Status, result.Elapsed.Truncate(time.Second))) + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(finalItem, "id")}, + {Key: "Environment ID", Value: getString(finalItem, "vector_environment_id")}, + {Key: "Status", Value: getString(finalItem, "status")}, + {Key: "Actor", Value: getString(finalItem, "actor")}, + {Key: "Created", Value: getString(finalItem, "created_at")}, + {Key: "Updated", Value: getString(finalItem, "updated_at")}, + }) + return nil + }, + } + + cmd.Flags().Bool("include-uploads", false, "Include wp-content/uploads in deployment") + cmd.Flags().Bool("include-database", true, "Include database in deployment") + addWaitFlags(cmd) + + return cmd +} + +// deployFormatDisplay formats deployment data for the alternate screen display. +func deployFormatDisplay(data map[string]any) []string { + return []string{ + fmt.Sprintf("%16s: %s", "ID", getString(data, "id")), + fmt.Sprintf("%16s: %s", "Environment ID", getString(data, "vector_environment_id")), + fmt.Sprintf("%16s: %s", "Status", getString(data, "status")), + fmt.Sprintf("%16s: %s", "Actor", getString(data, "actor")), + fmt.Sprintf("%16s: %s", "Created", getString(data, "created_at")), + fmt.Sprintf("%16s: %s", "Updated", getString(data, "updated_at")), + } +} + +func newDeployRollbackCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "rollback ", + Short: "Rollback a deployment", + Long: "Initiate a rollback for an environment. Rolls back to the last successful deployment unless a target is specified.", + Example: ` # Rollback to the last successful deployment + vector deploy rollback env-abc123 + + # Rollback to a specific deployment + vector deploy rollback env-abc123 --target deploy-789 + + # Rollback and wait for completion + vector deploy rollback env-abc123 --wait`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + waitEnabled, interval, timeout, err := getWaitConfig(cmd) + if err != nil { + return err + } + + reqBody := map[string]any{} + + if cmd.Flags().Changed("target") { + v, _ := cmd.Flags().GetString("target") + reqBody["target_deployment_id"] = v + } + + path := envsBasePath + "/" + args[0] + "/rollback" + resp, err := app.Client.Post(cmd.Context(), path, reqBody) + if err != nil { + return fmt.Errorf("failed to rollback deployment: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to rollback deployment: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to rollback deployment: %w", err) + } + + if !waitEnabled { + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to rollback deployment: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Environment ID", Value: getString(item, "vector_environment_id")}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Actor", Value: getString(item, "actor")}, + {Key: "Created", Value: getString(item, "created_at")}, + }) + return nil + } + + var rollbackItem map[string]any + if err := json.Unmarshal(data, &rollbackItem); err != nil { + return fmt.Errorf("failed to rollback deployment: %w", err) + } + + deployID := getString(rollbackItem, "id") + if deployID == "" { + return fmt.Errorf("failed to rollback deployment: response missing deployment ID") + } + + cfg := &waitConfig{ + ResourceID: deployID, + PollPath: deploysBasePath + "/" + deployID, + Interval: interval, + Timeout: timeout, + TerminalStatuses: map[string]bool{"deployed": true}, + FailedStatuses: map[string]bool{"failed": true, "cancelled": true}, + Noun: "Deployment", + FormatDisplay: deployFormatDisplay, + } + + result, err := waitForResource(cmd.Context(), app, cfg) + if err != nil { + return err + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(result.FinalData) + } + + var finalItem map[string]any + if err := json.Unmarshal(result.FinalData, &finalItem); err != nil { + return fmt.Errorf("failed to rollback deployment: %w", err) + } + + app.Output.Message(fmt.Sprintf("Deployment %s %s in %s", deployID, result.Status, result.Elapsed.Truncate(time.Second))) + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(finalItem, "id")}, + {Key: "Environment ID", Value: getString(finalItem, "vector_environment_id")}, + {Key: "Status", Value: getString(finalItem, "status")}, + {Key: "Actor", Value: getString(finalItem, "actor")}, + {Key: "Created", Value: getString(finalItem, "created_at")}, + {Key: "Updated", Value: getString(finalItem, "updated_at")}, + }) + return nil + }, + } + + cmd.Flags().String("target", "", "Target deployment ID to rollback to") + addWaitFlags(cmd) + + return cmd +} diff --git a/internal/commands/deploy_test.go b/internal/commands/deploy_test.go new file mode 100644 index 0000000..b81114e --- /dev/null +++ b/internal/commands/deploy_test.go @@ -0,0 +1,925 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "sync/atomic" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +var deployListResponse = map[string]any{ + "data": []map[string]any{ + { + "id": "dep-001", + "vector_environment_id": "env-001", + "status": "deployed", + "stdout": "Deployment successful", + "stderr": nil, + "actor": "user@example.com", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:05:00+00:00", + }, + { + "id": "dep-002", + "vector_environment_id": "env-001", + "status": "deployed", + "stdout": "Deployment successful", + "stderr": nil, + "actor": "admin@example.com", + "created_at": "2025-01-14T10:00:00+00:00", + "updated_at": "2025-01-14T10:03:00+00:00", + }, + }, + "meta": map[string]any{ + "current_page": 1, + "last_page": 1, + "total": 2, + }, + "message": "Deployments retrieved successfully", + "http_status": 200, +} + +var deployShowResponse = map[string]any{ + "data": map[string]any{ + "id": "dep-001", + "vector_environment_id": "env-001", + "status": "deployed", + "stdout": "Deploying files...\nDone.", + "stderr": nil, + "actor": "user@example.com", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:05:00+00:00", + }, + "message": "Deployment retrieved successfully", + "http_status": 200, +} + +var deployShowWithStderrResponse = map[string]any{ + "data": map[string]any{ + "id": "dep-003", + "vector_environment_id": "env-001", + "status": "failed", + "stdout": "Deploying files...", + "stderr": "Error: permission denied", + "actor": "user@example.com", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:05:00+00:00", + }, + "message": "Deployment retrieved successfully", + "http_status": 200, +} + +var deployShowNoOutputResponse = map[string]any{ + "data": map[string]any{ + "id": "dep-004", + "vector_environment_id": "env-001", + "status": "pending", + "stdout": nil, + "stderr": nil, + "actor": "user@example.com", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:05:00+00:00", + }, + "message": "Deployment retrieved successfully", + "http_status": 200, +} + +var deployTriggerResponse = map[string]any{ + "data": map[string]any{ + "id": "dep-005", + "vector_environment_id": "env-001", + "status": "pending", + "stdout": nil, + "stderr": nil, + "actor": "user@example.com", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "Deployment initiated", + "http_status": 201, +} + +var deployRollbackResponse = map[string]any{ + "data": map[string]any{ + "id": "dep-006", + "vector_environment_id": "env-001", + "status": "pending", + "stdout": nil, + "stderr": nil, + "actor": "user@example.com", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "Rollback initiated", + "http_status": 201, +} + +func newDeployTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/environments/env-001/deployments": + _ = json.NewEncoder(w).Encode(deployListResponse) + + case method == "GET" && path == "/api/v1/vector/deployments/dep-001": + _ = json.NewEncoder(w).Encode(deployShowResponse) + + case method == "GET" && path == "/api/v1/vector/deployments/dep-003": + _ = json.NewEncoder(w).Encode(deployShowWithStderrResponse) + + case method == "GET" && path == "/api/v1/vector/deployments/dep-004": + _ = json.NewEncoder(w).Encode(deployShowNoOutputResponse) + + case method == "POST" && path == "/api/v1/vector/environments/env-001/deployments": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(deployTriggerResponse) + + case method == "POST" && path == "/api/v1/vector/environments/env-001/rollback": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(deployRollbackResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +func buildDeployCmd(baseURL, token string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient(baseURL, token, "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + deployCmd := NewDeployCmd() + root.AddCommand(deployCmd) + + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func buildDeployCmdNoAuth(format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient("http://localhost", "", "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + deployCmd := NewDeployCmd() + root.AddCommand(deployCmd) + + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +// --- Deploy List Tests --- + +func TestDeployListCmd_TableOutput(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "list", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "dep-001") + assert.Contains(t, out, "dep-002") + assert.Contains(t, out, "deployed") + assert.Contains(t, out, "user@example.com") + assert.Contains(t, out, "admin@example.com") +} + +func TestDeployListCmd_JSONOutput(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"deploy", "list", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 2) + assert.Equal(t, "dep-001", result[0]["id"]) + assert.Equal(t, "dep-002", result[1]["id"]) +} + +func TestDeployListCmd_PaginationQueryParams(t *testing.T) { + var receivedPath, receivedQuery string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedPath = r.URL.Path + receivedQuery = r.URL.RawQuery + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(deployListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "list", "env-001", "--page", "2", "--per-page", "10"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "/api/v1/vector/environments/env-001/deployments", receivedPath) + assert.Contains(t, receivedQuery, "page=2") + assert.Contains(t, receivedQuery, "per_page=10") +} + +func TestDeployListCmd_AuthError(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"deploy", "list", "env-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestDeployListCmd_NoAuthToken(t *testing.T) { + cmd, _, _ := buildDeployCmdNoAuth(output.Table) + cmd.SetArgs([]string{"deploy", "list", "env-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestDeployListCmd_MissingArgs(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "list"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Deploy Show Tests --- + +func TestDeployShowCmd_TableOutput(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "show", "dep-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "dep-001") + assert.Contains(t, out, "env-001") + assert.Contains(t, out, "deployed") + assert.Contains(t, out, "user@example.com") + assert.Contains(t, out, "Stdout:") + assert.Contains(t, out, "Deploying files...") + assert.NotContains(t, out, "Stderr:") +} + +func TestDeployShowCmd_WithStderr(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "show", "dep-003"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "dep-003") + assert.Contains(t, out, "failed") + assert.Contains(t, out, "Stdout:") + assert.Contains(t, out, "Deploying files...") + assert.Contains(t, out, "Stderr:") + assert.Contains(t, out, "Error: permission denied") +} + +func TestDeployShowCmd_NoOutput(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "show", "dep-004"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "dep-004") + assert.Contains(t, out, "pending") + assert.NotContains(t, out, "Stdout:") + assert.NotContains(t, out, "Stderr:") +} + +func TestDeployShowCmd_JSONOutput(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"deploy", "show", "dep-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "dep-001", result["id"]) + assert.Equal(t, "deployed", result["status"]) +} + +func TestDeployShowCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(deployShowResponse) + })) + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "show", "dep-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/deployments/dep-001", receivedPath) +} + +func TestDeployShowCmd_AuthError(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"deploy", "show", "dep-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Deploy Trigger Tests --- + +func TestDeployTriggerCmd_TableOutput(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "trigger", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "dep-005") + assert.Contains(t, out, "env-001") + assert.Contains(t, out, "pending") +} + +func TestDeployTriggerCmd_JSONOutput(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"deploy", "trigger", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "dep-005", result["id"]) + assert.Equal(t, "pending", result["status"]) +} + +func TestDeployTriggerCmd_RequestBodyWithFlags(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(deployTriggerResponse) + })) + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "trigger", "env-001", "--include-uploads", "--include-database=false"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/environments/env-001/deployments", receivedPath) + assert.Equal(t, true, receivedBody["include_uploads"]) + assert.Equal(t, false, receivedBody["include_database"]) +} + +func TestDeployTriggerCmd_RequestBodyNoFlags(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(deployTriggerResponse) + })) + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "trigger", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + // When no flags are set, the body should be empty (no include_uploads or include_database) + assert.Nil(t, receivedBody["include_uploads"]) + assert.Nil(t, receivedBody["include_database"]) +} + +func TestDeployTriggerCmd_AuthError(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"deploy", "trigger", "env-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestDeployTriggerCmd_MissingArgs(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "trigger"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Deploy Rollback Tests --- + +func TestDeployRollbackCmd_TableOutput(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "rollback", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "dep-006") + assert.Contains(t, out, "env-001") + assert.Contains(t, out, "pending") +} + +func TestDeployRollbackCmd_JSONOutput(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"deploy", "rollback", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "dep-006", result["id"]) + assert.Equal(t, "pending", result["status"]) +} + +func TestDeployRollbackCmd_WithTargetFlag(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(deployRollbackResponse) + })) + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "rollback", "env-001", "--target", "dep-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/environments/env-001/rollback", receivedPath) + assert.Equal(t, "dep-001", receivedBody["target_deployment_id"]) +} + +func TestDeployRollbackCmd_WithoutTargetFlag(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(deployRollbackResponse) + })) + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "rollback", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/environments/env-001/rollback", receivedPath) + assert.Nil(t, receivedBody["target_deployment_id"]) +} + +func TestDeployRollbackCmd_AuthError(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"deploy", "rollback", "env-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestDeployRollbackCmd_MissingArgs(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "rollback"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Help Text Tests --- + +func TestDeployCmd_HelpText(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "show") + assert.Contains(t, out, "trigger") + assert.Contains(t, out, "rollback") +} + +func TestDeployTriggerCmd_HelpText(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "trigger", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "--include-uploads") + assert.Contains(t, out, "--include-database") +} + +func TestDeployRollbackCmd_HelpText(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "rollback", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "--target") +} + +// --- Server Error Tests --- + +func TestDeployShowCmd_NotFound(t *testing.T) { + ts := newDeployTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "show", "dep-nonexistent"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to show deployment") +} + +// --- Deploy Trigger --wait Tests --- + +// newDeployWaitTestServer creates a test server that handles: +// - POST /environments/{id}/deployments -> returns deployTriggerResponse +// - POST /environments/{id}/rollback -> returns deployRollbackResponse +// - GET /deployments/{id} -> returns successive poll responses +func newDeployWaitTestServer(validToken string, pollResponses []countingResponse) *httptest.Server { + var pollCount atomic.Int64 + + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + path := r.URL.Path + method := r.Method + + switch { + case method == "POST" && path == "/api/v1/vector/environments/env-001/deployments": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(deployTriggerResponse) + + case method == "POST" && path == "/api/v1/vector/environments/env-001/rollback": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(deployRollbackResponse) + + case method == "GET" && (path == "/api/v1/vector/deployments/dep-005" || path == "/api/v1/vector/deployments/dep-006"): + idx := int(pollCount.Add(1)) - 1 + if idx >= len(pollResponses) { + idx = len(pollResponses) - 1 + } + resp := pollResponses[idx] + if resp.httpStatus != 0 { + w.WriteHeader(resp.httpStatus) + } + _ = json.NewEncoder(w).Encode(resp.body) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +func makeDeployPollResponse(id, status string) countingResponse { + return countingResponse{ + httpStatus: http.StatusOK, + body: map[string]any{ + "data": map[string]any{ + "id": id, + "vector_environment_id": "env-001", + "status": status, + "actor": "user@example.com", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:05:00+00:00", + }, + "message": "Deployment retrieved successfully", + "http_status": 200, + }, + } +} + +func TestDeployTriggerCmd_WaitSuccess(t *testing.T) { + overrideWaitGlobals(t, false) + + ts := newDeployWaitTestServer("valid-token", []countingResponse{ + makeDeployPollResponse("dep-005", "pending"), + makeDeployPollResponse("dep-005", "deploying"), + makeDeployPollResponse("dep-005", "deployed"), + }) + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "trigger", "env-001", "--wait", "--poll-interval", "1s", "--timeout", "30s"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "dep-005") + assert.Contains(t, out, "deployed") + assert.Contains(t, out, "Deployment dep-005 deployed in") +} + +func TestDeployTriggerCmd_WaitFailure(t *testing.T) { + overrideWaitGlobals(t, false) + + ts := newDeployWaitTestServer("valid-token", []countingResponse{ + makeDeployPollResponse("dep-005", "pending"), + makeDeployPollResponse("dep-005", "failed"), + }) + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "trigger", "env-001", "--wait", "--poll-interval", "1s", "--timeout", "30s"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 1, apiErr.ExitCode) + assert.Contains(t, apiErr.Message, "failed status") +} + +func TestDeployTriggerCmd_WaitJSON(t *testing.T) { + overrideWaitGlobals(t, false) + + ts := newDeployWaitTestServer("valid-token", []countingResponse{ + makeDeployPollResponse("dep-005", "pending"), + makeDeployPollResponse("dep-005", "deployed"), + }) + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"deploy", "trigger", "env-001", "--wait", "--poll-interval", "1s", "--timeout", "30s"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "dep-005", result["id"]) + assert.Equal(t, "deployed", result["status"]) +} + +// --- Deploy Rollback --wait Tests --- + +func TestDeployRollbackCmd_WaitSuccess(t *testing.T) { + overrideWaitGlobals(t, false) + + ts := newDeployWaitTestServer("valid-token", []countingResponse{ + makeDeployPollResponse("dep-006", "pending"), + makeDeployPollResponse("dep-006", "deployed"), + }) + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "rollback", "env-001", "--wait", "--poll-interval", "1s", "--timeout", "30s"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "dep-006") + assert.Contains(t, out, "deployed") + assert.Contains(t, out, "Deployment dep-006 deployed in") +} + +func TestDeployRollbackCmd_WaitFailure(t *testing.T) { + overrideWaitGlobals(t, false) + + ts := newDeployWaitTestServer("valid-token", []countingResponse{ + makeDeployPollResponse("dep-006", "pending"), + makeDeployPollResponse("dep-006", "cancelled"), + }) + defer ts.Close() + + cmd, _, _ := buildDeployCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"deploy", "rollback", "env-001", "--wait", "--poll-interval", "1s", "--timeout", "30s"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 1, apiErr.ExitCode) + assert.Contains(t, apiErr.Message, "failed status") + assert.Contains(t, apiErr.Message, "cancelled") +} + +func TestDeployRollbackCmd_WaitJSON(t *testing.T) { + overrideWaitGlobals(t, false) + + ts := newDeployWaitTestServer("valid-token", []countingResponse{ + makeDeployPollResponse("dep-006", "pending"), + makeDeployPollResponse("dep-006", "deployed"), + }) + defer ts.Close() + + cmd, stdout, _ := buildDeployCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"deploy", "rollback", "env-001", "--wait", "--poll-interval", "1s", "--timeout", "30s"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "dep-006", result["id"]) + assert.Equal(t, "deployed", result["status"]) +} diff --git a/internal/commands/env.go b/internal/commands/env.go new file mode 100644 index 0000000..e62c746 --- /dev/null +++ b/internal/commands/env.go @@ -0,0 +1,451 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "strings" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/output" +) + +const envsBasePath = "/api/v1/vector/environments" + +// NewEnvCmd creates the env command group. +func NewEnvCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "env", + Short: "Manage environments", + Long: "Manage Vector environments including creating, updating, deleting, and managing secrets and database promotes.", + } + + cmd.AddCommand(newEnvListCmd()) + cmd.AddCommand(newEnvShowCmd()) + cmd.AddCommand(newEnvCreateCmd()) + cmd.AddCommand(newEnvUpdateCmd()) + cmd.AddCommand(newEnvDeleteCmd()) + cmd.AddCommand(NewEnvSecretCmd()) + cmd.AddCommand(NewEnvDBCmd()) + + return cmd +} + +func newEnvListCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "list ", + Short: "List environments for a site", + Long: "Retrieve a paginated list of environments for a site.", + Example: ` # List environments for a site + vector env list site-abc123`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + page, perPage := getPagination(cmd) + query := buildPaginationQuery(page, perPage) + query.Set("site", args[0]) + + resp, err := app.Client.Get(cmd.Context(), envsBasePath, query) + if err != nil { + return fmt.Errorf("failed to list environments: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list environments: %w", err) + } + + if app.Output.Format() == output.JSON { + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list environments: %w", err) + } + return app.Output.JSON(json.RawMessage(data)) + } + + data, meta, err := parseResponseWithMeta(body) + if err != nil { + return fmt.Errorf("failed to list environments: %w", err) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list environments: %w", err) + } + + headers := []string{"ID", "NAME", "PRODUCTION", "STATUS", "PHP", "PLATFORM DOMAIN", "CUSTOM DOMAIN"} + var rows [][]string + for _, item := range items { + rows = append(rows, []string{ + getString(item, "id"), + getString(item, "name"), + formatBool(getBool(item, "is_production")), + getString(item, "status"), + getString(item, "php_version"), + formatString(getString(item, "platform_domain")), + formatString(getString(item, "custom_domain")), + }) + } + + app.Output.Table(headers, rows) + if meta.LastPage > 1 { + app.Output.Pagination(meta.CurrentPage, meta.LastPage, meta.Total) + } + return nil + }, + } + addPaginationFlags(cmd) + return cmd +} + +func newEnvShowCmd() *cobra.Command { + return &cobra.Command{ + Use: "show ", + Short: "Show environment details", + Long: "Retrieve details of a specific environment.", + Example: ` # Show environment details + vector env show env-abc123`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), envsBasePath+"/"+args[0], nil) + if err != nil { + return fmt.Errorf("failed to show environment: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to show environment: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to show environment: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to show environment: %w", err) + } + + tags := tagsFromItem(item) + pairs := []output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Site ID", Value: getString(item, "vector_site_id")}, + {Key: "Name", Value: getString(item, "name")}, + {Key: "Production", Value: formatBool(getBool(item, "is_production"))}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "PHP Version", Value: getString(item, "php_version")}, + {Key: "Tags", Value: formatTags(tags)}, + {Key: "Platform Domain", Value: formatString(getString(item, "platform_domain"))}, + {Key: "Custom Domain", Value: formatString(getString(item, "custom_domain"))}, + {Key: "DNS Target", Value: formatString(getString(item, "dns_target"))}, + {Key: "Database Host", Value: formatString(getString(item, "database_host"))}, + {Key: "Database Name", Value: formatString(getString(item, "database_name"))}, + {Key: "Created", Value: getString(item, "created_at")}, + {Key: "Updated", Value: getString(item, "updated_at")}, + } + + cert := getMap(item, "custom_domain_certificate") + if cert != nil { + pairs = append(pairs, output.KeyValue{Key: "Certificate Status", Value: formatString(getString(cert, "status"))}) + } + + app.Output.KeyValue(pairs) + return nil + }, + } +} + +func newEnvCreateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "create ", + Short: "Create an environment", + Long: "Create a new environment for a site.", + Example: ` # Create a staging environment + vector env create site-abc123 --name staging --php-version 8.2 + + # Create a production environment with a custom domain + vector env create site-abc123 --name production --php-version 8.2 --custom-domain example.com --production`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + name, _ := cmd.Flags().GetString("name") + if name == "" { + return &api.APIError{ + Message: "--name is required", + ExitCode: 3, + } + } + + phpVersion, _ := cmd.Flags().GetString("php-version") + if phpVersion == "" { + return &api.APIError{ + Message: "--php-version is required", + ExitCode: 3, + } + } + + reqBody := map[string]any{ + "name": name, + "php_version": phpVersion, + } + + if cmd.Flags().Changed("custom-domain") { + v, _ := cmd.Flags().GetString("custom-domain") + if v != "" { + reqBody["custom_domain"] = v + } else { + reqBody["custom_domain"] = nil + } + } + + if cmd.Flags().Changed("production") { + v, _ := cmd.Flags().GetBool("production") + reqBody["is_production"] = v + } + + if cmd.Flags().Changed("tags") { + tagsStr, _ := cmd.Flags().GetString("tags") + if tagsStr != "" { + reqBody["tags"] = strings.Split(tagsStr, ",") + } else { + reqBody["tags"] = []string{} + } + } + + path := sitesBasePath + "/" + args[0] + "/environments" + resp, err := app.Client.Post(cmd.Context(), path, reqBody) + if err != nil { + return fmt.Errorf("failed to create environment: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to create environment: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to create environment: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to create environment: %w", err) + } + + tags := tagsFromItem(item) + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Site ID", Value: getString(item, "vector_site_id")}, + {Key: "Name", Value: getString(item, "name")}, + {Key: "Production", Value: formatBool(getBool(item, "is_production"))}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "PHP Version", Value: getString(item, "php_version")}, + {Key: "Tags", Value: formatTags(tags)}, + {Key: "Platform Domain", Value: formatString(getString(item, "platform_domain"))}, + {Key: "Custom Domain", Value: formatString(getString(item, "custom_domain"))}, + }) + return nil + }, + } + + cmd.Flags().String("name", "", "Environment name (slug format, required)") + cmd.Flags().String("php-version", "", "PHP version (required)") + cmd.Flags().String("custom-domain", "", "Custom domain for the environment") + cmd.Flags().Bool("production", false, "Mark as production environment") + cmd.Flags().String("tags", "", "Comma-separated tags") + + return cmd +} + +func newEnvUpdateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "update ", + Short: "Update an environment", + Long: "Update an environment's custom domain or tags. Domain changes trigger async infrastructure updates.", + Example: ` # Set a custom domain + vector env update env-abc123 --custom-domain example.com + + # Remove a custom domain + vector env update env-abc123 --clear-custom-domain`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + customDomainSet := cmd.Flags().Changed("custom-domain") + clearDomainSet := cmd.Flags().Changed("clear-custom-domain") + + if customDomainSet && clearDomainSet { + return &api.APIError{ + Message: "--custom-domain and --clear-custom-domain cannot be used together", + ExitCode: 3, + } + } + + reqBody := map[string]any{} + + if customDomainSet { + v, _ := cmd.Flags().GetString("custom-domain") + reqBody["custom_domain"] = v + } + if clearDomainSet { + reqBody["custom_domain"] = nil + } + + if cmd.Flags().Changed("tags") { + tagsStr, _ := cmd.Flags().GetString("tags") + if tagsStr != "" { + reqBody["tags"] = strings.Split(tagsStr, ",") + } else { + reqBody["tags"] = nil + } + } + + resp, err := app.Client.Put(cmd.Context(), envsBasePath+"/"+args[0], reqBody) + if err != nil { + return fmt.Errorf("failed to update environment: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + isDomainChange := resp.StatusCode == http.StatusAccepted + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to update environment: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to update environment: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to update environment: %w", err) + } + + tags := tagsFromItem(item) + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Name", Value: getString(item, "name")}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Tags", Value: formatTags(tags)}, + {Key: "Custom Domain", Value: formatString(getString(item, "custom_domain"))}, + {Key: "DNS Target", Value: formatString(getString(item, "dns_target"))}, + }) + + if isDomainChange { + _, _ = fmt.Fprintln(cmd.OutOrStdout()) + output.PrintMessage(cmd.OutOrStdout(), "Domain change initiated. DNS records must be configured for the new domain.") + pdc := getMap(item, "pending_domain_change") + if pdc != nil { + _, _ = fmt.Fprintf(cmd.OutOrStdout(), " Old Domain: %s\n", formatString(getString(pdc, "old_domain"))) + _, _ = fmt.Fprintf(cmd.OutOrStdout(), " New Domain: %s\n", formatString(getString(pdc, "new_domain"))) + } + } + + return nil + }, + } + + cmd.Flags().String("custom-domain", "", "Set custom domain") + cmd.Flags().Bool("clear-custom-domain", false, "Remove custom domain") + cmd.Flags().String("tags", "", "Comma-separated tags (empty string clears tags)") + + return cmd +} + +func newEnvDeleteCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "delete ", + Short: "Delete an environment", + Long: "Initiate deletion of an environment. This operation is irreversible.", + Example: ` # Delete an environment + vector env delete env-abc123 + + # Delete without confirmation + vector env delete env-abc123 --force`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + force, _ := cmd.Flags().GetBool("force") + if !force { + if !confirmAction(cmd, fmt.Sprintf("Are you sure you want to delete environment %s?", args[0])) { + output.PrintMessage(cmd.OutOrStdout(), "Aborted.") + return nil + } + } + + resp, err := app.Client.Delete(cmd.Context(), envsBasePath+"/"+args[0]) + if err != nil { + return fmt.Errorf("failed to delete environment: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to delete environment: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to delete environment: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to delete environment: %w", err) + } + + app.Output.Message(fmt.Sprintf("Environment %s deletion initiated.", getString(item, "id"))) + return nil + }, + } + + cmd.Flags().Bool("force", false, "Skip confirmation prompt") + + return cmd +} diff --git a/internal/commands/env_db.go b/internal/commands/env_db.go new file mode 100644 index 0000000..645890b --- /dev/null +++ b/internal/commands/env_db.go @@ -0,0 +1,168 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +// NewEnvDBCmd creates the env db command group. +func NewEnvDBCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "db", + Short: "Manage environment database", + Long: "Manage database operations for an environment, including promotes.", + } + + cmd.AddCommand(newEnvDBPromoteCmd()) + cmd.AddCommand(newEnvDBPromoteStatusCmd()) + + return cmd +} + +func newEnvDBPromoteCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "promote ", + Short: "Promote database", + Long: "Initiate a database promote for an environment. Copies the development database to the environment.", + Example: ` # Promote the dev database to an environment + vector env db promote env-abc123 + + # Promote and keep existing tables + vector env db promote env-abc123 --drop-tables=false`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + reqBody := map[string]any{} + + if cmd.Flags().Changed("drop-tables") { + v, _ := cmd.Flags().GetBool("drop-tables") + reqBody["drop_tables"] = v + } + if cmd.Flags().Changed("disable-foreign-keys") { + v, _ := cmd.Flags().GetBool("disable-foreign-keys") + reqBody["disable_foreign_keys"] = v + } + + path := envsBasePath + "/" + args[0] + "/db/promote" + resp, err := app.Client.Post(cmd.Context(), path, reqBody) + if err != nil { + return fmt.Errorf("failed to promote database: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to promote database: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to promote database: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to promote database: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Environment ID", Value: getString(item, "vector_environment_id")}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Created", Value: getString(item, "created_at")}, + }) + return nil + }, + } + + cmd.Flags().Bool("drop-tables", true, "Drop existing tables before promote") + cmd.Flags().Bool("disable-foreign-keys", true, "Disable foreign key checks during promote") + + return cmd +} + +func newEnvDBPromoteStatusCmd() *cobra.Command { + return &cobra.Command{ + Use: "promote-status ", + Short: "Check promote status", + Long: "Check the status of a database promote operation.", + Example: ` # Check promote status + vector env db promote-status env-abc123 promote-789`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + path := envsBasePath + "/" + args[0] + "/db/promotes/" + args[1] + resp, err := app.Client.Get(cmd.Context(), path, nil) + if err != nil { + return fmt.Errorf("failed to get promote status: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to get promote status: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to get promote status: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to get promote status: %w", err) + } + + pairs := []output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Environment ID", Value: getString(item, "vector_environment_id")}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Created", Value: getString(item, "created_at")}, + } + + startedAt := getString(item, "started_at") + if startedAt != "" { + pairs = append(pairs, output.KeyValue{Key: "Started", Value: startedAt}) + } + + completedAt := getString(item, "completed_at") + if completedAt != "" { + pairs = append(pairs, output.KeyValue{Key: "Completed", Value: completedAt}) + } + + durationMs := getFloat(item, "duration_ms") + if durationMs > 0 { + pairs = append(pairs, output.KeyValue{Key: "Duration", Value: fmt.Sprintf("%.0fms", durationMs)}) + } + + errorMsg := getString(item, "error_message") + if errorMsg != "" { + pairs = append(pairs, output.KeyValue{Key: "Error", Value: errorMsg}) + } + + app.Output.KeyValue(pairs) + return nil + }, + } +} diff --git a/internal/commands/env_db_test.go b/internal/commands/env_db_test.go new file mode 100644 index 0000000..7d4bb1a --- /dev/null +++ b/internal/commands/env_db_test.go @@ -0,0 +1,227 @@ +package commands + +import ( + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/output" +) + +var dbPromoteResponse = map[string]any{ + "data": map[string]any{ + "id": "prm-001", + "vector_environment_id": "env-001", + "vector_db_export_id": nil, + "status": "pending", + "options": map[string]any{ + "drop_tables": true, + "disable_foreign_keys": true, + "search_replace": nil, + }, + "duration_ms": nil, + "error_message": nil, + "created_at": "2025-01-15T12:00:00+00:00", + "started_at": nil, + "completed_at": nil, + }, + "message": "Database promote initiated", + "http_status": 202, +} + +var dbPromoteStatusResponse = map[string]any{ + "data": map[string]any{ + "id": "prm-001", + "vector_environment_id": "env-001", + "status": "completed", + "duration_ms": 1500, + "error_message": nil, + "created_at": "2025-01-15T12:00:00+00:00", + "started_at": "2025-01-15T12:00:01+00:00", + "completed_at": "2025-01-15T12:00:02+00:00", + }, + "message": "Database promote retrieved successfully", + "http_status": 200, +} + +func newDBTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "POST" && path == "/api/v1/vector/environments/env-001/db/promote": + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(dbPromoteResponse) + + case method == "GET" && path == "/api/v1/vector/environments/env-001/db/promotes/prm-001": + _ = json.NewEncoder(w).Encode(dbPromoteStatusResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +// --- DB Promote Tests --- + +func TestEnvDBPromoteCmd_TableOutput(t *testing.T) { + ts := newDBTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "db", "promote", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "prm-001") + assert.Contains(t, out, "env-001") + assert.Contains(t, out, "pending") +} + +func TestEnvDBPromoteCmd_JSONOutput(t *testing.T) { + ts := newDBTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"env", "db", "promote", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "prm-001", result["id"]) + assert.Equal(t, "pending", result["status"]) +} + +func TestEnvDBPromoteCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(dbPromoteResponse) + })) + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "db", "promote", "env-001", "--drop-tables=false", "--disable-foreign-keys=false"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/environments/env-001/db/promote", receivedPath) + assert.Equal(t, false, receivedBody["drop_tables"]) + assert.Equal(t, false, receivedBody["disable_foreign_keys"]) +} + +func TestEnvDBPromoteCmd_AuthError(t *testing.T) { + ts := newDBTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"env", "db", "promote", "env-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- DB Promote Status Tests --- + +func TestEnvDBPromoteStatusCmd_TableOutput(t *testing.T) { + ts := newDBTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "db", "promote-status", "env-001", "prm-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "prm-001") + assert.Contains(t, out, "completed") + assert.Contains(t, out, "1500ms") +} + +func TestEnvDBPromoteStatusCmd_JSONOutput(t *testing.T) { + ts := newDBTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"env", "db", "promote-status", "env-001", "prm-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "prm-001", result["id"]) + assert.Equal(t, "completed", result["status"]) +} + +func TestEnvDBPromoteStatusCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(dbPromoteStatusResponse) + })) + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "db", "promote-status", "env-001", "prm-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/environments/env-001/db/promotes/prm-001", receivedPath) +} + +func TestEnvDBPromoteStatusCmd_MissingArgs(t *testing.T) { + ts := newDBTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "db", "promote-status", "env-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} diff --git a/internal/commands/env_secret.go b/internal/commands/env_secret.go new file mode 100644 index 0000000..2c08846 --- /dev/null +++ b/internal/commands/env_secret.go @@ -0,0 +1,358 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +const secretsBasePath = "/api/v1/vector/secrets" + +// NewEnvSecretCmd creates the env secret command group. +func NewEnvSecretCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "secret", + Short: "Manage environment secrets", + Long: "Manage secrets and environment variables for an environment.", + } + + cmd.AddCommand(newEnvSecretListCmd()) + cmd.AddCommand(newEnvSecretShowCmd()) + cmd.AddCommand(newEnvSecretCreateCmd()) + cmd.AddCommand(newEnvSecretUpdateCmd()) + cmd.AddCommand(newEnvSecretDeleteCmd()) + + return cmd +} + +func newEnvSecretListCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "list ", + Short: "List secrets for an environment", + Long: "Retrieve a paginated list of secrets and environment variables for an environment.", + Example: ` # List secrets for an environment + vector env secret list env-abc123`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + page, perPage := getPagination(cmd) + query := buildPaginationQuery(page, perPage) + + path := envsBasePath + "/" + args[0] + "/secrets" + resp, err := app.Client.Get(cmd.Context(), path, query) + if err != nil { + return fmt.Errorf("failed to list secrets: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list secrets: %w", err) + } + + if app.Output.Format() == output.JSON { + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list secrets: %w", err) + } + return app.Output.JSON(json.RawMessage(data)) + } + + data, meta, err := parseResponseWithMeta(body) + if err != nil { + return fmt.Errorf("failed to list secrets: %w", err) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list secrets: %w", err) + } + + headers := []string{"ID", "KEY", "SECRET", "CREATED"} + var rows [][]string + for _, item := range items { + rows = append(rows, []string{ + getString(item, "id"), + getString(item, "key"), + formatBool(getBool(item, "is_secret")), + getString(item, "created_at"), + }) + } + + app.Output.Table(headers, rows) + if meta != nil && meta.LastPage > 1 { + app.Output.Pagination(meta.CurrentPage, meta.LastPage, meta.Total) + } + return nil + }, + } + addPaginationFlags(cmd) + return cmd +} + +func newEnvSecretShowCmd() *cobra.Command { + return &cobra.Command{ + Use: "show ", + Short: "Show secret details", + Long: "Retrieve details of a specific secret or environment variable.", + Example: ` # Show secret details + vector env secret show secret-456`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), secretsBasePath+"/"+args[0], nil) + if err != nil { + return fmt.Errorf("failed to show secret: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to show secret: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to show secret: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to show secret: %w", err) + } + + pairs := []output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Key", Value: getString(item, "key")}, + {Key: "Secret", Value: formatBool(getBool(item, "is_secret"))}, + {Key: "Created", Value: getString(item, "created_at")}, + {Key: "Updated", Value: getString(item, "updated_at")}, + } + + // Show value only for non-secret env vars + if !getBool(item, "is_secret") { + pairs = append(pairs, output.KeyValue{Key: "Value", Value: formatString(getString(item, "value"))}) + } + + app.Output.KeyValue(pairs) + return nil + }, + } +} + +func newEnvSecretCreateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "create ", + Short: "Create a secret", + Long: "Create a new secret or environment variable for an environment.", + Example: ` # Create a secret + vector env secret create env-abc123 --key DB_PASSWORD --value s3cret + + # Create a plain environment variable + vector env secret create env-abc123 --key APP_ENV --value production --is-secret=false`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + key, _ := cmd.Flags().GetString("key") + value, _ := cmd.Flags().GetString("value") + + reqBody := map[string]any{ + "key": key, + "value": value, + } + + if cmd.Flags().Changed("is-secret") { + v, _ := cmd.Flags().GetBool("is-secret") + reqBody["is_secret"] = v + } + + path := envsBasePath + "/" + args[0] + "/secrets" + resp, err := app.Client.Post(cmd.Context(), path, reqBody) + if err != nil { + return fmt.Errorf("failed to create secret: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to create secret: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to create secret: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to create secret: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Key", Value: getString(item, "key")}, + {Key: "Secret", Value: formatBool(getBool(item, "is_secret"))}, + {Key: "Created", Value: getString(item, "created_at")}, + }) + return nil + }, + } + + cmd.Flags().String("key", "", "Secret key name (required)") + cmd.Flags().String("value", "", "Secret value (required)") + cmd.Flags().Bool("is-secret", true, "Whether the value is a secret (default: true)") + _ = cmd.MarkFlagRequired("key") + _ = cmd.MarkFlagRequired("value") + + return cmd +} + +func newEnvSecretUpdateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "update ", + Short: "Update a secret", + Long: "Update an existing secret or environment variable.", + Example: ` # Update a secret value + vector env secret update secret-456 --value new-value`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + reqBody := map[string]any{} + + if cmd.Flags().Changed("key") { + v, _ := cmd.Flags().GetString("key") + reqBody["key"] = v + } + if cmd.Flags().Changed("value") { + v, _ := cmd.Flags().GetString("value") + reqBody["value"] = v + } + if cmd.Flags().Changed("is-secret") { + v, _ := cmd.Flags().GetBool("is-secret") + reqBody["is_secret"] = v + } + + resp, err := app.Client.Put(cmd.Context(), secretsBasePath+"/"+args[0], reqBody) + if err != nil { + return fmt.Errorf("failed to update secret: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to update secret: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to update secret: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to update secret: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Key", Value: getString(item, "key")}, + {Key: "Secret", Value: formatBool(getBool(item, "is_secret"))}, + {Key: "Updated", Value: getString(item, "updated_at")}, + }) + return nil + }, + } + + cmd.Flags().String("key", "", "New secret key name") + cmd.Flags().String("value", "", "New secret value") + cmd.Flags().Bool("is-secret", false, "Whether the value is a secret") + + return cmd +} + +func newEnvSecretDeleteCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "delete ", + Short: "Delete a secret", + Long: "Delete a secret or environment variable.", + Example: ` # Delete a secret + vector env secret delete secret-456 + + # Delete without confirmation + vector env secret delete secret-456 --force`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + force, _ := cmd.Flags().GetBool("force") + if !force { + if !confirmAction(cmd, fmt.Sprintf("Are you sure you want to delete secret %s?", args[0])) { + app.Output.Message("Aborted.") + return nil + } + } + + resp, err := app.Client.Delete(cmd.Context(), secretsBasePath+"/"+args[0]) + if err != nil { + return fmt.Errorf("failed to delete secret: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to delete secret: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to delete secret: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + app.Output.Message("Secret deleted successfully.") + return nil + }, + } + + cmd.Flags().Bool("force", false, "Skip confirmation prompt") + + return cmd +} diff --git a/internal/commands/env_secret_test.go b/internal/commands/env_secret_test.go new file mode 100644 index 0000000..d7d7aeb --- /dev/null +++ b/internal/commands/env_secret_test.go @@ -0,0 +1,496 @@ +package commands + +import ( + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/output" +) + +var secretListResponse = map[string]any{ + "data": []map[string]any{ + { + "id": "sec-001", + "key": "API_KEY", + "is_secret": true, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + { + "id": "sec-002", + "key": "APP_DEBUG", + "is_secret": false, + "value": "true", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + }, + "meta": map[string]any{ + "current_page": 1, + "last_page": 1, + "total": 2, + }, + "message": "Environment secrets retrieved successfully", + "http_status": 200, +} + +var secretShowResponse = map[string]any{ + "data": map[string]any{ + "id": "sec-002", + "key": "APP_DEBUG", + "is_secret": false, + "value": "true", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "Environment secret retrieved successfully", + "http_status": 200, +} + +var secretShowSecretResponse = map[string]any{ + "data": map[string]any{ + "id": "sec-001", + "key": "API_KEY", + "is_secret": true, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "Environment secret retrieved successfully", + "http_status": 200, +} + +var secretCreateResponse = map[string]any{ + "data": map[string]any{ + "id": "sec-003", + "key": "NEW_SECRET", + "is_secret": true, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "Environment secret created successfully", + "http_status": 201, +} + +var secretUpdateResponse = map[string]any{ + "data": map[string]any{ + "id": "sec-001", + "key": "UPDATED_KEY", + "is_secret": true, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-16T12:00:00+00:00", + }, + "message": "Environment secret updated successfully", + "http_status": 200, +} + +var secretDeleteResponse = map[string]any{ + "data": map[string]any{ + "id": "sec-001", + "key": "API_KEY", + "is_secret": true, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "Environment secret deleted successfully", + "http_status": 200, +} + +func newSecretTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/environments/env-001/secrets": + _ = json.NewEncoder(w).Encode(secretListResponse) + + case method == "GET" && path == "/api/v1/vector/secrets/sec-001": + _ = json.NewEncoder(w).Encode(secretShowSecretResponse) + + case method == "GET" && path == "/api/v1/vector/secrets/sec-002": + _ = json.NewEncoder(w).Encode(secretShowResponse) + + case method == "POST" && path == "/api/v1/vector/environments/env-001/secrets": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(secretCreateResponse) + + case method == "PUT" && path == "/api/v1/vector/secrets/sec-001": + _ = json.NewEncoder(w).Encode(secretUpdateResponse) + + case method == "DELETE" && path == "/api/v1/vector/secrets/sec-001": + _ = json.NewEncoder(w).Encode(secretDeleteResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +// --- Secret List Tests --- + +func TestEnvSecretListCmd_TableOutput(t *testing.T) { + ts := newSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "secret", "list", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "sec-001") + assert.Contains(t, out, "API_KEY") + assert.Contains(t, out, "Yes") + assert.Contains(t, out, "sec-002") + assert.Contains(t, out, "APP_DEBUG") +} + +func TestEnvSecretListCmd_JSONOutput(t *testing.T) { + ts := newSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"env", "secret", "list", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 2) + assert.Equal(t, "sec-001", result[0]["id"]) +} + +func TestEnvSecretListCmd_Pagination(t *testing.T) { + var receivedPage, receivedPerPage string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedPage = r.URL.Query().Get("page") + receivedPerPage = r.URL.Query().Get("per_page") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(secretListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "secret", "list", "env-001", "--page", "2", "--per-page", "5"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "2", receivedPage) + assert.Equal(t, "5", receivedPerPage) +} + +// --- Secret Show Tests --- + +func TestEnvSecretShowCmd_EnvVar(t *testing.T) { + ts := newSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "secret", "show", "sec-002"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "sec-002") + assert.Contains(t, out, "APP_DEBUG") + assert.Contains(t, out, "No") // is_secret = false + assert.Contains(t, out, "true") // value shown for non-secrets +} + +func TestEnvSecretShowCmd_Secret(t *testing.T) { + ts := newSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "secret", "show", "sec-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "sec-001") + assert.Contains(t, out, "API_KEY") + assert.Contains(t, out, "Yes") // is_secret = true + assert.NotContains(t, out, "Value") // value not shown for secrets +} + +func TestEnvSecretShowCmd_JSONOutput(t *testing.T) { + ts := newSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"env", "secret", "show", "sec-002"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "sec-002", result["id"]) + assert.Equal(t, "APP_DEBUG", result["key"]) +} + +// --- Secret Create Tests --- + +func TestEnvSecretCreateCmd_TableOutput(t *testing.T) { + ts := newSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "secret", "create", "env-001", "--key", "NEW_SECRET", "--value", "secret123"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "sec-003") + assert.Contains(t, out, "NEW_SECRET") +} + +func TestEnvSecretCreateCmd_JSONOutput(t *testing.T) { + ts := newSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"env", "secret", "create", "env-001", "--key", "NEW_SECRET", "--value", "secret123"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "sec-003", result["id"]) +} + +func TestEnvSecretCreateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(secretCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "secret", "create", "env-001", + "--key", "MY_VAR", + "--value", "my_value", + "--is-secret=false", + }) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "/api/v1/vector/environments/env-001/secrets", receivedPath) + assert.Equal(t, "MY_VAR", receivedBody["key"]) + assert.Equal(t, "my_value", receivedBody["value"]) + assert.Equal(t, false, receivedBody["is_secret"]) +} + +func TestEnvSecretCreateCmd_MissingKey(t *testing.T) { + ts := newSecretTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "secret", "create", "env-001", "--value", "test"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "key") +} + +// --- Secret Update Tests --- + +func TestEnvSecretUpdateCmd_TableOutput(t *testing.T) { + ts := newSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "secret", "update", "sec-001", "--key", "UPDATED_KEY"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "sec-001") + assert.Contains(t, out, "UPDATED_KEY") +} + +func TestEnvSecretUpdateCmd_JSONOutput(t *testing.T) { + ts := newSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"env", "secret", "update", "sec-001", "--key", "UPDATED_KEY"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "sec-001", result["id"]) +} + +func TestEnvSecretUpdateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(secretUpdateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "secret", "update", "sec-001", "--key", "NEW_KEY", "--value", "new_val"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "/api/v1/vector/secrets/sec-001", receivedPath) + assert.Equal(t, "NEW_KEY", receivedBody["key"]) + assert.Equal(t, "new_val", receivedBody["value"]) +} + +// --- Secret Delete Tests --- + +func TestEnvSecretDeleteCmd_WithForce(t *testing.T) { + ts := newSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "secret", "delete", "sec-001", "--force"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "Secret deleted successfully") +} + +func TestEnvSecretDeleteCmd_JSONOutput(t *testing.T) { + ts := newSecretTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"env", "secret", "delete", "sec-001", "--force"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "sec-001", result["id"]) +} + +func TestEnvSecretDeleteCmd_ConfirmAbort(t *testing.T) { + ts := newSecretTestServer("valid-token") + defer ts.Close() + + origReader := confirmReader + confirmReader = strings.NewReader("n\n") + t.Cleanup(func() { confirmReader = origReader }) + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "secret", "delete", "sec-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "Aborted") +} + +func TestEnvSecretDeleteCmd_ConfirmYes(t *testing.T) { + ts := newSecretTestServer("valid-token") + defer ts.Close() + + origReader := confirmReader + confirmReader = strings.NewReader("y\n") + t.Cleanup(func() { confirmReader = origReader }) + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "secret", "delete", "sec-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "Secret deleted successfully") +} + +func TestEnvSecretDeleteCmd_HTTPMethod(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(secretDeleteResponse) + })) + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "secret", "delete", "sec-001", "--force"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "DELETE", receivedMethod) + assert.Equal(t, "/api/v1/vector/secrets/sec-001", receivedPath) +} + +// --- Auth Error Tests --- + +func TestEnvSecretListCmd_AuthError(t *testing.T) { + ts := newSecretTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"env", "secret", "list", "env-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestEnvSecretListCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildEnvCmdNoAuth(output.Table) + cmd.SetArgs([]string{"env", "secret", "list", "env-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} diff --git a/internal/commands/env_test.go b/internal/commands/env_test.go new file mode 100644 index 0000000..ad1e3fc --- /dev/null +++ b/internal/commands/env_test.go @@ -0,0 +1,792 @@ +package commands + +import ( + "bytes" + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "strings" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +var envListResponse = map[string]any{ + "data": []map[string]any{ + { + "id": "env-001", + "vector_site_id": "site-001", + "name": "production", + "is_production": true, + "status": "active", + "php_version": "8.3", + "tags": []string{"live"}, + "platform_domain": "test--prod.vectorpages.com", + "custom_domain": "example.com", + "dns_target": "site-abc.b-cdn.net", + "database_host": "db.rds.amazonaws.com", + "database_name": "db_env001", + "custom_domain_certificate": map[string]any{ + "status": "issued", + "dns_validation_records": nil, + }, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + }, + "meta": map[string]any{ + "current_page": 1, + "last_page": 1, + "total": 1, + }, + "message": "Environments retrieved successfully", + "http_status": 200, +} + +var envShowResponse = map[string]any{ + "data": map[string]any{ + "id": "env-001", + "vector_site_id": "site-001", + "name": "production", + "is_production": true, + "status": "active", + "php_version": "8.3", + "tags": []string{"live"}, + "platform_domain": "test--prod.vectorpages.com", + "custom_domain": "example.com", + "dns_target": "site-abc.b-cdn.net", + "database_host": "db.rds.amazonaws.com", + "database_name": "db_env001", + "custom_domain_certificate": map[string]any{ + "status": "issued", + "dns_validation_records": nil, + }, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "Environment retrieved successfully", + "http_status": 200, +} + +var envCreateResponse = map[string]any{ + "data": map[string]any{ + "id": "env-002", + "vector_site_id": "site-001", + "name": "staging", + "is_production": false, + "status": "pending", + "php_version": "8.3", + "tags": []string{}, + "platform_domain": "test--staging.vectorpages.com", + "custom_domain": "", + }, + "message": "Environment creation initiated", + "http_status": 201, +} + +var envUpdateResponse = map[string]any{ + "data": map[string]any{ + "id": "env-001", + "name": "production", + "status": "active", + "tags": []string{"updated"}, + "custom_domain": "new.example.com", + "dns_target": "site-abc.b-cdn.net", + }, + "message": "Environment updated successfully", + "http_status": 200, +} + +var envUpdateDomainChangeResponse = map[string]any{ + "data": map[string]any{ + "id": "env-001", + "name": "production", + "status": "active", + "tags": []string{"live"}, + "custom_domain": "new.example.com", + "dns_target": "site-abc.b-cdn.net", + "pending_domain_change": map[string]any{ + "id": "dc-001", + "status": "pending", + "old_domain": "old.example.com", + "new_domain": "new.example.com", + }, + }, + "message": "Environment update initiated, domain change in progress", + "http_status": 202, +} + +var envDeleteResponse = map[string]any{ + "data": map[string]any{ + "id": "env-001", + "status": "terminating", + }, + "message": "Environment deletion initiated", + "http_status": 202, +} + +func newEnvTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/environments": + _ = json.NewEncoder(w).Encode(envListResponse) + + case method == "GET" && path == "/api/v1/vector/environments/env-001": + _ = json.NewEncoder(w).Encode(envShowResponse) + + case method == "GET" && path == "/api/v1/vector/environments/nonexistent": + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "data": map[string]any{}, + "message": "Environment not found", + "http_status": 404, + }) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/environments": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(envCreateResponse) + + case method == "PUT" && path == "/api/v1/vector/environments/env-001": + // Check if domain change + body, _ := io.ReadAll(r.Body) + var reqBody map[string]any + _ = json.Unmarshal(body, &reqBody) + if _, hasDomain := reqBody["custom_domain"]; hasDomain { + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(envUpdateDomainChangeResponse) + } else { + _ = json.NewEncoder(w).Encode(envUpdateResponse) + } + + case method == "DELETE" && path == "/api/v1/vector/environments/env-001": + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(envDeleteResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +func buildEnvCmd(baseURL, token string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient(baseURL, token, "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + envCmd := NewEnvCmd() + root.AddCommand(envCmd) + + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func buildEnvCmdNoAuth(format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient("http://localhost", "", "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + envCmd := NewEnvCmd() + root.AddCommand(envCmd) + + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +// --- Env List Tests --- + +func TestEnvListCmd_TableOutput(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "env-001") + assert.Contains(t, out, "production") + assert.Contains(t, out, "Yes") + assert.Contains(t, out, "active") + assert.Contains(t, out, "8.3") + assert.Contains(t, out, "test--prod.vectorpages.com") + assert.Contains(t, out, "example.com") +} + +func TestEnvListCmd_JSONOutput(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"env", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 1) + assert.Equal(t, "env-001", result[0]["id"]) +} + +func TestEnvListCmd_SiteQueryParam(t *testing.T) { + var receivedQuery string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedQuery = r.URL.RawQuery + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(envListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, receivedQuery, "site=site-001") +} + +func TestEnvListCmd_AuthError(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"env", "list", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestEnvListCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildEnvCmdNoAuth(output.Table) + cmd.SetArgs([]string{"env", "list", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestEnvListCmd_MissingArg(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "list"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Env Show Tests --- + +func TestEnvShowCmd_TableOutput(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "show", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "env-001") + assert.Contains(t, out, "site-001") + assert.Contains(t, out, "production") + assert.Contains(t, out, "Yes") + assert.Contains(t, out, "8.3") + assert.Contains(t, out, "example.com") + assert.Contains(t, out, "site-abc.b-cdn.net") + assert.Contains(t, out, "issued") +} + +func TestEnvShowCmd_JSONOutput(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"env", "show", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "env-001", result["id"]) + assert.Equal(t, "production", result["name"]) +} + +func TestEnvShowCmd_NotFound(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "show", "nonexistent"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 4, apiErr.ExitCode) +} + +// --- Env Create Tests --- + +func TestEnvCreateCmd_TableOutput(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "create", "site-001", "--name", "staging", "--php-version", "8.3"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "env-002") + assert.Contains(t, out, "staging") + assert.Contains(t, out, "pending") +} + +func TestEnvCreateCmd_JSONOutput(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"env", "create", "site-001", "--name", "staging", "--php-version", "8.3"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "env-002", result["id"]) +} + +func TestEnvCreateCmd_PostsToSitePath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(envCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "create", "site-001", "--name", "staging", "--php-version", "8.3"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/environments", receivedPath) +} + +func TestEnvCreateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(envCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "create", "site-001", + "--name", "staging", + "--php-version", "8.3", + "--production", + "--tags", "test,staging", + "--custom-domain", "staging.example.com", + }) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "staging", receivedBody["name"]) + assert.Equal(t, "8.3", receivedBody["php_version"]) + assert.Equal(t, true, receivedBody["is_production"]) + assert.Equal(t, "staging.example.com", receivedBody["custom_domain"]) + tags, ok := receivedBody["tags"].([]any) + require.True(t, ok) + assert.Equal(t, "test", tags[0]) + assert.Equal(t, "staging", tags[1]) +} + +func TestEnvCreateCmd_MissingName(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "create", "site-001", "--php-version", "8.3"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 3, apiErr.ExitCode) +} + +func TestEnvCreateCmd_MissingPHPVersion(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "create", "site-001", "--name", "staging"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 3, apiErr.ExitCode) +} + +// --- Env Update Tests --- + +func TestEnvUpdateCmd_TableOutput(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "update", "env-001", "--tags", "updated"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "env-001") + assert.Contains(t, out, "updated") +} + +func TestEnvUpdateCmd_JSONOutput(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"env", "update", "env-001", "--tags", "updated"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "env-001", result["id"]) +} + +func TestEnvUpdateCmd_DomainChange202(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "update", "env-001", "--custom-domain", "new.example.com"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "Domain change initiated") + assert.Contains(t, out, "old.example.com") + assert.Contains(t, out, "new.example.com") +} + +func TestEnvUpdateCmd_ClearCustomDomain(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(envUpdateDomainChangeResponse) + })) + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "update", "env-001", "--clear-custom-domain"}) + + err := cmd.Execute() + require.NoError(t, err) + + // custom_domain should be null (Go nil) + assert.Contains(t, receivedBody, "custom_domain") + assert.Nil(t, receivedBody["custom_domain"]) +} + +func TestEnvUpdateCmd_CustomDomainAndClearError(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "update", "env-001", "--custom-domain", "foo.com", "--clear-custom-domain"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 3, apiErr.ExitCode) + assert.Contains(t, apiErr.Message, "cannot be used together") +} + +func TestEnvUpdateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(envUpdateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "update", "env-001", "--tags", "tag1,tag2"}) + + err := cmd.Execute() + require.NoError(t, err) + + tags, ok := receivedBody["tags"].([]any) + require.True(t, ok) + assert.Equal(t, "tag1", tags[0]) + assert.Equal(t, "tag2", tags[1]) +} + +// --- Env Delete Tests --- + +func TestEnvDeleteCmd_WithForce(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "delete", "env-001", "--force"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "deletion initiated") +} + +func TestEnvDeleteCmd_JSONOutput(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"env", "delete", "env-001", "--force"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "env-001", result["id"]) + assert.Equal(t, "terminating", result["status"]) +} + +func TestEnvDeleteCmd_ConfirmAbort(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + origReader := confirmReader + confirmReader = strings.NewReader("n\n") + t.Cleanup(func() { confirmReader = origReader }) + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "delete", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "Aborted") +} + +func TestEnvDeleteCmd_ConfirmYes(t *testing.T) { + ts := newEnvTestServer("valid-token") + defer ts.Close() + + origReader := confirmReader + confirmReader = strings.NewReader("y\n") + t.Cleanup(func() { confirmReader = origReader }) + + cmd, stdout, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "delete", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "deletion initiated") +} + +func TestEnvDeleteCmd_HTTPMethod(t *testing.T) { + var receivedMethod string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(envDeleteResponse) + })) + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "delete", "env-001", "--force"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "DELETE", receivedMethod) +} + +// --- Help Text Tests --- + +func TestEnvCmd_Help(t *testing.T) { + cmd := NewEnvCmd() + cmd.SetContext(context.Background()) + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "show") + assert.Contains(t, out, "create") + assert.Contains(t, out, "update") + assert.Contains(t, out, "delete") + assert.Contains(t, out, "secret") + assert.Contains(t, out, "db") +} + +func TestEnvSecretCmd_Help(t *testing.T) { + cmd := NewEnvCmd() + cmd.SetContext(context.Background()) + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"secret", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "show") + assert.Contains(t, out, "create") + assert.Contains(t, out, "update") + assert.Contains(t, out, "delete") +} + +func TestEnvDBCmd_Help(t *testing.T) { + cmd := NewEnvCmd() + cmd.SetContext(context.Background()) + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"db", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "promote") + assert.Contains(t, out, "promote-status") +} + +// --- Server Error Test --- + +func TestEnvListCmd_ServerError(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusInternalServerError) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Internal server error", + "http_status": 500, + }) + })) + defer ts.Close() + + cmd, _, _ := buildEnvCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"env", "list", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 5, apiErr.ExitCode) +} diff --git a/internal/commands/event.go b/internal/commands/event.go new file mode 100644 index 0000000..640f61a --- /dev/null +++ b/internal/commands/event.go @@ -0,0 +1,160 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +const eventsBasePath = "/api/v1/vector/events" + +// NewEventCmd creates the event command group. +func NewEventCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "event", + Short: "Manage events", + Long: "View account event logs for auditing activity.", + } + + cmd.AddCommand(newEventListCmd()) + + return cmd +} + +func newEventListCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "list", + Short: "List events", + Long: "Retrieve a paginated list of account event logs in reverse chronological order.", + Example: ` # List recent events + vector event list + + # Filter by date range + vector event list --from 2024-01-01T00:00:00Z --to 2024-01-31T23:59:59Z + + # Filter by event type + vector event list --event site.created`, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + page, perPage := getPagination(cmd) + query := buildPaginationQuery(page, perPage) + + // Add optional filter flags + if cmd.Flags().Changed("from") { + v, _ := cmd.Flags().GetString("from") + if v != "" { + query.Set("from", v) + } + } + if cmd.Flags().Changed("to") { + v, _ := cmd.Flags().GetString("to") + if v != "" { + query.Set("to", v) + } + } + if cmd.Flags().Changed("event") { + v, _ := cmd.Flags().GetString("event") + if v != "" { + query.Set("event", v) + } + } + + resp, err := app.Client.Get(cmd.Context(), eventsBasePath, query) + if err != nil { + return fmt.Errorf("failed to list events: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list events: %w", err) + } + + if app.Output.Format() == output.JSON { + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list events: %w", err) + } + return app.Output.JSON(json.RawMessage(data)) + } + + data, meta, err := parseResponseWithMeta(body) + if err != nil { + return fmt.Errorf("failed to list events: %w", err) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list events: %w", err) + } + + headers := []string{"ID", "EVENT", "ACTOR", "RESOURCE", "CREATED"} + var rows [][]string + for _, item := range items { + rows = append(rows, []string{ + getString(item, "id"), + getString(item, "event"), + formatActor(item), + formatResource(item), + getString(item, "created_at"), + }) + } + + app.Output.Table(headers, rows) + if meta != nil && meta.LastPage > 1 { + app.Output.Pagination(meta.CurrentPage, meta.LastPage, meta.Total) + } + return nil + }, + } + + addPaginationFlags(cmd) + cmd.Flags().String("from", "", "Filter events from this ISO 8601 timestamp") + cmd.Flags().String("to", "", "Filter events to this ISO 8601 timestamp") + cmd.Flags().String("event", "", "Filter by event type (comma-separated)") + + return cmd +} + +// formatActor formats the actor column: token name > IP > "-". +func formatActor(item map[string]any) string { + actor := getMap(item, "actor") + if actor == nil { + return "-" + } + + tokenName := getString(actor, "token_name") + if tokenName != "" { + return tokenName + } + + ip := getString(actor, "ip") + if ip != "" { + return ip + } + + return "-" +} + +// formatResource formats the resource column as model_type:model_id or just model_type. +func formatResource(item map[string]any) string { + modelType := getString(item, "model_type") + if modelType == "" { + return "-" + } + + modelID := getString(item, "model_id") + if modelID != "" { + return modelType + ":" + modelID + } + + return modelType +} diff --git a/internal/commands/event_test.go b/internal/commands/event_test.go new file mode 100644 index 0000000..c4116a4 --- /dev/null +++ b/internal/commands/event_test.go @@ -0,0 +1,370 @@ +package commands + +import ( + "bytes" + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +var eventListResponse = map[string]any{ + "data": []map[string]any{ + { + "id": "evt-001", + "event": "site.provisioning.completed", + "model_type": "VectorSite", + "model_id": "site-001", + "context": nil, + "actor": map[string]any{ + "id": 1, + "ip": "192.0.2.1", + "token_id": 1, + "token_name": "Production API Key", + }, + "occurred_at": "2025-01-15T12:00:00+00:00", + "created_at": "2025-01-15T12:00:00+00:00", + }, + { + "id": "evt-002", + "event": "deployment.completed", + "model_type": "VectorEnvironment", + "model_id": "", + "context": nil, + "actor": map[string]any{ + "id": 2, + "ip": "10.0.0.1", + "token_id": nil, + "token_name": "", + }, + "occurred_at": "2025-01-15T13:00:00+00:00", + "created_at": "2025-01-15T13:00:00+00:00", + }, + { + "id": "evt-003", + "event": "site.deleted", + "model_type": "", + "model_id": "", + "context": nil, + "actor": nil, + "occurred_at": "2025-01-15T14:00:00+00:00", + "created_at": "2025-01-15T14:00:00+00:00", + }, + }, + "meta": map[string]any{ + "current_page": 1, + "last_page": 1, + "total": 3, + }, + "message": "Event logs retrieved successfully", + "http_status": 200, +} + +func newEventTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + switch { + case r.Method == "GET" && r.URL.Path == "/api/v1/vector/events": + _ = json.NewEncoder(w).Encode(eventListResponse) + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +func buildEventCmd(baseURL, token string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient(baseURL, token, "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + root.AddCommand(NewEventCmd()) + + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func buildEventCmdNoAuth(format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient("http://localhost", "", "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + root.AddCommand(NewEventCmd()) + + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +// --- Event List Tests --- + +func TestEventListCmd_TableOutput(t *testing.T) { + ts := newEventTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEventCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"event", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + // First event: has token name + assert.Contains(t, out, "evt-001") + assert.Contains(t, out, "site.provisioning.completed") + assert.Contains(t, out, "Production API Key") + assert.Contains(t, out, "VectorSite:site-001") + + // Second event: falls back to IP + assert.Contains(t, out, "evt-002") + assert.Contains(t, out, "10.0.0.1") + assert.Contains(t, out, "VectorEnvironment") + + // Third event: no actor, no resource + assert.Contains(t, out, "evt-003") +} + +func TestEventListCmd_JSONOutput(t *testing.T) { + ts := newEventTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildEventCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"event", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 3) + assert.Equal(t, "evt-001", result[0]["id"]) +} + +func TestEventListCmd_Pagination(t *testing.T) { + var receivedPage, receivedPerPage string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedPage = r.URL.Query().Get("page") + receivedPerPage = r.URL.Query().Get("per_page") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(eventListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildEventCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"event", "list", "--page", "2", "--per-page", "10"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "2", receivedPage) + assert.Equal(t, "10", receivedPerPage) +} + +func TestEventListCmd_FilterFlags(t *testing.T) { + var receivedFrom, receivedTo, receivedEvent string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedFrom = r.URL.Query().Get("from") + receivedTo = r.URL.Query().Get("to") + receivedEvent = r.URL.Query().Get("event") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(eventListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildEventCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"event", "list", + "--from", "2025-01-01T00:00:00+00:00", + "--to", "2025-01-31T23:59:59+00:00", + "--event", "site.provisioning.completed,deployment.completed", + }) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "2025-01-01T00:00:00+00:00", receivedFrom) + assert.Equal(t, "2025-01-31T23:59:59+00:00", receivedTo) + assert.Equal(t, "site.provisioning.completed,deployment.completed", receivedEvent) +} + +func TestEventListCmd_AuthError(t *testing.T) { + ts := newEventTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildEventCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"event", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestEventListCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildEventCmdNoAuth(output.Table) + cmd.SetArgs([]string{"event", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Help Text Tests --- + +func TestEventCmd_Help(t *testing.T) { + cmd := NewEventCmd() + cmd.SetContext(context.Background()) + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "View account event logs") +} + +func TestEventListCmd_Help(t *testing.T) { + cmd := NewEventCmd() + cmd.SetContext(context.Background()) + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"list", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "Retrieve a paginated list of account event logs") + assert.Contains(t, out, "--from") + assert.Contains(t, out, "--to") + assert.Contains(t, out, "--event") + assert.Contains(t, out, "--page") + assert.Contains(t, out, "--per-page") +} + +// --- Actor Formatting Tests --- + +func TestFormatActor_TokenName(t *testing.T) { + item := map[string]any{ + "actor": map[string]any{ + "token_name": "My Token", + "ip": "192.168.1.1", + }, + } + assert.Equal(t, "My Token", formatActor(item)) +} + +func TestFormatActor_FallbackToIP(t *testing.T) { + item := map[string]any{ + "actor": map[string]any{ + "token_name": "", + "ip": "192.168.1.1", + }, + } + assert.Equal(t, "192.168.1.1", formatActor(item)) +} + +func TestFormatActor_NilActor(t *testing.T) { + item := map[string]any{ + "actor": nil, + } + assert.Equal(t, "-", formatActor(item)) +} + +func TestFormatActor_NoActor(t *testing.T) { + item := map[string]any{} + assert.Equal(t, "-", formatActor(item)) +} + +// --- Resource Formatting Tests --- + +func TestFormatResource_WithModelID(t *testing.T) { + item := map[string]any{ + "model_type": "VectorSite", + "model_id": "site-001", + } + assert.Equal(t, "VectorSite:site-001", formatResource(item)) +} + +func TestFormatResource_WithoutModelID(t *testing.T) { + item := map[string]any{ + "model_type": "VectorEnvironment", + "model_id": "", + } + assert.Equal(t, "VectorEnvironment", formatResource(item)) +} + +func TestFormatResource_NoModelType(t *testing.T) { + item := map[string]any{ + "model_type": "", + "model_id": "", + } + assert.Equal(t, "-", formatResource(item)) +} diff --git a/internal/commands/helpers.go b/internal/commands/helpers.go new file mode 100644 index 0000000..ee6c3f2 --- /dev/null +++ b/internal/commands/helpers.go @@ -0,0 +1,219 @@ +package commands + +import ( + "bufio" + "encoding/json" + "fmt" + "io" + "net/url" + "os" + "strconv" + "strings" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/output" +) + +// confirmReader is the reader used for confirmation prompts. Override in tests. +var confirmReader io.Reader = os.Stdin + +// PaginationMeta holds pagination metadata from API responses. +type PaginationMeta struct { + CurrentPage int `json:"current_page"` + LastPage int `json:"last_page"` + Total int `json:"total"` +} + +// requireApp extracts *appctx.App from the command context and returns an error +// if no API token is set. +func requireApp(cmd *cobra.Command) (*appctx.App, error) { + app := appctx.FromContext(cmd.Context()) + if app == nil { + return nil, fmt.Errorf("app not initialized") + } + if app.Client.Token == "" { + return nil, &api.APIError{ + Message: "Authentication required. Run 'vector auth login', use --token flag, or set VECTOR_API_KEY environment variable.", + ExitCode: 2, + } + } + return app, nil +} + +// addPaginationFlags adds --page and --per-page flags to a command. +func addPaginationFlags(cmd *cobra.Command) { + cmd.Flags().Int("page", 0, "Page number") + cmd.Flags().Int("per-page", 0, "Items per page") +} + +// getPagination reads --page and --per-page flag values from the command. +func getPagination(cmd *cobra.Command) (page, perPage int) { + page, _ = cmd.Flags().GetInt("page") + perPage, _ = cmd.Flags().GetInt("per-page") + return page, perPage +} + +// buildPaginationQuery creates url.Values with page and per_page parameters. +// Defaults to page=1 and per_page=15 when values are <= 0. +func buildPaginationQuery(page, perPage int) url.Values { + if page <= 0 { + page = 1 + } + if perPage <= 0 { + perPage = 15 + } + q := url.Values{} + q.Set("page", strconv.Itoa(page)) + q.Set("per_page", strconv.Itoa(perPage)) + return q +} + +// parseResponseData parses a JSON response with a "data" key and returns the +// raw JSON for the data value (works for both objects and arrays). +func parseResponseData(resp []byte) (json.RawMessage, error) { + var envelope struct { + Data json.RawMessage `json:"data"` + } + if err := json.Unmarshal(resp, &envelope); err != nil { + return nil, fmt.Errorf("parsing response: %w", err) + } + if len(envelope.Data) == 0 || string(envelope.Data) == "null" { + return nil, fmt.Errorf("response missing \"data\" key") + } + return envelope.Data, nil +} + +// parseResponseWithMeta parses a JSON response with "data" and "meta" keys, +// returning the raw data and pagination metadata. +func parseResponseWithMeta(resp []byte) (json.RawMessage, *PaginationMeta, error) { + var envelope struct { + Data json.RawMessage `json:"data"` + Meta struct { + CurrentPage int `json:"current_page"` + LastPage int `json:"last_page"` + Total int `json:"total"` + } `json:"meta"` + } + if err := json.Unmarshal(resp, &envelope); err != nil { + return nil, nil, fmt.Errorf("parsing response: %w", err) + } + if len(envelope.Data) == 0 || string(envelope.Data) == "null" { + return nil, nil, fmt.Errorf("response missing \"data\" key") + } + meta := &PaginationMeta{ + CurrentPage: envelope.Meta.CurrentPage, + LastPage: envelope.Meta.LastPage, + Total: envelope.Meta.Total, + } + return envelope.Data, meta, nil +} + +// formatBool returns "Yes" for true, "No" for false. +func formatBool(v bool) string { + if v { + return "Yes" + } + return "No" +} + +// formatTags joins a tag slice with ", " or returns "-" if empty. +func formatTags(tags []string) string { + if len(tags) == 0 { + return "-" + } + return strings.Join(tags, ", ") +} + +// formatString returns the string or "-" if empty. +func formatString(v string) string { + if v == "" { + return "-" + } + return v +} + +// getString safely gets a string value from a map, returning "" if missing or wrong type. +func getString(m map[string]any, key string) string { + v, ok := m[key] + if !ok || v == nil { + return "" + } + s, ok := v.(string) + if !ok { + return "" + } + return s +} + +// getFloat safely gets a float64 value from a map, returning 0 if missing or wrong type. +func getFloat(m map[string]any, key string) float64 { + v, ok := m[key] + if !ok || v == nil { + return 0 + } + f, ok := v.(float64) + if !ok { + return 0 + } + return f +} + +// getBool safely gets a bool value from a map, returning false if missing or wrong type. +func getBool(m map[string]any, key string) bool { + v, ok := m[key] + if !ok || v == nil { + return false + } + b, ok := v.(bool) + if !ok { + return false + } + return b +} + +// getSlice safely gets a []any value from a map, returning nil if missing or wrong type. +func getSlice(m map[string]any, key string) []any { + v, ok := m[key] + if !ok || v == nil { + return nil + } + s, ok := v.([]any) + if !ok { + return nil + } + return s +} + +// getMap safely gets a map[string]any value from a map, returning nil if missing or wrong type. +func getMap(m map[string]any, key string) map[string]any { + v, ok := m[key] + if !ok || v == nil { + return nil + } + m2, ok := v.(map[string]any) + if !ok { + return nil + } + return m2 +} + +// printPaginationIfNeeded prints a pagination line only when there are multiple pages. +func printPaginationIfNeeded(w io.Writer, meta *PaginationMeta) { + if meta != nil && meta.LastPage > 1 { + output.PrintPagination(w, meta.CurrentPage, meta.LastPage, meta.Total) + } +} + +// confirmAction prompts the user on stderr and returns true only for "y" or "yes" input. +func confirmAction(cmd *cobra.Command, prompt string) bool { + _, _ = fmt.Fprintf(cmd.ErrOrStderr(), "%s [y/N]: ", prompt) + scanner := bufio.NewScanner(confirmReader) + if !scanner.Scan() { + return false + } + answer := strings.TrimSpace(strings.ToLower(scanner.Text())) + return answer == "y" || answer == "yes" +} diff --git a/internal/commands/helpers_test.go b/internal/commands/helpers_test.go new file mode 100644 index 0000000..8e78694 --- /dev/null +++ b/internal/commands/helpers_test.go @@ -0,0 +1,406 @@ +package commands + +import ( + "bytes" + "context" + "encoding/json" + "strings" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" +) + +// newTestCmd creates a cobra.Command with a background context set. +func newTestCmd() *cobra.Command { + cmd := &cobra.Command{Use: "test"} + cmd.SetContext(context.Background()) + return cmd +} + +// --- requireApp --- + +func TestRequireApp_WithToken(t *testing.T) { + cmd := newTestCmd() + client := api.NewClient("http://localhost", "valid-token", "test") + app := appctx.NewApp(config.DefaultConfig(), client, "") + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + + got, err := requireApp(cmd) + require.NoError(t, err) + assert.Equal(t, app, got) +} + +func TestRequireApp_NoToken(t *testing.T) { + cmd := newTestCmd() + client := api.NewClient("http://localhost", "", "test") + app := appctx.NewApp(config.DefaultConfig(), client, "") + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + + _, err := requireApp(cmd) + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestRequireApp_NilContext(t *testing.T) { + cmd := newTestCmd() + + _, err := requireApp(cmd) + require.Error(t, err) + assert.Contains(t, err.Error(), "app not initialized") +} + +// --- addPaginationFlags / getPagination --- + +func TestPaginationFlags(t *testing.T) { + cmd := &cobra.Command{Use: "test"} + addPaginationFlags(cmd) + + cmd.SetArgs([]string{"--page", "3", "--per-page", "25"}) + require.NoError(t, cmd.Execute()) + + page, perPage := getPagination(cmd) + assert.Equal(t, 3, page) + assert.Equal(t, 25, perPage) +} + +func TestPaginationFlags_Defaults(t *testing.T) { + cmd := &cobra.Command{Use: "test"} + addPaginationFlags(cmd) + + require.NoError(t, cmd.Execute()) + + page, perPage := getPagination(cmd) + assert.Equal(t, 0, page) + assert.Equal(t, 0, perPage) +} + +// --- buildPaginationQuery --- + +func TestBuildPaginationQuery(t *testing.T) { + tests := []struct { + name string + page, perPage int + wantPage string + wantPerPage string + }{ + {"explicit values", 2, 30, "2", "30"}, + {"defaults for zero", 0, 0, "1", "15"}, + {"defaults for negative", -1, -5, "1", "15"}, + {"page default only", 0, 10, "1", "10"}, + {"perPage default only", 5, 0, "5", "15"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + q := buildPaginationQuery(tt.page, tt.perPage) + assert.Equal(t, tt.wantPage, q.Get("page")) + assert.Equal(t, tt.wantPerPage, q.Get("per_page")) + }) + } +} + +// --- parseResponseData --- + +func TestParseResponseData(t *testing.T) { + tests := []struct { + name string + input string + want string + wantErr bool + }{ + { + name: "object data", + input: `{"data": {"id": 1, "name": "test"}}`, + want: `{"id": 1, "name": "test"}`, + }, + { + name: "array data", + input: `{"data": [{"id": 1}, {"id": 2}]}`, + want: `[{"id": 1}, {"id": 2}]`, + }, + { + name: "missing data key", + input: `{"message": "ok"}`, + wantErr: true, + }, + { + name: "invalid json", + input: `not json`, + wantErr: true, + }, + { + name: "null data", + input: `{"data": null}`, + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := parseResponseData([]byte(tt.input)) + if tt.wantErr { + require.Error(t, err) + return + } + require.NoError(t, err) + // Compare as compact JSON + assert.JSONEq(t, tt.want, string(got)) + }) + } +} + +// --- parseResponseWithMeta --- + +func TestParseResponseWithMeta(t *testing.T) { + tests := []struct { + name string + input string + wantData string + wantMeta *PaginationMeta + wantErr bool + }{ + { + name: "full response", + input: `{"data": [{"id": 1}], "meta": {"current_page": 2, "last_page": 5, "total": 50}}`, + wantData: `[{"id": 1}]`, + wantMeta: &PaginationMeta{CurrentPage: 2, LastPage: 5, Total: 50}, + }, + { + name: "no meta", + input: `{"data": [{"id": 1}]}`, + wantData: `[{"id": 1}]`, + wantMeta: &PaginationMeta{}, + }, + { + name: "missing data", + input: `{"meta": {"current_page": 1}}`, + wantErr: true, + }, + { + name: "invalid json", + input: `{broken`, + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + data, meta, err := parseResponseWithMeta([]byte(tt.input)) + if tt.wantErr { + require.Error(t, err) + return + } + require.NoError(t, err) + assert.JSONEq(t, tt.wantData, string(data)) + assert.Equal(t, tt.wantMeta, meta) + }) + } +} + +// --- formatBool --- + +func TestFormatBool(t *testing.T) { + assert.Equal(t, "Yes", formatBool(true)) + assert.Equal(t, "No", formatBool(false)) +} + +// --- formatTags --- + +func TestFormatTags(t *testing.T) { + tests := []struct { + name string + tags []string + want string + }{ + {"multiple tags", []string{"go", "cli", "api"}, "go, cli, api"}, + {"single tag", []string{"go"}, "go"}, + {"empty slice", []string{}, "-"}, + {"nil slice", nil, "-"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, formatTags(tt.tags)) + }) + } +} + +// --- formatString --- + +func TestFormatString(t *testing.T) { + tests := []struct { + name string + input string + want string + }{ + {"non-empty", "hello", "hello"}, + {"empty", "", "-"}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.want, formatString(tt.input)) + }) + } +} + +// --- safe map getters --- + +func TestGetString(t *testing.T) { + m := map[string]any{"name": "test", "count": 42, "nil": nil} + + assert.Equal(t, "test", getString(m, "name")) + assert.Equal(t, "", getString(m, "missing")) + assert.Equal(t, "", getString(m, "count")) + assert.Equal(t, "", getString(m, "nil")) +} + +func TestGetFloat(t *testing.T) { + m := map[string]any{"count": 42.5, "name": "test", "nil": nil} + + assert.Equal(t, 42.5, getFloat(m, "count")) + assert.Equal(t, 0.0, getFloat(m, "missing")) + assert.Equal(t, 0.0, getFloat(m, "name")) + assert.Equal(t, 0.0, getFloat(m, "nil")) +} + +func TestGetBool(t *testing.T) { + m := map[string]any{"active": true, "name": "test", "nil": nil} + + assert.Equal(t, true, getBool(m, "active")) + assert.Equal(t, false, getBool(m, "missing")) + assert.Equal(t, false, getBool(m, "name")) + assert.Equal(t, false, getBool(m, "nil")) +} + +func TestGetSlice(t *testing.T) { + m := map[string]any{ + "tags": []any{"a", "b"}, + "name": "test", + "nil": nil, + } + + assert.Equal(t, []any{"a", "b"}, getSlice(m, "tags")) + assert.Nil(t, getSlice(m, "missing")) + assert.Nil(t, getSlice(m, "name")) + assert.Nil(t, getSlice(m, "nil")) +} + +func TestGetMap(t *testing.T) { + inner := map[string]any{"key": "val"} + m := map[string]any{ + "nested": inner, + "name": "test", + "nil": nil, + } + + assert.Equal(t, inner, getMap(m, "nested")) + assert.Nil(t, getMap(m, "missing")) + assert.Nil(t, getMap(m, "name")) + assert.Nil(t, getMap(m, "nil")) +} + +// --- printPaginationIfNeeded --- + +func TestPrintPaginationIfNeeded(t *testing.T) { + tests := []struct { + name string + meta *PaginationMeta + want string + }{ + { + name: "multiple pages", + meta: &PaginationMeta{CurrentPage: 1, LastPage: 3, Total: 45}, + want: "Page 1 of 3 (45 total)\n", + }, + { + name: "single page", + meta: &PaginationMeta{CurrentPage: 1, LastPage: 1, Total: 5}, + want: "", + }, + { + name: "nil meta", + meta: nil, + want: "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var buf bytes.Buffer + printPaginationIfNeeded(&buf, tt.meta) + assert.Equal(t, tt.want, buf.String()) + }) + } +} + +// --- confirmAction --- + +func TestConfirmAction(t *testing.T) { + tests := []struct { + name string + input string + want bool + }{ + {"yes lowercase", "yes\n", true}, + {"y lowercase", "y\n", true}, + {"YES uppercase", "YES\n", true}, + {"Y uppercase", "Y\n", true}, + {"no", "no\n", false}, + {"empty", "\n", false}, + {"random text", "maybe\n", false}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + origReader := confirmReader + t.Cleanup(func() { confirmReader = origReader }) + + confirmReader = strings.NewReader(tt.input) + + cmd := &cobra.Command{Use: "test"} + stderr := new(bytes.Buffer) + cmd.SetErr(stderr) + + got := confirmAction(cmd, "Delete this?") + assert.Equal(t, tt.want, got) + assert.Contains(t, stderr.String(), "Delete this? [y/N]: ") + }) + } +} + +func TestConfirmAction_EOF(t *testing.T) { + origReader := confirmReader + t.Cleanup(func() { confirmReader = origReader }) + + confirmReader = strings.NewReader("") + + cmd := &cobra.Command{Use: "test"} + stderr := new(bytes.Buffer) + cmd.SetErr(stderr) + + assert.False(t, confirmAction(cmd, "Delete?")) +} + +// --- integration: parseResponseData with JSON unmarshal --- + +func TestParseResponseData_UnmarshalResult(t *testing.T) { + input := `{"data": {"id": 42, "name": "site1", "active": true}}` + data, err := parseResponseData([]byte(input)) + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(data, &result)) + assert.Equal(t, 42.0, result["id"]) + assert.Equal(t, "site1", result["name"]) + assert.Equal(t, true, result["active"]) +} diff --git a/internal/commands/mcp.go b/internal/commands/mcp.go new file mode 100644 index 0000000..03b94a8 --- /dev/null +++ b/internal/commands/mcp.go @@ -0,0 +1,223 @@ +package commands + +import ( + "encoding/json" + "errors" + "fmt" + "os" + "path/filepath" + "runtime" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/api" +) + +const mcpServerURL = "https://api.builtfast.com/mcp/vector" + +// claudeDesktopConfigPathFn is the function used to resolve the desktop config path. +// Override in tests for deterministic paths. +var claudeDesktopConfigPathFn = claudeDesktopConfigPath + +// NewMcpCmd creates the mcp command group. +func NewMcpCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "mcp", + Short: "MCP server configuration", + Long: "Configure the Vector MCP server for use with Claude Desktop or Claude Code.", + } + + cmd.AddCommand(newMcpSetupCmd()) + + return cmd +} + +func newMcpSetupCmd() *cobra.Command { + var target string + var global bool + var force bool + + cmd := &cobra.Command{ + Use: "setup", + Short: "Configure Vector MCP server", + Long: "Configure the Vector MCP server in Claude Desktop or Claude Code for AI-assisted site management.", + Example: ` # Set up for Claude Desktop + vector mcp setup + + # Set up for Claude Code + vector mcp setup --target code + + # Overwrite existing configuration + vector mcp setup --force`, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + token := app.Client.Token + + // Validate --global only with --target code + if global && target != "code" { + return fmt.Errorf("--global flag only applies when --target is \"code\"") + } + + // Determine config path + configPath, err := mcpConfigPath(target, global) + if err != nil { + return err + } + + // Build the MCP server entry + serverEntry := buildMcpServerEntry(target, token) + + // Read existing config or start fresh + configData, err := readJSONFile(configPath) + if err != nil { + return err + } + + // Get or create mcpServers + mcpServers, _ := configData["mcpServers"].(map[string]any) + if mcpServers == nil { + mcpServers = map[string]any{} + } + + // Check if already configured + action := "added" + if _, exists := mcpServers["vector"]; exists { + if !force { + return &api.APIError{ + Message: "Vector MCP server already configured. Use --force to overwrite.", + ExitCode: 1, + } + } + action = "updated" + } + + // Set the vector entry + mcpServers["vector"] = serverEntry + configData["mcpServers"] = mcpServers + + // Create parent directories + dir := filepath.Dir(configPath) + if err := os.MkdirAll(dir, 0o755); err != nil { + return fmt.Errorf("failed to create directory %s: %w", dir, err) + } + + // Write config + data, err := json.MarshalIndent(configData, "", " ") + if err != nil { + return fmt.Errorf("failed to marshal config: %w", err) + } + data = append(data, '\n') + + if err := os.WriteFile(configPath, data, 0o644); err != nil { + return fmt.Errorf("failed to write config: %w", err) + } + + // Success messages + w := cmd.OutOrStdout() + targetLabel := "Claude Desktop" + if target == "code" { + targetLabel = "Claude Code" + } + _, _ = fmt.Fprintf(w, "Vector MCP server %s in %s config.\n", action, targetLabel) + _, _ = fmt.Fprintf(w, "Config written to: %s\n", configPath) + + // Restart message: omitted for Code project-level config + if target == "desktop" || global { + _, _ = fmt.Fprintf(w, "Restart %s to apply changes.\n", targetLabel) + } + + return nil + }, + } + + cmd.Flags().StringVar(&target, "target", "desktop", "Target application: \"desktop\" or \"code\"") + cmd.Flags().BoolVar(&global, "global", false, "Write to global config (~/.claude.json) instead of project-level .mcp.json (only for --target code)") + cmd.Flags().BoolVar(&force, "force", false, "Overwrite existing Vector MCP configuration") + + return cmd +} + +// mcpConfigPath returns the config file path for the given target. +func mcpConfigPath(target string, global bool) (string, error) { + switch target { + case "desktop": + return claudeDesktopConfigPathFn() + case "code": + if global { + home, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("unable to determine home directory: %w", err) + } + return filepath.Join(home, ".claude.json"), nil + } + return ".mcp.json", nil + default: + return "", fmt.Errorf("invalid target %q: must be \"desktop\" or \"code\"", target) + } +} + +// claudeDesktopConfigPath returns the platform-specific Claude Desktop config path. +func claudeDesktopConfigPath() (string, error) { + switch runtime.GOOS { + case "darwin": + home, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("unable to determine home directory: %w", err) + } + return filepath.Join(home, "Library", "Application Support", "Claude", "claude_desktop_config.json"), nil + case "linux": + home, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("unable to determine home directory: %w", err) + } + return filepath.Join(home, ".config", "Claude", "claude_desktop_config.json"), nil + case "windows": + appData := os.Getenv("APPDATA") + if appData == "" { + return "", fmt.Errorf("%%APPDATA%% is not set") + } + return filepath.Join(appData, "Claude", "claude_desktop_config.json"), nil + default: + return "", fmt.Errorf("unsupported platform: %s", runtime.GOOS) + } +} + +// buildMcpServerEntry returns the MCP server config for the given target. +func buildMcpServerEntry(target, token string) map[string]any { + entry := map[string]any{ + "command": "npx", + "args": []string{ + "-y", + "mcp-remote", + mcpServerURL, + "--header", + fmt.Sprintf("Authorization: Bearer %s", token), + }, + } + if target == "code" { + entry["type"] = "stdio" + } + return entry +} + +// readJSONFile reads a JSON file into a map. Returns an empty map if the file doesn't exist. +func readJSONFile(path string) (map[string]any, error) { + data, err := os.ReadFile(path) + if err != nil { + if errors.Is(err, os.ErrNotExist) { + return map[string]any{}, nil + } + return nil, fmt.Errorf("failed to read %s: %w", path, err) + } + + var result map[string]any + if err := json.Unmarshal(data, &result); err != nil { + return nil, fmt.Errorf("failed to parse %s: %w", path, err) + } + + return result, nil +} diff --git a/internal/commands/mcp_test.go b/internal/commands/mcp_test.go new file mode 100644 index 0000000..a8f7c71 --- /dev/null +++ b/internal/commands/mcp_test.go @@ -0,0 +1,416 @@ +package commands + +import ( + "bytes" + "encoding/json" + "os" + "path/filepath" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +func buildMcpCmd(token string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient("http://localhost", token, "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + mcpCmd := NewMcpCmd() + root.AddCommand(mcpCmd) + + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func buildMcpCmdNoAuth(format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient("http://localhost", "", "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + mcpCmd := NewMcpCmd() + root.AddCommand(mcpCmd) + + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +// --- Auth Tests --- + +func TestMcpSetupCmd_NoAuthToken(t *testing.T) { + cmd, _, _ := buildMcpCmdNoAuth(output.Table) + cmd.SetArgs([]string{"mcp", "setup"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Desktop Target Tests --- + +func TestMcpSetupCmd_DesktopNewConfig(t *testing.T) { + tmpDir := t.TempDir() + configPath := filepath.Join(tmpDir, "Claude", "claude_desktop_config.json") + + // Patch claudeDesktopConfigPath for test + origFn := claudeDesktopConfigPathFn + claudeDesktopConfigPathFn = func() (string, error) { return configPath, nil } + defer func() { claudeDesktopConfigPathFn = origFn }() + + cmd, stdout, _ := buildMcpCmd("test-token-123", output.Table) + cmd.SetArgs([]string{"mcp", "setup"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "Vector MCP server added in Claude Desktop config.") + assert.Contains(t, out, "Config written to: "+configPath) + assert.Contains(t, out, "Restart Claude Desktop to apply changes.") + + // Verify file contents + data, err := os.ReadFile(configPath) + require.NoError(t, err) + + var cfg map[string]any + require.NoError(t, json.Unmarshal(data, &cfg)) + + mcpServers := cfg["mcpServers"].(map[string]any) + vector := mcpServers["vector"].(map[string]any) + assert.Equal(t, "npx", vector["command"]) + assert.Nil(t, vector["type"]) // Desktop should NOT have type field + + args := vector["args"].([]any) + assert.Equal(t, "-y", args[0]) + assert.Equal(t, "mcp-remote", args[1]) + assert.Equal(t, "https://api.builtfast.com/mcp/vector", args[2]) + assert.Equal(t, "--header", args[3]) + assert.Equal(t, "Authorization: Bearer test-token-123", args[4]) +} + +func TestMcpSetupCmd_DesktopPreservesExisting(t *testing.T) { + tmpDir := t.TempDir() + configPath := filepath.Join(tmpDir, "Claude", "claude_desktop_config.json") + + // Create existing config with another MCP server + require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755)) + existing := map[string]any{ + "mcpServers": map[string]any{ + "other-server": map[string]any{ + "command": "other", + "args": []string{"arg1"}, + }, + }, + "otherSetting": "preserved", + } + data, _ := json.MarshalIndent(existing, "", " ") + require.NoError(t, os.WriteFile(configPath, data, 0o644)) + + origFn := claudeDesktopConfigPathFn + claudeDesktopConfigPathFn = func() (string, error) { return configPath, nil } + defer func() { claudeDesktopConfigPathFn = origFn }() + + cmd, _, _ := buildMcpCmd("test-token", output.Table) + cmd.SetArgs([]string{"mcp", "setup"}) + + err := cmd.Execute() + require.NoError(t, err) + + // Verify both servers exist and other settings preserved + fileData, err := os.ReadFile(configPath) + require.NoError(t, err) + + var cfg map[string]any + require.NoError(t, json.Unmarshal(fileData, &cfg)) + + assert.Equal(t, "preserved", cfg["otherSetting"]) + mcpServers := cfg["mcpServers"].(map[string]any) + assert.Contains(t, mcpServers, "other-server") + assert.Contains(t, mcpServers, "vector") +} + +func TestMcpSetupCmd_DesktopAlreadyConfigured(t *testing.T) { + tmpDir := t.TempDir() + configPath := filepath.Join(tmpDir, "Claude", "claude_desktop_config.json") + + require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755)) + existing := map[string]any{ + "mcpServers": map[string]any{ + "vector": map[string]any{ + "command": "npx", + "args": []string{"old-args"}, + }, + }, + } + data, _ := json.MarshalIndent(existing, "", " ") + require.NoError(t, os.WriteFile(configPath, data, 0o644)) + + origFn := claudeDesktopConfigPathFn + claudeDesktopConfigPathFn = func() (string, error) { return configPath, nil } + defer func() { claudeDesktopConfigPathFn = origFn }() + + cmd, _, _ := buildMcpCmd("test-token", output.Table) + cmd.SetArgs([]string{"mcp", "setup"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "Vector MCP server already configured. Use --force to overwrite") +} + +func TestMcpSetupCmd_DesktopForceOverwrite(t *testing.T) { + tmpDir := t.TempDir() + configPath := filepath.Join(tmpDir, "Claude", "claude_desktop_config.json") + + require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755)) + existing := map[string]any{ + "mcpServers": map[string]any{ + "vector": map[string]any{ + "command": "npx", + "args": []string{"old-args"}, + }, + }, + } + data, _ := json.MarshalIndent(existing, "", " ") + require.NoError(t, os.WriteFile(configPath, data, 0o644)) + + origFn := claudeDesktopConfigPathFn + claudeDesktopConfigPathFn = func() (string, error) { return configPath, nil } + defer func() { claudeDesktopConfigPathFn = origFn }() + + cmd, stdout, _ := buildMcpCmd("new-token", output.Table) + cmd.SetArgs([]string{"mcp", "setup", "--force"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "Vector MCP server updated in Claude Desktop config.") + + // Verify updated config + fileData, err := os.ReadFile(configPath) + require.NoError(t, err) + + var cfg map[string]any + require.NoError(t, json.Unmarshal(fileData, &cfg)) + + mcpServers := cfg["mcpServers"].(map[string]any) + vector := mcpServers["vector"].(map[string]any) + args := vector["args"].([]any) + assert.Equal(t, "Authorization: Bearer new-token", args[4]) +} + +// --- Code Target Tests --- + +func TestMcpSetupCmd_CodeProjectLevel(t *testing.T) { + tmpDir := t.TempDir() + configPath := filepath.Join(tmpDir, ".mcp.json") + + // Change to temp dir so .mcp.json is created there + origDir, _ := os.Getwd() + require.NoError(t, os.Chdir(tmpDir)) + defer func() { _ = os.Chdir(origDir) }() + + cmd, stdout, _ := buildMcpCmd("test-token-456", output.Table) + cmd.SetArgs([]string{"mcp", "setup", "--target", "code"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "Vector MCP server added in Claude Code config.") + assert.Contains(t, out, "Config written to: .mcp.json") + // Should NOT contain restart message for project-level + assert.NotContains(t, out, "Restart") + + // Verify file contents + data, err := os.ReadFile(configPath) + require.NoError(t, err) + + var cfg map[string]any + require.NoError(t, json.Unmarshal(data, &cfg)) + + mcpServers := cfg["mcpServers"].(map[string]any) + vector := mcpServers["vector"].(map[string]any) + assert.Equal(t, "stdio", vector["type"]) + assert.Equal(t, "npx", vector["command"]) + + args := vector["args"].([]any) + assert.Equal(t, "Authorization: Bearer test-token-456", args[4]) +} + +func TestMcpSetupCmd_CodeGlobal(t *testing.T) { + tmpDir := t.TempDir() + configPath := filepath.Join(tmpDir, ".claude.json") + + // Override HOME for the test + origHome := os.Getenv("HOME") + require.NoError(t, os.Setenv("HOME", tmpDir)) + defer func() { _ = os.Setenv("HOME", origHome) }() + + cmd, stdout, _ := buildMcpCmd("test-token-789", output.Table) + cmd.SetArgs([]string{"mcp", "setup", "--target", "code", "--global"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "Vector MCP server added in Claude Code config.") + assert.Contains(t, out, configPath) + assert.Contains(t, out, "Restart Claude Code to apply changes.") + + // Verify file contents + data, err := os.ReadFile(configPath) + require.NoError(t, err) + + var cfg map[string]any + require.NoError(t, json.Unmarshal(data, &cfg)) + + mcpServers := cfg["mcpServers"].(map[string]any) + vector := mcpServers["vector"].(map[string]any) + assert.Equal(t, "stdio", vector["type"]) +} + +func TestMcpSetupCmd_CodeAlreadyConfigured(t *testing.T) { + tmpDir := t.TempDir() + configPath := filepath.Join(tmpDir, ".mcp.json") + + origDir, _ := os.Getwd() + require.NoError(t, os.Chdir(tmpDir)) + defer func() { _ = os.Chdir(origDir) }() + + existing := map[string]any{ + "mcpServers": map[string]any{ + "vector": map[string]any{ + "type": "stdio", + "command": "npx", + "args": []string{"old-args"}, + }, + }, + } + data, _ := json.MarshalIndent(existing, "", " ") + require.NoError(t, os.WriteFile(configPath, data, 0o644)) + + cmd, _, _ := buildMcpCmd("test-token", output.Table) + cmd.SetArgs([]string{"mcp", "setup", "--target", "code"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "Vector MCP server already configured. Use --force to overwrite") +} + +func TestMcpSetupCmd_CodeForceOverwrite(t *testing.T) { + tmpDir := t.TempDir() + configPath := filepath.Join(tmpDir, ".mcp.json") + + origDir, _ := os.Getwd() + require.NoError(t, os.Chdir(tmpDir)) + defer func() { _ = os.Chdir(origDir) }() + + existing := map[string]any{ + "mcpServers": map[string]any{ + "vector": map[string]any{ + "type": "stdio", + "command": "npx", + "args": []string{"old-args"}, + }, + }, + } + data, _ := json.MarshalIndent(existing, "", " ") + require.NoError(t, os.WriteFile(configPath, data, 0o644)) + + cmd, stdout, _ := buildMcpCmd("new-token", output.Table) + cmd.SetArgs([]string{"mcp", "setup", "--target", "code", "--force"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "Vector MCP server updated in Claude Code config.") +} + +// --- Flag Validation Tests --- + +func TestMcpSetupCmd_GlobalWithDesktopErrors(t *testing.T) { + cmd, _, _ := buildMcpCmd("test-token", output.Table) + cmd.SetArgs([]string{"mcp", "setup", "--target", "desktop", "--global"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "--global flag only applies when --target is \"code\"") +} + +func TestMcpSetupCmd_InvalidTarget(t *testing.T) { + cmd, _, _ := buildMcpCmd("test-token", output.Table) + cmd.SetArgs([]string{"mcp", "setup", "--target", "invalid"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid target") +} + +// --- Help Text Tests --- + +func TestMcpCmd_HelpText(t *testing.T) { + cmd, stdout, _ := buildMcpCmd("test-token", output.Table) + cmd.SetArgs([]string{"mcp", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "setup") +} + +func TestMcpSetupCmd_HelpText(t *testing.T) { + cmd, stdout, _ := buildMcpCmd("test-token", output.Table) + cmd.SetArgs([]string{"mcp", "setup", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "--target") + assert.Contains(t, out, "--global") + assert.Contains(t, out, "--force") +} diff --git a/internal/commands/php_version.go b/internal/commands/php_version.go new file mode 100644 index 0000000..844d927 --- /dev/null +++ b/internal/commands/php_version.go @@ -0,0 +1,64 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +const phpVersionsPath = "/api/v1/vector/php-versions" + +// NewPHPVersionsCmd creates the php-versions command. +func NewPHPVersionsCmd() *cobra.Command { + return &cobra.Command{ + Use: "php-versions", + Short: "List available PHP versions", + Long: "Retrieve a list of all available PHP versions for Vector environments.", + Example: ` # List available PHP versions + vector php-versions`, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), phpVersionsPath, nil) + if err != nil { + return fmt.Errorf("failed to list PHP versions: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list PHP versions: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list PHP versions: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list PHP versions: %w", err) + } + + rows := make([][]string, 0, len(items)) + for _, item := range items { + rows = append(rows, []string{getString(item, "value")}) + } + + app.Output.Table([]string{"VERSION"}, rows) + return nil + }, + } +} diff --git a/internal/commands/php_version_test.go b/internal/commands/php_version_test.go new file mode 100644 index 0000000..cd34dd4 --- /dev/null +++ b/internal/commands/php_version_test.go @@ -0,0 +1,213 @@ +package commands + +import ( + "bytes" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +var phpVersionsResponse = map[string]any{ + "data": []map[string]any{ + {"value": "7.4", "label": "PHP 7.4"}, + {"value": "8.0", "label": "PHP 8.0"}, + {"value": "8.1", "label": "PHP 8.1"}, + {"value": "8.2", "label": "PHP 8.2"}, + {"value": "8.3", "label": "PHP 8.3"}, + }, + "http_status": 200, +} + +func newPHPVersionsTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + if r.Method == "GET" && r.URL.Path == "/api/v1/vector/php-versions" { + _ = json.NewEncoder(w).Encode(phpVersionsResponse) + } else { + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +func buildPHPVersionsCmd(baseURL, token string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient(baseURL, token, "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + root.AddCommand(NewPHPVersionsCmd()) + + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func buildPHPVersionsCmdNoAuth(format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient("http://localhost", "", "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + root.AddCommand(NewPHPVersionsCmd()) + + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +// --- PHP Versions Tests --- + +func TestPHPVersionsCmd_TableOutput(t *testing.T) { + ts := newPHPVersionsTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildPHPVersionsCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"php-versions"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "VERSION") + assert.Contains(t, out, "7.4") + assert.Contains(t, out, "8.0") + assert.Contains(t, out, "8.1") + assert.Contains(t, out, "8.2") + assert.Contains(t, out, "8.3") +} + +func TestPHPVersionsCmd_JSONOutput(t *testing.T) { + ts := newPHPVersionsTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildPHPVersionsCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"php-versions"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 5) + assert.Equal(t, "7.4", result[0]["value"]) + assert.Equal(t, "8.3", result[4]["value"]) +} + +func TestPHPVersionsCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(phpVersionsResponse) + })) + defer ts.Close() + + cmd, _, _ := buildPHPVersionsCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"php-versions"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/php-versions", receivedPath) +} + +func TestPHPVersionsCmd_AuthError(t *testing.T) { + ts := newPHPVersionsTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildPHPVersionsCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"php-versions"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestPHPVersionsCmd_NoAuthToken(t *testing.T) { + cmd, _, _ := buildPHPVersionsCmdNoAuth(output.Table) + cmd.SetArgs([]string{"php-versions"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestPHPVersionsCmd_HelpText(t *testing.T) { + ts := newPHPVersionsTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildPHPVersionsCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"php-versions", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "php-versions") + assert.Contains(t, out, "available PHP versions") +} diff --git a/internal/commands/restore.go b/internal/commands/restore.go new file mode 100644 index 0000000..44d7ec4 --- /dev/null +++ b/internal/commands/restore.go @@ -0,0 +1,357 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + "time" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +const restoresBasePath = "/api/v1/vector/restores" + +// NewRestoreCmd creates the restore command group. +func NewRestoreCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "restore", + Short: "Manage restores", + Long: "Manage restores to recover site data from backups.", + } + + cmd.AddCommand(newRestoreListCmd()) + cmd.AddCommand(newRestoreShowCmd()) + cmd.AddCommand(newRestoreCreateCmd()) + + return cmd +} + +func newRestoreListCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "list", + Short: "List restores", + Long: "Retrieve a paginated list of restores, optionally filtered by type, site, environment, or backup.", + Example: ` # List all restores + vector restore list + + # Filter by site + vector restore list --site-id site-abc123`, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + page, perPage := getPagination(cmd) + query := buildPaginationQuery(page, perPage) + + if cmd.Flags().Changed("site-id") { + v, _ := cmd.Flags().GetString("site-id") + if v != "" { + query.Set("site_id", v) + } + } + if cmd.Flags().Changed("environment-id") { + v, _ := cmd.Flags().GetString("environment-id") + if v != "" { + query.Set("environment_id", v) + } + } + if cmd.Flags().Changed("type") { + v, _ := cmd.Flags().GetString("type") + if v != "" { + query.Set("type", v) + } + } + if cmd.Flags().Changed("backup-id") { + v, _ := cmd.Flags().GetString("backup-id") + if v != "" { + query.Set("backup_id", v) + } + } + + resp, err := app.Client.Get(cmd.Context(), restoresBasePath, query) + if err != nil { + return fmt.Errorf("failed to list restores: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list restores: %w", err) + } + + if app.Output.Format() == output.JSON { + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list restores: %w", err) + } + return app.Output.JSON(json.RawMessage(data)) + } + + data, meta, err := parseResponseWithMeta(body) + if err != nil { + return fmt.Errorf("failed to list restores: %w", err) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list restores: %w", err) + } + + headers := []string{"ID", "MODEL", "BACKUP ID", "SCOPE", "STATUS", "CREATED"} + var rows [][]string + for _, item := range items { + rows = append(rows, []string{ + getString(item, "id"), + formatArchivableType(getString(item, "archivable_type")), + getString(item, "vector_backup_id"), + getString(item, "scope"), + getString(item, "status"), + getString(item, "created_at"), + }) + } + + app.Output.Table(headers, rows) + if meta != nil { + app.Output.Pagination(meta.CurrentPage, meta.LastPage, meta.Total) + } + return nil + }, + } + addPaginationFlags(cmd) + cmd.Flags().String("site-id", "", "Filter by site ID") + cmd.Flags().String("environment-id", "", "Filter by environment ID") + cmd.Flags().String("type", "", "Filter by type (site/environment)") + cmd.Flags().String("backup-id", "", "Filter by backup ID") + return cmd +} + +func newRestoreShowCmd() *cobra.Command { + return &cobra.Command{ + Use: "show ", + Short: "Show a restore", + Long: "Display details of a specific restore.", + Example: ` # Show restore details + vector restore show restore-456`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), restoresBasePath+"/"+args[0], nil) + if err != nil { + return fmt.Errorf("failed to get restore: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to get restore: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to get restore: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to get restore: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Model", Value: formatArchivableType(getString(item, "archivable_type"))}, + {Key: "Model ID", Value: getString(item, "archivable_id")}, + {Key: "Backup ID", Value: getString(item, "vector_backup_id")}, + {Key: "Scope", Value: getString(item, "scope")}, + {Key: "Trigger", Value: getString(item, "trigger")}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Error Message", Value: formatString(getString(item, "error_message"))}, + {Key: "Duration", Value: formatFloat(getFloat(item, "duration_ms"))}, + {Key: "Started At", Value: formatString(getString(item, "started_at"))}, + {Key: "Completed At", Value: formatString(getString(item, "completed_at"))}, + {Key: "Created At", Value: getString(item, "created_at")}, + {Key: "Updated At", Value: getString(item, "updated_at")}, + }) + return nil + }, + } +} + +func newRestoreCreateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "create ", + Short: "Create a restore", + Long: "Create a new restore from a backup.", + Example: ` # Restore from a backup + vector restore create backup-456 + + # Restore database only with search-replace + vector restore create backup-456 --scope database --search-replace-from old.example.com --search-replace-to new.example.com + + # Restore and wait for completion + vector restore create backup-456 --wait`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + waitEnabled, interval, timeout, err := getWaitConfig(cmd) + if err != nil { + return err + } + + reqBody := map[string]any{ + "backup_id": args[0], + } + + dropTables, _ := cmd.Flags().GetBool("drop-tables") + if dropTables { + reqBody["drop_tables"] = true + } + + disableForeignKeys, _ := cmd.Flags().GetBool("disable-foreign-keys") + if disableForeignKeys { + reqBody["disable_foreign_keys"] = true + } + + srFrom, _ := cmd.Flags().GetString("search-replace-from") + srTo, _ := cmd.Flags().GetString("search-replace-to") + if srFrom != "" && srTo != "" { + reqBody["search_replace"] = []map[string]string{ + {"from": srFrom, "to": srTo}, + } + } + + resp, err := app.Client.Post(cmd.Context(), restoresBasePath, reqBody) + if err != nil { + return fmt.Errorf("failed to create restore: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to create restore: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to create restore: %w", err) + } + + if !waitEnabled { + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to create restore: %w", err) + } + + output.PrintMessage(cmd.OutOrStdout(), fmt.Sprintf("Restore initiated. Use 'vector restore show %s' to check progress.", getString(item, "id"))) + output.PrintMessage(cmd.OutOrStdout(), "") + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Model", Value: formatArchivableType(getString(item, "archivable_type"))}, + {Key: "Model ID", Value: getString(item, "archivable_id")}, + {Key: "Backup ID", Value: getString(item, "vector_backup_id")}, + {Key: "Scope", Value: getString(item, "scope")}, + {Key: "Trigger", Value: getString(item, "trigger")}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Error Message", Value: formatString(getString(item, "error_message"))}, + {Key: "Duration", Value: formatFloat(getFloat(item, "duration_ms"))}, + {Key: "Started At", Value: formatString(getString(item, "started_at"))}, + {Key: "Completed At", Value: formatString(getString(item, "completed_at"))}, + {Key: "Created At", Value: getString(item, "created_at")}, + {Key: "Updated At", Value: getString(item, "updated_at")}, + }) + return nil + } + + var createItem map[string]any + if err := json.Unmarshal(data, &createItem); err != nil { + return fmt.Errorf("failed to create restore: %w", err) + } + + restoreID := getString(createItem, "id") + if restoreID == "" { + return fmt.Errorf("failed to create restore: response missing restore ID") + } + + cfg := &waitConfig{ + ResourceID: restoreID, + PollPath: restoresBasePath + "/" + restoreID, + Interval: interval, + Timeout: timeout, + TerminalStatuses: map[string]bool{"completed": true}, + FailedStatuses: map[string]bool{"failed": true}, + Noun: "Restore", + FormatDisplay: restoreFormatDisplay, + } + + result, err := waitForResource(cmd.Context(), app, cfg) + if err != nil { + return err + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(result.FinalData) + } + + var finalItem map[string]any + if err := json.Unmarshal(result.FinalData, &finalItem); err != nil { + return fmt.Errorf("failed to create restore: %w", err) + } + + app.Output.Message(fmt.Sprintf("Restore %s %s in %s", restoreID, result.Status, result.Elapsed.Truncate(time.Second))) + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(finalItem, "id")}, + {Key: "Model", Value: formatArchivableType(getString(finalItem, "archivable_type"))}, + {Key: "Backup ID", Value: getString(finalItem, "vector_backup_id")}, + {Key: "Scope", Value: getString(finalItem, "scope")}, + {Key: "Status", Value: getString(finalItem, "status")}, + {Key: "Duration", Value: formatFloat(getFloat(finalItem, "duration_ms"))}, + {Key: "Started At", Value: formatString(getString(finalItem, "started_at"))}, + {Key: "Completed At", Value: formatString(getString(finalItem, "completed_at"))}, + }) + return nil + }, + } + + cmd.Flags().Bool("drop-tables", false, "Drop existing tables before restore") + cmd.Flags().Bool("disable-foreign-keys", false, "Disable foreign key checks during restore") + cmd.Flags().String("search-replace-from", "", "URL to search for (used with --search-replace-to)") + cmd.Flags().String("search-replace-to", "", "URL to replace with (used with --search-replace-from)") + addWaitFlags(cmd) + + return cmd +} + +// restoreFormatDisplay formats restore data for the alternate screen display. +func restoreFormatDisplay(data map[string]any) []string { + return []string{ + fmt.Sprintf("%16s: %s", "ID", getString(data, "id")), + fmt.Sprintf("%16s: %s", "Model", formatArchivableType(getString(data, "archivable_type"))), + fmt.Sprintf("%16s: %s", "Backup ID", getString(data, "vector_backup_id")), + fmt.Sprintf("%16s: %s", "Scope", getString(data, "scope")), + fmt.Sprintf("%16s: %s", "Status", getString(data, "status")), + fmt.Sprintf("%16s: %s", "Duration", formatFloat(getFloat(data, "duration_ms"))), + fmt.Sprintf("%16s: %s", "Started", formatString(getString(data, "started_at"))), + fmt.Sprintf("%16s: %s", "Completed", formatString(getString(data, "completed_at"))), + } +} diff --git a/internal/commands/restore_test.go b/internal/commands/restore_test.go new file mode 100644 index 0000000..0b2e2e3 --- /dev/null +++ b/internal/commands/restore_test.go @@ -0,0 +1,673 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "sync/atomic" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +var restoreListResponse = map[string]any{ + "data": []map[string]any{ + { + "id": "rst-001", + "archivable_type": "vector_site", + "archivable_id": "site-001", + "scope": "full", + "trigger": "manual", + "status": "completed", + "vector_backup_id": "bk-001", + "search_replace": nil, + "drop_tables": false, + "disable_foreign_keys": false, + "error_message": nil, + "duration_ms": float64(45200), + "started_at": "2025-01-15T12:00:00+00:00", + "completed_at": "2025-01-15T12:05:00+00:00", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:05:00+00:00", + }, + { + "id": "rst-002", + "archivable_type": "vector_environment", + "archivable_id": "env-001", + "scope": "database", + "trigger": "manual", + "status": "pending", + "vector_backup_id": "bk-002", + "search_replace": nil, + "drop_tables": false, + "disable_foreign_keys": false, + "error_message": nil, + "duration_ms": nil, + "started_at": nil, + "completed_at": nil, + "created_at": "2025-01-16T12:00:00+00:00", + "updated_at": "2025-01-16T12:00:00+00:00", + }, + }, + "meta": map[string]any{ + "current_page": 1, + "last_page": 1, + "total": 2, + }, + "message": "Restores retrieved successfully", + "http_status": 200, +} + +var restoreShowResponse = map[string]any{ + "data": map[string]any{ + "id": "rst-001", + "archivable_type": "vector_site", + "archivable_id": "site-001", + "scope": "full", + "trigger": "manual", + "status": "completed", + "vector_backup_id": "bk-001", + "search_replace": []map[string]any{ + {"from": "example.org", "to": "example.com"}, + }, + "drop_tables": false, + "disable_foreign_keys": false, + "error_message": nil, + "duration_ms": float64(45200), + "started_at": "2025-01-15T12:00:00+00:00", + "completed_at": "2025-01-15T12:05:00+00:00", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:05:00+00:00", + }, + "message": "Restore retrieved successfully", + "http_status": 200, +} + +var restoreCreateResponse = map[string]any{ + "data": map[string]any{ + "id": "rst-003", + "archivable_type": "vector_site", + "archivable_id": "site-001", + "scope": "full", + "trigger": "manual", + "status": "pending", + "vector_backup_id": "bk-005", + "search_replace": nil, + "drop_tables": false, + "disable_foreign_keys": false, + "error_message": nil, + "duration_ms": nil, + "started_at": nil, + "completed_at": nil, + "created_at": "2025-01-20T12:00:00+00:00", + "updated_at": "2025-01-20T12:00:00+00:00", + }, + "message": "Restore initiated successfully", + "http_status": 202, +} + +func newRestoreTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/restores": + _ = json.NewEncoder(w).Encode(restoreListResponse) + + case method == "GET" && path == "/api/v1/vector/restores/rst-001": + _ = json.NewEncoder(w).Encode(restoreShowResponse) + + case method == "POST" && path == "/api/v1/vector/restores": + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(restoreCreateResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +func buildRestoreCmd(baseURL, token string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient(baseURL, token, "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + root.AddCommand(NewRestoreCmd()) + + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func buildRestoreCmdNoAuth(format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient("http://localhost", "", "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + root.AddCommand(NewRestoreCmd()) + + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +// --- Restore List Tests --- + +func TestRestoreListCmd_TableOutput(t *testing.T) { + ts := newRestoreTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildRestoreCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"restore", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "rst-001") + assert.Contains(t, out, "Site") + assert.Contains(t, out, "bk-001") + assert.Contains(t, out, "full") + assert.Contains(t, out, "completed") + assert.Contains(t, out, "rst-002") + assert.Contains(t, out, "Environment") + assert.Contains(t, out, "bk-002") + assert.Contains(t, out, "pending") +} + +func TestRestoreListCmd_JSONOutput(t *testing.T) { + ts := newRestoreTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildRestoreCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"restore", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 2) + assert.Equal(t, "rst-001", result[0]["id"]) +} + +func TestRestoreListCmd_Pagination(t *testing.T) { + var receivedPage, receivedPerPage string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedPage = r.URL.Query().Get("page") + receivedPerPage = r.URL.Query().Get("per_page") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(restoreListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildRestoreCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"restore", "list", "--page", "3", "--per-page", "25"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "3", receivedPage) + assert.Equal(t, "25", receivedPerPage) +} + +func TestRestoreListCmd_FilterFlags(t *testing.T) { + var receivedSiteID, receivedEnvID, receivedType, receivedBackupID string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedSiteID = r.URL.Query().Get("site_id") + receivedEnvID = r.URL.Query().Get("environment_id") + receivedType = r.URL.Query().Get("type") + receivedBackupID = r.URL.Query().Get("backup_id") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(restoreListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildRestoreCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"restore", "list", "--site-id", "site-001", "--environment-id", "env-001", "--type", "site", "--backup-id", "bk-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "site-001", receivedSiteID) + assert.Equal(t, "env-001", receivedEnvID) + assert.Equal(t, "site", receivedType) + assert.Equal(t, "bk-001", receivedBackupID) +} + +func TestRestoreListCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(restoreListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildRestoreCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"restore", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/restores", receivedPath) +} + +func TestRestoreListCmd_AuthError(t *testing.T) { + ts := newRestoreTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildRestoreCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"restore", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestRestoreListCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildRestoreCmdNoAuth(output.Table) + cmd.SetArgs([]string{"restore", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Restore Show Tests --- + +func TestRestoreShowCmd_TableOutput(t *testing.T) { + ts := newRestoreTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildRestoreCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"restore", "show", "rst-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "rst-001") + assert.Contains(t, out, "Site") + assert.Contains(t, out, "site-001") + assert.Contains(t, out, "bk-001") + assert.Contains(t, out, "full") + assert.Contains(t, out, "manual") + assert.Contains(t, out, "completed") + assert.Contains(t, out, "45200") + assert.Contains(t, out, "2025-01-15T12:00:00+00:00") + assert.Contains(t, out, "2025-01-15T12:05:00+00:00") +} + +func TestRestoreShowCmd_JSONOutput(t *testing.T) { + ts := newRestoreTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildRestoreCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"restore", "show", "rst-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "rst-001", result["id"]) + assert.Equal(t, "vector_site", result["archivable_type"]) +} + +func TestRestoreShowCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(restoreShowResponse) + })) + defer ts.Close() + + cmd, _, _ := buildRestoreCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"restore", "show", "rst-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/restores/rst-001", receivedPath) +} + +func TestRestoreShowCmd_MissingArg(t *testing.T) { + ts := newRestoreTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildRestoreCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"restore", "show"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Restore Create Tests --- + +func TestRestoreCreateCmd_TableOutput(t *testing.T) { + ts := newRestoreTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildRestoreCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"restore", "create", "bk-005"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "Restore initiated. Use 'vector restore show rst-003' to check progress.") + assert.Contains(t, out, "rst-003") + assert.Contains(t, out, "Site") + assert.Contains(t, out, "bk-005") + assert.Contains(t, out, "pending") +} + +func TestRestoreCreateCmd_JSONOutput(t *testing.T) { + ts := newRestoreTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildRestoreCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"restore", "create", "bk-005"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "rst-003", result["id"]) + assert.Equal(t, "pending", result["status"]) +} + +func TestRestoreCreateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(restoreCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildRestoreCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"restore", "create", "bk-005"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/restores", receivedPath) + assert.Equal(t, "bk-005", receivedBody["backup_id"]) +} + +func TestRestoreCreateCmd_WithFlags(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(restoreCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildRestoreCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"restore", "create", "bk-005", "--drop-tables", "--disable-foreign-keys", "--search-replace-from", "example.org", "--search-replace-to", "example.com"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "bk-005", receivedBody["backup_id"]) + assert.Equal(t, true, receivedBody["drop_tables"]) + assert.Equal(t, true, receivedBody["disable_foreign_keys"]) + + sr, ok := receivedBody["search_replace"].([]any) + require.True(t, ok) + require.Len(t, sr, 1) + pair, ok := sr[0].(map[string]any) + require.True(t, ok) + assert.Equal(t, "example.org", pair["from"]) + assert.Equal(t, "example.com", pair["to"]) +} + +func TestRestoreCreateCmd_MissingArg(t *testing.T) { + ts := newRestoreTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildRestoreCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"restore", "create"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Help Tests --- + +func TestRestoreCmd_Help(t *testing.T) { + cmd := NewRestoreCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "show") + assert.Contains(t, out, "create") + assert.Contains(t, out, "restores") +} + +// --- Restore Create --wait Tests --- + +// newRestoreWaitTestServer creates a test server that handles: +// - POST /restores -> returns restoreCreateResponse +// - GET /restores/{id} -> returns successive poll responses +func newRestoreWaitTestServer(validToken string, pollResponses []countingResponse) *httptest.Server { + var pollCount atomic.Int64 + + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + path := r.URL.Path + method := r.Method + + switch { + case method == "POST" && path == "/api/v1/vector/restores": + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(restoreCreateResponse) + + case method == "GET" && path == "/api/v1/vector/restores/rst-003": + idx := int(pollCount.Add(1)) - 1 + if idx >= len(pollResponses) { + idx = len(pollResponses) - 1 + } + resp := pollResponses[idx] + if resp.httpStatus != 0 { + w.WriteHeader(resp.httpStatus) + } + _ = json.NewEncoder(w).Encode(resp.body) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +func makeRestorePollResponse(id, status string) countingResponse { + return countingResponse{ + httpStatus: http.StatusOK, + body: map[string]any{ + "data": map[string]any{ + "id": id, + "archivable_type": "vector_site", + "archivable_id": "site-001", + "scope": "full", + "status": status, + "vector_backup_id": "bk-005", + "duration_ms": float64(45200), + "started_at": "2025-01-20T12:00:00+00:00", + "completed_at": "2025-01-20T12:01:00+00:00", + }, + "message": "Restore retrieved successfully", + "http_status": 200, + }, + } +} + +func TestRestoreCreateCmd_WaitSuccess(t *testing.T) { + overrideWaitGlobals(t, false) + + ts := newRestoreWaitTestServer("valid-token", []countingResponse{ + makeRestorePollResponse("rst-003", "pending"), + makeRestorePollResponse("rst-003", "running"), + makeRestorePollResponse("rst-003", "completed"), + }) + defer ts.Close() + + cmd, stdout, _ := buildRestoreCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"restore", "create", "bk-005", "--wait", "--poll-interval", "1s", "--timeout", "30s"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + // Should NOT contain the "Restore initiated" message when --wait is used + assert.NotContains(t, out, "Restore initiated") + // Should contain the summary line and final state + assert.Contains(t, out, "Restore rst-003 completed in") + assert.Contains(t, out, "rst-003") + assert.Contains(t, out, "completed") +} + +func TestRestoreCreateCmd_WaitFailure(t *testing.T) { + overrideWaitGlobals(t, false) + + ts := newRestoreWaitTestServer("valid-token", []countingResponse{ + makeRestorePollResponse("rst-003", "pending"), + makeRestorePollResponse("rst-003", "failed"), + }) + defer ts.Close() + + cmd, _, _ := buildRestoreCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"restore", "create", "bk-005", "--wait", "--poll-interval", "1s", "--timeout", "30s"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 1, apiErr.ExitCode) + assert.Contains(t, apiErr.Message, "failed status") +} + +func TestRestoreCreateCmd_WaitJSON(t *testing.T) { + overrideWaitGlobals(t, false) + + ts := newRestoreWaitTestServer("valid-token", []countingResponse{ + makeRestorePollResponse("rst-003", "pending"), + makeRestorePollResponse("rst-003", "completed"), + }) + defer ts.Close() + + cmd, stdout, _ := buildRestoreCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"restore", "create", "bk-005", "--wait", "--poll-interval", "1s", "--timeout", "30s"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "rst-003", result["id"]) + assert.Equal(t, "completed", result["status"]) +} diff --git a/internal/commands/site.go b/internal/commands/site.go new file mode 100644 index 0000000..707cffb --- /dev/null +++ b/internal/commands/site.go @@ -0,0 +1,1132 @@ +package commands + +import ( + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "strings" + "time" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/output" +) + +const sitesBasePath = "/api/v1/vector/sites" + +// NewSiteCmd creates the site command group. +func NewSiteCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "site", + Short: "Manage sites", + Long: "Manage Vector sites including creating, updating, deleting, and performing actions like suspend, clone, and cache purge.", + } + + cmd.AddCommand(newSiteListCmd()) + cmd.AddCommand(newSiteShowCmd()) + cmd.AddCommand(newSiteCreateCmd()) + cmd.AddCommand(newSiteUpdateCmd()) + cmd.AddCommand(newSiteDeleteCmd()) + cmd.AddCommand(newSiteCloneCmd()) + cmd.AddCommand(newSiteSuspendCmd()) + cmd.AddCommand(newSiteUnsuspendCmd()) + cmd.AddCommand(newSiteResetSFTPPasswordCmd()) + cmd.AddCommand(newSiteResetDBPasswordCmd()) + cmd.AddCommand(newSitePurgeCacheCmd()) + cmd.AddCommand(newSiteLogsCmd()) + cmd.AddCommand(newSiteWPReconfigCmd()) + cmd.AddCommand(NewSiteSSHKeyCmd()) + + return cmd +} + +func newSiteListCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "list", + Short: "List all sites", + Long: "Retrieve a paginated list of all sites for the authenticated account.", + Example: ` # List all sites + vector site list + + # List with pagination + vector site list --page 2 --per-page 50`, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + page, perPage := getPagination(cmd) + query := buildPaginationQuery(page, perPage) + + resp, err := app.Client.Get(cmd.Context(), sitesBasePath, query) + if err != nil { + return fmt.Errorf("failed to list sites: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list sites: %w", err) + } + + if app.Output.Format() == output.JSON { + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list sites: %w", err) + } + return app.Output.JSON(json.RawMessage(data)) + } + + data, meta, err := parseResponseWithMeta(body) + if err != nil { + return fmt.Errorf("failed to list sites: %w", err) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list sites: %w", err) + } + + headers := []string{"ID", "CUSTOMER ID", "STATUS", "DEV DOMAIN", "TAGS"} + var rows [][]string + for _, item := range items { + tags := tagsFromItem(item) + rows = append(rows, []string{ + getString(item, "id"), + formatString(getString(item, "your_customer_id")), + getString(item, "status"), + formatString(getString(item, "dev_domain")), + formatTags(tags), + }) + } + + app.Output.Table(headers, rows) + if meta != nil && meta.LastPage > 1 { + app.Output.Pagination(meta.CurrentPage, meta.LastPage, meta.Total) + } + return nil + }, + } + addPaginationFlags(cmd) + return cmd +} + +func newSiteShowCmd() *cobra.Command { + return &cobra.Command{ + Use: "show ", + Short: "Show site details", + Long: "Retrieve details of a specific site including its environments.", + Example: ` # Show site details + vector site show site-abc123`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), sitesBasePath+"/"+args[0], nil) + if err != nil { + return fmt.Errorf("failed to show site: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to show site: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to show site: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to show site: %w", err) + } + + tags := tagsFromItem(item) + + pairs := []output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Customer ID", Value: formatString(getString(item, "your_customer_id"))}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Tags", Value: formatTags(tags)}, + {Key: "Dev Domain", Value: formatString(getString(item, "dev_domain"))}, + {Key: "Dev DB Host", Value: formatString(getString(item, "dev_db_host"))}, + {Key: "Dev DB Name", Value: formatString(getString(item, "dev_db_name"))}, + {Key: "Created", Value: getString(item, "created_at")}, + {Key: "Updated", Value: getString(item, "updated_at")}, + } + app.Output.KeyValue(pairs) + + // Print environments table if present + envs := getSlice(item, "environments") + if len(envs) > 0 { + _, _ = fmt.Fprintln(cmd.OutOrStdout()) + _, _ = fmt.Fprintln(cmd.OutOrStdout(), "Environments:") + headers := []string{"ID", "NAME", "PRODUCTION", "STATUS", "PHP", "PLATFORM DOMAIN", "CUSTOM DOMAIN"} + var rows [][]string + for _, e := range envs { + env, ok := e.(map[string]any) + if !ok { + continue + } + rows = append(rows, []string{ + getString(env, "id"), + getString(env, "name"), + formatBool(getBool(env, "is_production")), + getString(env, "status"), + getString(env, "php_version"), + formatString(getString(env, "platform_domain")), + formatString(getString(env, "custom_domain")), + }) + } + app.Output.Table(headers, rows) + } + + return nil + }, + } +} + +func newSiteCreateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "create", + Short: "Create a new site", + Long: "Create a new site with a development container. Returns credentials that are only shown once.", + Example: ` # Create a site + vector site create --customer-id cust-001 --php-version 8.2 + + # Create with WordPress auto-install + vector site create --customer-id cust-001 --php-version 8.2 --wp-admin-email admin@example.com + + # Create and wait for site to become active + vector site create --customer-id cust-001 --wait`, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + waitEnabled, interval, timeout, err := getWaitConfig(cmd) + if err != nil { + return err + } + + customerID, _ := cmd.Flags().GetString("customer-id") + if customerID == "" { + return &api.APIError{ + Message: "--customer-id is required", + ExitCode: 3, + } + } + + reqBody := map[string]any{ + "your_customer_id": customerID, + } + + phpVersion, _ := cmd.Flags().GetString("php-version") + if phpVersion != "" { + reqBody["dev_php_version"] = phpVersion + } + + if cmd.Flags().Changed("tags") { + tagsStr, _ := cmd.Flags().GetString("tags") + if tagsStr != "" { + reqBody["tags"] = strings.Split(tagsStr, ",") + } else { + reqBody["tags"] = []string{} + } + } + + if cmd.Flags().Changed("production-domain") { + v, _ := cmd.Flags().GetString("production-domain") + reqBody["production_domain"] = v + } + if cmd.Flags().Changed("staging-domain") { + v, _ := cmd.Flags().GetString("staging-domain") + reqBody["staging_domain"] = v + } + if cmd.Flags().Changed("wp-admin-email") { + v, _ := cmd.Flags().GetString("wp-admin-email") + reqBody["wp_admin_email"] = v + } + if cmd.Flags().Changed("wp-admin-user") { + v, _ := cmd.Flags().GetString("wp-admin-user") + reqBody["wp_admin_user"] = v + } + if cmd.Flags().Changed("wp-site-title") { + v, _ := cmd.Flags().GetString("wp-site-title") + reqBody["wp_site_title"] = v + } + + resp, err := app.Client.Post(cmd.Context(), sitesBasePath, reqBody) + if err != nil { + return fmt.Errorf("failed to create site: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to create site: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to create site: %w", err) + } + + if !waitEnabled { + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to create site: %w", err) + } + + app.Output.KeyValue(siteCreateCredentialPairs(item)) + return nil + } + + var createItem map[string]any + if err := json.Unmarshal(data, &createItem); err != nil { + return fmt.Errorf("failed to create site: %w", err) + } + + siteID := getString(createItem, "id") + if siteID == "" { + return fmt.Errorf("failed to create site: response missing site ID") + } + + // TTY/table mode: print one-time credentials before entering alt screen + if app.Output.Format() != output.JSON { + app.Output.KeyValue(siteCreateCredentialPairs(createItem)) + _, _ = fmt.Fprintln(cmd.OutOrStdout()) + } + + cfg := &waitConfig{ + ResourceID: siteID, + PollPath: sitesBasePath + "/" + siteID, + Interval: interval, + Timeout: timeout, + TerminalStatuses: map[string]bool{"active": true}, + FailedStatuses: map[string]bool{"failed": true}, + Noun: "Site", + FormatDisplay: siteFormatDisplay, + } + + result, err := waitForResource(cmd.Context(), app, cfg) + if err != nil { + return err + } + + if app.Output.Format() == output.JSON { + merged := siteCreateMergeCredentials(createItem, result.FinalData) + return app.Output.JSON(merged) + } + + var finalItem map[string]any + if err := json.Unmarshal(result.FinalData, &finalItem); err != nil { + return fmt.Errorf("failed to create site: %w", err) + } + + app.Output.Message(fmt.Sprintf("Site %s %s in %s", siteID, result.Status, result.Elapsed.Truncate(time.Second))) + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(finalItem, "id")}, + {Key: "Customer ID", Value: formatString(getString(finalItem, "your_customer_id"))}, + {Key: "Status", Value: getString(finalItem, "status")}, + {Key: "Dev Domain", Value: formatString(getString(finalItem, "dev_domain"))}, + }) + return nil + }, + } + + cmd.Flags().String("customer-id", "", "Your internal customer identifier (required)") + cmd.Flags().String("php-version", "", "PHP version for development container") + cmd.Flags().String("tags", "", "Comma-separated tags") + cmd.Flags().String("production-domain", "", "Custom domain for production environment") + cmd.Flags().String("staging-domain", "", "Custom domain for staging environment") + cmd.Flags().String("wp-admin-email", "", "WordPress admin email for auto-install") + cmd.Flags().String("wp-admin-user", "", "WordPress admin username") + cmd.Flags().String("wp-site-title", "", "WordPress site title") + addWaitFlags(cmd) + + return cmd +} + +// siteFormatDisplay formats site data for the alternate screen display. +func siteFormatDisplay(data map[string]any) []string { + return []string{ + fmt.Sprintf("%16s: %s", "ID", getString(data, "id")), + fmt.Sprintf("%16s: %s", "Customer ID", formatString(getString(data, "your_customer_id"))), + fmt.Sprintf("%16s: %s", "Status", getString(data, "status")), + fmt.Sprintf("%16s: %s", "Dev Domain", formatString(getString(data, "dev_domain"))), + } +} + +// siteCreateCredentialPairs builds the key-value pairs for the site create response, +// including one-time SFTP, DB, and WP admin credentials. +func siteCreateCredentialPairs(item map[string]any) []output.KeyValue { + pairs := []output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Customer ID", Value: formatString(getString(item, "your_customer_id"))}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Dev Domain", Value: formatString(getString(item, "dev_domain"))}, + {Key: "Dev DB Host", Value: formatString(getString(item, "dev_db_host"))}, + {Key: "Dev DB Name", Value: formatString(getString(item, "dev_db_name"))}, + } + + sftp := getMap(item, "dev_sftp") + if sftp != nil { + pairs = append(pairs, + output.KeyValue{Key: "SFTP Host", Value: getString(sftp, "hostname")}, + output.KeyValue{Key: "SFTP Port", Value: fmt.Sprintf("%.0f", getFloat(sftp, "port"))}, + output.KeyValue{Key: "SFTP User", Value: getString(sftp, "username")}, + output.KeyValue{Key: "SFTP Password", Value: getString(sftp, "password")}, + ) + } + + dbUser := getString(item, "dev_db_username") + dbPass := getString(item, "dev_db_password") + if dbUser != "" { + pairs = append(pairs, output.KeyValue{Key: "DB Username", Value: dbUser}) + } + if dbPass != "" { + pairs = append(pairs, output.KeyValue{Key: "DB Password", Value: dbPass}) + } + + wp := getMap(item, "wp_admin") + if wp != nil { + pairs = append(pairs, + output.KeyValue{Key: "WP Admin User", Value: getString(wp, "user")}, + output.KeyValue{Key: "WP Admin Email", Value: getString(wp, "email")}, + output.KeyValue{Key: "WP Admin Password", Value: getString(wp, "password")}, + output.KeyValue{Key: "WP Site Title", Value: getString(wp, "site_title")}, + ) + } + + return pairs +} + +// siteCreateMergeCredentials merges one-time credential fields from the initial +// create response into the final polled data for JSON output. +func siteCreateMergeCredentials(createItem map[string]any, finalData json.RawMessage) json.RawMessage { + var finalItem map[string]any + if err := json.Unmarshal(finalData, &finalItem); err != nil { + return finalData + } + + // Merge one-time credential fields that are not present in the polled response + credentialKeys := []string{"dev_sftp", "dev_db_username", "dev_db_password", "wp_admin"} + for _, key := range credentialKeys { + if val, ok := createItem[key]; ok && val != nil { + if _, exists := finalItem[key]; !exists || finalItem[key] == nil { + finalItem[key] = val + } + } + } + + merged, err := json.Marshal(finalItem) + if err != nil { + return finalData + } + return merged +} + +func newSiteUpdateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "update ", + Short: "Update a site", + Long: "Update a site's metadata such as customer ID and tags.", + Example: ` # Update tags + vector site update site-abc123 --tags production,primary`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + reqBody := map[string]any{} + + if cmd.Flags().Changed("customer-id") { + v, _ := cmd.Flags().GetString("customer-id") + reqBody["your_customer_id"] = v + } + if cmd.Flags().Changed("tags") { + tagsStr, _ := cmd.Flags().GetString("tags") + if tagsStr != "" { + reqBody["tags"] = strings.Split(tagsStr, ",") + } else { + reqBody["tags"] = nil + } + } + + resp, err := app.Client.Put(cmd.Context(), sitesBasePath+"/"+args[0], reqBody) + if err != nil { + return fmt.Errorf("failed to update site: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to update site: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to update site: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to update site: %w", err) + } + + tags := tagsFromItem(item) + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Customer ID", Value: formatString(getString(item, "your_customer_id"))}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Tags", Value: formatTags(tags)}, + {Key: "Dev Domain", Value: formatString(getString(item, "dev_domain"))}, + }) + return nil + }, + } + + cmd.Flags().String("customer-id", "", "Your internal customer identifier") + cmd.Flags().String("tags", "", "Comma-separated tags (empty string clears tags)") + + return cmd +} + +func newSiteDeleteCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "delete ", + Short: "Delete a site", + Long: "Initiate deletion of a site. All environments must be terminated first. This operation is irreversible.", + Example: ` # Delete a site (prompts for confirmation) + vector site delete site-abc123 + + # Delete without confirmation + vector site delete site-abc123 --force`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + force, _ := cmd.Flags().GetBool("force") + if !force { + if !confirmAction(cmd, fmt.Sprintf("Are you sure you want to delete site %s?", args[0])) { + app.Output.Message("Aborted.") + return nil + } + } + + resp, err := app.Client.Delete(cmd.Context(), sitesBasePath+"/"+args[0]) + if err != nil { + return fmt.Errorf("failed to delete site: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to delete site: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to delete site: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to delete site: %w", err) + } + + app.Output.Message(fmt.Sprintf("Site %s deletion initiated.", getString(item, "id"))) + return nil + }, + } + + cmd.Flags().Bool("force", false, "Skip confirmation prompt") + + return cmd +} + +func newSiteCloneCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "clone ", + Short: "Clone a site", + Long: "Create a new site by cloning an existing site's development container including files and database.", + Example: ` # Clone a site + vector site clone site-abc123 + + # Clone with a different customer ID + vector site clone site-abc123 --customer-id cust-002`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + reqBody := map[string]any{} + + if cmd.Flags().Changed("customer-id") { + v, _ := cmd.Flags().GetString("customer-id") + reqBody["your_customer_id"] = v + } + if cmd.Flags().Changed("php-version") { + v, _ := cmd.Flags().GetString("php-version") + reqBody["dev_php_version"] = v + } + if cmd.Flags().Changed("tags") { + tagsStr, _ := cmd.Flags().GetString("tags") + if tagsStr != "" { + reqBody["tags"] = strings.Split(tagsStr, ",") + } else { + reqBody["tags"] = []string{} + } + } + + resp, err := app.Client.Post(cmd.Context(), sitesBasePath+"/"+args[0]+"/clone", reqBody) + if err != nil { + return fmt.Errorf("failed to clone site: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to clone site: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to clone site: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to clone site: %w", err) + } + + pairs := []output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Customer ID", Value: formatString(getString(item, "your_customer_id"))}, + {Key: "Status", Value: getString(item, "status")}, + {Key: "Dev Domain", Value: formatString(getString(item, "dev_domain"))}, + } + + dbUser := getString(item, "dev_db_username") + dbPass := getString(item, "dev_db_password") + if dbUser != "" { + pairs = append(pairs, output.KeyValue{Key: "DB Username", Value: dbUser}) + } + if dbPass != "" { + pairs = append(pairs, output.KeyValue{Key: "DB Password", Value: dbPass}) + } + + app.Output.KeyValue(pairs) + return nil + }, + } + + cmd.Flags().String("customer-id", "", "Customer identifier for cloned site") + cmd.Flags().String("php-version", "", "PHP version for cloned site") + cmd.Flags().String("tags", "", "Comma-separated tags for cloned site") + + return cmd +} + +func newSiteSuspendCmd() *cobra.Command { + return &cobra.Command{ + Use: "suspend ", + Short: "Suspend a site", + Long: "Suspend a site's development container. The site must be active.", + Example: ` # Suspend a site + vector site suspend site-abc123`, + Args: cobra.ExactArgs(1), + RunE: siteActionRunE("suspend", "PUT"), + } +} + +func newSiteUnsuspendCmd() *cobra.Command { + return &cobra.Command{ + Use: "unsuspend ", + Short: "Unsuspend a site", + Long: "Resume a previously suspended site's development container.", + Example: ` # Unsuspend a site + vector site unsuspend site-abc123`, + Args: cobra.ExactArgs(1), + RunE: siteActionRunE("unsuspend", "PUT"), + } +} + +func newSiteResetSFTPPasswordCmd() *cobra.Command { + return &cobra.Command{ + Use: "reset-sftp-password ", + Short: "Reset SFTP password", + Long: "Generate a new SFTP password for the site's development container. The new password is only shown once.", + Example: ` # Reset SFTP password + vector site reset-sftp-password site-abc123`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Post(cmd.Context(), sitesBasePath+"/"+args[0]+"/sftp/reset-password", nil) + if err != nil { + return fmt.Errorf("failed to reset SFTP password: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to reset SFTP password: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to reset SFTP password: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to reset SFTP password: %w", err) + } + + sftp := getMap(item, "dev_sftp") + if sftp == nil { + app.Output.Message("SFTP password reset successfully.") + return nil + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "Hostname", Value: getString(sftp, "hostname")}, + {Key: "Port", Value: fmt.Sprintf("%.0f", getFloat(sftp, "port"))}, + {Key: "Username", Value: getString(sftp, "username")}, + {Key: "Password", Value: getString(sftp, "password")}, + }) + return nil + }, + } +} + +func newSiteResetDBPasswordCmd() *cobra.Command { + return &cobra.Command{ + Use: "reset-db-password ", + Short: "Reset database password", + Long: "Generate a new database password for the site's development container. The new password is only shown once.", + Example: ` # Reset database password + vector site reset-db-password site-abc123`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Post(cmd.Context(), sitesBasePath+"/"+args[0]+"/db/reset-password", nil) + if err != nil { + return fmt.Errorf("failed to reset database password: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to reset database password: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to reset database password: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to reset database password: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "DB Username", Value: getString(item, "dev_db_username")}, + {Key: "DB Password", Value: getString(item, "dev_db_password")}, + }) + return nil + }, + } +} + +func newSitePurgeCacheCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "purge-cache ", + Short: "Purge CDN cache", + Long: "Purge the CDN cache for a site. Can purge the entire cache, by cache tag, or a specific URL.", + Example: ` # Purge entire cache + vector site purge-cache site-abc123 + + # Purge a specific URL + vector site purge-cache site-abc123 --url https://example.com/page`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + reqBody := map[string]any{} + if cmd.Flags().Changed("cache-tag") { + v, _ := cmd.Flags().GetString("cache-tag") + reqBody["cache_tag"] = v + } + if cmd.Flags().Changed("url") { + v, _ := cmd.Flags().GetString("url") + reqBody["url"] = v + } + + resp, err := app.Client.Post(cmd.Context(), sitesBasePath+"/"+args[0]+"/purge-cache", reqBody) + if err != nil { + return fmt.Errorf("failed to purge cache: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to purge cache: %w", err) + } + + if app.Output.Format() == output.JSON { + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to purge cache: %w", err) + } + return app.Output.JSON(json.RawMessage(data)) + } + + // Extract message from response + var envelope struct { + Message string `json:"message"` + } + if err := json.Unmarshal(body, &envelope); err == nil && envelope.Message != "" { + app.Output.Message(envelope.Message) + } else { + app.Output.Message("Cache purged successfully.") + } + return nil + }, + } + + cmd.Flags().String("cache-tag", "", "Purge only content with this cache tag") + cmd.Flags().String("url", "", "Purge a specific URL") + + return cmd +} + +func newSiteLogsCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "logs ", + Short: "View site logs", + Long: "Retrieve logs for a site. Logs are returned in reverse chronological order.", + Example: ` # View recent logs + vector site logs site-abc123 + + # View error logs with a limit + vector site logs site-abc123 --level error --limit 100`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + query := buildLogsQuery(cmd) + + resp, err := app.Client.Get(cmd.Context(), sitesBasePath+"/"+args[0]+"/logs", query) + if err != nil { + return fmt.Errorf("failed to get logs: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to get logs: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to get logs: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var logData map[string]any + if err := json.Unmarshal(data, &logData); err != nil { + return fmt.Errorf("failed to get logs: %w", err) + } + + printLogEntries(cmd.OutOrStdout(), logData) + return nil + }, + } + + cmd.Flags().String("start-time", "", "Start time (RFC3339 or relative, e.g., now-1h)") + cmd.Flags().String("end-time", "", "End time (RFC3339 or relative)") + cmd.Flags().Int("limit", 0, "Maximum number of log entries (1-1000)") + cmd.Flags().String("environment", "", "Filter by environment name") + cmd.Flags().String("deployment-id", "", "Filter by deployment ID") + cmd.Flags().String("level", "", "Filter by log level (error, warning, info)") + cmd.Flags().String("cursor", "", "Pagination cursor from previous response") + + return cmd +} + +func newSiteWPReconfigCmd() *cobra.Command { + return &cobra.Command{ + Use: "wp-reconfig ", + Short: "Regenerate wp-config.php", + Long: "Regenerate the wp-config.php file for the site's development container.", + Example: ` # Regenerate wp-config.php + vector site wp-reconfig site-abc123`, + Args: cobra.ExactArgs(1), + RunE: sitePostActionRunE("wp/reconfig", "wp-config regenerated"), + } +} + +// siteActionRunE returns a RunE function for simple site action endpoints (suspend/unsuspend). +func siteActionRunE(action, method string) func(*cobra.Command, []string) error { + return func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + path := sitesBasePath + "/" + args[0] + "/" + action + + var ( + resp *http.Response + reqErr error + ) + switch method { + case "PUT": + resp, reqErr = app.Client.Put(cmd.Context(), path, nil) + default: + resp, reqErr = app.Client.Post(cmd.Context(), path, nil) + } + if reqErr != nil { + return fmt.Errorf("failed to %s site: %w", action, reqErr) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to %s site: %w", action, err) + } + + data, parseErr := parseResponseData(body) + if parseErr != nil { + return fmt.Errorf("failed to %s site: %w", action, parseErr) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to %s site: %w", action, err) + } + + app.Output.Message(fmt.Sprintf("Site %s %s initiated.", getString(item, "id"), action)) + return nil + } +} + +// sitePostActionRunE returns a RunE function for simple POST site action endpoints. +func sitePostActionRunE(subPath, successMsg string) func(*cobra.Command, []string) error { + return func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Post(cmd.Context(), sitesBasePath+"/"+args[0]+"/"+subPath, nil) + if err != nil { + var apiErr *api.APIError + if errors.As(err, &apiErr) { + return apiErr + } + return fmt.Errorf("failed to %s: %w", successMsg, err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("reading response: %w", err) + } + + data, parseErr := parseResponseData(body) + if parseErr != nil { + return fmt.Errorf("parsing response: %w", parseErr) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + // Extract message from full response + var envelope struct { + Message string `json:"message"` + } + if err := json.Unmarshal(body, &envelope); err == nil && envelope.Message != "" { + app.Output.Message(envelope.Message) + } else { + app.Output.Message("Operation completed successfully.") + } + return nil + } +} + +// buildLogsQuery builds query parameters for the logs endpoint. +func buildLogsQuery(cmd *cobra.Command) map[string][]string { + q := make(map[string][]string) + + flagMap := map[string]string{ + "start-time": "start_time", + "end-time": "end_time", + "environment": "environment", + "deployment-id": "deployment_id", + "level": "level", + "cursor": "cursor", + } + + for flag, param := range flagMap { + if cmd.Flags().Changed(flag) { + v, _ := cmd.Flags().GetString(flag) + if v != "" { + q[param] = []string{v} + } + } + } + + if cmd.Flags().Changed("limit") { + v, _ := cmd.Flags().GetInt("limit") + if v > 0 { + q["limit"] = []string{fmt.Sprintf("%d", v)} + } + } + + return q +} + +// printLogEntries prints log entries from the logs API response. +func printLogEntries(w io.Writer, logData map[string]any) { + logs := getMap(logData, "logs") + if logs == nil { + output.PrintMessage(w, "No logs found.") + return + } + + tables := getSlice(logs, "tables") + if len(tables) == 0 { + output.PrintMessage(w, "No logs found.") + return + } + + for _, t := range tables { + table, ok := t.(map[string]any) + if !ok { + continue + } + + columns := getSlice(table, "columns") + rows := getSlice(table, "rows") + + if len(columns) == 0 || len(rows) == 0 { + continue + } + + // Build header names + var headers []string + for _, c := range columns { + col, ok := c.(map[string]any) + if !ok { + continue + } + headers = append(headers, strings.ToUpper(getString(col, "name"))) + } + + // Build row data + var tableRows [][]string + for _, r := range rows { + row, ok := r.([]any) + if !ok { + continue + } + var cells []string + for _, cell := range row { + cells = append(cells, fmt.Sprintf("%v", cell)) + } + tableRows = append(tableRows, cells) + } + + output.PrintTable(w, headers, tableRows) + } + + // Show cursor info + cursor := getString(logData, "cursor") + hasMore := getBool(logData, "has_more") + if hasMore && cursor != "" { + _, _ = fmt.Fprintf(w, "\nMore logs available. Use --cursor %s to continue.\n", cursor) + } +} + +// tagsFromItem extracts tags as []string from a map item. +func tagsFromItem(item map[string]any) []string { + rawTags := getSlice(item, "tags") + var tags []string + for _, t := range rawTags { + if s, ok := t.(string); ok { + tags = append(tags, s) + } + } + return tags +} diff --git a/internal/commands/site_ssh_key.go b/internal/commands/site_ssh_key.go new file mode 100644 index 0000000..3a495fb --- /dev/null +++ b/internal/commands/site_ssh_key.go @@ -0,0 +1,203 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +// NewSiteSSHKeyCmd creates the site ssh-key command group. +func NewSiteSSHKeyCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "ssh-key", + Short: "Manage site SSH keys", + Long: "Manage SSH keys for a site's development container.", + } + + cmd.AddCommand(newSSHKeyListCmd()) + cmd.AddCommand(newSSHKeyAddCmd()) + cmd.AddCommand(newSSHKeyRemoveCmd()) + + return cmd +} + +func newSSHKeyListCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "list ", + Short: "List SSH keys", + Long: "Retrieve all SSH keys installed on a site's development container.", + Example: ` # List SSH keys for a site + vector site ssh-key list site-abc123`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + page, perPage := getPagination(cmd) + query := buildPaginationQuery(page, perPage) + + path := sitesBasePath + "/" + args[0] + "/ssh-keys" + resp, err := app.Client.Get(cmd.Context(), path, query) + if err != nil { + return fmt.Errorf("failed to list SSH keys: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list SSH keys: %w", err) + } + + if app.Output.Format() == output.JSON { + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list SSH keys: %w", err) + } + return app.Output.JSON(json.RawMessage(data)) + } + + data, meta, err := parseResponseWithMeta(body) + if err != nil { + return fmt.Errorf("failed to list SSH keys: %w", err) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list SSH keys: %w", err) + } + + headers := []string{"ID", "NAME", "FINGERPRINT", "DEFAULT", "CREATED"} + var rows [][]string + for _, item := range items { + rows = append(rows, []string{ + getString(item, "id"), + getString(item, "name"), + formatString(getString(item, "fingerprint")), + formatBool(getBool(item, "is_account_default")), + getString(item, "created_at"), + }) + } + + app.Output.Table(headers, rows) + if meta != nil { + app.Output.Pagination(meta.CurrentPage, meta.LastPage, meta.Total) + } + return nil + }, + } + addPaginationFlags(cmd) + return cmd +} + +func newSSHKeyAddCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "add ", + Short: "Add an SSH key", + Long: "Add a new SSH key to a site's development container.", + Example: ` # Add an SSH key to a site + vector site ssh-key add site-abc123 --name "deploy-key" --public-key "ssh-ed25519 AAAA..."`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + name, _ := cmd.Flags().GetString("name") + publicKey, _ := cmd.Flags().GetString("public-key") + + reqBody := map[string]any{ + "name": name, + "public_key": publicKey, + } + + path := sitesBasePath + "/" + args[0] + "/ssh-keys" + resp, err := app.Client.Post(cmd.Context(), path, reqBody) + if err != nil { + return fmt.Errorf("failed to add SSH key: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to add SSH key: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to add SSH key: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to add SSH key: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Name", Value: getString(item, "name")}, + {Key: "Fingerprint", Value: formatString(getString(item, "fingerprint"))}, + {Key: "Default", Value: formatBool(getBool(item, "is_account_default"))}, + }) + return nil + }, + } + + cmd.Flags().String("name", "", "Friendly name for the SSH key (required)") + cmd.Flags().String("public-key", "", "SSH public key in OpenSSH format (required)") + _ = cmd.MarkFlagRequired("name") + _ = cmd.MarkFlagRequired("public-key") + + return cmd +} + +func newSSHKeyRemoveCmd() *cobra.Command { + return &cobra.Command{ + Use: "remove ", + Short: "Remove an SSH key", + Long: "Remove an SSH key from a site's development container.", + Example: ` # Remove an SSH key from a site + vector site ssh-key remove site-abc123 key-456`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + path := sitesBasePath + "/" + args[0] + "/ssh-keys/" + args[1] + resp, err := app.Client.Delete(cmd.Context(), path) + if err != nil { + return fmt.Errorf("failed to remove SSH key: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to remove SSH key: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to remove SSH key: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + app.Output.Message("SSH key removed successfully.") + return nil + }, + } +} diff --git a/internal/commands/site_ssh_key_test.go b/internal/commands/site_ssh_key_test.go new file mode 100644 index 0000000..6ce934b --- /dev/null +++ b/internal/commands/site_ssh_key_test.go @@ -0,0 +1,386 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/output" +) + +var sshKeyListResponse = map[string]any{ + "data": []map[string]any{ + { + "id": "key-001", + "account_id": 1, + "vector_site_id": "site-001", + "name": "developer key", + "fingerprint": "SHA256:abc123def456", + "public_key_preview": "ssh-rsa AAAAB3...user@host", + "is_account_default": false, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + }, + "meta": map[string]any{ + "current_page": 1, + "last_page": 1, + "total": 1, + }, + "message": "SSH keys retrieved successfully", + "http_status": 200, +} + +var sshKeyAddResponse = map[string]any{ + "data": map[string]any{ + "id": "key-002", + "account_id": 1, + "vector_site_id": "site-001", + "name": "new key", + "fingerprint": "SHA256:xyz789", + "public_key_preview": "ssh-rsa BBBBB3...user@host", + "is_account_default": false, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "SSH key added to site successfully", + "http_status": 201, +} + +var sshKeyRemoveResponse = map[string]any{ + "data": map[string]any{ + "id": "key-001", + "account_id": 1, + "vector_site_id": "site-001", + "name": "developer key", + "fingerprint": "SHA256:abc123def456", + "public_key_preview": "ssh-rsa AAAAB3...user@host", + "is_account_default": false, + }, + "message": "SSH key removed from site successfully", + "http_status": 200, +} + +func newSSHKeyTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/sites/site-001/ssh-keys": + _ = json.NewEncoder(w).Encode(sshKeyListResponse) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/ssh-keys": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(sshKeyAddResponse) + + case method == "DELETE" && path == "/api/v1/vector/sites/site-001/ssh-keys/key-001": + _ = json.NewEncoder(w).Encode(sshKeyRemoveResponse) + + case method == "DELETE" && path == "/api/v1/vector/sites/site-001/ssh-keys/nonexistent": + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "SSH key not found on this site", + "http_status": 404, + }) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +// --- SSH Key List Tests --- + +func TestSSHKeyListCmd_TableOutput(t *testing.T) { + ts := newSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "ssh-key", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "key-001") + assert.Contains(t, out, "developer key") + assert.Contains(t, out, "SHA256:abc123def456") + assert.Contains(t, out, "No") +} + +func TestSSHKeyListCmd_JSONOutput(t *testing.T) { + ts := newSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "ssh-key", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 1) + assert.Equal(t, "key-001", result[0]["id"]) +} + +func TestSSHKeyListCmd_Pagination(t *testing.T) { + var receivedPage, receivedPerPage string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedPage = r.URL.Query().Get("page") + receivedPerPage = r.URL.Query().Get("per_page") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(sshKeyListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "ssh-key", "list", "site-001", "--page", "3", "--per-page", "25"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "3", receivedPage) + assert.Equal(t, "25", receivedPerPage) +} + +func TestSSHKeyListCmd_AuthError(t *testing.T) { + ts := newSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"site", "ssh-key", "list", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestSSHKeyListCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildSiteCmdNoAuth(output.Table) + cmd.SetArgs([]string{"site", "ssh-key", "list", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestSSHKeyListCmd_HTTPPath(t *testing.T) { + var receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(sshKeyListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "ssh-key", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "/api/v1/vector/sites/site-001/ssh-keys", receivedPath) +} + +// --- SSH Key Add Tests --- + +func TestSSHKeyAddCmd_TableOutput(t *testing.T) { + ts := newSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "ssh-key", "add", "site-001", + "--name", "new key", + "--public-key", "ssh-rsa BBBBB3...", + }) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "key-002") + assert.Contains(t, out, "new key") + assert.Contains(t, out, "SHA256:xyz789") +} + +func TestSSHKeyAddCmd_JSONOutput(t *testing.T) { + ts := newSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "ssh-key", "add", "site-001", + "--name", "new key", + "--public-key", "ssh-rsa BBBBB3...", + }) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "key-002", result["id"]) +} + +func TestSSHKeyAddCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(sshKeyAddResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "ssh-key", "add", "site-001", + "--name", "my key", + "--public-key", "ssh-rsa AAAAB3NzaC1yc2EA...", + }) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/ssh-keys", receivedPath) + assert.Equal(t, "my key", receivedBody["name"]) + assert.Equal(t, "ssh-rsa AAAAB3NzaC1yc2EA...", receivedBody["public_key"]) +} + +func TestSSHKeyAddCmd_MissingRequiredFlags(t *testing.T) { + ts := newSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "ssh-key", "add", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "required") +} + +// --- SSH Key Remove Tests --- + +func TestSSHKeyRemoveCmd_TableOutput(t *testing.T) { + ts := newSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "ssh-key", "remove", "site-001", "key-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "SSH key removed successfully") +} + +func TestSSHKeyRemoveCmd_JSONOutput(t *testing.T) { + ts := newSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "ssh-key", "remove", "site-001", "key-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "key-001", result["id"]) +} + +func TestSSHKeyRemoveCmd_NotFound(t *testing.T) { + ts := newSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "ssh-key", "remove", "site-001", "nonexistent"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 4, apiErr.ExitCode) +} + +func TestSSHKeyRemoveCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(sshKeyRemoveResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "ssh-key", "remove", "site-001", "key-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "DELETE", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/ssh-keys/key-001", receivedPath) +} + +func TestSSHKeyRemoveCmd_MissingArgs(t *testing.T) { + ts := newSSHKeyTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "ssh-key", "remove", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} + +// --- SSH Key Help Tests --- + +func TestSSHKeyCmd_Help(t *testing.T) { + cmd := NewSiteCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"ssh-key", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "add") + assert.Contains(t, out, "remove") +} diff --git a/internal/commands/site_test.go b/internal/commands/site_test.go new file mode 100644 index 0000000..5c9e66e --- /dev/null +++ b/internal/commands/site_test.go @@ -0,0 +1,1420 @@ +package commands + +import ( + "bytes" + "context" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "strings" + "sync/atomic" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +// siteListResponse is the standard response for GET /api/v1/vector/sites. +var siteListResponse = map[string]any{ + "data": []map[string]any{ + { + "id": "site-001", + "your_customer_id": "cust_123", + "status": "active", + "dev_domain": "dev.test.vectorpages.com", + "tags": []string{"wordpress", "production"}, + "dev_db_host": "db.test.rds.amazonaws.com", + "dev_db_name": "db_site001", + "environments": []map[string]any{ + { + "id": "env-001", + "name": "production", + "is_production": true, + "status": "active", + "php_version": "8.3", + "platform_domain": "test--prod.vectorpages.com", + "custom_domain": "example.com", + "custom_domain_certificate_status": "issued", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + }, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + }, + "meta": map[string]any{ + "current_page": 1, + "last_page": 1, + "total": 1, + }, + "message": "Sites retrieved successfully", + "http_status": 200, +} + +// siteShowResponse is the standard response for GET /api/v1/vector/sites/{site}. +var siteShowResponse = map[string]any{ + "data": map[string]any{ + "id": "site-001", + "your_customer_id": "cust_123", + "status": "active", + "tags": []string{"wordpress"}, + "dev_domain": "dev.test.vectorpages.com", + "dev_db_host": "db.test.rds.amazonaws.com", + "dev_db_name": "db_site001", + "environments": []map[string]any{ + { + "id": "env-001", + "name": "production", + "is_production": true, + "status": "active", + "php_version": "8.3", + "platform_domain": "test--prod.vectorpages.com", + "custom_domain": "example.com", + }, + }, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "Site retrieved successfully", + "http_status": 200, +} + +var siteCreateResponse = map[string]any{ + "data": map[string]any{ + "id": "site-002", + "your_customer_id": "cust_456", + "status": "pending", + "dev_domain": "dev.new.vectorpages.com", + "dev_db_host": "db.new.rds.amazonaws.com", + "dev_db_name": "db_site002", + "dev_sftp": map[string]any{ + "hostname": "ssh.vectorpages.com", + "port": 22, + "username": "new-site", + "password": "sftp-pass-123", + }, + "dev_db_username": "db_site002", + "dev_db_password": "db-pass-456", + "wp_admin": map[string]any{ + "user": "admin", + "email": "admin@example.com", + "password": "wp-pass-789", + "site_title": "My Blog", + }, + "environments": []map[string]any{}, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "Vector site creation initiated", + "http_status": 201, +} + +var siteDeleteResponse = map[string]any{ + "data": map[string]any{ + "id": "site-001", + "your_customer_id": "cust_123", + "status": "terminating", + "environments": []any{}, + }, + "message": "Vector site deletion initiated", + "http_status": 202, +} + +var siteSuspendResponse = map[string]any{ + "data": map[string]any{ + "id": "site-001", + "status": "suspended", + }, + "message": "Vector site suspension initiated", + "http_status": 200, +} + +var siteUnsuspendResponse = map[string]any{ + "data": map[string]any{ + "id": "site-001", + "status": "active", + }, + "message": "Vector site unsuspension initiated", + "http_status": 200, +} + +var siteCloneResponse = map[string]any{ + "data": map[string]any{ + "id": "site-003", + "your_customer_id": "cust_123", + "status": "pending", + "dev_domain": "dev.clone.vectorpages.com", + "dev_db_username": "db_site003", + "dev_db_password": "clone-pass-123", + "environments": []any{}, + }, + "message": "Vector site clone initiated", + "http_status": 201, +} + +var siteResetSFTPResponse = map[string]any{ + "data": map[string]any{ + "id": "site-001", + "dev_sftp": map[string]any{ + "hostname": "ssh.vectorpages.com", + "port": 22, + "username": "test-site", + "password": "new-sftp-pass", + }, + }, + "message": "SFTP password reset successfully.", + "http_status": 200, +} + +var siteResetDBResponse = map[string]any{ + "data": map[string]any{ + "id": "site-001", + "dev_db_username": "db_site001", + "dev_db_password": "new-db-pass", + }, + "message": "Database password reset successfully.", + "http_status": 200, +} + +var sitePurgeCacheResponse = map[string]any{ + "data": map[string]any{}, + "message": "Cache purged successfully", + "http_status": 200, +} + +var siteLogsResponse = map[string]any{ + "data": map[string]any{ + "logs": map[string]any{ + "tables": []map[string]any{ + { + "name": "0", + "columns": []map[string]any{ + {"name": "_time", "type": "datetime"}, + {"name": "message", "type": "string"}, + {"name": "level", "type": "string"}, + }, + "rows": [][]string{ + {"2025-01-15T12:00:00+00:00", "Request completed", "info"}, + }, + }, + }, + }, + "cursor": "abc123", + "has_more": true, + }, + "message": "Logs retrieved successfully", + "http_status": 200, +} + +var siteWPReconfigResponse = map[string]any{ + "data": map[string]any{ + "id": "site-001", + "status": "active", + }, + "message": "WordPress configuration regenerated successfully", + "http_status": 200, +} + +var siteUpdateResponse = map[string]any{ + "data": map[string]any{ + "id": "site-001", + "your_customer_id": "cust_999", + "status": "active", + "tags": []string{"updated"}, + "dev_domain": "dev.test.vectorpages.com", + }, + "message": "Vector site updated successfully", + "http_status": 200, +} + +// newSiteTestServer creates an httptest server that handles site endpoints. +func newSiteTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + // Check auth + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/sites": + _ = json.NewEncoder(w).Encode(siteListResponse) + + case method == "GET" && path == "/api/v1/vector/sites/site-001": + _ = json.NewEncoder(w).Encode(siteShowResponse) + + case method == "GET" && path == "/api/v1/vector/sites/nonexistent": + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "data": map[string]any{}, + "message": "Site not found", + "http_status": 404, + }) + + case method == "POST" && path == "/api/v1/vector/sites": + // Validate request body + body, _ := io.ReadAll(r.Body) + var reqBody map[string]any + _ = json.Unmarshal(body, &reqBody) + if reqBody["your_customer_id"] == nil || reqBody["your_customer_id"] == "" { + w.WriteHeader(http.StatusUnprocessableEntity) + _ = json.NewEncoder(w).Encode(map[string]any{ + "errors": map[string][]string{ + "your_customer_id": {"The partner customer id field is required."}, + }, + "message": "Validation failed", + "http_status": 422, + }) + return + } + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(siteCreateResponse) + + case method == "PUT" && path == "/api/v1/vector/sites/site-001": + _ = json.NewEncoder(w).Encode(siteUpdateResponse) + + case method == "DELETE" && path == "/api/v1/vector/sites/site-001": + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(siteDeleteResponse) + + case method == "PUT" && path == "/api/v1/vector/sites/site-001/suspend": + _ = json.NewEncoder(w).Encode(siteSuspendResponse) + + case method == "PUT" && path == "/api/v1/vector/sites/site-001/unsuspend": + _ = json.NewEncoder(w).Encode(siteUnsuspendResponse) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/clone": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(siteCloneResponse) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/sftp/reset-password": + _ = json.NewEncoder(w).Encode(siteResetSFTPResponse) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/db/reset-password": + _ = json.NewEncoder(w).Encode(siteResetDBResponse) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/purge-cache": + _ = json.NewEncoder(w).Encode(sitePurgeCacheResponse) + + case method == "GET" && path == "/api/v1/vector/sites/site-001/logs": + _ = json.NewEncoder(w).Encode(siteLogsResponse) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/wp/reconfig": + _ = json.NewEncoder(w).Encode(siteWPReconfigResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +// buildSiteCmd creates a root + site command wired with an App context. +func buildSiteCmd(baseURL, token string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient(baseURL, token, "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + siteCmd := NewSiteCmd() + root.AddCommand(siteCmd) + + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +// buildSiteCmdNoAuth creates a root + site command with no auth token. +func buildSiteCmdNoAuth(format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient("http://localhost", "", "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + siteCmd := NewSiteCmd() + root.AddCommand(siteCmd) + + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +// --- Site List Tests --- + +func TestSiteListCmd_TableOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "site-001") + assert.Contains(t, out, "cust_123") + assert.Contains(t, out, "active") + assert.Contains(t, out, "dev.test.vectorpages.com") + assert.Contains(t, out, "wordpress, production") +} + +func TestSiteListCmd_JSONOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 1) + assert.Equal(t, "site-001", result[0]["id"]) +} + +func TestSiteListCmd_Pagination(t *testing.T) { + var receivedPage, receivedPerPage string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedPage = r.URL.Query().Get("page") + receivedPerPage = r.URL.Query().Get("per_page") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(siteListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "list", "--page", "2", "--per-page", "10"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "2", receivedPage) + assert.Equal(t, "10", receivedPerPage) +} + +func TestSiteListCmd_AuthError(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"site", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestSiteListCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildSiteCmdNoAuth(output.Table) + cmd.SetArgs([]string{"site", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Site Show Tests --- + +func TestSiteShowCmd_TableOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "show", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "site-001") + assert.Contains(t, out, "cust_123") + assert.Contains(t, out, "active") + assert.Contains(t, out, "wordpress") + // Should contain environments table + assert.Contains(t, out, "Environments:") + assert.Contains(t, out, "env-001") + assert.Contains(t, out, "production") + assert.Contains(t, out, "example.com") +} + +func TestSiteShowCmd_JSONOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "show", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "site-001", result["id"]) + assert.Equal(t, "cust_123", result["your_customer_id"]) +} + +func TestSiteShowCmd_NotFound(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "show", "nonexistent"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 4, apiErr.ExitCode) +} + +func TestSiteShowCmd_MissingArg(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "show"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Site Create Tests --- + +func TestSiteCreateCmd_TableOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "create", "--customer-id", "cust_456"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "site-002") + assert.Contains(t, out, "pending") + assert.Contains(t, out, "sftp-pass-123") + assert.Contains(t, out, "db-pass-456") + assert.Contains(t, out, "wp-pass-789") +} + +func TestSiteCreateCmd_JSONOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "create", "--customer-id", "cust_456"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "site-002", result["id"]) + assert.Equal(t, "pending", result["status"]) +} + +func TestSiteCreateCmd_MissingCustomerID(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "create"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 3, apiErr.ExitCode) +} + +func TestSiteCreateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(siteCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "create", + "--customer-id", "cust_789", + "--php-version", "8.3", + "--tags", "wordpress,staging", + "--wp-admin-email", "admin@test.com", + }) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "cust_789", receivedBody["your_customer_id"]) + assert.Equal(t, "8.3", receivedBody["dev_php_version"]) + assert.Equal(t, "admin@test.com", receivedBody["wp_admin_email"]) + tags, ok := receivedBody["tags"].([]any) + require.True(t, ok) + assert.Equal(t, "wordpress", tags[0]) + assert.Equal(t, "staging", tags[1]) +} + +// --- Site Update Tests --- + +func TestSiteUpdateCmd_TableOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "update", "site-001", "--customer-id", "cust_999"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "site-001") + assert.Contains(t, out, "cust_999") +} + +func TestSiteUpdateCmd_JSONOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "update", "site-001", "--customer-id", "cust_999"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "site-001", result["id"]) +} + +func TestSiteUpdateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(siteUpdateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "update", "site-001", "--customer-id", "new_id", "--tags", "tag1,tag2"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "new_id", receivedBody["your_customer_id"]) + tags, ok := receivedBody["tags"].([]any) + require.True(t, ok) + assert.Equal(t, "tag1", tags[0]) + assert.Equal(t, "tag2", tags[1]) +} + +// --- Site Delete Tests --- + +func TestSiteDeleteCmd_WithForce(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "delete", "site-001", "--force"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "deletion initiated") +} + +func TestSiteDeleteCmd_JSONOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "delete", "site-001", "--force"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "site-001", result["id"]) + assert.Equal(t, "terminating", result["status"]) +} + +func TestSiteDeleteCmd_ConfirmAbort(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + // Override confirmReader to return "n" + origReader := confirmReader + confirmReader = strings.NewReader("n\n") + t.Cleanup(func() { confirmReader = origReader }) + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "delete", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "Aborted") +} + +func TestSiteDeleteCmd_ConfirmYes(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + origReader := confirmReader + confirmReader = strings.NewReader("y\n") + t.Cleanup(func() { confirmReader = origReader }) + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "delete", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "deletion initiated") +} + +func TestSiteDeleteCmd_HTTPMethod(t *testing.T) { + var receivedMethod string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusAccepted) + _ = json.NewEncoder(w).Encode(siteDeleteResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "delete", "site-001", "--force"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "DELETE", receivedMethod) +} + +// --- Site Clone Tests --- + +func TestSiteCloneCmd_TableOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "clone", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "site-003") + assert.Contains(t, out, "pending") + assert.Contains(t, out, "clone-pass-123") +} + +func TestSiteCloneCmd_JSONOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "clone", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "site-003", result["id"]) +} + +func TestSiteCloneCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(siteCloneResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "clone", "site-001", "--customer-id", "new_cust", "--php-version", "8.4"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "/api/v1/vector/sites/site-001/clone", receivedPath) + assert.Equal(t, "new_cust", receivedBody["your_customer_id"]) + assert.Equal(t, "8.4", receivedBody["dev_php_version"]) +} + +// --- Site Suspend/Unsuspend Tests --- + +func TestSiteSuspendCmd_TableOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "suspend", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "suspend initiated") +} + +func TestSiteSuspendCmd_JSONOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "suspend", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "suspended", result["status"]) +} + +func TestSiteSuspendCmd_HTTPMethod(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(siteSuspendResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "suspend", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "PUT", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/suspend", receivedPath) +} + +func TestSiteUnsuspendCmd_TableOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "unsuspend", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "unsuspend initiated") +} + +func TestSiteUnsuspendCmd_JSONOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "unsuspend", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "active", result["status"]) +} + +// --- Site Reset SFTP Password Tests --- + +func TestSiteResetSFTPPasswordCmd_TableOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "reset-sftp-password", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "ssh.vectorpages.com") + assert.Contains(t, out, "test-site") + assert.Contains(t, out, "new-sftp-pass") +} + +func TestSiteResetSFTPPasswordCmd_JSONOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "reset-sftp-password", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + sftp := result["dev_sftp"].(map[string]any) + assert.Equal(t, "new-sftp-pass", sftp["password"]) +} + +func TestSiteResetSFTPPasswordCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(siteResetSFTPResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "reset-sftp-password", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/sftp/reset-password", receivedPath) +} + +// --- Site Reset DB Password Tests --- + +func TestSiteResetDBPasswordCmd_TableOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "reset-db-password", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "db_site001") + assert.Contains(t, out, "new-db-pass") +} + +func TestSiteResetDBPasswordCmd_JSONOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "reset-db-password", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "new-db-pass", result["dev_db_password"]) +} + +// --- Site Purge Cache Tests --- + +func TestSitePurgeCacheCmd_FullPurge(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "purge-cache", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "Cache purged successfully") +} + +func TestSitePurgeCacheCmd_JSONOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "purge-cache", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + // data is empty object for full purge + assert.NotNil(t, result) +} + +func TestSitePurgeCacheCmd_WithTag(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(sitePurgeCacheResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "purge-cache", "site-001", "--cache-tag", "images"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "images", receivedBody["cache_tag"]) +} + +func TestSitePurgeCacheCmd_WithURL(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(sitePurgeCacheResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "purge-cache", "site-001", "--url", "https://example.com/style.css"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "https://example.com/style.css", receivedBody["url"]) +} + +// --- Site Logs Tests --- + +func TestSiteLogsCmd_TableOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "logs", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "Request completed") + assert.Contains(t, out, "info") + assert.Contains(t, out, "--cursor abc123") +} + +func TestSiteLogsCmd_JSONOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "logs", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "abc123", result["cursor"]) + assert.Equal(t, true, result["has_more"]) +} + +func TestSiteLogsCmd_QueryParams(t *testing.T) { + var receivedQuery string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedQuery = r.URL.RawQuery + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(siteLogsResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "logs", "site-001", + "--start-time", "now-24h", + "--level", "error", + "--limit", "500", + "--environment", "production", + }) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, receivedQuery, "start_time=now-24h") + assert.Contains(t, receivedQuery, "level=error") + assert.Contains(t, receivedQuery, "limit=500") + assert.Contains(t, receivedQuery, "environment=production") +} + +// --- Site WP Reconfig Tests --- + +func TestSiteWPReconfigCmd_TableOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "wp-reconfig", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "WordPress configuration regenerated successfully") +} + +func TestSiteWPReconfigCmd_JSONOutput(t *testing.T) { + ts := newSiteTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "wp-reconfig", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "site-001", result["id"]) +} + +func TestSiteWPReconfigCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(siteWPReconfigResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "wp-reconfig", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/wp/reconfig", receivedPath) +} + +// --- Help Text Tests --- + +func TestSiteCmd_Help(t *testing.T) { + cmd := NewSiteCmd() + cmd.SetContext(context.Background()) + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "show") + assert.Contains(t, out, "create") + assert.Contains(t, out, "update") + assert.Contains(t, out, "delete") + assert.Contains(t, out, "clone") + assert.Contains(t, out, "suspend") + assert.Contains(t, out, "unsuspend") + assert.Contains(t, out, "ssh-key") + assert.Contains(t, out, "purge-cache") + assert.Contains(t, out, "logs") +} + +func TestSiteCreateCmd_Help(t *testing.T) { + cmd := NewSiteCmd() + cmd.SetContext(context.Background()) + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"create", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "--customer-id") + assert.Contains(t, out, "--php-version") + assert.Contains(t, out, "--tags") +} + +// --- Server Error Tests --- + +func TestSiteListCmd_ServerError(t *testing.T) { + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusInternalServerError) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Internal server error", + "http_status": 500, + }) + })) + defer ts.Close() + + cmd, _, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 5, apiErr.ExitCode) +} + +// --- Site Create --wait Tests --- + +// siteActivePollResponse is the polled response for a site that has become active. +// It does NOT contain one-time credentials (those are only in the initial POST response). +var siteActivePollResponse = map[string]any{ + "data": map[string]any{ + "id": "site-002", + "your_customer_id": "cust_456", + "status": "active", + "dev_domain": "dev.new.vectorpages.com", + "dev_db_host": "db.new.rds.amazonaws.com", + "dev_db_name": "db_site002", + "environments": []map[string]any{ + { + "id": "env-002", + "name": "production", + "is_production": true, + "status": "active", + "php_version": "8.3", + "platform_domain": "new--prod.vectorpages.com", + }, + }, + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:05:00+00:00", + }, + "message": "Site retrieved successfully", + "http_status": 200, +} + +// newSiteWaitTestServer creates a test server that handles: +// - POST /sites -> returns siteCreateResponse (with credentials) +// - GET /sites/{id} -> returns successive poll responses (without credentials) +func newSiteWaitTestServer(validToken string, pollResponses []countingResponse) *httptest.Server { + var pollCount atomic.Int64 + + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + path := r.URL.Path + method := r.Method + + switch { + case method == "POST" && path == "/api/v1/vector/sites": + body, _ := io.ReadAll(r.Body) + var reqBody map[string]any + _ = json.Unmarshal(body, &reqBody) + if reqBody["your_customer_id"] == nil || reqBody["your_customer_id"] == "" { + w.WriteHeader(http.StatusUnprocessableEntity) + _ = json.NewEncoder(w).Encode(map[string]any{ + "errors": map[string][]string{ + "your_customer_id": {"The partner customer id field is required."}, + }, + "message": "Validation failed", + "http_status": 422, + }) + return + } + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(siteCreateResponse) + + case method == "GET" && path == "/api/v1/vector/sites/site-002": + idx := int(pollCount.Add(1)) - 1 + if idx >= len(pollResponses) { + idx = len(pollResponses) - 1 + } + resp := pollResponses[idx] + if resp.httpStatus != 0 { + w.WriteHeader(resp.httpStatus) + } + _ = json.NewEncoder(w).Encode(resp.body) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +func makeSitePollResponse(id, status string) countingResponse { + return countingResponse{ + httpStatus: http.StatusOK, + body: map[string]any{ + "data": map[string]any{ + "id": id, + "your_customer_id": "cust_456", + "status": status, + "dev_domain": "dev.new.vectorpages.com", + }, + "message": "Site retrieved successfully", + "http_status": 200, + }, + } +} + +func TestSiteCreateCmd_WaitSuccess(t *testing.T) { + overrideWaitGlobals(t, false) + + ts := newSiteWaitTestServer("valid-token", []countingResponse{ + makeSitePollResponse("site-002", "pending"), + makeSitePollResponse("site-002", "provisioning"), + { + httpStatus: http.StatusOK, + body: siteActivePollResponse, + }, + }) + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "create", "--customer-id", "cust_456", "--wait", "--poll-interval", "1s", "--timeout", "30s"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + + // Credentials should be printed before polling (from the initial POST response) + assert.Contains(t, out, "sftp-pass-123") + assert.Contains(t, out, "db-pass-456") + assert.Contains(t, out, "wp-pass-789") + + // Final state should be shown after polling completes + assert.Contains(t, out, "Site site-002 active in") + assert.Contains(t, out, "active") +} + +func TestSiteCreateCmd_WaitFailure(t *testing.T) { + overrideWaitGlobals(t, false) + + ts := newSiteWaitTestServer("valid-token", []countingResponse{ + makeSitePollResponse("site-002", "pending"), + makeSitePollResponse("site-002", "failed"), + }) + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"site", "create", "--customer-id", "cust_456", "--wait", "--poll-interval", "1s", "--timeout", "30s"}) + + err := cmd.Execute() + require.Error(t, err) + + // Even on failure, credentials should have been printed + out := stdout.String() + assert.Contains(t, out, "sftp-pass-123") + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 1, apiErr.ExitCode) + assert.Contains(t, apiErr.Message, "failed status") +} + +func TestSiteCreateCmd_WaitJSON(t *testing.T) { + overrideWaitGlobals(t, false) + + ts := newSiteWaitTestServer("valid-token", []countingResponse{ + makeSitePollResponse("site-002", "pending"), + { + httpStatus: http.StatusOK, + body: siteActivePollResponse, + }, + }) + defer ts.Close() + + cmd, stdout, _ := buildSiteCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"site", "create", "--customer-id", "cust_456", "--wait", "--poll-interval", "1s", "--timeout", "30s"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + + // Final status should be active + assert.Equal(t, "site-002", result["id"]) + assert.Equal(t, "active", result["status"]) + + // One-time credentials from the initial POST should be merged into the final JSON + sftp, ok := result["dev_sftp"].(map[string]any) + require.True(t, ok, "dev_sftp should be merged into final JSON") + assert.Equal(t, "sftp-pass-123", sftp["password"]) + + assert.Equal(t, "db_site002", result["dev_db_username"]) + assert.Equal(t, "db-pass-456", result["dev_db_password"]) + + wp, ok := result["wp_admin"].(map[string]any) + require.True(t, ok, "wp_admin should be merged into final JSON") + assert.Equal(t, "wp-pass-789", wp["password"]) +} diff --git a/internal/commands/skill.go b/internal/commands/skill.go new file mode 100644 index 0000000..c8f2c1a --- /dev/null +++ b/internal/commands/skill.go @@ -0,0 +1,253 @@ +package commands + +import ( + "fmt" + "os" + "path/filepath" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/version" + "github.com/built-fast/vector-cli/skills" +) + +// skillInstallDir is the base directory for installed skills. Override in tests. +var skillInstallDir = "" + +// claudeSkillsDir is the Claude Code skills directory. Override in tests. +var claudeSkillsDir = "" + +// symlinkFunc is the function used to create symlinks. Override in tests. +var symlinkFunc = os.Symlink + +// defaultSkillInstallDir returns ~/.agents/skills. +func defaultSkillInstallDir() (string, error) { + home, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("failed to determine home directory: %w", err) + } + return filepath.Join(home, ".agents", "skills"), nil +} + +// defaultClaudeSkillsDir returns ~/.claude/skills. +func defaultClaudeSkillsDir() (string, error) { + home, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("failed to determine home directory: %w", err) + } + return filepath.Join(home, ".claude", "skills"), nil +} + +// getSkillInstallDir returns the skill install directory, using the override if set. +func getSkillInstallDir() (string, error) { + if skillInstallDir != "" { + return skillInstallDir, nil + } + return defaultSkillInstallDir() +} + +// getClaudeSkillsDir returns the Claude skills directory, using the override if set. +func getClaudeSkillsDir() (string, error) { + if claudeSkillsDir != "" { + return claudeSkillsDir, nil + } + return defaultClaudeSkillsDir() +} + +// NewSkillCmd creates the skill command group. +func NewSkillCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "skill", + Short: "Agent skill document", + Long: "View or manage the embedded SKILL.md agent reference document.", + RunE: func(cmd *cobra.Command, args []string) error { + content, err := skills.Content.ReadFile("vector/SKILL.md") + if err != nil { + return fmt.Errorf("failed to read embedded skill: %w", err) + } + _, err = cmd.OutOrStdout().Write(content) + return err + }, + } + + cmd.AddCommand(newSkillInstallCmd()) + cmd.AddCommand(newSkillUninstallCmd()) + + return cmd +} + +// newSkillInstallCmd creates the skill install leaf command. +func newSkillInstallCmd() *cobra.Command { + return &cobra.Command{ + Use: "install", + Short: "Install skill document for AI agents", + Long: "Install the SKILL.md agent reference to ~/.agents/skills/vector/ and link it into ~/.claude/skills/vector/ for automatic discovery by Claude Code.", + RunE: func(cmd *cobra.Command, args []string) error { + return runSkillInstall(cmd) + }, + } +} + +// installSkillFiles installs the skill document and version stamp to the given directory. +// Returns the path to the installed SKILL.md. +func installSkillFiles(installDir string) (string, error) { + vectorDir := filepath.Join(installDir, "vector") + if err := os.MkdirAll(vectorDir, 0o755); err != nil { + return "", fmt.Errorf("failed to create skill directory: %w", err) + } + + content, err := skills.Content.ReadFile("vector/SKILL.md") + if err != nil { + return "", fmt.Errorf("failed to read embedded skill: %w", err) + } + + skillPath := filepath.Join(vectorDir, "SKILL.md") + if err := os.WriteFile(skillPath, content, 0o644); err != nil { + return "", fmt.Errorf("failed to write skill file: %w", err) + } + + versionPath := filepath.Join(vectorDir, ".version") + if err := os.WriteFile(versionPath, []byte(version.Version), 0o644); err != nil { + return "", fmt.Errorf("failed to write version stamp: %w", err) + } + + return skillPath, nil +} + +// linkClaudeSkill creates a symlink (or copies as fallback) from the Claude skills +// directory to the installed skill file. +func linkClaudeSkill(claudeDir, installedPath string) error { + claudeVectorDir := filepath.Join(claudeDir, "vector") + if err := os.MkdirAll(claudeVectorDir, 0o755); err != nil { + return fmt.Errorf("failed to create Claude skills directory: %w", err) + } + + linkPath := filepath.Join(claudeVectorDir, "SKILL.md") + + // Remove existing file/symlink for idempotency. + _ = os.Remove(linkPath) + + // Try symlink first. + if err := symlinkFunc(installedPath, linkPath); err != nil { + // Fallback to copy. + content, readErr := os.ReadFile(installedPath) + if readErr != nil { + return fmt.Errorf("failed to read installed skill for copy: %w", readErr) + } + if writeErr := os.WriteFile(linkPath, content, 0o644); writeErr != nil { + return fmt.Errorf("failed to copy skill file: %w", writeErr) + } + } + + return nil +} + +// newSkillUninstallCmd creates the skill uninstall leaf command. +func newSkillUninstallCmd() *cobra.Command { + return &cobra.Command{ + Use: "uninstall", + Short: "Uninstall skill document", + Long: "Remove the installed SKILL.md agent reference from ~/.agents/skills/vector/ and ~/.claude/skills/vector/.", + RunE: func(cmd *cobra.Command, args []string) error { + return runSkillUninstall(cmd) + }, + } +} + +// runSkillUninstall removes the installed skill files and Claude symlink/copy. +func runSkillUninstall(cmd *cobra.Command) error { + installDir, err := getSkillInstallDir() + if err != nil { + return err + } + + claudeDir, err := getClaudeSkillsDir() + if err != nil { + return err + } + + // Remove ~/.agents/skills/vector/ + vectorInstallDir := filepath.Join(installDir, "vector") + if err := os.RemoveAll(vectorInstallDir); err != nil { + return fmt.Errorf("failed to remove skill directory: %w", err) + } + + // Remove ~/.claude/skills/vector/ + claudeVectorDir := filepath.Join(claudeDir, "vector") + if err := os.RemoveAll(claudeVectorDir); err != nil { + return fmt.Errorf("failed to remove Claude skill directory: %w", err) + } + + _, _ = fmt.Fprintln(cmd.OutOrStdout(), "Skill uninstalled successfully.") + + return nil +} + +// RefreshSkillsIfVersionChanged silently re-installs skill files when the +// installed version stamp differs from the current CLI version. It does nothing +// if the skill has never been installed, if the version matches, or if this is +// a dev build. Best-effort: no output on success, no error output on failure. +func RefreshSkillsIfVersionChanged() { + // Skip dev builds. + if version.Version == "dev" || version.Version == "" { + return + } + + installDir, err := getSkillInstallDir() + if err != nil { + return + } + + versionPath := filepath.Join(installDir, "vector", ".version") + stamp, err := os.ReadFile(versionPath) + if err != nil { + // Sentinel missing — skill has never been installed. + return + } + + if string(stamp) == version.Version { + // Version matches — nothing to do. + return + } + + // Version mismatch — re-install silently. + installedPath, err := installSkillFiles(installDir) + if err != nil { + return + } + + claudeDir, err := getClaudeSkillsDir() + if err != nil { + return + } + + _ = linkClaudeSkill(claudeDir, installedPath) +} + +// runSkillInstall performs the full install sequence. +func runSkillInstall(cmd *cobra.Command) error { + installDir, err := getSkillInstallDir() + if err != nil { + return err + } + + installedPath, err := installSkillFiles(installDir) + if err != nil { + return err + } + + claudeDir, err := getClaudeSkillsDir() + if err != nil { + return err + } + + if err := linkClaudeSkill(claudeDir, installedPath); err != nil { + return err + } + + _, _ = fmt.Fprintln(cmd.OutOrStdout(), "Skill installed successfully.") + _, _ = fmt.Fprintf(cmd.OutOrStdout(), " Installed to: %s\n", filepath.Join(installDir, "vector", "SKILL.md")) + _, _ = fmt.Fprintf(cmd.OutOrStdout(), " Linked from: %s\n", filepath.Join(claudeDir, "vector", "SKILL.md")) + + return nil +} diff --git a/internal/commands/skill_test.go b/internal/commands/skill_test.go new file mode 100644 index 0000000..f4ec675 --- /dev/null +++ b/internal/commands/skill_test.go @@ -0,0 +1,312 @@ +package commands + +import ( + "bytes" + "errors" + "os" + "path/filepath" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/version" + "github.com/built-fast/vector-cli/skills" +) + +func buildSkillCmd() (*cobra.Command, *bytes.Buffer) { + root := &cobra.Command{ + Use: "vector", + SilenceUsage: true, + SilenceErrors: true, + } + + root.AddCommand(NewSkillCmd()) + + stdout := new(bytes.Buffer) + root.SetOut(stdout) + + return root, stdout +} + +func setSkillTestDirs(t *testing.T) (installDir, claudeDir string) { + t.Helper() + installDir = filepath.Join(t.TempDir(), "agents", "skills") + claudeDir = filepath.Join(t.TempDir(), "claude", "skills") + + oldInstall := skillInstallDir + oldClaude := claudeSkillsDir + skillInstallDir = installDir + claudeSkillsDir = claudeDir + t.Cleanup(func() { + skillInstallDir = oldInstall + claudeSkillsDir = oldClaude + }) + return installDir, claudeDir +} + +func TestSkillPrintsEmbeddedContent(t *testing.T) { + cmd, stdout := buildSkillCmd() + cmd.SetArgs([]string{"skill"}) + + err := cmd.Execute() + require.NoError(t, err) + + expected, err := skills.Content.ReadFile("vector/SKILL.md") + require.NoError(t, err) + + assert.Equal(t, string(expected), stdout.String()) +} + +func TestSkillInstallCreatesFile(t *testing.T) { + installDir, claudeDir := setSkillTestDirs(t) + + cmd, stdout := buildSkillCmd() + cmd.SetArgs([]string{"skill", "install"}) + + err := cmd.Execute() + require.NoError(t, err) + + // Verify SKILL.md was installed. + installed, err := os.ReadFile(filepath.Join(installDir, "vector", "SKILL.md")) + require.NoError(t, err) + + expected, err := skills.Content.ReadFile("vector/SKILL.md") + require.NoError(t, err) + assert.Equal(t, string(expected), string(installed)) + + // Verify Claude skills link/copy exists and is readable. + linked, err := os.ReadFile(filepath.Join(claudeDir, "vector", "SKILL.md")) + require.NoError(t, err) + assert.Equal(t, string(expected), string(linked)) + + // Verify output message. + assert.Contains(t, stdout.String(), "Skill installed successfully.") +} + +func TestSkillInstallIdempotent(t *testing.T) { + installDir, _ := setSkillTestDirs(t) + + cmd1, _ := buildSkillCmd() + cmd1.SetArgs([]string{"skill", "install"}) + require.NoError(t, cmd1.Execute()) + + cmd2, stdout := buildSkillCmd() + cmd2.SetArgs([]string{"skill", "install"}) + require.NoError(t, cmd2.Execute()) + + // File still exists and is correct after second run. + installed, err := os.ReadFile(filepath.Join(installDir, "vector", "SKILL.md")) + require.NoError(t, err) + + expected, err := skills.Content.ReadFile("vector/SKILL.md") + require.NoError(t, err) + assert.Equal(t, string(expected), string(installed)) + assert.Contains(t, stdout.String(), "Skill installed successfully.") +} + +func TestSkillInstallSymlink(t *testing.T) { + installDir, claudeDir := setSkillTestDirs(t) + + cmd, _ := buildSkillCmd() + cmd.SetArgs([]string{"skill", "install"}) + require.NoError(t, cmd.Execute()) + + linkPath := filepath.Join(claudeDir, "vector", "SKILL.md") + target, err := os.Readlink(linkPath) + require.NoError(t, err, "expected a symlink at %s", linkPath) + assert.Equal(t, filepath.Join(installDir, "vector", "SKILL.md"), target) +} + +func TestSkillInstallCopyFallback(t *testing.T) { + installDir, claudeDir := setSkillTestDirs(t) + + // Override symlinkFunc to always fail, forcing the copy fallback. + oldSymlink := symlinkFunc + symlinkFunc = func(_, _ string) error { + return errors.New("symlink not supported") + } + t.Cleanup(func() { symlinkFunc = oldSymlink }) + + cmd, stdout := buildSkillCmd() + cmd.SetArgs([]string{"skill", "install"}) + require.NoError(t, cmd.Execute()) + + // Verify the Claude skills file is a regular file (not a symlink). + linkPath := filepath.Join(claudeDir, "vector", "SKILL.md") + _, err := os.Readlink(linkPath) + assert.Error(t, err, "expected a regular file, not a symlink") + + // Verify content matches. + expected, err := skills.Content.ReadFile("vector/SKILL.md") + require.NoError(t, err) + + copied, err := os.ReadFile(linkPath) + require.NoError(t, err) + assert.Equal(t, string(expected), string(copied)) + + // Verify installed file also exists. + installed, err := os.ReadFile(filepath.Join(installDir, "vector", "SKILL.md")) + require.NoError(t, err) + assert.Equal(t, string(expected), string(installed)) + + assert.Contains(t, stdout.String(), "Skill installed successfully.") +} + +func TestSkillUninstallRemovesFiles(t *testing.T) { + installDir, claudeDir := setSkillTestDirs(t) + + // Install first. + cmd1, _ := buildSkillCmd() + cmd1.SetArgs([]string{"skill", "install"}) + require.NoError(t, cmd1.Execute()) + + // Verify files exist. + require.FileExists(t, filepath.Join(installDir, "vector", "SKILL.md")) + require.FileExists(t, filepath.Join(claudeDir, "vector", "SKILL.md")) + + // Uninstall. + cmd2, stdout := buildSkillCmd() + cmd2.SetArgs([]string{"skill", "uninstall"}) + require.NoError(t, cmd2.Execute()) + + // Verify files are removed. + assert.NoDirExists(t, filepath.Join(installDir, "vector")) + assert.NoDirExists(t, filepath.Join(claudeDir, "vector")) + assert.Contains(t, stdout.String(), "Skill uninstalled successfully.") +} + +func TestSkillUninstallNoOpWhenNotInstalled(t *testing.T) { + setSkillTestDirs(t) + + // Uninstall without installing first — should be a no-op. + cmd, stdout := buildSkillCmd() + cmd.SetArgs([]string{"skill", "uninstall"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "Skill uninstalled successfully.") +} + +func TestRefreshSkillsIfVersionChanged_SentinelMissing(t *testing.T) { + installDir, claudeDir := setSkillTestDirs(t) + + oldVersion := version.Version + version.Version = "1.0.0" + t.Cleanup(func() { version.Version = oldVersion }) + + // No sentinel file exists — RefreshSkillsIfVersionChanged should be a no-op. + RefreshSkillsIfVersionChanged() + + // Verify nothing was installed. + assert.NoDirExists(t, filepath.Join(installDir, "vector")) + assert.NoDirExists(t, filepath.Join(claudeDir, "vector")) +} + +func TestRefreshSkillsIfVersionChanged_VersionMatches(t *testing.T) { + installDir, _ := setSkillTestDirs(t) + + oldVersion := version.Version + version.Version = "1.0.0" + t.Cleanup(func() { version.Version = oldVersion }) + + // Install skill first to create the sentinel. + cmd, _ := buildSkillCmd() + cmd.SetArgs([]string{"skill", "install"}) + require.NoError(t, cmd.Execute()) + + // Record file modification time. + skillPath := filepath.Join(installDir, "vector", "SKILL.md") + infoBefore, err := os.Stat(skillPath) + require.NoError(t, err) + + // Refresh — version matches, so nothing should change. + RefreshSkillsIfVersionChanged() + + infoAfter, err := os.Stat(skillPath) + require.NoError(t, err) + assert.Equal(t, infoBefore.ModTime(), infoAfter.ModTime()) +} + +func TestRefreshSkillsIfVersionChanged_VersionMismatch(t *testing.T) { + installDir, claudeDir := setSkillTestDirs(t) + + oldVersion := version.Version + version.Version = "1.0.0" + t.Cleanup(func() { version.Version = oldVersion }) + + // Install skill at version 1.0.0. + cmd, _ := buildSkillCmd() + cmd.SetArgs([]string{"skill", "install"}) + require.NoError(t, cmd.Execute()) + + // Verify version stamp is 1.0.0. + stamp, err := os.ReadFile(filepath.Join(installDir, "vector", ".version")) + require.NoError(t, err) + assert.Equal(t, "1.0.0", string(stamp)) + + // Simulate upgrade to 2.0.0. + version.Version = "2.0.0" + + RefreshSkillsIfVersionChanged() + + // Verify version stamp was updated. + stamp, err = os.ReadFile(filepath.Join(installDir, "vector", ".version")) + require.NoError(t, err) + assert.Equal(t, "2.0.0", string(stamp)) + + // Verify SKILL.md still exists and is valid in both locations. + expected, err := skills.Content.ReadFile("vector/SKILL.md") + require.NoError(t, err) + + installed, err := os.ReadFile(filepath.Join(installDir, "vector", "SKILL.md")) + require.NoError(t, err) + assert.Equal(t, string(expected), string(installed)) + + linked, err := os.ReadFile(filepath.Join(claudeDir, "vector", "SKILL.md")) + require.NoError(t, err) + assert.Equal(t, string(expected), string(linked)) +} + +func TestRefreshSkillsIfVersionChanged_DevVersionSkip(t *testing.T) { + installDir, _ := setSkillTestDirs(t) + + oldVersion := version.Version + t.Cleanup(func() { version.Version = oldVersion }) + + // Install at version 1.0.0 first. + version.Version = "1.0.0" + cmd, _ := buildSkillCmd() + cmd.SetArgs([]string{"skill", "install"}) + require.NoError(t, cmd.Execute()) + + // Now set version to "dev" — refresh should skip. + version.Version = "dev" + RefreshSkillsIfVersionChanged() + + // Version stamp should still be 1.0.0 (not overwritten with "dev"). + stamp, err := os.ReadFile(filepath.Join(installDir, "vector", ".version")) + require.NoError(t, err) + assert.Equal(t, "1.0.0", string(stamp)) + + // Same for empty version. + version.Version = "" + RefreshSkillsIfVersionChanged() + + stamp, err = os.ReadFile(filepath.Join(installDir, "vector", ".version")) + require.NoError(t, err) + assert.Equal(t, "1.0.0", string(stamp)) +} + +func TestSkillInstallVersionStamp(t *testing.T) { + installDir, _ := setSkillTestDirs(t) + + cmd, _ := buildSkillCmd() + cmd.SetArgs([]string{"skill", "install"}) + require.NoError(t, cmd.Execute()) + + stamp, err := os.ReadFile(filepath.Join(installDir, "vector", ".version")) + require.NoError(t, err) + assert.Equal(t, version.Version, string(stamp)) +} diff --git a/internal/commands/ssl.go b/internal/commands/ssl.go new file mode 100644 index 0000000..92a9418 --- /dev/null +++ b/internal/commands/ssl.go @@ -0,0 +1,153 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +// NewSSLCmd creates the ssl command group. +func NewSSLCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "ssl", + Short: "Manage SSL certificates", + Long: "Manage SSL certificate provisioning for environments, including checking status and nudging stuck provisioning.", + } + + cmd.AddCommand(newSSLStatusCmd()) + cmd.AddCommand(newSSLNudgeCmd()) + + return cmd +} + +func newSSLStatusCmd() *cobra.Command { + return &cobra.Command{ + Use: "status ", + Short: "Check SSL provisioning status", + Long: "Get the current SSL provisioning status for an environment.", + Example: ` # Check SSL status + vector ssl status env-abc123`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + path := envsBasePath + "/" + args[0] + "/ssl" + resp, err := app.Client.Get(cmd.Context(), path, nil) + if err != nil { + return fmt.Errorf("failed to get SSL status: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to get SSL status: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to get SSL status: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to get SSL status: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "Status", Value: getString(item, "status")}, + {Key: "Provisioning Step", Value: formatString(getString(item, "provisioning_step"))}, + {Key: "Failure Reason", Value: formatString(getString(item, "failure_reason"))}, + {Key: "Production", Value: formatBool(getBool(item, "is_production"))}, + {Key: "Custom Domain", Value: formatString(getString(item, "custom_domain"))}, + {Key: "Platform Domain", Value: formatString(getString(item, "platform_domain"))}, + }) + return nil + }, + } +} + +func newSSLNudgeCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "nudge ", + Short: "Nudge SSL provisioning", + Long: "Manually nudge SSL provisioning for an environment. Use this when SSL provisioning appears to be stuck or to retry after a failure.", + Example: ` # Nudge SSL provisioning + vector ssl nudge env-abc123 + + # Retry from a failed state + vector ssl nudge env-abc123 --retry`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + reqBody := map[string]any{} + + if cmd.Flags().Changed("retry") { + v, _ := cmd.Flags().GetBool("retry") + reqBody["retry"] = v + } + + path := envsBasePath + "/" + args[0] + "/ssl/nudge" + resp, err := app.Client.Post(cmd.Context(), path, reqBody) + if err != nil { + return fmt.Errorf("failed to nudge SSL: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to nudge SSL: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to nudge SSL: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + // Extract message from response + var envelope struct { + Message string `json:"message"` + } + if err := json.Unmarshal(body, &envelope); err == nil && envelope.Message != "" { + app.Output.Message(envelope.Message) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to nudge SSL: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "Status", Value: getString(item, "status")}, + {Key: "Provisioning Step", Value: formatString(getString(item, "provisioning_step"))}, + {Key: "Failure Reason", Value: formatString(getString(item, "failure_reason"))}, + {Key: "Production", Value: formatBool(getBool(item, "is_production"))}, + {Key: "Custom Domain", Value: formatString(getString(item, "custom_domain"))}, + {Key: "Platform Domain", Value: formatString(getString(item, "platform_domain"))}, + }) + return nil + }, + } + + cmd.Flags().Bool("retry", false, "Retry from a failed state") + + return cmd +} diff --git a/internal/commands/ssl_test.go b/internal/commands/ssl_test.go new file mode 100644 index 0000000..8d79cc7 --- /dev/null +++ b/internal/commands/ssl_test.go @@ -0,0 +1,456 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +var sslStatusResponse = map[string]any{ + "data": map[string]any{ + "id": "env-001", + "vector_site_id": "site-001", + "name": "production", + "is_production": true, + "status": "active", + "provisioning_step": "complete", + "failure_reason": nil, + "php_version": "8.3", + "platform_domain": "wispy-dust.vectorpages.com", + "custom_domain": "example.com", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "SSL status retrieved", + "http_status": 200, +} + +var sslNudgeProgressedResponse = map[string]any{ + "data": map[string]any{ + "id": "env-001", + "vector_site_id": "site-001", + "name": "production", + "is_production": true, + "status": "provisioning", + "provisioning_step": "deploying", + "failure_reason": nil, + "php_version": "8.3", + "platform_domain": "wispy-dust.vectorpages.com", + "custom_domain": "example.com", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "SSL provisioning advanced from waiting_cert to deploying", + "http_status": 200, +} + +var sslNudgeWaitingResponse = map[string]any{ + "data": map[string]any{ + "id": "env-001", + "vector_site_id": "site-001", + "name": "production", + "is_production": true, + "status": "provisioning", + "provisioning_step": "waiting_custom_dns", + "failure_reason": nil, + "php_version": "8.3", + "platform_domain": "wispy-dust.vectorpages.com", + "custom_domain": "example.com", + "created_at": "2025-01-15T12:00:00+00:00", + "updated_at": "2025-01-15T12:00:00+00:00", + }, + "message": "SSL provisioning is waiting and cannot advance yet. Current step: waiting_custom_dns", + "http_status": 200, +} + +func newSSLTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/environments/env-001/ssl": + _ = json.NewEncoder(w).Encode(sslStatusResponse) + + case method == "POST" && path == "/api/v1/vector/environments/env-001/ssl/nudge": + body, _ := io.ReadAll(r.Body) + var reqBody map[string]any + _ = json.Unmarshal(body, &reqBody) + + if retry, ok := reqBody["retry"].(bool); ok && retry { + _ = json.NewEncoder(w).Encode(sslNudgeProgressedResponse) + } else { + _ = json.NewEncoder(w).Encode(sslNudgeWaitingResponse) + } + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +func buildSSLCmd(baseURL, token string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient(baseURL, token, "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + sslCmd := NewSSLCmd() + root.AddCommand(sslCmd) + + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func buildSSLCmdNoAuth(format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient("http://localhost", "", "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + sslCmd := NewSSLCmd() + root.AddCommand(sslCmd) + + stderr := new(bytes.Buffer) + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +// --- SSL Status Tests --- + +func TestSSLStatusCmd_TableOutput(t *testing.T) { + ts := newSSLTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSSLCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"ssl", "status", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "active") + assert.Contains(t, out, "complete") + assert.Contains(t, out, "Yes") + assert.Contains(t, out, "example.com") + assert.Contains(t, out, "wispy-dust.vectorpages.com") +} + +func TestSSLStatusCmd_JSONOutput(t *testing.T) { + ts := newSSLTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSSLCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"ssl", "status", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "env-001", result["id"]) + assert.Equal(t, "active", result["status"]) + assert.Equal(t, "complete", result["provisioning_step"]) +} + +func TestSSLStatusCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(sslStatusResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSSLCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"ssl", "status", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/environments/env-001/ssl", receivedPath) +} + +func TestSSLStatusCmd_AuthError(t *testing.T) { + ts := newSSLTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildSSLCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"ssl", "status", "env-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestSSLStatusCmd_NoAuthToken(t *testing.T) { + cmd, _, _ := buildSSLCmdNoAuth(output.Table) + cmd.SetArgs([]string{"ssl", "status", "env-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestSSLStatusCmd_MissingArgs(t *testing.T) { + ts := newSSLTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildSSLCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"ssl", "status"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- SSL Nudge Tests --- + +func TestSSLNudgeCmd_TableOutput(t *testing.T) { + ts := newSSLTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSSLCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"ssl", "nudge", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "SSL provisioning is waiting") + assert.Contains(t, out, "waiting_custom_dns") + assert.Contains(t, out, "provisioning") +} + +func TestSSLNudgeCmd_WithRetryFlag(t *testing.T) { + ts := newSSLTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSSLCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"ssl", "nudge", "env-001", "--retry"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "SSL provisioning advanced") + assert.Contains(t, out, "deploying") +} + +func TestSSLNudgeCmd_JSONOutput(t *testing.T) { + ts := newSSLTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSSLCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"ssl", "nudge", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "env-001", result["id"]) + assert.Equal(t, "provisioning", result["status"]) +} + +func TestSSLNudgeCmd_RequestBodyWithRetry(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(sslNudgeProgressedResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSSLCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"ssl", "nudge", "env-001", "--retry"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/environments/env-001/ssl/nudge", receivedPath) + assert.Equal(t, true, receivedBody["retry"]) +} + +func TestSSLNudgeCmd_RequestBodyWithoutRetry(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(sslNudgeWaitingResponse) + })) + defer ts.Close() + + cmd, _, _ := buildSSLCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"ssl", "nudge", "env-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/environments/env-001/ssl/nudge", receivedPath) + assert.Nil(t, receivedBody["retry"]) +} + +func TestSSLNudgeCmd_AuthError(t *testing.T) { + ts := newSSLTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildSSLCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"ssl", "nudge", "env-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestSSLNudgeCmd_NoAuthToken(t *testing.T) { + cmd, _, _ := buildSSLCmdNoAuth(output.Table) + cmd.SetArgs([]string{"ssl", "nudge", "env-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestSSLNudgeCmd_MissingArgs(t *testing.T) { + ts := newSSLTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildSSLCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"ssl", "nudge"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Help Text Tests --- + +func TestSSLCmd_HelpText(t *testing.T) { + ts := newSSLTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSSLCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"ssl", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "status") + assert.Contains(t, out, "nudge") +} + +func TestSSLStatusCmd_HelpText(t *testing.T) { + ts := newSSLTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSSLCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"ssl", "status", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "env-id") +} + +func TestSSLNudgeCmd_HelpText(t *testing.T) { + ts := newSSLTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildSSLCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"ssl", "nudge", "--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "--retry") + assert.Contains(t, out, "env-id") +} diff --git a/internal/commands/waf.go b/internal/commands/waf.go new file mode 100644 index 0000000..b531943 --- /dev/null +++ b/internal/commands/waf.go @@ -0,0 +1,21 @@ +package commands + +import ( + "github.com/spf13/cobra" +) + +// NewWafCmd creates the waf command group. +func NewWafCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "waf", + Short: "Manage WAF rules", + Long: "Manage Web Application Firewall rules for your sites.", + } + + cmd.AddCommand(NewWafRateLimitCmd()) + cmd.AddCommand(NewWafBlockedIPCmd()) + cmd.AddCommand(NewWafBlockedReferrerCmd()) + cmd.AddCommand(NewWafAllowedReferrerCmd()) + + return cmd +} diff --git a/internal/commands/waf_allowed_referrer.go b/internal/commands/waf_allowed_referrer.go new file mode 100644 index 0000000..063de5c --- /dev/null +++ b/internal/commands/waf_allowed_referrer.go @@ -0,0 +1,167 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +func wafAllowedReferrersPath(siteID string) string { + return sitesBasePath + "/" + siteID + "/waf/allowed-referrers" +} + +// NewWafAllowedReferrerCmd creates the waf allowed-referrer command group. +func NewWafAllowedReferrerCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "allowed-referrer", + Short: "Manage WAF allowed referrers", + Long: "Manage allowed referrer hostnames for a site's Web Application Firewall.", + } + + cmd.AddCommand(newWafAllowedReferrerListCmd()) + cmd.AddCommand(newWafAllowedReferrerAddCmd()) + cmd.AddCommand(newWafAllowedReferrerRemoveCmd()) + + return cmd +} + +func newWafAllowedReferrerListCmd() *cobra.Command { + return &cobra.Command{ + Use: "list ", + Short: "List allowed referrers", + Long: "List all allowed referrer hostnames for a site.", + Example: ` # List allowed referrers + vector waf allowed-referrer list site-abc123`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), wafAllowedReferrersPath(args[0]), nil) + if err != nil { + return fmt.Errorf("failed to list allowed referrers: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list allowed referrers: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list allowed referrers: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list allowed referrers: %w", err) + } + + headers := []string{"HOSTNAME"} + var rows [][]string + for _, item := range items { + rows = append(rows, []string{ + getString(item, "hostname"), + }) + } + + app.Output.Table(headers, rows) + return nil + }, + } +} + +func newWafAllowedReferrerAddCmd() *cobra.Command { + return &cobra.Command{ + Use: "add ", + Short: "Add an allowed referrer", + Long: "Add a hostname to the allowed referrers list for a site.", + Example: ` # Allow a referrer + vector waf allowed-referrer add site-abc123 trusted.example.com`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + reqBody := map[string]any{ + "hostname": args[1], + } + + resp, err := app.Client.Post(cmd.Context(), wafAllowedReferrersPath(args[0]), reqBody) + if err != nil { + return fmt.Errorf("failed to add allowed referrer: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to add allowed referrer: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to add allowed referrer: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + app.Output.Message(fmt.Sprintf("Hostname %s added to allowed referrers.", args[1])) + return nil + }, + } +} + +func newWafAllowedReferrerRemoveCmd() *cobra.Command { + return &cobra.Command{ + Use: "remove ", + Short: "Remove an allowed referrer", + Long: "Remove a hostname from the allowed referrers list for a site.", + Example: ` # Remove an allowed referrer + vector waf allowed-referrer remove site-abc123 trusted.example.com`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Delete(cmd.Context(), wafAllowedReferrersPath(args[0])+"/"+args[1]) + if err != nil { + return fmt.Errorf("failed to remove allowed referrer: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to remove allowed referrer: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to remove allowed referrer: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + app.Output.Message(fmt.Sprintf("Hostname %s removed from allowed referrers.", args[1])) + return nil + }, + } +} diff --git a/internal/commands/waf_allowed_referrer_test.go b/internal/commands/waf_allowed_referrer_test.go new file mode 100644 index 0000000..6021da7 --- /dev/null +++ b/internal/commands/waf_allowed_referrer_test.go @@ -0,0 +1,303 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/output" +) + +var allowedReferrerListResponse = map[string]any{ + "data": []map[string]any{ + {"hostname": "trusted.example.com"}, + {"hostname": "partner.example.org"}, + }, + "message": "Allowed referrers retrieved successfully", + "http_status": 200, +} + +var allowedReferrerAddResponse = map[string]any{ + "data": map[string]any{"hostname": "trusted.example.com"}, + "message": "Hostname added to allowed referrers", + "http_status": 201, +} + +var allowedReferrerRemoveResponse = map[string]any{ + "data": map[string]any{}, + "message": "Hostname removed from allowed referrers", + "http_status": 200, +} + +func newWafAllowedReferrerTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/sites/site-001/waf/allowed-referrers": + _ = json.NewEncoder(w).Encode(allowedReferrerListResponse) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/waf/allowed-referrers": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(allowedReferrerAddResponse) + + case method == "DELETE" && path == "/api/v1/vector/sites/site-001/waf/allowed-referrers/trusted.example.com": + _ = json.NewEncoder(w).Encode(allowedReferrerRemoveResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +// --- Allowed Referrer List Tests --- + +func TestWafAllowedReferrerListCmd_TableOutput(t *testing.T) { + ts := newWafAllowedReferrerTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "allowed-referrer", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "trusted.example.com") + assert.Contains(t, out, "partner.example.org") + assert.Contains(t, out, "HOSTNAME") +} + +func TestWafAllowedReferrerListCmd_JSONOutput(t *testing.T) { + ts := newWafAllowedReferrerTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"waf", "allowed-referrer", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 2) + assert.Equal(t, "trusted.example.com", result[0]["hostname"]) +} + +func TestWafAllowedReferrerListCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(allowedReferrerListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "allowed-referrer", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/waf/allowed-referrers", receivedPath) +} + +func TestWafAllowedReferrerListCmd_AuthError(t *testing.T) { + ts := newWafAllowedReferrerTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"waf", "allowed-referrer", "list", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestWafAllowedReferrerListCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildWafCmdNoAuth(output.Table) + cmd.SetArgs([]string{"waf", "allowed-referrer", "list", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestWafAllowedReferrerListCmd_MissingArg(t *testing.T) { + cmd, _, _ := buildWafCmdNoAuth(output.Table) + cmd.SetArgs([]string{"waf", "allowed-referrer", "list"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Allowed Referrer Add Tests --- + +func TestWafAllowedReferrerAddCmd_TableOutput(t *testing.T) { + ts := newWafAllowedReferrerTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "allowed-referrer", "add", "site-001", "trusted.example.com"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "Hostname trusted.example.com added to allowed referrers.") +} + +func TestWafAllowedReferrerAddCmd_JSONOutput(t *testing.T) { + ts := newWafAllowedReferrerTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"waf", "allowed-referrer", "add", "site-001", "trusted.example.com"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "trusted.example.com", result["hostname"]) +} + +func TestWafAllowedReferrerAddCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(allowedReferrerAddResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "allowed-referrer", "add", "site-001", "trusted.example.com"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/waf/allowed-referrers", receivedPath) + assert.Equal(t, "trusted.example.com", receivedBody["hostname"]) +} + +func TestWafAllowedReferrerAddCmd_MissingArg(t *testing.T) { + cmd, _, _ := buildWafCmdNoAuth(output.Table) + cmd.SetArgs([]string{"waf", "allowed-referrer", "add", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} + +// --- Allowed Referrer Remove Tests --- + +func TestWafAllowedReferrerRemoveCmd_TableOutput(t *testing.T) { + ts := newWafAllowedReferrerTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "allowed-referrer", "remove", "site-001", "trusted.example.com"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "Hostname trusted.example.com removed from allowed referrers.") +} + +func TestWafAllowedReferrerRemoveCmd_JSONOutput(t *testing.T) { + ts := newWafAllowedReferrerTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"waf", "allowed-referrer", "remove", "site-001", "trusted.example.com"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) +} + +func TestWafAllowedReferrerRemoveCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(allowedReferrerRemoveResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "allowed-referrer", "remove", "site-001", "trusted.example.com"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "DELETE", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/waf/allowed-referrers/trusted.example.com", receivedPath) +} + +func TestWafAllowedReferrerRemoveCmd_MissingArg(t *testing.T) { + cmd, _, _ := buildWafCmdNoAuth(output.Table) + cmd.SetArgs([]string{"waf", "allowed-referrer", "remove", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} + +// --- Help Tests --- + +func TestWafAllowedReferrerCmd_Help(t *testing.T) { + cmd := NewWafAllowedReferrerCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "add") + assert.Contains(t, out, "remove") + assert.Contains(t, out, "allowed referrer") +} diff --git a/internal/commands/waf_blocked_ip.go b/internal/commands/waf_blocked_ip.go new file mode 100644 index 0000000..6c702c2 --- /dev/null +++ b/internal/commands/waf_blocked_ip.go @@ -0,0 +1,167 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +func wafBlockedIPsPath(siteID string) string { + return sitesBasePath + "/" + siteID + "/waf/blocked-ips" +} + +// NewWafBlockedIPCmd creates the waf blocked-ip command group. +func NewWafBlockedIPCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "blocked-ip", + Short: "Manage WAF blocked IPs", + Long: "Manage blocked IP addresses for a site's Web Application Firewall.", + } + + cmd.AddCommand(newWafBlockedIPListCmd()) + cmd.AddCommand(newWafBlockedIPAddCmd()) + cmd.AddCommand(newWafBlockedIPRemoveCmd()) + + return cmd +} + +func newWafBlockedIPListCmd() *cobra.Command { + return &cobra.Command{ + Use: "list ", + Short: "List blocked IPs", + Long: "List all blocked IP addresses for a site.", + Example: ` # List blocked IPs + vector waf blocked-ip list site-abc123`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), wafBlockedIPsPath(args[0]), nil) + if err != nil { + return fmt.Errorf("failed to list blocked IPs: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list blocked IPs: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list blocked IPs: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list blocked IPs: %w", err) + } + + headers := []string{"IP"} + var rows [][]string + for _, item := range items { + rows = append(rows, []string{ + getString(item, "ip"), + }) + } + + app.Output.Table(headers, rows) + return nil + }, + } +} + +func newWafBlockedIPAddCmd() *cobra.Command { + return &cobra.Command{ + Use: "add ", + Short: "Add a blocked IP", + Long: "Add an IP address to the blocklist for a site.", + Example: ` # Block an IP address + vector waf blocked-ip add site-abc123 203.0.113.50`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + reqBody := map[string]any{ + "ip": args[1], + } + + resp, err := app.Client.Post(cmd.Context(), wafBlockedIPsPath(args[0]), reqBody) + if err != nil { + return fmt.Errorf("failed to add blocked IP: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to add blocked IP: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to add blocked IP: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + app.Output.Message(fmt.Sprintf("IP %s added to blocklist.", args[1])) + return nil + }, + } +} + +func newWafBlockedIPRemoveCmd() *cobra.Command { + return &cobra.Command{ + Use: "remove ", + Short: "Remove a blocked IP", + Long: "Remove an IP address from the blocklist for a site.", + Example: ` # Unblock an IP address + vector waf blocked-ip remove site-abc123 203.0.113.50`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Delete(cmd.Context(), wafBlockedIPsPath(args[0])+"/"+args[1]) + if err != nil { + return fmt.Errorf("failed to remove blocked IP: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to remove blocked IP: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to remove blocked IP: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + app.Output.Message(fmt.Sprintf("IP %s removed from blocklist.", args[1])) + return nil + }, + } +} diff --git a/internal/commands/waf_blocked_ip_test.go b/internal/commands/waf_blocked_ip_test.go new file mode 100644 index 0000000..273e87e --- /dev/null +++ b/internal/commands/waf_blocked_ip_test.go @@ -0,0 +1,303 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/output" +) + +var blockedIPListResponse = map[string]any{ + "data": []map[string]any{ + {"ip": "192.168.1.100"}, + {"ip": "10.0.0.50"}, + }, + "message": "Blocked IPs retrieved successfully", + "http_status": 200, +} + +var blockedIPAddResponse = map[string]any{ + "data": map[string]any{"ip": "192.168.1.100"}, + "message": "IP added to blocklist", + "http_status": 201, +} + +var blockedIPRemoveResponse = map[string]any{ + "data": map[string]any{}, + "message": "IP removed from blocklist", + "http_status": 200, +} + +func newWafBlockedIPTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/sites/site-001/waf/blocked-ips": + _ = json.NewEncoder(w).Encode(blockedIPListResponse) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/waf/blocked-ips": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(blockedIPAddResponse) + + case method == "DELETE" && path == "/api/v1/vector/sites/site-001/waf/blocked-ips/192.168.1.100": + _ = json.NewEncoder(w).Encode(blockedIPRemoveResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +// --- Blocked IP List Tests --- + +func TestWafBlockedIPListCmd_TableOutput(t *testing.T) { + ts := newWafBlockedIPTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "blocked-ip", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "192.168.1.100") + assert.Contains(t, out, "10.0.0.50") + assert.Contains(t, out, "IP") +} + +func TestWafBlockedIPListCmd_JSONOutput(t *testing.T) { + ts := newWafBlockedIPTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"waf", "blocked-ip", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 2) + assert.Equal(t, "192.168.1.100", result[0]["ip"]) +} + +func TestWafBlockedIPListCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(blockedIPListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "blocked-ip", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/waf/blocked-ips", receivedPath) +} + +func TestWafBlockedIPListCmd_AuthError(t *testing.T) { + ts := newWafBlockedIPTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"waf", "blocked-ip", "list", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestWafBlockedIPListCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildWafCmdNoAuth(output.Table) + cmd.SetArgs([]string{"waf", "blocked-ip", "list", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestWafBlockedIPListCmd_MissingArg(t *testing.T) { + cmd, _, _ := buildWafCmdNoAuth(output.Table) + cmd.SetArgs([]string{"waf", "blocked-ip", "list"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Blocked IP Add Tests --- + +func TestWafBlockedIPAddCmd_TableOutput(t *testing.T) { + ts := newWafBlockedIPTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "blocked-ip", "add", "site-001", "192.168.1.100"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "IP 192.168.1.100 added to blocklist.") +} + +func TestWafBlockedIPAddCmd_JSONOutput(t *testing.T) { + ts := newWafBlockedIPTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"waf", "blocked-ip", "add", "site-001", "192.168.1.100"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "192.168.1.100", result["ip"]) +} + +func TestWafBlockedIPAddCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(blockedIPAddResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "blocked-ip", "add", "site-001", "192.168.1.100"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/waf/blocked-ips", receivedPath) + assert.Equal(t, "192.168.1.100", receivedBody["ip"]) +} + +func TestWafBlockedIPAddCmd_MissingArg(t *testing.T) { + cmd, _, _ := buildWafCmdNoAuth(output.Table) + cmd.SetArgs([]string{"waf", "blocked-ip", "add", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} + +// --- Blocked IP Remove Tests --- + +func TestWafBlockedIPRemoveCmd_TableOutput(t *testing.T) { + ts := newWafBlockedIPTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "blocked-ip", "remove", "site-001", "192.168.1.100"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "IP 192.168.1.100 removed from blocklist.") +} + +func TestWafBlockedIPRemoveCmd_JSONOutput(t *testing.T) { + ts := newWafBlockedIPTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"waf", "blocked-ip", "remove", "site-001", "192.168.1.100"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) +} + +func TestWafBlockedIPRemoveCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(blockedIPRemoveResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "blocked-ip", "remove", "site-001", "192.168.1.100"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "DELETE", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/waf/blocked-ips/192.168.1.100", receivedPath) +} + +func TestWafBlockedIPRemoveCmd_MissingArg(t *testing.T) { + cmd, _, _ := buildWafCmdNoAuth(output.Table) + cmd.SetArgs([]string{"waf", "blocked-ip", "remove", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} + +// --- Help Tests --- + +func TestWafBlockedIPCmd_Help(t *testing.T) { + cmd := NewWafBlockedIPCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "add") + assert.Contains(t, out, "remove") + assert.Contains(t, out, "blocked IP") +} diff --git a/internal/commands/waf_blocked_referrer.go b/internal/commands/waf_blocked_referrer.go new file mode 100644 index 0000000..b782471 --- /dev/null +++ b/internal/commands/waf_blocked_referrer.go @@ -0,0 +1,167 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +func wafBlockedReferrersPath(siteID string) string { + return sitesBasePath + "/" + siteID + "/waf/blocked-referrers" +} + +// NewWafBlockedReferrerCmd creates the waf blocked-referrer command group. +func NewWafBlockedReferrerCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "blocked-referrer", + Short: "Manage WAF blocked referrers", + Long: "Manage blocked referrer hostnames for a site's Web Application Firewall.", + } + + cmd.AddCommand(newWafBlockedReferrerListCmd()) + cmd.AddCommand(newWafBlockedReferrerAddCmd()) + cmd.AddCommand(newWafBlockedReferrerRemoveCmd()) + + return cmd +} + +func newWafBlockedReferrerListCmd() *cobra.Command { + return &cobra.Command{ + Use: "list ", + Short: "List blocked referrers", + Long: "List all blocked referrer hostnames for a site.", + Example: ` # List blocked referrers + vector waf blocked-referrer list site-abc123`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), wafBlockedReferrersPath(args[0]), nil) + if err != nil { + return fmt.Errorf("failed to list blocked referrers: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list blocked referrers: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list blocked referrers: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list blocked referrers: %w", err) + } + + headers := []string{"HOSTNAME"} + var rows [][]string + for _, item := range items { + rows = append(rows, []string{ + getString(item, "hostname"), + }) + } + + app.Output.Table(headers, rows) + return nil + }, + } +} + +func newWafBlockedReferrerAddCmd() *cobra.Command { + return &cobra.Command{ + Use: "add ", + Short: "Add a blocked referrer", + Long: "Add a hostname to the blocked referrers list for a site.", + Example: ` # Block a referrer + vector waf blocked-referrer add site-abc123 spam.example.com`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + reqBody := map[string]any{ + "hostname": args[1], + } + + resp, err := app.Client.Post(cmd.Context(), wafBlockedReferrersPath(args[0]), reqBody) + if err != nil { + return fmt.Errorf("failed to add blocked referrer: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to add blocked referrer: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to add blocked referrer: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + app.Output.Message(fmt.Sprintf("Hostname %s added to blocked referrers.", args[1])) + return nil + }, + } +} + +func newWafBlockedReferrerRemoveCmd() *cobra.Command { + return &cobra.Command{ + Use: "remove ", + Short: "Remove a blocked referrer", + Long: "Remove a hostname from the blocked referrers list for a site.", + Example: ` # Unblock a referrer + vector waf blocked-referrer remove site-abc123 spam.example.com`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Delete(cmd.Context(), wafBlockedReferrersPath(args[0])+"/"+args[1]) + if err != nil { + return fmt.Errorf("failed to remove blocked referrer: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to remove blocked referrer: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to remove blocked referrer: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + app.Output.Message(fmt.Sprintf("Hostname %s removed from blocked referrers.", args[1])) + return nil + }, + } +} diff --git a/internal/commands/waf_blocked_referrer_test.go b/internal/commands/waf_blocked_referrer_test.go new file mode 100644 index 0000000..d469dfa --- /dev/null +++ b/internal/commands/waf_blocked_referrer_test.go @@ -0,0 +1,303 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/output" +) + +var blockedReferrerListResponse = map[string]any{ + "data": []map[string]any{ + {"hostname": "spam.example.com"}, + {"hostname": "bad-referrer.net"}, + }, + "message": "Blocked referrers retrieved successfully", + "http_status": 200, +} + +var blockedReferrerAddResponse = map[string]any{ + "data": map[string]any{"hostname": "spam.example.com"}, + "message": "Hostname added to blocked referrers", + "http_status": 201, +} + +var blockedReferrerRemoveResponse = map[string]any{ + "data": map[string]any{}, + "message": "Hostname removed from blocked referrers", + "http_status": 200, +} + +func newWafBlockedReferrerTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/sites/site-001/waf/blocked-referrers": + _ = json.NewEncoder(w).Encode(blockedReferrerListResponse) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/waf/blocked-referrers": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(blockedReferrerAddResponse) + + case method == "DELETE" && path == "/api/v1/vector/sites/site-001/waf/blocked-referrers/spam.example.com": + _ = json.NewEncoder(w).Encode(blockedReferrerRemoveResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +// --- Blocked Referrer List Tests --- + +func TestWafBlockedReferrerListCmd_TableOutput(t *testing.T) { + ts := newWafBlockedReferrerTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "blocked-referrer", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "spam.example.com") + assert.Contains(t, out, "bad-referrer.net") + assert.Contains(t, out, "HOSTNAME") +} + +func TestWafBlockedReferrerListCmd_JSONOutput(t *testing.T) { + ts := newWafBlockedReferrerTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"waf", "blocked-referrer", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 2) + assert.Equal(t, "spam.example.com", result[0]["hostname"]) +} + +func TestWafBlockedReferrerListCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(blockedReferrerListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "blocked-referrer", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/waf/blocked-referrers", receivedPath) +} + +func TestWafBlockedReferrerListCmd_AuthError(t *testing.T) { + ts := newWafBlockedReferrerTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"waf", "blocked-referrer", "list", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestWafBlockedReferrerListCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildWafCmdNoAuth(output.Table) + cmd.SetArgs([]string{"waf", "blocked-referrer", "list", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestWafBlockedReferrerListCmd_MissingArg(t *testing.T) { + cmd, _, _ := buildWafCmdNoAuth(output.Table) + cmd.SetArgs([]string{"waf", "blocked-referrer", "list"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Blocked Referrer Add Tests --- + +func TestWafBlockedReferrerAddCmd_TableOutput(t *testing.T) { + ts := newWafBlockedReferrerTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "blocked-referrer", "add", "site-001", "spam.example.com"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "Hostname spam.example.com added to blocked referrers.") +} + +func TestWafBlockedReferrerAddCmd_JSONOutput(t *testing.T) { + ts := newWafBlockedReferrerTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"waf", "blocked-referrer", "add", "site-001", "spam.example.com"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "spam.example.com", result["hostname"]) +} + +func TestWafBlockedReferrerAddCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(blockedReferrerAddResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "blocked-referrer", "add", "site-001", "spam.example.com"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/waf/blocked-referrers", receivedPath) + assert.Equal(t, "spam.example.com", receivedBody["hostname"]) +} + +func TestWafBlockedReferrerAddCmd_MissingArg(t *testing.T) { + cmd, _, _ := buildWafCmdNoAuth(output.Table) + cmd.SetArgs([]string{"waf", "blocked-referrer", "add", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} + +// --- Blocked Referrer Remove Tests --- + +func TestWafBlockedReferrerRemoveCmd_TableOutput(t *testing.T) { + ts := newWafBlockedReferrerTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "blocked-referrer", "remove", "site-001", "spam.example.com"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "Hostname spam.example.com removed from blocked referrers.") +} + +func TestWafBlockedReferrerRemoveCmd_JSONOutput(t *testing.T) { + ts := newWafBlockedReferrerTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"waf", "blocked-referrer", "remove", "site-001", "spam.example.com"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) +} + +func TestWafBlockedReferrerRemoveCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(blockedReferrerRemoveResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "blocked-referrer", "remove", "site-001", "spam.example.com"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "DELETE", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/waf/blocked-referrers/spam.example.com", receivedPath) +} + +func TestWafBlockedReferrerRemoveCmd_MissingArg(t *testing.T) { + cmd, _, _ := buildWafCmdNoAuth(output.Table) + cmd.SetArgs([]string{"waf", "blocked-referrer", "remove", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} + +// --- Help Tests --- + +func TestWafBlockedReferrerCmd_Help(t *testing.T) { + cmd := NewWafBlockedReferrerCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "add") + assert.Contains(t, out, "remove") + assert.Contains(t, out, "blocked referrer") +} diff --git a/internal/commands/waf_rate_limit.go b/internal/commands/waf_rate_limit.go new file mode 100644 index 0000000..4cd2051 --- /dev/null +++ b/internal/commands/waf_rate_limit.go @@ -0,0 +1,438 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + "strings" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +func wafRateLimitsPath(siteID string) string { + return sitesBasePath + "/" + siteID + "/waf/rate-limits" +} + +// NewWafRateLimitCmd creates the waf rate-limit command group. +func NewWafRateLimitCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "rate-limit", + Short: "Manage WAF rate limiting rules", + Long: "Manage WAF rate limiting rules to protect your sites from abuse.", + } + + cmd.AddCommand(newWafRateLimitListCmd()) + cmd.AddCommand(newWafRateLimitShowCmd()) + cmd.AddCommand(newWafRateLimitCreateCmd()) + cmd.AddCommand(newWafRateLimitUpdateCmd()) + cmd.AddCommand(newWafRateLimitDeleteCmd()) + + return cmd +} + +func newWafRateLimitListCmd() *cobra.Command { + return &cobra.Command{ + Use: "list ", + Short: "List WAF rate limiting rules", + Long: "Retrieve all rate limit rules configured for a site.", + Example: ` # List rate limit rules + vector waf rate-limit list site-abc123`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), wafRateLimitsPath(args[0]), nil) + if err != nil { + return fmt.Errorf("failed to list rate limits: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list rate limits: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list rate limits: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list rate limits: %w", err) + } + + headers := []string{"ID", "NAME", "REQUESTS/TIME", "BLOCK TIME"} + var rows [][]string + for _, item := range items { + config := getMap(item, "configuration") + reqCount := getFloat(config, "request_count") + timeframe := getFloat(config, "timeframe") + blockTime := getFloat(config, "block_time") + + rows = append(rows, []string{ + fmt.Sprintf("%.0f", getFloat(item, "id")), + getString(item, "name"), + fmt.Sprintf("%.0f/%.0fs", reqCount, timeframe), + fmt.Sprintf("%.0fs", blockTime), + }) + } + + app.Output.Table(headers, rows) + return nil + }, + } +} + +func newWafRateLimitShowCmd() *cobra.Command { + return &cobra.Command{ + Use: "show ", + Short: "Show a WAF rate limiting rule", + Long: "Display details of a specific rate limit rule.", + Example: ` # Show rule details + vector waf rate-limit show site-abc123 rule-42`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), wafRateLimitsPath(args[0])+"/"+args[1], nil) + if err != nil { + return fmt.Errorf("failed to get rate limit: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to get rate limit: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to get rate limit: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to get rate limit: %w", err) + } + + config := getMap(item, "configuration") + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: fmt.Sprintf("%.0f", getFloat(item, "id"))}, + {Key: "Name", Value: getString(item, "name")}, + {Key: "Description", Value: formatString(getString(item, "description"))}, + {Key: "Request Count", Value: fmt.Sprintf("%.0f", getFloat(config, "request_count"))}, + {Key: "Timeframe", Value: fmt.Sprintf("%.0f", getFloat(config, "timeframe"))}, + {Key: "Block Time", Value: fmt.Sprintf("%.0f", getFloat(config, "block_time"))}, + {Key: "Value", Value: formatString(getString(config, "value"))}, + {Key: "Operator", Value: formatString(getString(config, "operator"))}, + {Key: "Variables", Value: formatSliceField(config, "variables")}, + {Key: "Transformations", Value: formatSliceField(config, "transformations")}, + }) + return nil + }, + } +} + +func newWafRateLimitCreateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "create ", + Short: "Create a WAF rate limiting rule", + Long: "Create a new rate limit rule for a site.", + Example: ` # Create a rate limit rule + vector waf rate-limit create site-abc123 --name "login-limit" --request-count 100 --timeframe 10 --block-time 60`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + name, _ := cmd.Flags().GetString("name") + requestCount, _ := cmd.Flags().GetInt("request-count") + timeframe, _ := cmd.Flags().GetInt("timeframe") + blockTime, _ := cmd.Flags().GetInt("block-time") + + reqBody := map[string]any{ + "name": name, + "request_count": requestCount, + "timeframe": timeframe, + "block_time": blockTime, + } + + if cmd.Flags().Changed("description") { + desc, _ := cmd.Flags().GetString("description") + reqBody["description"] = desc + } + + if cmd.Flags().Changed("value") { + value, _ := cmd.Flags().GetString("value") + reqBody["value"] = value + } + + if cmd.Flags().Changed("operator") { + operator, _ := cmd.Flags().GetString("operator") + reqBody["operator"] = operator + } + + if cmd.Flags().Changed("variables") { + vars, _ := cmd.Flags().GetString("variables") + reqBody["variables"] = strings.Split(vars, ",") + } + + if cmd.Flags().Changed("transformations") { + trans, _ := cmd.Flags().GetString("transformations") + reqBody["transformations"] = strings.Split(trans, ",") + } + + resp, err := app.Client.Post(cmd.Context(), wafRateLimitsPath(args[0]), reqBody) + if err != nil { + return fmt.Errorf("failed to create rate limit: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to create rate limit: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to create rate limit: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to create rate limit: %w", err) + } + + config := getMap(item, "configuration") + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: fmt.Sprintf("%.0f", getFloat(item, "id"))}, + {Key: "Name", Value: getString(item, "name")}, + {Key: "Description", Value: formatString(getString(item, "description"))}, + {Key: "Request Count", Value: fmt.Sprintf("%.0f", getFloat(config, "request_count"))}, + {Key: "Timeframe", Value: fmt.Sprintf("%.0f", getFloat(config, "timeframe"))}, + {Key: "Block Time", Value: fmt.Sprintf("%.0f", getFloat(config, "block_time"))}, + {Key: "Value", Value: formatString(getString(config, "value"))}, + {Key: "Operator", Value: formatString(getString(config, "operator"))}, + {Key: "Variables", Value: formatSliceField(config, "variables")}, + {Key: "Transformations", Value: formatSliceField(config, "transformations")}, + }) + return nil + }, + } + + cmd.Flags().String("name", "", "Rule name (required)") + cmd.Flags().Int("request-count", 0, "Number of requests allowed within the timeframe (required)") + cmd.Flags().Int("timeframe", 0, "Time window in seconds (required)") + cmd.Flags().Int("block-time", 0, "Duration to block in seconds (required)") + cmd.Flags().String("description", "", "Rule description") + cmd.Flags().String("value", "", "URL path or pattern to match") + cmd.Flags().String("operator", "", "Match operator") + cmd.Flags().String("variables", "", "Comma-separated request variables to inspect") + cmd.Flags().String("transformations", "", "Comma-separated transformations to apply") + _ = cmd.MarkFlagRequired("name") + _ = cmd.MarkFlagRequired("request-count") + _ = cmd.MarkFlagRequired("timeframe") + _ = cmd.MarkFlagRequired("block-time") + + return cmd +} + +func newWafRateLimitUpdateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "update ", + Short: "Update a WAF rate limiting rule", + Long: "Update an existing rate limit rule. Only sends changed fields.", + Example: ` # Update block time + vector waf rate-limit update site-abc123 rule-42 --block-time 300`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + reqBody := map[string]any{} + + if cmd.Flags().Changed("name") { + name, _ := cmd.Flags().GetString("name") + reqBody["name"] = name + } + + if cmd.Flags().Changed("description") { + desc, _ := cmd.Flags().GetString("description") + reqBody["description"] = desc + } + + if cmd.Flags().Changed("request-count") { + rc, _ := cmd.Flags().GetInt("request-count") + reqBody["request_count"] = rc + } + + if cmd.Flags().Changed("timeframe") { + tf, _ := cmd.Flags().GetInt("timeframe") + reqBody["timeframe"] = tf + } + + if cmd.Flags().Changed("block-time") { + bt, _ := cmd.Flags().GetInt("block-time") + reqBody["block_time"] = bt + } + + if cmd.Flags().Changed("value") { + value, _ := cmd.Flags().GetString("value") + reqBody["value"] = value + } + + if cmd.Flags().Changed("operator") { + operator, _ := cmd.Flags().GetString("operator") + reqBody["operator"] = operator + } + + if cmd.Flags().Changed("variables") { + vars, _ := cmd.Flags().GetString("variables") + reqBody["variables"] = strings.Split(vars, ",") + } + + if cmd.Flags().Changed("transformations") { + trans, _ := cmd.Flags().GetString("transformations") + reqBody["transformations"] = strings.Split(trans, ",") + } + + resp, err := app.Client.Put(cmd.Context(), wafRateLimitsPath(args[0])+"/"+args[1], reqBody) + if err != nil { + return fmt.Errorf("failed to update rate limit: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to update rate limit: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to update rate limit: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to update rate limit: %w", err) + } + + config := getMap(item, "configuration") + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: fmt.Sprintf("%.0f", getFloat(item, "id"))}, + {Key: "Name", Value: getString(item, "name")}, + {Key: "Description", Value: formatString(getString(item, "description"))}, + {Key: "Request Count", Value: fmt.Sprintf("%.0f", getFloat(config, "request_count"))}, + {Key: "Timeframe", Value: fmt.Sprintf("%.0f", getFloat(config, "timeframe"))}, + {Key: "Block Time", Value: fmt.Sprintf("%.0f", getFloat(config, "block_time"))}, + {Key: "Value", Value: formatString(getString(config, "value"))}, + {Key: "Operator", Value: formatString(getString(config, "operator"))}, + {Key: "Variables", Value: formatSliceField(config, "variables")}, + {Key: "Transformations", Value: formatSliceField(config, "transformations")}, + }) + return nil + }, + } + + cmd.Flags().String("name", "", "Rule name") + cmd.Flags().String("description", "", "Rule description") + cmd.Flags().Int("request-count", 0, "Number of requests allowed within the timeframe") + cmd.Flags().Int("timeframe", 0, "Time window in seconds") + cmd.Flags().Int("block-time", 0, "Duration to block in seconds") + cmd.Flags().String("value", "", "URL path or pattern to match") + cmd.Flags().String("operator", "", "Match operator") + cmd.Flags().String("variables", "", "Comma-separated request variables to inspect") + cmd.Flags().String("transformations", "", "Comma-separated transformations to apply") + + return cmd +} + +func newWafRateLimitDeleteCmd() *cobra.Command { + return &cobra.Command{ + Use: "delete ", + Short: "Delete a WAF rate limiting rule", + Long: "Permanently delete a rate limit rule. This action cannot be undone.", + Example: ` # Delete a rule + vector waf rate-limit delete site-abc123 rule-42`, + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Delete(cmd.Context(), wafRateLimitsPath(args[0])+"/"+args[1]) + if err != nil { + return fmt.Errorf("failed to delete rate limit: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to delete rate limit: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to delete rate limit: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + app.Output.Message("Rate limit rule deleted successfully.") + return nil + }, + } +} + +// formatSliceField joins string elements of a slice field into a comma-separated string. +func formatSliceField(m map[string]any, key string) string { + items := getSlice(m, key) + if len(items) == 0 { + return "-" + } + parts := make([]string, 0, len(items)) + for _, item := range items { + if s, ok := item.(string); ok { + parts = append(parts, s) + } + } + if len(parts) == 0 { + return "-" + } + return strings.Join(parts, ", ") +} diff --git a/internal/commands/waf_rate_limit_test.go b/internal/commands/waf_rate_limit_test.go new file mode 100644 index 0000000..49f0ddd --- /dev/null +++ b/internal/commands/waf_rate_limit_test.go @@ -0,0 +1,705 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +var rateLimitListResponse = map[string]any{ + "data": []map[string]any{ + { + "id": float64(12345), + "name": "API Rate Limit", + "description": "Limit API requests to 100/second", + "shield_zone_id": float64(67890), + "configuration": map[string]any{ + "request_count": float64(100), + "timeframe": float64(1), + "block_time": float64(60), + "value": "/api/*", + "action": "rate-limit", + "operator": "begins-with", + "variables": []any{"request-uri"}, + "transformations": []any{"lowercase"}, + }, + }, + { + "id": float64(12346), + "name": "Login Rate Limit", + "description": "Limit login attempts", + "shield_zone_id": float64(67890), + "configuration": map[string]any{ + "request_count": float64(10), + "timeframe": float64(10), + "block_time": float64(300), + "value": "/login", + "action": "rate-limit", + "operator": "eq", + "variables": []any{"request-uri"}, + "transformations": []any{"lowercase", "url-decode"}, + }, + }, + }, + "message": "Rate limits retrieved successfully", + "http_status": 200, +} + +var rateLimitShowResponse = map[string]any{ + "data": map[string]any{ + "id": float64(12345), + "name": "API Rate Limit", + "description": "Limit API requests to 100/second", + "shield_zone_id": float64(67890), + "configuration": map[string]any{ + "request_count": float64(100), + "timeframe": float64(1), + "block_time": float64(60), + "value": "/api/*", + "action": "rate-limit", + "operator": "begins-with", + "variables": []any{"request-uri"}, + "transformations": []any{"lowercase"}, + }, + }, + "message": "Rate limit retrieved successfully", + "http_status": 200, +} + +var rateLimitCreateResponse = map[string]any{ + "data": map[string]any{ + "id": float64(12347), + "name": "New Rate Limit", + "description": "New rule description", + "shield_zone_id": float64(67890), + "configuration": map[string]any{ + "request_count": float64(50), + "timeframe": float64(10), + "block_time": float64(300), + "value": "/api/*", + "action": "rate-limit", + "operator": "begins-with", + "variables": []any{"request-uri"}, + "transformations": []any{"lowercase", "url-decode"}, + }, + }, + "message": "Rate limit created successfully", + "http_status": 201, +} + +var rateLimitUpdateResponse = map[string]any{ + "data": map[string]any{ + "id": float64(12345), + "name": "Updated Rate Limit", + "description": "Updated description", + "shield_zone_id": float64(67890), + "configuration": map[string]any{ + "request_count": float64(200), + "timeframe": float64(10), + "block_time": float64(300), + "value": "/api/v2/*", + "action": "rate-limit", + "operator": "regex", + "variables": []any{"request-uri", "query-string"}, + "transformations": []any{"lowercase"}, + }, + }, + "message": "Rate limit updated successfully", + "http_status": 200, +} + +var rateLimitDeleteResponse = map[string]any{ + "data": map[string]any{}, + "message": "Rate limit deleted successfully", + "http_status": 200, +} + +func newWafRateLimitTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/sites/site-001/waf/rate-limits": + _ = json.NewEncoder(w).Encode(rateLimitListResponse) + + case method == "GET" && path == "/api/v1/vector/sites/site-001/waf/rate-limits/12345": + _ = json.NewEncoder(w).Encode(rateLimitShowResponse) + + case method == "POST" && path == "/api/v1/vector/sites/site-001/waf/rate-limits": + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(rateLimitCreateResponse) + + case method == "PUT" && path == "/api/v1/vector/sites/site-001/waf/rate-limits/12345": + _ = json.NewEncoder(w).Encode(rateLimitUpdateResponse) + + case method == "DELETE" && path == "/api/v1/vector/sites/site-001/waf/rate-limits/12345": + _ = json.NewEncoder(w).Encode(rateLimitDeleteResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +func buildWafCmd(baseURL, token string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient(baseURL, token, "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + root.AddCommand(NewWafCmd()) + + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func buildWafCmdNoAuth(format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient("http://localhost", "", "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + root.AddCommand(NewWafCmd()) + + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +// --- Rate Limit List Tests --- + +func TestWafRateLimitListCmd_TableOutput(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "12345") + assert.Contains(t, out, "API Rate Limit") + assert.Contains(t, out, "100/1s") + assert.Contains(t, out, "60s") + assert.Contains(t, out, "12346") + assert.Contains(t, out, "Login Rate Limit") + assert.Contains(t, out, "10/10s") + assert.Contains(t, out, "300s") +} + +func TestWafRateLimitListCmd_JSONOutput(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"waf", "rate-limit", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 2) + assert.Equal(t, float64(12345), result[0]["id"]) +} + +func TestWafRateLimitListCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(rateLimitListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "list", "site-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/waf/rate-limits", receivedPath) +} + +func TestWafRateLimitListCmd_AuthError(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "list", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestWafRateLimitListCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildWafCmdNoAuth(output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "list", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestWafRateLimitListCmd_MissingArg(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "list"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Rate Limit Show Tests --- + +func TestWafRateLimitShowCmd_TableOutput(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "show", "site-001", "12345"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "12345") + assert.Contains(t, out, "API Rate Limit") + assert.Contains(t, out, "Limit API requests to 100/second") + assert.Contains(t, out, "100") + assert.Contains(t, out, "60") + assert.Contains(t, out, "/api/*") + assert.Contains(t, out, "begins-with") + assert.Contains(t, out, "request-uri") + assert.Contains(t, out, "lowercase") +} + +func TestWafRateLimitShowCmd_JSONOutput(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"waf", "rate-limit", "show", "site-001", "12345"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, float64(12345), result["id"]) + assert.Equal(t, "API Rate Limit", result["name"]) +} + +func TestWafRateLimitShowCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(rateLimitShowResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "show", "site-001", "12345"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/waf/rate-limits/12345", receivedPath) +} + +func TestWafRateLimitShowCmd_MissingArg(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "show", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} + +// --- Rate Limit Create Tests --- + +func TestWafRateLimitCreateCmd_TableOutput(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "create", "site-001", + "--name", "New Rate Limit", + "--request-count", "50", + "--timeframe", "10", + "--block-time", "300", + "--description", "New rule description", + "--value", "/api/*", + "--operator", "begins-with", + "--variables", "request-uri", + "--transformations", "lowercase,url-decode", + }) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "12347") + assert.Contains(t, out, "New Rate Limit") + assert.Contains(t, out, "New rule description") + assert.Contains(t, out, "50") + assert.Contains(t, out, "300") + assert.Contains(t, out, "/api/*") + assert.Contains(t, out, "begins-with") + assert.Contains(t, out, "request-uri") + assert.Contains(t, out, "lowercase, url-decode") +} + +func TestWafRateLimitCreateCmd_JSONOutput(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"waf", "rate-limit", "create", "site-001", + "--name", "New Rate Limit", + "--request-count", "50", + "--timeframe", "10", + "--block-time", "300", + }) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, float64(12347), result["id"]) +} + +func TestWafRateLimitCreateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(rateLimitCreateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "create", "site-001", + "--name", "New Rate Limit", + "--request-count", "50", + "--timeframe", "10", + "--block-time", "300", + "--description", "New rule description", + "--variables", "request-uri", + "--transformations", "lowercase,url-decode", + }) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/waf/rate-limits", receivedPath) + assert.Equal(t, "New Rate Limit", receivedBody["name"]) + assert.Equal(t, float64(50), receivedBody["request_count"]) + assert.Equal(t, float64(10), receivedBody["timeframe"]) + assert.Equal(t, float64(300), receivedBody["block_time"]) + assert.Equal(t, "New rule description", receivedBody["description"]) + vars, ok := receivedBody["variables"].([]any) + require.True(t, ok) + assert.Equal(t, []any{"request-uri"}, vars) + trans, ok := receivedBody["transformations"].([]any) + require.True(t, ok) + assert.Equal(t, []any{"lowercase", "url-decode"}, trans) +} + +func TestWafRateLimitCreateCmd_MissingRequiredFlags(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "create", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "required") +} + +func TestWafRateLimitCreateCmd_MissingArg(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "create"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Rate Limit Update Tests --- + +func TestWafRateLimitUpdateCmd_TableOutput(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "update", "site-001", "12345", + "--name", "Updated Rate Limit", + "--request-count", "200", + }) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "12345") + assert.Contains(t, out, "Updated Rate Limit") + assert.Contains(t, out, "200") +} + +func TestWafRateLimitUpdateCmd_JSONOutput(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"waf", "rate-limit", "update", "site-001", "12345", + "--name", "Updated Rate Limit", + }) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, float64(12345), result["id"]) +} + +func TestWafRateLimitUpdateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(rateLimitUpdateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "update", "site-001", "12345", + "--name", "Updated Rate Limit", + "--request-count", "200", + }) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "PUT", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/waf/rate-limits/12345", receivedPath) + assert.Equal(t, "Updated Rate Limit", receivedBody["name"]) + assert.Equal(t, float64(200), receivedBody["request_count"]) + // Flags not provided should not be sent + _, hasTimeframe := receivedBody["timeframe"] + assert.False(t, hasTimeframe) + _, hasBlockTime := receivedBody["block_time"] + assert.False(t, hasBlockTime) +} + +func TestWafRateLimitUpdateCmd_VariablesFlag(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(rateLimitUpdateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "update", "site-001", "12345", + "--variables", "request-uri,query-string", + }) + + err := cmd.Execute() + require.NoError(t, err) + + vars, ok := receivedBody["variables"].([]any) + require.True(t, ok) + assert.Equal(t, []any{"request-uri", "query-string"}, vars) +} + +func TestWafRateLimitUpdateCmd_MissingArg(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "update", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} + +// --- Rate Limit Delete Tests --- + +func TestWafRateLimitDeleteCmd_TableOutput(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "delete", "site-001", "12345"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "Rate limit rule deleted successfully") +} + +func TestWafRateLimitDeleteCmd_JSONOutput(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWafCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"waf", "rate-limit", "delete", "site-001", "12345"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) +} + +func TestWafRateLimitDeleteCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(rateLimitDeleteResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "delete", "site-001", "12345"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "DELETE", receivedMethod) + assert.Equal(t, "/api/v1/vector/sites/site-001/waf/rate-limits/12345", receivedPath) +} + +func TestWafRateLimitDeleteCmd_MissingArg(t *testing.T) { + ts := newWafRateLimitTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWafCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"waf", "rate-limit", "delete", "site-001"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 2 arg(s)") +} + +// --- Help Tests --- + +func TestWafRateLimitCmd_Help(t *testing.T) { + cmd := NewWafRateLimitCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "show") + assert.Contains(t, out, "create") + assert.Contains(t, out, "update") + assert.Contains(t, out, "delete") + assert.Contains(t, out, "rate limit") +} diff --git a/internal/commands/waf_test.go b/internal/commands/waf_test.go new file mode 100644 index 0000000..e966319 --- /dev/null +++ b/internal/commands/waf_test.go @@ -0,0 +1,27 @@ +package commands + +import ( + "bytes" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestWafCmd_HelpText(t *testing.T) { + cmd := NewWafCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "rate-limit") + assert.Contains(t, out, "blocked-ip") + assert.Contains(t, out, "blocked-referrer") + assert.Contains(t, out, "allowed-referrer") + assert.Contains(t, out, "WAF") +} diff --git a/internal/commands/wait.go b/internal/commands/wait.go new file mode 100644 index 0000000..9bb17b8 --- /dev/null +++ b/internal/commands/wait.go @@ -0,0 +1,249 @@ +package commands + +import ( + "context" + "encoding/json" + "fmt" + "io" + "math" + "os" + "os/signal" + "time" + + "github.com/spf13/cobra" + "golang.org/x/term" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/output" +) + +const ( + maxTimeout = 30 * time.Minute + minPollInterval = 1 * time.Second + maxConsecutiveErrors = 10 + + // ANSI escape sequences for alternate screen buffer. + ansiAltScreenEnter = "\033[?1049h" + ansiAltScreenExit = "\033[?1049l" + ansiCursorHome = "\033[H" + ansiClearScreen = "\033[2J" +) + +// isTerminalForWait checks if stdout is a terminal. Override in tests. +var isTerminalForWait = func() bool { + return term.IsTerminal(int(os.Stdout.Fd())) +} + +// altScreenWriter is the writer used for alternate screen display. Override in tests. +var altScreenWriter io.Writer = os.Stdout + +// waitConfig holds all parameters needed by waitForResource. +type waitConfig struct { + // ResourceID is the identifier of the resource being waited on. + ResourceID string + + // PollPath is the API path to GET for status checks. + PollPath string + + // Interval is the duration between poll requests. + Interval time.Duration + + // Timeout is the maximum duration to wait before giving up. + Timeout time.Duration + + // TerminalStatuses is the set of statuses that indicate completion. + TerminalStatuses map[string]bool + + // FailedStatuses is the set of statuses that indicate failure. + FailedStatuses map[string]bool + + // Noun is a human-readable label for the resource (e.g., "Deployment", "Site"). + Noun string + + // FormatDisplay is an optional callback that formats poll data for display. + FormatDisplay func(data map[string]any) []string +} + +// waitResult holds the outcome of a wait operation. +type waitResult struct { + // FinalData is the parsed response data from the last successful poll. + FinalData json.RawMessage + + // Status is the terminal status that ended the wait. + Status string + + // Elapsed is the total time spent waiting. + Elapsed time.Duration +} + +// addWaitFlags registers --wait, --poll-interval, and --timeout on a command. +func addWaitFlags(cmd *cobra.Command) { + cmd.Flags().Bool("wait", false, "Wait for the operation to complete") + cmd.Flags().Duration("poll-interval", 60*time.Second, "Interval between status polls (minimum 1s)") + cmd.Flags().Duration("timeout", 5*time.Minute, "Maximum time to wait (maximum 30m)") +} + +// getWaitConfig reads wait-related flags from the command and returns a +// partially populated waitConfig. The caller must set ResourceID, PollPath, +// TerminalStatuses, FailedStatuses, Noun, and FormatDisplay. +func getWaitConfig(cmd *cobra.Command) (enabled bool, interval, timeout time.Duration, err error) { + enabled, _ = cmd.Flags().GetBool("wait") + interval, _ = cmd.Flags().GetDuration("poll-interval") + timeout, _ = cmd.Flags().GetDuration("timeout") + + if !enabled { + return false, 0, 0, nil + } + + if interval < minPollInterval { + return false, 0, 0, &api.APIError{ + Message: fmt.Sprintf("poll interval must be at least %s", minPollInterval), + ExitCode: 1, + } + } + + if timeout > maxTimeout { + return false, 0, 0, &api.APIError{ + Message: fmt.Sprintf("timeout must not exceed %s", maxTimeout), + ExitCode: 1, + } + } + + if interval > timeout { + return false, 0, 0, &api.APIError{ + Message: "poll interval must not exceed timeout", + ExitCode: 1, + } + } + + return enabled, interval, timeout, nil +} + +// useAltScreen returns true when the alternate screen display should be used. +// It requires a TTY and non-JSON output format. +func useAltScreen(app *appctx.App) bool { + if app.Output.Format() == output.JSON { + return false + } + return isTerminalForWait() +} + +// renderWaitDisplay writes the current wait status to the alternate screen buffer. +func renderWaitDisplay(w io.Writer, cfg *waitConfig, pollCount, estimatedPolls int, elapsed time.Duration, kvLines []string) { + _, _ = fmt.Fprint(w, ansiCursorHome+ansiClearScreen) + _, _ = fmt.Fprintf(w, "Waiting for %s %s... (%s)\n\n", cfg.Noun, cfg.ResourceID, elapsed.Truncate(time.Second)) + _, _ = fmt.Fprintf(w, "Poll %d of ~%d\n", pollCount, estimatedPolls) + _, _ = fmt.Fprintf(w, "Polling every %s. Press Ctrl+C to cancel.\n\n", cfg.Interval) + for _, line := range kvLines { + _, _ = fmt.Fprintln(w, line) + } +} + +// waitForResource polls the API until the resource reaches a terminal or failed +// status, the timeout expires, or the context is cancelled (e.g., Ctrl+C). +func waitForResource(ctx context.Context, app *appctx.App, cfg *waitConfig) (*waitResult, error) { + ctx, stop := signal.NotifyContext(ctx, os.Interrupt) + defer stop() + + useAlt := useAltScreen(app) + if useAlt { + _, _ = fmt.Fprint(altScreenWriter, ansiAltScreenEnter) + defer func() { _, _ = fmt.Fprint(altScreenWriter, ansiAltScreenExit) }() + } + + deadline := time.After(cfg.Timeout) + ticker := time.NewTicker(cfg.Interval) + defer ticker.Stop() + + start := time.Now() + consecutiveErrors := 0 + pollCount := 0 + estimatedPolls := int(math.Ceil(float64(cfg.Timeout) / float64(cfg.Interval))) + var lastErr error + + for { + select { + case <-ctx.Done(): + return nil, &api.APIError{ + Message: fmt.Sprintf("%s wait cancelled", cfg.Noun), + ExitCode: 1, + } + case <-deadline: + return nil, &api.APIError{ + Message: fmt.Sprintf("timed out waiting for %s %s after %s", cfg.Noun, cfg.ResourceID, cfg.Timeout), + ExitCode: 1, + } + case <-ticker.C: + pollCount++ + data, status, err := pollOnce(ctx, app, cfg) + if err != nil { + consecutiveErrors++ + lastErr = err + if consecutiveErrors >= maxConsecutiveErrors { + return nil, &api.APIError{ + Message: fmt.Sprintf("aborting after %d consecutive poll failures for %s %s: %v", maxConsecutiveErrors, cfg.Noun, cfg.ResourceID, lastErr), + ExitCode: 1, + } + } + continue + } + + consecutiveErrors = 0 + + if useAlt && cfg.FormatDisplay != nil { + var item map[string]any + if jsonErr := json.Unmarshal(data, &item); jsonErr == nil { + kvLines := cfg.FormatDisplay(item) + renderWaitDisplay(altScreenWriter, cfg, pollCount, estimatedPolls, time.Since(start), kvLines) + } + } + + if cfg.FailedStatuses[status] { + return nil, &api.APIError{ + Message: fmt.Sprintf("%s %s reached failed status: %s", cfg.Noun, cfg.ResourceID, status), + ExitCode: 1, + } + } + + if cfg.TerminalStatuses[status] { + return &waitResult{ + FinalData: data, + Status: status, + Elapsed: time.Since(start), + }, nil + } + } + } +} + +// pollOnce performs a single GET request and extracts the status field. +func pollOnce(ctx context.Context, app *appctx.App, cfg *waitConfig) (json.RawMessage, string, error) { + resp, err := app.Client.Get(ctx, cfg.PollPath, nil) + if err != nil { + return nil, "", err + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, "", fmt.Errorf("failed to read poll response: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return nil, "", fmt.Errorf("failed to parse poll response: %w", err) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return nil, "", fmt.Errorf("failed to unmarshal poll data: %w", err) + } + + status := getString(item, "status") + if status == "" { + return nil, "", fmt.Errorf("poll response missing status field") + } + + return data, status, nil +} diff --git a/internal/commands/wait_test.go b/internal/commands/wait_test.go new file mode 100644 index 0000000..5e0c3af --- /dev/null +++ b/internal/commands/wait_test.go @@ -0,0 +1,426 @@ +package commands + +import ( + "bytes" + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "strings" + "sync/atomic" + "testing" + "time" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +// countingResponse defines an HTTP response for newCountingTestServer. +type countingResponse struct { + httpStatus int + body map[string]any +} + +// newCountingTestServer returns a test server that returns different responses +// on successive GET requests. After exhausting the response list, it repeats +// the last response. +func newCountingTestServer(validToken string, responses []countingResponse) *httptest.Server { + var callCount atomic.Int64 + + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + idx := int(callCount.Add(1)) - 1 + if idx >= len(responses) { + idx = len(responses) - 1 + } + + resp := responses[idx] + w.Header().Set("Content-Type", "application/json") + if resp.httpStatus != 0 { + w.WriteHeader(resp.httpStatus) + } + _ = json.NewEncoder(w).Encode(resp.body) + })) +} + +// newWaitApp creates an App wired to the given test server for wait tests. +func newWaitApp(baseURL, token string, format output.Format) (*appctx.App, *bytes.Buffer) { + stdout := new(bytes.Buffer) + client := api.NewClient(baseURL, token, "test-agent") + app := appctx.NewApp(config.DefaultConfig(), client, "") + app.Output = output.NewWriter(stdout, format) + return app, stdout +} + +// overrideWaitGlobals overrides isTerminalForWait and altScreenWriter for a test, +// restoring originals on cleanup. +func overrideWaitGlobals(t *testing.T, isTTY bool) *bytes.Buffer { + t.Helper() + origIsTerminal := isTerminalForWait + origWriter := altScreenWriter + altBuf := new(bytes.Buffer) + isTerminalForWait = func() bool { return isTTY } + altScreenWriter = altBuf + t.Cleanup(func() { + isTerminalForWait = origIsTerminal + altScreenWriter = origWriter + }) + return altBuf +} + +// makeOKResponse creates a standard API envelope with the given status field value. +func makeOKResponse(status string) countingResponse { + return countingResponse{ + httpStatus: http.StatusOK, + body: map[string]any{ + "data": map[string]any{ + "id": "res-001", + "status": status, + "name": "test-resource", + }, + "message": "Resource retrieved", + "http_status": 200, + }, + } +} + +// makeErrorResponse creates a 500 server error response. +func makeErrorResponse() countingResponse { + return countingResponse{ + httpStatus: http.StatusInternalServerError, + body: map[string]any{ + "message": "Internal Server Error", + "http_status": 500, + }, + } +} + +// baseWaitConfig returns a waitConfig with short intervals suitable for tests. +func baseWaitConfig() *waitConfig { + return &waitConfig{ + ResourceID: "res-001", + PollPath: "/api/v1/vector/test/res-001", + Interval: 10 * time.Millisecond, + Timeout: 500 * time.Millisecond, + TerminalStatuses: map[string]bool{"active": true, "deployed": true, "completed": true}, + FailedStatuses: map[string]bool{"failed": true}, + Noun: "Resource", + FormatDisplay: func(data map[string]any) []string { + return []string{"Status: " + getString(data, "status")} + }, + } +} + +// --- Flag registration and validation tests --- + +func TestAddWaitFlags(t *testing.T) { + cmd := &cobra.Command{Use: "test"} + addWaitFlags(cmd) + + waitFlag := cmd.Flags().Lookup("wait") + require.NotNil(t, waitFlag) + assert.Equal(t, "false", waitFlag.DefValue) + + intervalFlag := cmd.Flags().Lookup("poll-interval") + require.NotNil(t, intervalFlag) + assert.Equal(t, "1m0s", intervalFlag.DefValue) + + timeoutFlag := cmd.Flags().Lookup("timeout") + require.NotNil(t, timeoutFlag) + assert.Equal(t, "5m0s", timeoutFlag.DefValue) +} + +func TestGetWaitConfig_Disabled(t *testing.T) { + cmd := &cobra.Command{Use: "test"} + addWaitFlags(cmd) + require.NoError(t, cmd.ParseFlags([]string{})) + + enabled, _, _, err := getWaitConfig(cmd) + require.NoError(t, err) + assert.False(t, enabled) +} + +func TestGetWaitConfig_Enabled(t *testing.T) { + cmd := &cobra.Command{Use: "test"} + addWaitFlags(cmd) + require.NoError(t, cmd.ParseFlags([]string{"--wait", "--poll-interval", "5s", "--timeout", "2m"})) + + enabled, interval, timeout, err := getWaitConfig(cmd) + require.NoError(t, err) + assert.True(t, enabled) + assert.Equal(t, 5*time.Second, interval) + assert.Equal(t, 2*time.Minute, timeout) +} + +func TestGetWaitConfig_PollIntervalTooSmall(t *testing.T) { + cmd := &cobra.Command{Use: "test"} + addWaitFlags(cmd) + require.NoError(t, cmd.ParseFlags([]string{"--wait", "--poll-interval", "500ms"})) + + _, _, _, err := getWaitConfig(cmd) + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 1, apiErr.ExitCode) + assert.Contains(t, apiErr.Message, "at least") +} + +func TestGetWaitConfig_TimeoutTooLarge(t *testing.T) { + cmd := &cobra.Command{Use: "test"} + addWaitFlags(cmd) + require.NoError(t, cmd.ParseFlags([]string{"--wait", "--timeout", "31m"})) + + _, _, _, err := getWaitConfig(cmd) + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 1, apiErr.ExitCode) + assert.Contains(t, apiErr.Message, "must not exceed") +} + +func TestGetWaitConfig_IntervalExceedsTimeout(t *testing.T) { + cmd := &cobra.Command{Use: "test"} + addWaitFlags(cmd) + require.NoError(t, cmd.ParseFlags([]string{"--wait", "--poll-interval", "10m", "--timeout", "5m"})) + + _, _, _, err := getWaitConfig(cmd) + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 1, apiErr.ExitCode) + assert.Contains(t, apiErr.Message, "must not exceed timeout") +} + +// --- waitForResource tests --- + +func TestWaitForResource_CompletesOnTerminalStatus(t *testing.T) { + overrideWaitGlobals(t, false) + + ts := newCountingTestServer("test-token", []countingResponse{ + makeOKResponse("pending"), + makeOKResponse("pending"), + makeOKResponse("active"), + }) + defer ts.Close() + + app, _ := newWaitApp(ts.URL, "test-token", output.Table) + cfg := baseWaitConfig() + cfg.PollPath = "/api/v1/vector/test/res-001" + + result, err := waitForResource(context.Background(), app, cfg) + require.NoError(t, err) + require.NotNil(t, result) + assert.Equal(t, "active", result.Status) + assert.NotZero(t, result.Elapsed) + + var item map[string]any + require.NoError(t, json.Unmarshal(result.FinalData, &item)) + assert.Equal(t, "res-001", item["id"]) +} + +func TestWaitForResource_DetectsFailureStatus(t *testing.T) { + overrideWaitGlobals(t, false) + + ts := newCountingTestServer("test-token", []countingResponse{ + makeOKResponse("pending"), + makeOKResponse("failed"), + }) + defer ts.Close() + + app, _ := newWaitApp(ts.URL, "test-token", output.Table) + cfg := baseWaitConfig() + + result, err := waitForResource(context.Background(), app, cfg) + require.Error(t, err) + assert.Nil(t, result) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 1, apiErr.ExitCode) + assert.Contains(t, apiErr.Message, "failed status") + assert.Contains(t, apiErr.Message, "failed") +} + +func TestWaitForResource_HandlesTransientPollErrors(t *testing.T) { + overrideWaitGlobals(t, false) + + ts := newCountingTestServer("test-token", []countingResponse{ + makeOKResponse("pending"), + makeErrorResponse(), // 500 on 2nd poll + makeOKResponse("deployed"), // success on 3rd + }) + defer ts.Close() + + app, _ := newWaitApp(ts.URL, "test-token", output.Table) + cfg := baseWaitConfig() + + result, err := waitForResource(context.Background(), app, cfg) + require.NoError(t, err) + require.NotNil(t, result) + assert.Equal(t, "deployed", result.Status) +} + +func TestWaitForResource_RespectsContextCancellation(t *testing.T) { + overrideWaitGlobals(t, false) + + // Server always returns pending — the context cancellation should stop it. + ts := newCountingTestServer("test-token", []countingResponse{ + makeOKResponse("pending"), + }) + defer ts.Close() + + app, _ := newWaitApp(ts.URL, "test-token", output.Table) + cfg := baseWaitConfig() + cfg.Timeout = 5 * time.Second // long enough that timeout doesn't fire first + + ctx, cancel := context.WithCancel(context.Background()) + // Cancel after a short delay to simulate Ctrl+C. + go func() { + time.Sleep(50 * time.Millisecond) + cancel() + }() + + result, err := waitForResource(ctx, app, cfg) + require.Error(t, err) + assert.Nil(t, result) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 1, apiErr.ExitCode) + assert.Contains(t, apiErr.Message, "cancelled") +} + +func TestWaitForResource_TimesOut(t *testing.T) { + overrideWaitGlobals(t, false) + + // Server always returns pending — should time out. + ts := newCountingTestServer("test-token", []countingResponse{ + makeOKResponse("pending"), + }) + defer ts.Close() + + app, _ := newWaitApp(ts.URL, "test-token", output.Table) + cfg := baseWaitConfig() + cfg.Interval = 10 * time.Millisecond + cfg.Timeout = 50 * time.Millisecond + + result, err := waitForResource(context.Background(), app, cfg) + require.Error(t, err) + assert.Nil(t, result) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 1, apiErr.ExitCode) + assert.Contains(t, apiErr.Message, "timed out") +} + +func TestWaitForResource_AbortsAfterMaxConsecutiveFailures(t *testing.T) { + overrideWaitGlobals(t, false) + + // Build a response list with maxConsecutiveErrors 500s. + responses := make([]countingResponse, maxConsecutiveErrors) + for i := range responses { + responses[i] = makeErrorResponse() + } + + ts := newCountingTestServer("test-token", responses) + defer ts.Close() + + app, _ := newWaitApp(ts.URL, "test-token", output.Table) + cfg := baseWaitConfig() + cfg.Timeout = 5 * time.Second // long enough that timeout doesn't fire first + + result, err := waitForResource(context.Background(), app, cfg) + require.Error(t, err) + assert.Nil(t, result) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 1, apiErr.ExitCode) + assert.Contains(t, apiErr.Message, "consecutive poll failures") +} + +func TestWaitForResource_JSONModeNoANSI(t *testing.T) { + altBuf := overrideWaitGlobals(t, true) // TTY=true, but JSON mode should suppress + + ts := newCountingTestServer("test-token", []countingResponse{ + makeOKResponse("pending"), + makeOKResponse("active"), + }) + defer ts.Close() + + app, _ := newWaitApp(ts.URL, "test-token", output.JSON) + cfg := baseWaitConfig() + + result, err := waitForResource(context.Background(), app, cfg) + require.NoError(t, err) + require.NotNil(t, result) + assert.Equal(t, "active", result.Status) + + // Alt screen buffer should have no ANSI sequences. + altOutput := altBuf.String() + assert.NotContains(t, altOutput, "\033[") +} + +func TestWaitForResource_NonTTYNoANSI(t *testing.T) { + altBuf := overrideWaitGlobals(t, false) // Non-TTY + + ts := newCountingTestServer("test-token", []countingResponse{ + makeOKResponse("pending"), + makeOKResponse("completed"), + }) + defer ts.Close() + + app, _ := newWaitApp(ts.URL, "test-token", output.Table) + cfg := baseWaitConfig() + + result, err := waitForResource(context.Background(), app, cfg) + require.NoError(t, err) + require.NotNil(t, result) + assert.Equal(t, "completed", result.Status) + + // Alt screen buffer should have no ANSI sequences. + altOutput := altBuf.String() + assert.NotContains(t, altOutput, "\033[") +} + +func TestWaitForResource_TTYWritesANSI(t *testing.T) { + altBuf := overrideWaitGlobals(t, true) // TTY + Table format + + ts := newCountingTestServer("test-token", []countingResponse{ + makeOKResponse("pending"), + makeOKResponse("active"), + }) + defer ts.Close() + + app, _ := newWaitApp(ts.URL, "test-token", output.Table) + cfg := baseWaitConfig() + + result, err := waitForResource(context.Background(), app, cfg) + require.NoError(t, err) + require.NotNil(t, result) + + altOutput := altBuf.String() + // TTY mode should have alt screen enter and exit sequences. + assert.True(t, strings.Contains(altOutput, ansiAltScreenEnter), "expected alt screen enter sequence") + assert.True(t, strings.Contains(altOutput, ansiAltScreenExit), "expected alt screen exit sequence") +} diff --git a/internal/commands/webhook.go b/internal/commands/webhook.go new file mode 100644 index 0000000..e0a4a4d --- /dev/null +++ b/internal/commands/webhook.go @@ -0,0 +1,378 @@ +package commands + +import ( + "encoding/json" + "fmt" + "io" + "strings" + + "github.com/spf13/cobra" + + "github.com/built-fast/vector-cli/internal/output" +) + +const webhooksBasePath = "/api/v1/vector/webhooks" + +// NewWebhookCmd creates the webhook command group. +func NewWebhookCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "webhook", + Short: "Manage webhooks", + Long: "Manage webhooks for receiving notifications about account events.", + } + + cmd.AddCommand(newWebhookListCmd()) + cmd.AddCommand(newWebhookShowCmd()) + cmd.AddCommand(newWebhookCreateCmd()) + cmd.AddCommand(newWebhookUpdateCmd()) + cmd.AddCommand(newWebhookDeleteCmd()) + + return cmd +} + +func newWebhookListCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "list", + Short: "List webhooks", + Long: "Retrieve a paginated list of webhooks for your account.", + Example: ` # List webhooks + vector webhook list`, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + page, perPage := getPagination(cmd) + query := buildPaginationQuery(page, perPage) + + resp, err := app.Client.Get(cmd.Context(), webhooksBasePath, query) + if err != nil { + return fmt.Errorf("failed to list webhooks: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to list webhooks: %w", err) + } + + if app.Output.Format() == output.JSON { + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to list webhooks: %w", err) + } + return app.Output.JSON(json.RawMessage(data)) + } + + data, meta, err := parseResponseWithMeta(body) + if err != nil { + return fmt.Errorf("failed to list webhooks: %w", err) + } + + var items []map[string]any + if err := json.Unmarshal(data, &items); err != nil { + return fmt.Errorf("failed to list webhooks: %w", err) + } + + headers := []string{"ID", "TYPE", "URL", "ENABLED"} + var rows [][]string + for _, item := range items { + rows = append(rows, []string{ + getString(item, "id"), + getString(item, "type"), + getString(item, "url"), + formatBool(getBool(item, "enabled")), + }) + } + + app.Output.Table(headers, rows) + if meta != nil { + app.Output.Pagination(meta.CurrentPage, meta.LastPage, meta.Total) + } + return nil + }, + } + addPaginationFlags(cmd) + return cmd +} + +func newWebhookShowCmd() *cobra.Command { + return &cobra.Command{ + Use: "show ", + Short: "Show a webhook", + Long: "Display details of a specific webhook.", + Example: ` # Show webhook details + vector webhook show webhook-456`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Get(cmd.Context(), webhooksBasePath+"/"+args[0], nil) + if err != nil { + return fmt.Errorf("failed to get webhook: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to get webhook: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to get webhook: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to get webhook: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Type", Value: getString(item, "type")}, + {Key: "URL", Value: getString(item, "url")}, + {Key: "Enabled", Value: formatBool(getBool(item, "enabled"))}, + {Key: "Events", Value: formatEvents(item)}, + {Key: "Created", Value: getString(item, "created_at")}, + {Key: "Updated", Value: getString(item, "updated_at")}, + }) + return nil + }, + } +} + +func newWebhookCreateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "create", + Short: "Create a webhook", + Long: "Create a new webhook for receiving notifications about account events.", + Example: ` # Create a webhook + vector webhook create --url https://example.com/hooks/vector --events "site.created,deploy.completed" + + # Create a Slack webhook + vector webhook create --url https://hooks.slack.com/services/xxx --events "deploy.completed" --type slack`, + Args: cobra.NoArgs, + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + urlFlag, _ := cmd.Flags().GetString("url") + eventsStr, _ := cmd.Flags().GetString("events") + webhookType, _ := cmd.Flags().GetString("type") + + events := strings.Split(eventsStr, ",") + + reqBody := map[string]any{ + "url": urlFlag, + "events": events, + "type": webhookType, + } + + resp, err := app.Client.Post(cmd.Context(), webhooksBasePath, reqBody) + if err != nil { + return fmt.Errorf("failed to create webhook: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to create webhook: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to create webhook: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to create webhook: %w", err) + } + + w := cmd.OutOrStdout() + kvs := []output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Type", Value: getString(item, "type")}, + {Key: "URL", Value: getString(item, "url")}, + {Key: "Enabled", Value: formatBool(getBool(item, "enabled"))}, + {Key: "Events", Value: formatEvents(item)}, + {Key: "Created", Value: getString(item, "created_at")}, + } + + secret := getString(item, "secret") + if secret != "" { + kvs = append(kvs, output.KeyValue{Key: "Secret", Value: secret}) + } + + app.Output.KeyValue(kvs) + + if secret != "" { + output.PrintMessage(w, "") + output.PrintMessage(w, "Save this secret — it won't be shown again!") + } + return nil + }, + } + + cmd.Flags().String("url", "", "Webhook URL (required)") + cmd.Flags().String("events", "", "Comma-separated event types (required)") + cmd.Flags().String("type", "http", "Webhook type (http or slack)") + _ = cmd.MarkFlagRequired("url") + _ = cmd.MarkFlagRequired("events") + + return cmd +} + +func newWebhookUpdateCmd() *cobra.Command { + cmd := &cobra.Command{ + Use: "update ", + Short: "Update a webhook", + Long: "Update an existing webhook configuration.", + Example: ` # Update webhook URL + vector webhook update webhook-456 --url https://example.com/hooks/new + + # Disable a webhook + vector webhook update webhook-456 --enabled`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + reqBody := map[string]any{} + + if cmd.Flags().Changed("url") { + urlFlag, _ := cmd.Flags().GetString("url") + reqBody["url"] = urlFlag + } + + if cmd.Flags().Changed("events") { + eventsStr, _ := cmd.Flags().GetString("events") + events := strings.Split(eventsStr, ",") + reqBody["events"] = events + } + + if cmd.Flags().Changed("enabled") { + enabled, _ := cmd.Flags().GetBool("enabled") + reqBody["enabled"] = enabled + } + + resp, err := app.Client.Put(cmd.Context(), webhooksBasePath+"/"+args[0], reqBody) + if err != nil { + return fmt.Errorf("failed to update webhook: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to update webhook: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to update webhook: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + var item map[string]any + if err := json.Unmarshal(data, &item); err != nil { + return fmt.Errorf("failed to update webhook: %w", err) + } + + app.Output.KeyValue([]output.KeyValue{ + {Key: "ID", Value: getString(item, "id")}, + {Key: "Type", Value: getString(item, "type")}, + {Key: "URL", Value: getString(item, "url")}, + {Key: "Enabled", Value: formatBool(getBool(item, "enabled"))}, + {Key: "Events", Value: formatEvents(item)}, + {Key: "Created", Value: getString(item, "created_at")}, + {Key: "Updated", Value: getString(item, "updated_at")}, + }) + return nil + }, + } + + cmd.Flags().String("url", "", "Webhook URL") + cmd.Flags().String("events", "", "Comma-separated event types") + cmd.Flags().Bool("enabled", false, "Whether the webhook is enabled") + + return cmd +} + +func newWebhookDeleteCmd() *cobra.Command { + return &cobra.Command{ + Use: "delete ", + Short: "Delete a webhook", + Long: "Delete a webhook. All associated delivery logs will also be deleted.", + Example: ` # Delete a webhook + vector webhook delete webhook-456`, + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { + app, err := requireApp(cmd) + if err != nil { + return err + } + + resp, err := app.Client.Delete(cmd.Context(), webhooksBasePath+"/"+args[0]) + if err != nil { + return fmt.Errorf("failed to delete webhook: %w", err) + } + defer func() { _ = resp.Body.Close() }() + + body, err := io.ReadAll(resp.Body) + if err != nil { + return fmt.Errorf("failed to delete webhook: %w", err) + } + + data, err := parseResponseData(body) + if err != nil { + return fmt.Errorf("failed to delete webhook: %w", err) + } + + if app.Output.Format() == output.JSON { + return app.Output.JSON(json.RawMessage(data)) + } + + output.PrintMessage(cmd.OutOrStdout(), "Webhook deleted successfully.") + return nil + }, + } +} + +// formatEvents joins the events array into a comma-separated string. +func formatEvents(item map[string]any) string { + events := getSlice(item, "events") + if len(events) == 0 { + return "-" + } + parts := make([]string, 0, len(events)) + for _, e := range events { + if s, ok := e.(string); ok { + parts = append(parts, s) + } + } + if len(parts) == 0 { + return "-" + } + return strings.Join(parts, ", ") +} diff --git a/internal/commands/webhook_test.go b/internal/commands/webhook_test.go new file mode 100644 index 0000000..1be799c --- /dev/null +++ b/internal/commands/webhook_test.go @@ -0,0 +1,702 @@ +package commands + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + "net/http/httptest" + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/built-fast/vector-cli/internal/api" + "github.com/built-fast/vector-cli/internal/appctx" + "github.com/built-fast/vector-cli/internal/config" + "github.com/built-fast/vector-cli/internal/output" +) + +var webhookListResponse = map[string]any{ + "data": []map[string]any{ + { + "id": "wh-001", + "type": "http", + "url": "https://example.com/webhook", + "events": []any{"site.created", "deployment.completed"}, + "enabled": true, + "created_at": "2025-01-01T00:00:00+00:00", + "updated_at": "2025-01-01T00:00:00+00:00", + }, + { + "id": "wh-002", + "type": "slack", + "url": "https://hooks.slack.com/services/T00/B00/XXX", + "events": []any{"deployment.completed"}, + "enabled": false, + "created_at": "2025-01-02T00:00:00+00:00", + "updated_at": "2025-01-02T00:00:00+00:00", + }, + }, + "meta": map[string]any{ + "current_page": 1, + "last_page": 1, + "total": 2, + }, + "message": "Webhooks retrieved successfully", + "http_status": 200, +} + +var webhookShowResponse = map[string]any{ + "data": map[string]any{ + "id": "wh-001", + "type": "http", + "url": "https://example.com/webhook", + "events": []any{"site.created", "deployment.completed"}, + "enabled": true, + "created_at": "2025-01-01T00:00:00+00:00", + "updated_at": "2025-01-05T00:00:00+00:00", + }, + "message": "Webhook retrieved successfully", + "http_status": 200, +} + +var webhookCreateHTTPResponse = map[string]any{ + "data": map[string]any{ + "id": "wh-003", + "type": "http", + "url": "https://example.com/new-webhook", + "events": []any{"site.created", "deployment.completed"}, + "secret": "a1b2c3d4e5f6789012345678901234567890", + "enabled": true, + "created_at": "2025-01-15T00:00:00+00:00", + "updated_at": "2025-01-15T00:00:00+00:00", + }, + "message": "Webhook created successfully.", + "http_status": 201, +} + +var webhookCreateSlackResponse = map[string]any{ + "data": map[string]any{ + "id": "wh-004", + "type": "slack", + "url": "https://hooks.slack.com/services/T00/B00/XXX", + "events": []any{"deployment.completed"}, + "enabled": true, + "created_at": "2025-01-15T00:00:00+00:00", + "updated_at": "2025-01-15T00:00:00+00:00", + }, + "message": "Slack webhook created successfully.", + "http_status": 201, +} + +var webhookUpdateResponse = map[string]any{ + "data": map[string]any{ + "id": "wh-001", + "type": "http", + "url": "https://example.com/new-webhook", + "events": []any{"site.created", "site.updated"}, + "enabled": false, + "created_at": "2025-01-01T00:00:00+00:00", + "updated_at": "2025-01-20T00:00:00+00:00", + }, + "message": "Webhook updated successfully", + "http_status": 200, +} + +var webhookDeleteResponse = map[string]any{ + "data": map[string]any{ + "id": "wh-001", + "type": "http", + "url": "https://example.com/webhook", + "events": []any{"site.created", "deployment.completed"}, + "enabled": true, + "created_at": "2025-01-01T00:00:00+00:00", + "updated_at": "2025-01-05T00:00:00+00:00", + }, + "message": "Webhook deleted successfully", + "http_status": 200, +} + +func newWebhookTestServer(validToken string) *httptest.Server { + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + auth := r.Header.Get("Authorization") + if auth != "Bearer "+validToken { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusUnauthorized) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Unauthenticated.", + "http_status": 401, + }) + return + } + + w.Header().Set("Content-Type", "application/json") + + path := r.URL.Path + method := r.Method + + switch { + case method == "GET" && path == "/api/v1/vector/webhooks": + _ = json.NewEncoder(w).Encode(webhookListResponse) + + case method == "GET" && path == "/api/v1/vector/webhooks/wh-001": + _ = json.NewEncoder(w).Encode(webhookShowResponse) + + case method == "POST" && path == "/api/v1/vector/webhooks": + var reqBody map[string]any + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &reqBody) + webhookType, _ := reqBody["type"].(string) + if webhookType == "slack" { + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(webhookCreateSlackResponse) + } else { + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(webhookCreateHTTPResponse) + } + + case method == "PUT" && path == "/api/v1/vector/webhooks/wh-001": + _ = json.NewEncoder(w).Encode(webhookUpdateResponse) + + case method == "DELETE" && path == "/api/v1/vector/webhooks/wh-001": + _ = json.NewEncoder(w).Encode(webhookDeleteResponse) + + default: + w.WriteHeader(http.StatusNotFound) + _ = json.NewEncoder(w).Encode(map[string]any{ + "message": "Not Found", + "http_status": 404, + }) + } + })) +} + +func buildWebhookCmd(baseURL, token string, format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient(baseURL, token, "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + root.AddCommand(NewWebhookCmd()) + + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +func buildWebhookCmdNoAuth(format output.Format) (*cobra.Command, *bytes.Buffer, *bytes.Buffer) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + + root := &cobra.Command{ + Use: "vector", + PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + client := api.NewClient("http://localhost", "", "test-agent") + app := appctx.NewApp( + config.DefaultConfig(), + client, + "", + ) + app.Output = output.NewWriter(stdout, format) + cmd.SetContext(appctx.WithApp(cmd.Context(), app)) + return nil + }, + SilenceUsage: true, + SilenceErrors: true, + } + + root.AddCommand(NewWebhookCmd()) + + root.SetOut(stdout) + root.SetErr(stderr) + + return root, stdout, stderr +} + +// --- Webhook List Tests --- + +func TestWebhookListCmd_TableOutput(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "wh-001") + assert.Contains(t, out, "http") + assert.Contains(t, out, "https://example.com/webhook") + assert.Contains(t, out, "Yes") + assert.Contains(t, out, "wh-002") + assert.Contains(t, out, "slack") + assert.Contains(t, out, "No") +} + +func TestWebhookListCmd_JSONOutput(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWebhookCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"webhook", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result []map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Len(t, result, 2) + assert.Equal(t, "wh-001", result[0]["id"]) +} + +func TestWebhookListCmd_Pagination(t *testing.T) { + var receivedPage, receivedPerPage string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedPage = r.URL.Query().Get("page") + receivedPerPage = r.URL.Query().Get("per_page") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(webhookListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "list", "--page", "3", "--per-page", "25"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "3", receivedPage) + assert.Equal(t, "25", receivedPerPage) +} + +func TestWebhookListCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(webhookListResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "list"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/webhooks", receivedPath) +} + +func TestWebhookListCmd_AuthError(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWebhookCmd(ts.URL, "bad-token", output.Table) + cmd.SetArgs([]string{"webhook", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +func TestWebhookListCmd_NoAuth(t *testing.T) { + cmd, _, _ := buildWebhookCmdNoAuth(output.Table) + cmd.SetArgs([]string{"webhook", "list"}) + + err := cmd.Execute() + require.Error(t, err) + + var apiErr *api.APIError + require.ErrorAs(t, err, &apiErr) + assert.Equal(t, 2, apiErr.ExitCode) +} + +// --- Webhook Show Tests --- + +func TestWebhookShowCmd_TableOutput(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "show", "wh-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "wh-001") + assert.Contains(t, out, "http") + assert.Contains(t, out, "https://example.com/webhook") + assert.Contains(t, out, "Yes") + assert.Contains(t, out, "site.created, deployment.completed") + assert.Contains(t, out, "2025-01-05T00:00:00+00:00") +} + +func TestWebhookShowCmd_JSONOutput(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWebhookCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"webhook", "show", "wh-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "wh-001", result["id"]) + assert.Equal(t, "http", result["type"]) +} + +func TestWebhookShowCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(webhookShowResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "show", "wh-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "GET", receivedMethod) + assert.Equal(t, "/api/v1/vector/webhooks/wh-001", receivedPath) +} + +func TestWebhookShowCmd_MissingArg(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "show"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Webhook Create Tests --- + +func TestWebhookCreateCmd_HTTPTableOutput(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "create", "--url", "https://example.com/new-webhook", "--events", "site.created,deployment.completed"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "wh-003") + assert.Contains(t, out, "http") + assert.Contains(t, out, "https://example.com/new-webhook") + assert.Contains(t, out, "a1b2c3d4e5f6789012345678901234567890") + assert.Contains(t, out, "Save this secret") +} + +func TestWebhookCreateCmd_SlackTableOutput(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "create", "--url", "https://hooks.slack.com/services/T00/B00/XXX", "--events", "deployment.completed", "--type", "slack"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "wh-004") + assert.Contains(t, out, "slack") + assert.NotContains(t, out, "Save this secret") +} + +func TestWebhookCreateCmd_JSONOutput(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWebhookCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"webhook", "create", "--url", "https://example.com/new-webhook", "--events", "site.created,deployment.completed"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "wh-003", result["id"]) +} + +func TestWebhookCreateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(http.StatusCreated) + _ = json.NewEncoder(w).Encode(webhookCreateHTTPResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "create", "--url", "https://example.com/new-webhook", "--events", "site.created,deployment.completed"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "POST", receivedMethod) + assert.Equal(t, "/api/v1/vector/webhooks", receivedPath) + assert.Equal(t, "https://example.com/new-webhook", receivedBody["url"]) + assert.Equal(t, "http", receivedBody["type"]) + events, ok := receivedBody["events"].([]any) + require.True(t, ok) + assert.Equal(t, []any{"site.created", "deployment.completed"}, events) +} + +func TestWebhookCreateCmd_MissingRequiredFlags(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "create"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "required") +} + +func TestWebhookCreateCmd_MissingEventsFlag(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "create", "--url", "https://example.com/webhook"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "required") +} + +// --- Webhook Update Tests --- + +func TestWebhookUpdateCmd_TableOutput(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "update", "wh-001", "--url", "https://example.com/new-webhook"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "wh-001") + assert.Contains(t, out, "https://example.com/new-webhook") + assert.Contains(t, out, "2025-01-20T00:00:00+00:00") +} + +func TestWebhookUpdateCmd_JSONOutput(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWebhookCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"webhook", "update", "wh-001", "--url", "https://example.com/new-webhook"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "wh-001", result["id"]) +} + +func TestWebhookUpdateCmd_RequestBody(t *testing.T) { + var receivedBody map[string]any + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(webhookUpdateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "update", "wh-001", "--url", "https://example.com/new-webhook"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, "PUT", receivedMethod) + assert.Equal(t, "/api/v1/vector/webhooks/wh-001", receivedPath) + assert.Equal(t, "https://example.com/new-webhook", receivedBody["url"]) + // events and enabled should not be sent when not provided + _, hasEvents := receivedBody["events"] + assert.False(t, hasEvents) + _, hasEnabled := receivedBody["enabled"] + assert.False(t, hasEnabled) +} + +func TestWebhookUpdateCmd_EventsFlag(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(webhookUpdateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "update", "wh-001", "--events", "site.created,site.updated"}) + + err := cmd.Execute() + require.NoError(t, err) + + events, ok := receivedBody["events"].([]any) + require.True(t, ok) + assert.Equal(t, []any{"site.created", "site.updated"}, events) +} + +func TestWebhookUpdateCmd_EnabledFlag(t *testing.T) { + var receivedBody map[string]any + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + body, _ := io.ReadAll(r.Body) + _ = json.Unmarshal(body, &receivedBody) + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(webhookUpdateResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "update", "wh-001", "--enabled=false"}) + + err := cmd.Execute() + require.NoError(t, err) + + assert.Equal(t, false, receivedBody["enabled"]) +} + +func TestWebhookUpdateCmd_MissingArg(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "update"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Webhook Delete Tests --- + +func TestWebhookDeleteCmd_TableOutput(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "delete", "wh-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Contains(t, stdout.String(), "Webhook deleted successfully") +} + +func TestWebhookDeleteCmd_JSONOutput(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, stdout, _ := buildWebhookCmd(ts.URL, "valid-token", output.JSON) + cmd.SetArgs([]string{"webhook", "delete", "wh-001"}) + + err := cmd.Execute() + require.NoError(t, err) + + var result map[string]any + require.NoError(t, json.Unmarshal(stdout.Bytes(), &result)) + assert.Equal(t, "wh-001", result["id"]) +} + +func TestWebhookDeleteCmd_HTTPPath(t *testing.T) { + var receivedMethod, receivedPath string + ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + receivedMethod = r.Method + receivedPath = r.URL.Path + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(webhookDeleteResponse) + })) + defer ts.Close() + + cmd, _, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "delete", "wh-001"}) + + err := cmd.Execute() + require.NoError(t, err) + assert.Equal(t, "DELETE", receivedMethod) + assert.Equal(t, "/api/v1/vector/webhooks/wh-001", receivedPath) +} + +func TestWebhookDeleteCmd_MissingArg(t *testing.T) { + ts := newWebhookTestServer("valid-token") + defer ts.Close() + + cmd, _, _ := buildWebhookCmd(ts.URL, "valid-token", output.Table) + cmd.SetArgs([]string{"webhook", "delete"}) + + err := cmd.Execute() + require.Error(t, err) + assert.Contains(t, err.Error(), "accepts 1 arg(s)") +} + +// --- Help Tests --- + +func TestWebhookCmd_Help(t *testing.T) { + cmd := NewWebhookCmd() + + stdout := new(bytes.Buffer) + cmd.SetOut(stdout) + cmd.SetArgs([]string{"--help"}) + + err := cmd.Execute() + require.NoError(t, err) + + out := stdout.String() + assert.Contains(t, out, "list") + assert.Contains(t, out, "show") + assert.Contains(t, out, "create") + assert.Contains(t, out, "update") + assert.Contains(t, out, "delete") + assert.Contains(t, out, "webhooks") +} diff --git a/internal/config/config.go b/internal/config/config.go new file mode 100644 index 0000000..90b37b1 --- /dev/null +++ b/internal/config/config.go @@ -0,0 +1,73 @@ +// Package config handles configuration and credentials loading. +package config + +import ( + "encoding/json" + "errors" + "fmt" + "os" +) + +const defaultAPIURL = "https://api.builtfast.com" + +// Config holds the CLI configuration. +type Config struct { + ApiURL string `json:"api_url"` +} + +// DefaultConfig returns a Config with default values. +func DefaultConfig() *Config { + return &Config{ + ApiURL: defaultAPIURL, + } +} + +// LoadConfig reads config.json from the config directory. +// Returns default config if the file doesn't exist. +func LoadConfig() (*Config, error) { + path := ConfigFilePath() + if path == "" { + return nil, fmt.Errorf("unable to determine config file path") + } + + data, err := os.ReadFile(path) + if err != nil { + if errors.Is(err, os.ErrNotExist) { + return DefaultConfig(), nil + } + return nil, fmt.Errorf("unable to read config file: %w", err) + } + + cfg := DefaultConfig() + if err := json.Unmarshal(data, cfg); err != nil { + return nil, fmt.Errorf("invalid JSON in config file %s: %w", path, err) + } + + return cfg, nil +} + +// SaveConfig writes config.json to the config directory. +// Creates the config directory if it doesn't exist. +func SaveConfig(cfg *Config) error { + if _, err := EnsureConfigDir(); err != nil { + return err + } + + path := ConfigFilePath() + if path == "" { + return fmt.Errorf("unable to determine config file path") + } + + data, err := json.MarshalIndent(cfg, "", " ") + if err != nil { + return fmt.Errorf("unable to marshal config: %w", err) + } + + data = append(data, '\n') + + if err := os.WriteFile(path, data, 0o644); err != nil { + return fmt.Errorf("unable to write config file: %w", err) + } + + return nil +} diff --git a/internal/config/config_test.go b/internal/config/config_test.go new file mode 100644 index 0000000..9800308 --- /dev/null +++ b/internal/config/config_test.go @@ -0,0 +1,84 @@ +package config + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestDefaultConfig(t *testing.T) { + cfg := DefaultConfig() + assert.Equal(t, "https://api.builtfast.com", cfg.ApiURL) +} + +func TestLoadConfig_FileMissing(t *testing.T) { + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + + cfg, err := LoadConfig() + require.NoError(t, err) + assert.Equal(t, "https://api.builtfast.com", cfg.ApiURL) +} + +func TestLoadConfig_ValidFile(t *testing.T) { + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + + data := []byte(`{"api_url": "https://custom.example.com"}`) + err := os.WriteFile(filepath.Join(tmpDir, "config.json"), data, 0o644) + require.NoError(t, err) + + cfg, err := LoadConfig() + require.NoError(t, err) + assert.Equal(t, "https://custom.example.com", cfg.ApiURL) +} + +func TestLoadConfig_InvalidJSON(t *testing.T) { + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + + data := []byte(`{not valid json}`) + err := os.WriteFile(filepath.Join(tmpDir, "config.json"), data, 0o644) + require.NoError(t, err) + + cfg, err := LoadConfig() + assert.Nil(t, cfg) + assert.Error(t, err) + assert.Contains(t, err.Error(), "invalid JSON in config file") +} + +func TestSaveConfig(t *testing.T) { + tmpDir := t.TempDir() + configDir := filepath.Join(tmpDir, "vector") + t.Setenv("VECTOR_CONFIG_DIR", configDir) + + cfg := &Config{ApiURL: "https://custom.example.com"} + err := SaveConfig(cfg) + require.NoError(t, err) + + // Verify directory was created + info, err := os.Stat(configDir) + require.NoError(t, err) + assert.True(t, info.IsDir()) + + // Verify file contents + data, err := os.ReadFile(filepath.Join(configDir, "config.json")) + require.NoError(t, err) + assert.Contains(t, string(data), `"api_url": "https://custom.example.com"`) +} + +func TestSaveConfig_RoundTrip(t *testing.T) { + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + + original := &Config{ApiURL: "https://roundtrip.example.com"} + err := SaveConfig(original) + require.NoError(t, err) + + loaded, err := LoadConfig() + require.NoError(t, err) + assert.Equal(t, original.ApiURL, loaded.ApiURL) +} diff --git a/internal/config/keyring.go b/internal/config/keyring.go new file mode 100644 index 0000000..0df0b9e --- /dev/null +++ b/internal/config/keyring.go @@ -0,0 +1,40 @@ +package config + +import ( + "errors" + "os" + + "github.com/zalando/go-keyring" +) + +const ( + keyringService = "vector-cli" + keyringAccount = "api-token" +) + +// ErrKeyringDisabled is returned when the VECTOR_NO_KEYRING environment variable is set. +var ErrKeyringDisabled = errors.New("keyring is disabled (VECTOR_NO_KEYRING is set)") + +// Save stores the API token in the OS keyring. +func Save(token string) error { + if os.Getenv("VECTOR_NO_KEYRING") != "" { + return ErrKeyringDisabled + } + return keyring.Set(keyringService, keyringAccount, token) +} + +// Load retrieves the API token from the OS keyring. +func Load() (string, error) { + if os.Getenv("VECTOR_NO_KEYRING") != "" { + return "", ErrKeyringDisabled + } + return keyring.Get(keyringService, keyringAccount) +} + +// Delete removes the API token from the OS keyring. +func Delete() error { + if os.Getenv("VECTOR_NO_KEYRING") != "" { + return ErrKeyringDisabled + } + return keyring.Delete(keyringService, keyringAccount) +} diff --git a/internal/config/keyring_test.go b/internal/config/keyring_test.go new file mode 100644 index 0000000..ad1bd23 --- /dev/null +++ b/internal/config/keyring_test.go @@ -0,0 +1,109 @@ +package config + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/zalando/go-keyring" +) + +func TestSave(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_NO_KEYRING", "") + + err := Save("test-token") + require.NoError(t, err) + + // Verify it was stored + token, err := keyring.Get(keyringService, keyringAccount) + require.NoError(t, err) + assert.Equal(t, "test-token", token) +} + +func TestLoad(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_NO_KEYRING", "") + + // Store a token first + require.NoError(t, keyring.Set(keyringService, keyringAccount, "my-token")) + + token, err := Load() + require.NoError(t, err) + assert.Equal(t, "my-token", token) +} + +func TestLoad_NotFound(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_NO_KEYRING", "") + + _, err := Load() + assert.ErrorIs(t, err, keyring.ErrNotFound) +} + +func TestDelete(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_NO_KEYRING", "") + + // Store a token first + require.NoError(t, keyring.Set(keyringService, keyringAccount, "to-delete")) + + err := Delete() + require.NoError(t, err) + + // Verify it was removed + _, err = keyring.Get(keyringService, keyringAccount) + assert.ErrorIs(t, err, keyring.ErrNotFound) +} + +func TestDelete_NotFound(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_NO_KEYRING", "") + + err := Delete() + assert.ErrorIs(t, err, keyring.ErrNotFound) +} + +func TestSave_KeyringDisabled(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_NO_KEYRING", "1") + + err := Save("test-token") + assert.ErrorIs(t, err, ErrKeyringDisabled) +} + +func TestLoad_KeyringDisabled(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_NO_KEYRING", "1") + + _, err := Load() + assert.ErrorIs(t, err, ErrKeyringDisabled) +} + +func TestDelete_KeyringDisabled(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_NO_KEYRING", "1") + + err := Delete() + assert.ErrorIs(t, err, ErrKeyringDisabled) +} + +func TestSaveLoadDelete_RoundTrip(t *testing.T) { + keyring.MockInit() + t.Setenv("VECTOR_NO_KEYRING", "") + + // Save + require.NoError(t, Save("roundtrip-token")) + + // Load + token, err := Load() + require.NoError(t, err) + assert.Equal(t, "roundtrip-token", token) + + // Delete + require.NoError(t, Delete()) + + // Load again — should fail + _, err = Load() + assert.ErrorIs(t, err, keyring.ErrNotFound) +} diff --git a/internal/config/paths.go b/internal/config/paths.go new file mode 100644 index 0000000..768bbea --- /dev/null +++ b/internal/config/paths.go @@ -0,0 +1,58 @@ +package config + +import ( + "fmt" + "os" + "path/filepath" + "runtime" +) + +// ConfigDir resolves the configuration directory path without creating it. +// Resolution order: VECTOR_CONFIG_DIR env → XDG_CONFIG_HOME/vector → platform default. +// Platform defaults: ~/.config/vector on Linux/macOS, %APPDATA%/vector on Windows. +func ConfigDir() (string, error) { + if dir := os.Getenv("VECTOR_CONFIG_DIR"); dir != "" { + return dir, nil + } + + if xdg := os.Getenv("XDG_CONFIG_HOME"); xdg != "" { + return filepath.Join(xdg, "vector"), nil + } + + if runtime.GOOS == "windows" { + appData := os.Getenv("APPDATA") + if appData == "" { + return "", fmt.Errorf("%%APPDATA%% is not set") + } + return filepath.Join(appData, "vector"), nil + } + + home, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("unable to determine home directory: %w", err) + } + return filepath.Join(home, ".config", "vector"), nil +} + +// EnsureConfigDir creates the config directory with 0700 permissions if it doesn't exist. +func EnsureConfigDir() (string, error) { + dir, err := ConfigDir() + if err != nil { + return "", err + } + + if err := os.MkdirAll(dir, 0o700); err != nil { + return "", fmt.Errorf("unable to create config directory: %w", err) + } + + return dir, nil +} + +// ConfigFilePath returns the path to config.json within the config directory. +func ConfigFilePath() string { + dir, err := ConfigDir() + if err != nil { + return "" + } + return filepath.Join(dir, "config.json") +} diff --git a/internal/config/paths_test.go b/internal/config/paths_test.go new file mode 100644 index 0000000..96ffe03 --- /dev/null +++ b/internal/config/paths_test.go @@ -0,0 +1,97 @@ +package config + +import ( + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestConfigDir_EnvOverride(t *testing.T) { + t.Setenv("VECTOR_CONFIG_DIR", "/custom/config/dir") + t.Setenv("XDG_CONFIG_HOME", "/should/be/ignored") + + dir, err := ConfigDir() + require.NoError(t, err) + assert.Equal(t, "/custom/config/dir", dir) +} + +func TestConfigDir_XDGConfigHome(t *testing.T) { + t.Setenv("VECTOR_CONFIG_DIR", "") + t.Setenv("XDG_CONFIG_HOME", "/xdg/config") + + dir, err := ConfigDir() + require.NoError(t, err) + assert.Equal(t, filepath.Join("/xdg/config", "vector"), dir) +} + +func TestConfigDir_DefaultFallback(t *testing.T) { + t.Setenv("VECTOR_CONFIG_DIR", "") + t.Setenv("XDG_CONFIG_HOME", "") + + dir, err := ConfigDir() + require.NoError(t, err) + + if runtime.GOOS == "windows" { + appData := os.Getenv("APPDATA") + assert.Equal(t, filepath.Join(appData, "vector"), dir) + } else { + home, err := os.UserHomeDir() + require.NoError(t, err) + assert.Equal(t, filepath.Join(home, ".config", "vector"), dir) + } +} + +func TestConfigDir_PriorityOrder(t *testing.T) { + // VECTOR_CONFIG_DIR takes precedence over XDG_CONFIG_HOME + t.Setenv("VECTOR_CONFIG_DIR", "/vector/dir") + t.Setenv("XDG_CONFIG_HOME", "/xdg/dir") + + dir, err := ConfigDir() + require.NoError(t, err) + assert.Equal(t, "/vector/dir", dir) + + // When VECTOR_CONFIG_DIR is unset, XDG_CONFIG_HOME is used + t.Setenv("VECTOR_CONFIG_DIR", "") + + dir, err = ConfigDir() + require.NoError(t, err) + assert.Equal(t, filepath.Join("/xdg/dir", "vector"), dir) +} + +func TestEnsureConfigDir_CreatesDirectory(t *testing.T) { + tmpDir := t.TempDir() + configPath := filepath.Join(tmpDir, "nested", "config") + t.Setenv("VECTOR_CONFIG_DIR", configPath) + + dir, err := EnsureConfigDir() + require.NoError(t, err) + assert.Equal(t, configPath, dir) + + info, err := os.Stat(configPath) + require.NoError(t, err) + assert.True(t, info.IsDir()) + + if runtime.GOOS != "windows" { + assert.Equal(t, os.FileMode(0o700), info.Mode().Perm()) + } +} + +func TestEnsureConfigDir_ExistingDirectory(t *testing.T) { + tmpDir := t.TempDir() + t.Setenv("VECTOR_CONFIG_DIR", tmpDir) + + dir, err := EnsureConfigDir() + require.NoError(t, err) + assert.Equal(t, tmpDir, dir) +} + +func TestConfigFilePath(t *testing.T) { + t.Setenv("VECTOR_CONFIG_DIR", "/test/config") + + path := ConfigFilePath() + assert.Equal(t, filepath.Join("/test/config", "config.json"), path) +} diff --git a/internal/output/helpers.go b/internal/output/helpers.go new file mode 100644 index 0000000..5469269 --- /dev/null +++ b/internal/output/helpers.go @@ -0,0 +1,72 @@ +package output + +import ( + "encoding/json" + "fmt" + "io" + "text/tabwriter" +) + +// KeyValue represents a key-value pair for display in show commands. +type KeyValue struct { + Key string + Value string +} + +// PrintJSON writes v as pretty-printed (indented) JSON to w. +func PrintJSON(w io.Writer, v any) error { + enc := json.NewEncoder(w) + enc.SetIndent("", " ") + return enc.Encode(v) +} + +// PrintTable writes a formatted table with headers and rows to w. +func PrintTable(w io.Writer, headers []string, rows [][]string) { + tw := tabwriter.NewWriter(w, 0, 0, 2, ' ', 0) + for i, h := range headers { + if i > 0 { + _, _ = fmt.Fprint(tw, "\t") + } + _, _ = fmt.Fprint(tw, h) + } + _, _ = fmt.Fprintln(tw) + + for _, row := range rows { + for i, col := range row { + if i > 0 { + _, _ = fmt.Fprint(tw, "\t") + } + _, _ = fmt.Fprint(tw, col) + } + _, _ = fmt.Fprintln(tw) + } + _ = tw.Flush() +} + +// PrintKeyValue writes key-value pairs with a right-aligned key column to w. +func PrintKeyValue(w io.Writer, pairs []KeyValue) { + maxLen := 0 + for _, p := range pairs { + if len(p.Key) > maxLen { + maxLen = len(p.Key) + } + } + for _, p := range pairs { + _, _ = fmt.Fprintf(w, "%*s: %s\n", maxLen, p.Key, p.Value) + } +} + +// PrintPagination writes "Page X of Y (Z total)" to w. +func PrintPagination(w io.Writer, page, lastPage, total int) { + _, _ = fmt.Fprintf(w, "Page %d of %d (%d total)\n", page, lastPage, total) +} + +// PrintError writes "Error: " to w. +func PrintError(w io.Writer, msg string) { + _, _ = fmt.Fprintf(w, "Error: %s\n", msg) +} + +// PrintMessage writes a plain message line to w. +func PrintMessage(w io.Writer, msg string) { + _, _ = fmt.Fprintln(w, msg) +} diff --git a/internal/output/helpers_test.go b/internal/output/helpers_test.go new file mode 100644 index 0000000..671cf33 --- /dev/null +++ b/internal/output/helpers_test.go @@ -0,0 +1,126 @@ +package output + +import ( + "bytes" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestPrintJSON(t *testing.T) { + var buf bytes.Buffer + data := map[string]any{"name": "test", "count": 42} + + err := PrintJSON(&buf, data) + require.NoError(t, err) + + expected := "{\n \"count\": 42,\n \"name\": \"test\"\n}\n" + assert.Equal(t, expected, buf.String()) +} + +func TestPrintJSON_Slice(t *testing.T) { + var buf bytes.Buffer + data := []string{"a", "b"} + + err := PrintJSON(&buf, data) + require.NoError(t, err) + + expected := "[\n \"a\",\n \"b\"\n]\n" + assert.Equal(t, expected, buf.String()) +} + +func TestPrintJSON_InvalidValue(t *testing.T) { + var buf bytes.Buffer + // Channels cannot be marshaled to JSON. + err := PrintJSON(&buf, make(chan int)) + assert.Error(t, err) +} + +func TestPrintTable(t *testing.T) { + var buf bytes.Buffer + headers := []string{"ID", "Name", "Status"} + rows := [][]string{ + {"1", "Alpha", "active"}, + {"2", "Beta", "inactive"}, + } + + PrintTable(&buf, headers, rows) + + output := buf.String() + assert.Contains(t, output, "ID") + assert.Contains(t, output, "Name") + assert.Contains(t, output, "Status") + assert.Contains(t, output, "Alpha") + assert.Contains(t, output, "Beta") + assert.Contains(t, output, "active") + assert.Contains(t, output, "inactive") +} + +func TestPrintTable_EmptyRows(t *testing.T) { + var buf bytes.Buffer + headers := []string{"ID", "Name"} + + PrintTable(&buf, headers, nil) + + output := buf.String() + assert.Contains(t, output, "ID") + assert.Contains(t, output, "Name") +} + +func TestPrintKeyValue(t *testing.T) { + var buf bytes.Buffer + pairs := []KeyValue{ + {Key: "Name", Value: "Test Project"}, + {Key: "API URL", Value: "https://api.example.com"}, + {Key: "Status", Value: "active"}, + } + + PrintKeyValue(&buf, pairs) + + output := buf.String() + // "API URL" is the longest key (7 chars), so shorter keys should be right-aligned. + assert.Contains(t, output, " Name: Test Project\n") + assert.Contains(t, output, "API URL: https://api.example.com\n") + assert.Contains(t, output, " Status: active\n") +} + +func TestPrintKeyValue_SinglePair(t *testing.T) { + var buf bytes.Buffer + pairs := []KeyValue{ + {Key: "Key", Value: "Value"}, + } + + PrintKeyValue(&buf, pairs) + assert.Equal(t, "Key: Value\n", buf.String()) +} + +func TestPrintPagination(t *testing.T) { + var buf bytes.Buffer + PrintPagination(&buf, 2, 5, 48) + assert.Equal(t, "Page 2 of 5 (48 total)\n", buf.String()) +} + +func TestPrintPagination_SinglePage(t *testing.T) { + var buf bytes.Buffer + PrintPagination(&buf, 1, 1, 3) + assert.Equal(t, "Page 1 of 1 (3 total)\n", buf.String()) +} + +func TestPrintError(t *testing.T) { + var buf bytes.Buffer + PrintError(&buf, "something went wrong") + assert.Equal(t, "Error: something went wrong\n", buf.String()) +} + +func TestPrintMessage(t *testing.T) { + var buf bytes.Buffer + PrintMessage(&buf, "Operation completed successfully.") + assert.Equal(t, "Operation completed successfully.\n", buf.String()) +} + +func TestPrintMessage_EmptyString(t *testing.T) { + var buf bytes.Buffer + PrintMessage(&buf, "") + assert.Equal(t, "\n", buf.String()) +} diff --git a/internal/output/output.go b/internal/output/output.go new file mode 100644 index 0000000..3119ece --- /dev/null +++ b/internal/output/output.go @@ -0,0 +1,41 @@ +// Package output provides format detection, table/JSON writers, and error envelopes. +package output + +import ( + "os" + + "golang.org/x/term" +) + +// Format represents an output format. +type Format int + +const ( + // Table is human-friendly tabular output. + Table Format = iota + // JSON is machine-friendly JSON output. + JSON +) + +// isTerminalFunc is the function used to check if a file descriptor is a terminal. +// It can be overridden in tests to simulate TTY/non-TTY environments. +var isTerminalFunc = func() bool { + return term.IsTerminal(int(os.Stdout.Fd())) +} + +// DetectFormat determines the output format based on explicit flags and TTY detection. +// --json flag forces JSON output, --no-json flag forces Table output. +// When neither flag is set, it checks whether stdout is a terminal: +// TTY → Table, non-TTY (piped) → JSON. +func DetectFormat(jsonFlag, noJsonFlag bool) Format { + if jsonFlag { + return JSON + } + if noJsonFlag { + return Table + } + if isTerminalFunc() { + return Table + } + return JSON +} diff --git a/internal/output/output_test.go b/internal/output/output_test.go new file mode 100644 index 0000000..87bc25d --- /dev/null +++ b/internal/output/output_test.go @@ -0,0 +1,48 @@ +package output + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDetectFormat_JSONFlag(t *testing.T) { + // --json flag always returns JSON, regardless of TTY state. + isTerminalFunc = func() bool { return true } + t.Cleanup(func() { isTerminalFunc = func() bool { return false } }) + + assert.Equal(t, JSON, DetectFormat(true, false)) +} + +func TestDetectFormat_NoJSONFlag(t *testing.T) { + // --no-json flag always returns Table, regardless of TTY state. + isTerminalFunc = func() bool { return false } + t.Cleanup(func() { isTerminalFunc = func() bool { return false } }) + + assert.Equal(t, Table, DetectFormat(false, true)) +} + +func TestDetectFormat_BothFlags_JSONWins(t *testing.T) { + // When both flags are set, --json takes precedence. + assert.Equal(t, JSON, DetectFormat(true, true)) +} + +func TestDetectFormat_NoFlags_TTY(t *testing.T) { + // No flags, stdout is a TTY → Table. + isTerminalFunc = func() bool { return true } + t.Cleanup(func() { isTerminalFunc = func() bool { return false } }) + + assert.Equal(t, Table, DetectFormat(false, false)) +} + +func TestDetectFormat_NoFlags_NonTTY(t *testing.T) { + // No flags, stdout is not a TTY (piped) → JSON. + isTerminalFunc = func() bool { return false } + + assert.Equal(t, JSON, DetectFormat(false, false)) +} + +func TestFormatConstants(t *testing.T) { + // Verify constants have distinct values. + assert.NotEqual(t, Table, JSON) +} diff --git a/internal/output/writer.go b/internal/output/writer.go new file mode 100644 index 0000000..6a7ee9a --- /dev/null +++ b/internal/output/writer.go @@ -0,0 +1,128 @@ +package output + +import ( + "encoding/json" + "fmt" + "io" + + "github.com/itchyny/gojq" +) + +// WriterOption configures a Writer. +type WriterOption func(*Writer) + +// WithJQ returns a WriterOption that enables jq filtering on JSON output. +func WithJQ(filter string, code *gojq.Code) WriterOption { + return func(w *Writer) { + w.jqFilter = filter + w.jqCode = code + } +} + +// Writer wraps output formatting and optional jq filtering. +type Writer struct { + w io.Writer + format Format + jqCode *gojq.Code + jqFilter string +} + +// NewWriter creates a new Writer with the given io.Writer, format, and options. +func NewWriter(w io.Writer, format Format, opts ...WriterOption) *Writer { + wr := &Writer{ + w: w, + format: format, + } + for _, opt := range opts { + opt(wr) + } + return wr +} + +// Format returns the configured output format. +func (wr *Writer) Format() Format { + return wr.format +} + +// HasJQ returns true when a jq filter is active. +func (wr *Writer) HasJQ() bool { + return wr.jqCode != nil +} + +// JSON writes v as pretty-printed JSON, applying a jq filter if set. +func (wr *Writer) JSON(v any) error { + if wr.HasJQ() { + return wr.writeJQ(v) + } + return PrintJSON(wr.w, v) +} + +// Table delegates to PrintTable. +func (wr *Writer) Table(headers []string, rows [][]string) { + PrintTable(wr.w, headers, rows) +} + +// KeyValue delegates to PrintKeyValue. +func (wr *Writer) KeyValue(pairs []KeyValue) { + PrintKeyValue(wr.w, pairs) +} + +// Pagination delegates to PrintPagination. +func (wr *Writer) Pagination(page, lastPage, total int) { + PrintPagination(wr.w, page, lastPage, total) +} + +// Message delegates to PrintMessage. +func (wr *Writer) Message(msg string) { + PrintMessage(wr.w, msg) +} + +// Error delegates to PrintError. +func (wr *Writer) Error(msg string) { + PrintError(wr.w, msg) +} + +// Underlying returns the raw io.Writer. +func (wr *Writer) Underlying() io.Writer { + return wr.w +} + +// writeJQ marshals v to a generic value, runs the jq filter, and outputs results. +func (wr *Writer) writeJQ(v any) error { + // Marshal then unmarshal to ensure we have a clean interface{} tree + // that gojq can work with (no typed structs). + b, err := json.Marshal(v) + if err != nil { + return err + } + + var input any + if err := json.Unmarshal(b, &input); err != nil { + return err + } + + iter := wr.jqCode.Run(input) + for { + result, ok := iter.Next() + if !ok { + break + } + if err, isErr := result.(error); isErr { + return fmt.Errorf("jq: %w", err) + } + + switch val := result.(type) { + case nil: + _, _ = fmt.Fprintln(wr.w, "null") + case string: + _, _ = fmt.Fprintln(wr.w, val) + default: + out, err := json.MarshalIndent(val, "", " ") + if err != nil { + return err + } + _, _ = fmt.Fprintln(wr.w, string(out)) + } + } + return nil +} diff --git a/internal/output/writer_test.go b/internal/output/writer_test.go new file mode 100644 index 0000000..b4f1593 --- /dev/null +++ b/internal/output/writer_test.go @@ -0,0 +1,274 @@ +package output + +import ( + "bytes" + "encoding/base64" + "testing" + + "github.com/itchyny/gojq" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// compileJQ is a test helper that parses and compiles a jq expression. +func compileJQ(t *testing.T, expr string) *gojq.Code { + t.Helper() + query, err := gojq.Parse(expr) + require.NoError(t, err) + code, err := gojq.Compile(query) + require.NoError(t, err) + return code +} + +func TestWriter_JSON_WithoutJQ(t *testing.T) { + var buf bytes.Buffer + w := NewWriter(&buf, JSON) + + err := w.JSON(map[string]any{"name": "test", "count": 42}) + require.NoError(t, err) + + expected := "{\n \"count\": 42,\n \"name\": \"test\"\n}\n" + assert.Equal(t, expected, buf.String()) +} + +func TestWriter_JSON_JQ_FieldAccess(t *testing.T) { + var buf bytes.Buffer + code := compileJQ(t, ".name") + w := NewWriter(&buf, JSON, WithJQ(".name", code)) + + err := w.JSON(map[string]any{"name": "alice", "age": 30}) + require.NoError(t, err) + + assert.Equal(t, "alice\n", buf.String()) +} + +func TestWriter_JSON_JQ_ArrayFilter(t *testing.T) { + var buf bytes.Buffer + code := compileJQ(t, ".[].id") + w := NewWriter(&buf, JSON, WithJQ(".[].id", code)) + + err := w.JSON([]any{ + map[string]any{"id": 1, "name": "a"}, + map[string]any{"id": 2, "name": "b"}, + }) + require.NoError(t, err) + + assert.Equal(t, "1\n2\n", buf.String()) +} + +func TestWriter_JSON_JQ_Select(t *testing.T) { + var buf bytes.Buffer + code := compileJQ(t, `[.[] | select(.active == true)]`) + w := NewWriter(&buf, JSON, WithJQ(`[.[] | select(.active == true)]`, code)) + + err := w.JSON([]any{ + map[string]any{"name": "a", "active": true}, + map[string]any{"name": "b", "active": false}, + map[string]any{"name": "c", "active": true}, + }) + require.NoError(t, err) + + expected := "[\n {\n \"active\": true,\n \"name\": \"a\"\n },\n {\n \"active\": true,\n \"name\": \"c\"\n }\n]\n" + assert.Equal(t, expected, buf.String()) +} + +func TestWriter_JSON_JQ_Length(t *testing.T) { + var buf bytes.Buffer + code := compileJQ(t, "length") + w := NewWriter(&buf, JSON, WithJQ("length", code)) + + err := w.JSON([]any{1, 2, 3}) + require.NoError(t, err) + + assert.Equal(t, "3\n", buf.String()) +} + +func TestWriter_JSON_JQ_Pipe(t *testing.T) { + var buf bytes.Buffer + code := compileJQ(t, ".[].name") + w := NewWriter(&buf, JSON, WithJQ(".[].name", code)) + + err := w.JSON([]any{ + map[string]any{"name": "alice"}, + map[string]any{"name": "bob"}, + map[string]any{"name": "charlie"}, + }) + require.NoError(t, err) + + assert.Equal(t, "alice\nbob\ncharlie\n", buf.String()) +} + +func TestWriter_JSON_JQ_Identity(t *testing.T) { + var buf bytes.Buffer + code := compileJQ(t, ".") + w := NewWriter(&buf, JSON, WithJQ(".", code)) + + err := w.JSON(map[string]any{"x": 1}) + require.NoError(t, err) + + expected := "{\n \"x\": 1\n}\n" + assert.Equal(t, expected, buf.String()) +} + +func TestWriter_JSON_JQ_FormatCSV(t *testing.T) { + var buf bytes.Buffer + code := compileJQ(t, `[.[] | [.id, .name]] | .[] | @csv`) + w := NewWriter(&buf, JSON, WithJQ(`[.[] | [.id, .name]] | .[] | @csv`, code)) + + err := w.JSON([]any{ + map[string]any{"id": 1, "name": "alice"}, + map[string]any{"id": 2, "name": "bob"}, + }) + require.NoError(t, err) + + assert.Equal(t, "1,\"alice\"\n2,\"bob\"\n", buf.String()) +} + +func TestWriter_JSON_JQ_FormatBase64(t *testing.T) { + var buf bytes.Buffer + code := compileJQ(t, ".name | @base64") + w := NewWriter(&buf, JSON, WithJQ(".name | @base64", code)) + + err := w.JSON(map[string]any{"name": "hello"}) + require.NoError(t, err) + + expected := base64.StdEncoding.EncodeToString([]byte("hello")) + assert.Equal(t, expected+"\n", buf.String()) +} + +func TestWriter_JSON_JQ_PrettyPrintObject(t *testing.T) { + var buf bytes.Buffer + code := compileJQ(t, ".info") + w := NewWriter(&buf, JSON, WithJQ(".info", code)) + + err := w.JSON(map[string]any{ + "info": map[string]any{"a": 1, "b": 2}, + }) + require.NoError(t, err) + + expected := "{\n \"a\": 1,\n \"b\": 2\n}\n" + assert.Equal(t, expected, buf.String()) +} + +func TestWriter_JSON_JQ_PrettyPrintArray(t *testing.T) { + var buf bytes.Buffer + code := compileJQ(t, ".items") + w := NewWriter(&buf, JSON, WithJQ(".items", code)) + + err := w.JSON(map[string]any{ + "items": []any{1, 2, 3}, + }) + require.NoError(t, err) + + expected := "[\n 1,\n 2,\n 3\n]\n" + assert.Equal(t, expected, buf.String()) +} + +func TestWriter_JSON_InvalidInput(t *testing.T) { + var buf bytes.Buffer + w := NewWriter(&buf, JSON) + + err := w.JSON(make(chan int)) + assert.Error(t, err) +} + +func TestWriter_JSON_JQ_RuntimeError(t *testing.T) { + var buf bytes.Buffer + // .foo on a number will produce a runtime error + code := compileJQ(t, ".foo") + w := NewWriter(&buf, JSON, WithJQ(".foo", code)) + + err := w.JSON("not-an-object") + require.Error(t, err) + assert.Contains(t, err.Error(), "jq:") +} + +func TestWriter_JSON_JQ_NilResult(t *testing.T) { + var buf bytes.Buffer + code := compileJQ(t, ".missing") + w := NewWriter(&buf, JSON, WithJQ(".missing", code)) + + err := w.JSON(map[string]any{"name": "test"}) + require.NoError(t, err) + + assert.Equal(t, "null\n", buf.String()) +} + +func TestWriter_HasJQ_True(t *testing.T) { + code := compileJQ(t, ".") + w := NewWriter(&bytes.Buffer{}, JSON, WithJQ(".", code)) + assert.True(t, w.HasJQ()) +} + +func TestWriter_HasJQ_False(t *testing.T) { + w := NewWriter(&bytes.Buffer{}, JSON) + assert.False(t, w.HasJQ()) +} + +func TestWriter_Format(t *testing.T) { + w := NewWriter(&bytes.Buffer{}, Table) + assert.Equal(t, Table, w.Format()) + + w2 := NewWriter(&bytes.Buffer{}, JSON) + assert.Equal(t, JSON, w2.Format()) +} + +func TestWriter_Table(t *testing.T) { + var buf bytes.Buffer + w := NewWriter(&buf, Table) + + headers := []string{"ID", "Name"} + rows := [][]string{{"1", "Alice"}, {"2", "Bob"}} + w.Table(headers, rows) + + output := buf.String() + assert.Contains(t, output, "ID") + assert.Contains(t, output, "Name") + assert.Contains(t, output, "Alice") + assert.Contains(t, output, "Bob") +} + +func TestWriter_KeyValue(t *testing.T) { + var buf bytes.Buffer + w := NewWriter(&buf, Table) + + pairs := []KeyValue{ + {Key: "Name", Value: "Test"}, + {Key: "Status", Value: "active"}, + } + w.KeyValue(pairs) + + output := buf.String() + assert.Contains(t, output, "Name: Test") + assert.Contains(t, output, "Status: active") +} + +func TestWriter_Message(t *testing.T) { + var buf bytes.Buffer + w := NewWriter(&buf, Table) + + w.Message("hello world") + assert.Equal(t, "hello world\n", buf.String()) +} + +func TestWriter_Error(t *testing.T) { + var buf bytes.Buffer + w := NewWriter(&buf, Table) + + w.Error("something broke") + assert.Equal(t, "Error: something broke\n", buf.String()) +} + +func TestWriter_Pagination(t *testing.T) { + var buf bytes.Buffer + w := NewWriter(&buf, Table) + + w.Pagination(2, 5, 48) + assert.Equal(t, "Page 2 of 5 (48 total)\n", buf.String()) +} + +func TestWriter_Underlying(t *testing.T) { + var buf bytes.Buffer + w := NewWriter(&buf, JSON) + assert.Equal(t, &buf, w.Underlying()) +} diff --git a/internal/surface/surface.go b/internal/surface/surface.go new file mode 100644 index 0000000..511b9a6 --- /dev/null +++ b/internal/surface/surface.go @@ -0,0 +1,87 @@ +package surface + +import ( + "fmt" + "sort" + "strings" + + "github.com/spf13/cobra" + "github.com/spf13/pflag" +) + +// Generate walks a cobra command tree and produces a deterministic, sorted +// snapshot of all commands, flags, and positional arguments. Built-in commands +// (help, completion) and the --help flag are excluded. +func Generate(root *cobra.Command) string { + var lines []string + walk(root, &lines) + sort.Strings(lines) + return strings.Join(lines, "\n") + "\n" +} + +func walk(cmd *cobra.Command, lines *[]string) { + name := cmd.Name() + + // Skip Cobra built-in commands. + if name == "help" || name == "completion" { + return + } + + path := fullPath(cmd) + + // CMD line + *lines = append(*lines, fmt.Sprintf("CMD %s", path)) + + // ARG lines — extracted from the Use string. + args := parseArgs(cmd.Use) + for i, arg := range args { + *lines = append(*lines, fmt.Sprintf("ARG %s %d %s", path, i, arg)) + } + + // FLAG lines. + // For the root command, emit persistent flags (they apply globally). + // For all commands, emit local non-persistent flags. + if !cmd.HasParent() { + cmd.PersistentFlags().VisitAll(func(f *pflag.Flag) { + if f.Name == "help" { + return + } + *lines = append(*lines, fmt.Sprintf("FLAG %s --%s type=%s", path, f.Name, f.Value.Type())) + }) + } + cmd.Flags().VisitAll(func(f *pflag.Flag) { + if f.Name == "help" { + return + } + // Skip persistent flags (already emitted on root). + if cmd.PersistentFlags().Lookup(f.Name) != nil { + return + } + *lines = append(*lines, fmt.Sprintf("FLAG %s --%s type=%s", path, f.Name, f.Value.Type())) + }) + + for _, child := range cmd.Commands() { + walk(child, lines) + } +} + +// fullPath returns the full command path (e.g. "vector site list"). +func fullPath(cmd *cobra.Command) string { + parts := []string{} + for c := cmd; c != nil; c = c.Parent() { + parts = append([]string{c.Name()}, parts...) + } + return strings.Join(parts, " ") +} + +// parseArgs extracts positional argument names from a cobra Use string. +// e.g. "list " -> ["site-id"], "add " -> ["site-id", "hostname"] +func parseArgs(use string) []string { + var args []string + for _, token := range strings.Fields(use) { + if strings.HasPrefix(token, "<") && strings.HasSuffix(token, ">") { + args = append(args, token[1:len(token)-1]) + } + } + return args +} diff --git a/internal/surface/surface_test.go b/internal/surface/surface_test.go new file mode 100644 index 0000000..c3a586b --- /dev/null +++ b/internal/surface/surface_test.go @@ -0,0 +1,105 @@ +package surface + +import ( + "testing" + + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" +) + +func TestGenerate(t *testing.T) { + root := &cobra.Command{Use: "vector"} + root.PersistentFlags().String("token", "", "API token") + root.PersistentFlags().Bool("json", false, "JSON output") + + // Group command (no RunE). + site := &cobra.Command{Use: "site", Short: "Manage sites"} + root.AddCommand(site) + + // Leaf with positional args and local flags. + list := &cobra.Command{ + Use: "list", + RunE: func(cmd *cobra.Command, args []string) error { return nil }, + } + list.Flags().Int("page", 1, "Page number") + site.AddCommand(list) + + show := &cobra.Command{ + Use: "show ", + Args: cobra.ExactArgs(1), + RunE: func(cmd *cobra.Command, args []string) error { return nil }, + } + site.AddCommand(show) + + // Command with multiple positional args. + clone := &cobra.Command{ + Use: "clone ", + Args: cobra.ExactArgs(2), + RunE: func(cmd *cobra.Command, args []string) error { return nil }, + } + clone.Flags().String("php-version", "", "PHP version") + site.AddCommand(clone) + + // Auth group. + auth := &cobra.Command{Use: "auth", Short: "Authentication"} + root.AddCommand(auth) + + login := &cobra.Command{ + Use: "login", + RunE: func(cmd *cobra.Command, args []string) error { return nil }, + } + auth.AddCommand(login) + + // Built-in help command should be excluded. + // Cobra adds help automatically; we just verify it's excluded. + + got := Generate(root) + + expected := `ARG vector site clone 0 source-id +ARG vector site clone 1 name +ARG vector site show 0 site-id +CMD vector +CMD vector auth +CMD vector auth login +CMD vector site +CMD vector site clone +CMD vector site list +CMD vector site show +FLAG vector --json type=bool +FLAG vector --token type=string +FLAG vector site clone --php-version type=string +FLAG vector site list --page type=int +` + + assert.Equal(t, expected, got) +} + +func TestGenerateExcludesCompletion(t *testing.T) { + root := &cobra.Command{Use: "vector"} + + // Add a completion command (Cobra adds one by default in some setups). + completion := &cobra.Command{Use: "completion", Short: "Generate completions"} + root.AddCommand(completion) + + real := &cobra.Command{ + Use: "status", + RunE: func(cmd *cobra.Command, args []string) error { return nil }, + } + root.AddCommand(real) + + got := Generate(root) + + assert.Contains(t, got, "CMD vector\n") + assert.Contains(t, got, "CMD vector status\n") + assert.NotContains(t, got, "completion") +} + +func TestGenerateExcludesHelpFlag(t *testing.T) { + root := &cobra.Command{Use: "vector"} + root.Flags().String("version", "", "Show version") + + got := Generate(root) + + assert.NotContains(t, got, "--help") + assert.Contains(t, got, "--version") +} diff --git a/internal/version/version.go b/internal/version/version.go new file mode 100644 index 0000000..10a6413 --- /dev/null +++ b/internal/version/version.go @@ -0,0 +1,17 @@ +// Package version provides build-time version information via ldflags injection. +package version + +import "fmt" + +// Version, Commit, and Date are set at build time via ldflags. +// Example: go build -ldflags "-X github.com/built-fast/vector-cli/internal/version.Version=1.0.0" +var ( + Version = "dev" + Commit = "unknown" + Date = "unknown" +) + +// FullVersion returns a formatted version string. +func FullVersion() string { + return fmt.Sprintf("vector v%s (%s) built %s", Version, Commit, Date) +} diff --git a/internal/version/version_test.go b/internal/version/version_test.go new file mode 100644 index 0000000..5921e0f --- /dev/null +++ b/internal/version/version_test.go @@ -0,0 +1,50 @@ +package version + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestDefaultValues(t *testing.T) { + assert.Equal(t, "dev", Version) + assert.Equal(t, "unknown", Commit) + assert.Equal(t, "unknown", Date) +} + +func TestFullVersion(t *testing.T) { + origVersion, origCommit, origDate := Version, Commit, Date + t.Cleanup(func() { + Version, Commit, Date = origVersion, origCommit, origDate + }) + + tests := []struct { + name string + version string + commit string + date string + want string + }{ + { + name: "defaults", + version: "dev", + commit: "unknown", + date: "unknown", + want: "vector vdev (unknown) built unknown", + }, + { + name: "injected values", + version: "1.0.0", + commit: "abc1234", + date: "2026-03-14", + want: "vector v1.0.0 (abc1234) built 2026-03-14", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + Version, Commit, Date = tt.version, tt.commit, tt.date + assert.Equal(t, tt.want, FullVersion()) + }) + } +} diff --git a/man/man1/vector.1 b/man/man1/vector.1 index 727dc15..b53b994 100644 --- a/man/man1/vector.1 +++ b/man/man1/vector.1 @@ -1,4 +1,4 @@ -.TH VECTOR 1 "2026-03-13" "vector 0.7.0" "Vector Pro CLI" +.TH VECTOR 1 "2026-03-14" "vector 0.8.0" "Vector Pro CLI" .SH NAME vector \- command-line interface for the Vector Pro API .SH SYNOPSIS @@ -20,16 +20,27 @@ and flags. .SH OPTIONS .TP +.BI \-\-token " TOKEN" +API token for this invocation. Overrides the +.B VECTOR_API_KEY +environment variable and stored credentials. +.TP .B \-\-json Force JSON output. .TP .B \-\-no\-json Force table output. .TP +.BI \-\-jq " EXPRESSION" +Filter JSON output with a jq expression (built-in, no external +.B jq +required). Automatically forces JSON output. Cannot be used with +.BR \-\-no\-json . +.TP .BR \-h ", " \-\-help Print help information. .TP -.BR \-V ", " \-\-version +.B \-\-version Print version information. .SH COMMANDS .SS auth \- Manage authentication @@ -52,8 +63,11 @@ List all sites. Defaults to page 1 with 15 results per page. .B site show \fIID\fR Show details for a site. .TP -.B site create \fB\-\-customer\-id\fR \fIID\fR \fB\-\-dev\-php\-version\fR \fIVERSION\fR \fR[\fB\-\-production\-domain\fR \fIDOMAIN\fR] [\fB\-\-staging\-domain\fR \fIDOMAIN\fR] [\fB\-\-tags\fR \fITAGS\fR] [\fB\-\-wp\-admin\-email\fR \fIEMAIL\fR] [\fB\-\-wp\-admin\-user\fR \fIUSER\fR] [\fB\-\-wp\-site\-title\fR \fITITLE\fR] -Create a new site. Optionally set custom domains for the production and staging +.B site create \fB\-\-customer\-id\fR \fIID\fR \fB\-\-dev\-php\-version\fR \fIVERSION\fR \fR[\fB\-\-production\-domain\fR \fIDOMAIN\fR] [\fB\-\-staging\-domain\fR \fIDOMAIN\fR] [\fB\-\-tags\fR \fITAGS\fR] [\fB\-\-wp\-admin\-email\fR \fIEMAIL\fR] [\fB\-\-wp\-admin\-user\fR \fIUSER\fR] [\fB\-\-wp\-site\-title\fR \fITITLE\fR] [\fB\-\-wait\fR] [\fB\-\-poll\-interval\fR \fIDURATION\fR] [\fB\-\-timeout\fR \fIDURATION\fR] +Create a new site. Use +.B \-\-wait +to block until the site reaches active status. One\-time credentials (SFTP, DB, +WP admin) are displayed before the wait begins. Optionally set custom domains for the production and staging environments. When custom domains are provided, DNS setup instructions and certificate validation records will be displayed. .IP @@ -181,11 +195,16 @@ List deployments for an environment. .B deploy show \fIDEPLOY_ID\fR Show deployment details. .TP -.B deploy trigger \fIENV_ID\fR \fR[\fB\-\-include\-uploads\fR] [\fB\-\-include\-database\fR] -Trigger a new deployment. +.B deploy trigger \fIENV_ID\fR \fR[\fB\-\-include\-uploads\fR] [\fB\-\-include\-database\fR] [\fB\-\-wait\fR] [\fB\-\-poll\-interval\fR \fIDURATION\fR] [\fB\-\-timeout\fR \fIDURATION\fR] +Trigger a new deployment. Use +.B \-\-wait +to block until the deployment reaches a terminal status. Default poll interval +is 60s (min 1s), default timeout is 5m (max 30m). .TP -.B deploy rollback \fIENV_ID\fR \fR[\fB\-\-target\-deployment\-id\fR \fIID\fR] -Rollback to a previous deployment. +.B deploy rollback \fIENV_ID\fR \fR[\fB\-\-target\-deployment\-id\fR \fIID\fR] [\fB\-\-wait\fR] [\fB\-\-poll\-interval\fR \fIDURATION\fR] [\fB\-\-timeout\fR \fIDURATION\fR] +Rollback to a previous deployment. Use +.B \-\-wait +to block until the rollback deployment reaches a terminal status. .SS ssl \- Manage SSL certificates .TP .B ssl status \fIENV_ID\fR @@ -270,8 +289,10 @@ List restores. Filter by site, environment, type (site, environment), or backup. .B restore show \fIRESTORE_ID\fR Show restore details. .TP -.B restore create \fIBACKUP_ID\fR \fR[\fB\-\-scope\fR \fISCOPE\fR] [\fB\-\-drop\-tables\fR] [\fB\-\-disable\-foreign\-keys\fR] [\fB\-\-search\-replace\-from\fR \fIFROM\fR] [\fB\-\-search\-replace\-to\fR \fITO\fR] -Restore from a backup. Scope can be +.B restore create \fIBACKUP_ID\fR \fR[\fB\-\-scope\fR \fISCOPE\fR] [\fB\-\-drop\-tables\fR] [\fB\-\-disable\-foreign\-keys\fR] [\fB\-\-search\-replace\-from\fR \fIFROM\fR] [\fB\-\-search\-replace\-to\fR \fITO\fR] [\fB\-\-wait\fR] [\fB\-\-poll\-interval\fR \fIDURATION\fR] [\fB\-\-timeout\fR \fIDURATION\fR] +Restore from a backup. Use +.B \-\-wait +to block until the restore completes or fails. Scope can be .B full\fR, .B database\fR, or @@ -408,10 +429,46 @@ List all PHP versions available on the platform. Set up Claude Desktop with the Vector MCP server. Use .B \-\-force to overwrite existing configuration. +.SS skill \- Manage agent skill +.TP +.B skill install +Install the SKILL.md agent reference document to ~/.agents/skills/vector/ and +create a symlink at ~/.claude/skills/vector/. +.TP +.B skill uninstall +Remove the installed skill files and symlink. +.SS completion \- Generate shell completions +.TP +.B completion bash +Generate Bash completion script. +.TP +.B completion zsh +Generate Zsh completion script. +.TP +.B completion fish +Generate Fish completion script. +.TP +.B completion powershell +Generate PowerShell completion script. +.PP +See +.B vector completion \-\-help +for installation instructions specific to your shell. +.SH TOKEN RESOLUTION +The API token is resolved in the following order (first match wins): +.IP 1. 4 +.B \-\-token +command\-line flag +.IP 2. +.B VECTOR_API_KEY +environment variable +.IP 3. +Token stored in the system keyring (set via +.BR "vector auth login" ) .SH ENVIRONMENT .TP .B VECTOR_API_KEY -API token for authentication. Overrides stored credentials. +API token for authentication. Overrides keyring. .TP .B VECTOR_API_URL API base URL. Defaults to @@ -422,9 +479,6 @@ Configuration directory. Defaults to .IR ~/.config/vector . .SH FILES .TP -.I ~/.config/vector/credentials.json -Stored API token (file permissions 0600). -.TP .I ~/.config/vector/config.json Optional configuration settings. .SH EXIT STATUS @@ -446,6 +500,75 @@ Resource not found (HTTP 404). .TP .B 5 Network or server error (HTTP 5xx). +.SH JQ FILTERING +The +.B \-\-jq +flag filters JSON output through a built\-in jq processor powered by gojq. +No external +.B jq +binary is required. When +.B \-\-jq +is used, output is automatically set to JSON format. +.PP +Extract specific fields: +.PP +.RS +.nf +$ vector site list \-\-jq '.[].id' +$ vector site show 456 \-\-jq '.dev_domain' +.fi +.RE +.PP +Filter with select: +.PP +.RS +.nf +$ vector env list \-\-site\-id 123 \-\-jq '[.[] | select(.status == "active")]' +.fi +.RE +.PP +Count items: +.PP +.RS +.nf +$ vector webhook list \-\-jq 'length' +.fi +.RE +.SS Format Strings +The following jq format strings are supported for converting values: +.BR @csv ", " @tsv ", " @html ", " @uri ", " @base64 . +.PP +CSV output: +.PP +.RS +.nf +$ vector site list \-\-jq '[.[] | [.id, .name]] | .[] | @csv' +.fi +.RE +.PP +TSV output: +.PP +.RS +.nf +$ vector site list \-\-jq '[.[] | [.id, .name]] | .[] | @tsv' +.fi +.RE +.PP +URL\-encode a value: +.PP +.RS +.nf +$ vector site show 456 \-\-jq '.name | @uri' +.fi +.RE +.PP +Base64\-encode a value: +.PP +.RS +.nf +$ vector site show 456 \-\-jq '.name | @base64' +.fi +.RE .SH EXAMPLES Authenticate with the API: .PP @@ -505,6 +628,14 @@ $ vector waf blocked\-ip add 42 203.0.113.50 .fi .RE .PP +Use a one\-off token without logging in: +.PP +.RS +.nf +$ vector \-\-token mytoken123 site list +.fi +.RE +.PP Use in a pipeline (auto\-JSON output): .PP .RS @@ -512,6 +643,15 @@ Use in a pipeline (auto\-JSON output): $ vector site list | jq '.[].id' .fi .RE +.PP +Use built\-in jq filtering: +.PP +.RS +.nf +$ vector site list \-\-jq '.[].id' +$ vector site list \-\-jq '[.[] | [.id, .name]] | .[] | @csv' +.fi +.RE .SH AUTHORS BuiltFast .SH BUGS diff --git a/scripts/check-skill-drift.sh b/scripts/check-skill-drift.sh new file mode 100755 index 0000000..154a7d7 --- /dev/null +++ b/scripts/check-skill-drift.sh @@ -0,0 +1,125 @@ +#!/usr/bin/env bash +set -eo pipefail + +REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)" +SURFACE="$REPO_ROOT/.surface" +SKILL="$REPO_ROOT/skills/vector/SKILL.md" +BASELINE="$REPO_ROOT/.surface-skill-drift" + +# Strip YAML frontmatter (content between first pair of --- delimiters) +strip_frontmatter() { + awk ' + /^---[[:space:]]*$/ { count++; next } + count >= 2 { print } + ' "$1" +} + +# Resolve longest matching CMD in .surface for a "vector sub1 sub2..." pattern. +# Prints the resolved command path and returns 0, or returns 1 if no match. +resolve_cmd() { + local words + read -ra words <<< "$1" + local i=${#words[@]} + while (( i >= 1 )); do + local try="${words[*]:0:i}" + if grep -qx "CMD ${try}" "$SURFACE"; then + echo "$try" + return 0 + fi + (( i-- )) + done + return 1 +} + +# Check if a flag exists on a command, its ancestors, or its descendants. +flag_exists() { + local cmd="$1" + local flag="$2" + + # Exact command + grep -q "^FLAG ${cmd} ${flag} " "$SURFACE" && return 0 + + # Descendants (subcommands of this command) + grep -qE "^FLAG ${cmd} [a-z].* ${flag} " "$SURFACE" && return 0 + + # Ancestors (inherited persistent flags, e.g. --json on root) + local words + read -ra words <<< "$cmd" + local i=$(( ${#words[@]} - 1 )) + while (( i >= 1 )); do + local ancestor="${words[*]:0:i}" + grep -q "^FLAG ${ancestor} ${flag} " "$SURFACE" && return 0 + (( i-- )) + done + + return 1 +} + +# --- Main --- + +content=$(strip_frontmatter "$SKILL") +drifts=() + +# Phase 1: Extract "vector ..." patterns and verify CMD exists +while IFS= read -r cmd_pattern; do + [ -z "$cmd_pattern" ] && continue + if ! resolve_cmd "$cmd_pattern" > /dev/null; then + drifts+=("CMD: $cmd_pattern") + fi +done < <(echo "$content" | { grep -oE 'vector( [a-z][a-z0-9-]*)+' || true; } | sort -u) + +# Phase 2: For lines with "vector ... --", verify flags exist +while IFS= read -r line; do + [ -z "$line" ] && continue + + # Extract command path + cmd_part=$(echo "$line" | grep -oE 'vector( [a-z][a-z0-9-]*)+' | head -1) || true + [ -z "$cmd_part" ] && continue + + # Resolve to longest matching CMD + resolved=$(resolve_cmd "$cmd_part") || continue + + # Check each flag on the line + while IFS= read -r flag; do + [ -z "$flag" ] && continue + if ! flag_exists "$resolved" "$flag"; then + drifts+=("FLAG: ${resolved} ${flag}") + fi + done < <(echo "$line" | { grep -oE -- '--[a-z][a-z0-9-]*' || true; } | sort -u) +done < <(echo "$content" | { grep -E 'vector [a-z].*--[a-z]' || true; }) + +# No drift found +if [ ${#drifts[@]} -eq 0 ]; then + echo "No skill drift detected." + exit 0 +fi + +# Deduplicate +drifts_deduped=() +while IFS= read -r d; do + [ -z "$d" ] && continue + drifts_deduped+=("$d") +done < <(printf '%s\n' "${drifts[@]}" | sort -u) +drifts=("${drifts_deduped[@]}") + +# Filter out baselined drifts +new_drifts=() +for d in "${drifts[@]}"; do + if [ -f "$BASELINE" ] && grep -qxF "$d" "$BASELINE"; then + continue + fi + new_drifts+=("$d") +done + +if [ ${#new_drifts[@]} -eq 0 ]; then + echo "All drift is baselined. OK." + exit 0 +fi + +echo "Skill drift detected (${#new_drifts[@]} issue(s)):" +for d in "${new_drifts[@]}"; do + echo " $d" +done +echo "" +echo "To baseline accepted mismatches, add them to .surface-skill-drift" +exit 1 diff --git a/skills/embed.go b/skills/embed.go new file mode 100644 index 0000000..1b9f16a --- /dev/null +++ b/skills/embed.go @@ -0,0 +1,8 @@ +package skills + +import "embed" + +// Content embeds the skills directory tree (e.g., vector/SKILL.md). +// +//go:embed vector +var Content embed.FS diff --git a/skills/vector/SKILL.md b/skills/vector/SKILL.md new file mode 100644 index 0000000..f4d1339 --- /dev/null +++ b/skills/vector/SKILL.md @@ -0,0 +1,842 @@ +--- +name: vector +description: Reference document for the vector CLI — manages sites, environments, deployments, backups, WAF, SSL, and more on the Vector Pro hosting platform. +triggers: + - vector +--- + +# Vector CLI — Agent Reference + +`vector` is the CLI for managing sites on the **Vector Pro** hosting platform +(API: `https://api.builtfast.com`). This document is the authoritative reference +for AI agents invoking vector commands. + +## Agent Invariants + +### Output Modes + +| Priority | Mechanism | Result | +|----------|-----------|--------| +| 1 | `--json` flag | JSON output | +| 2 | `--no-json` flag | Table output | +| 3 | `--jq ` flag | JSON output with jq filter applied | +| 4 | TTY auto-detect | TTY → table, piped → JSON | + +- `--jq` implies `--json` and is mutually exclusive with `--no-json`. +- Agents should always pass `--json` for machine-readable output. +- Use `--jq '.data'` or similar to extract specific fields. + +### Authentication + +Token resolution order: + +1. `--token ` flag (highest priority) +2. `VECTOR_API_KEY` environment variable +3. OS keyring (stored via `vector auth login`) + +All commands except `vector auth login` require a valid token. + +### Exit Codes + +| Code | Meaning | HTTP Status | +|------|---------|-------------| +| 0 | Success | 2xx | +| 1 | General error | other | +| 2 | Auth failure | 401, 403 | +| 3 | Validation error | 422 | +| 4 | Not found | 404 | +| 5 | Server error | 5xx | + +### Pagination + +List commands accept `--page` (default 1) and `--per-page` (default 15). +JSON output includes `meta.current_page`, `meta.last_page`, and `meta.total`. + +### Destructive Operations + +Commands that delete or suspend resources require interactive confirmation +unless `--force` is passed. + +### Waiting for Async Operations + +Four commands support `--wait` to block until the operation reaches a terminal +status instead of returning immediately: + +| Command | Terminal Status | Failed Statuses | +|---------|----------------|-----------------| +| `site create` | `active` | `failed` | +| `deploy trigger` | `deployed` | `failed`, `cancelled` | +| `deploy rollback` | `deployed` | `failed`, `cancelled` | +| `restore create` | `completed` | `failed` | + +**Shared flags:** + +| Flag | Default | Description | +|------|---------|-------------| +| `--wait` | false | Enable blocking wait | +| `--poll-interval` | 60s | Poll frequency (min 1s, must be ≤ timeout) | +| `--timeout` | 5m | Maximum wait time (max 30m) | + +**Behavior:** +- TTY: displays a live-updating alternate screen with status, then prints + a summary line and final state on exit. +- JSON mode (`--json`): silently polls and emits only the final JSON object. +- Piped/non-TTY: silently polls with no ANSI output. +- Returns exit code 1 if the operation reaches a failed status or times out. +- Ctrl+C cleanly aborts the wait. +- `site create --wait` prints one-time credentials (SFTP, DB, WP admin) + before entering the wait loop; with `--json`, credentials are merged into + the final JSON output. + +**Agents** should prefer `--wait --json` to get a single blocking call that +returns the final resource state, eliminating the need for manual poll loops. + +--- + +## Authentication + +### vector auth login + +Authenticate and store token in the OS keyring. + +``` +vector auth login +``` + +Prompts for a token interactively, validates it via the API, and stores it. +Exits with code 2 if the token is invalid. + +### vector auth logout + +Remove stored credentials from the keyring. + +``` +vector auth logout +``` + +### vector auth status + +Show current authentication status. + +``` +vector auth status --json +``` + +Displays: user name/email, account name, token name, abilities, expiration, +token source (flag/env/keyring), and config directory. +Exits with code 2 if not authenticated. + +--- + +## Configuration + +| Setting | Source | +|---------|--------| +| Config directory | `VECTOR_CONFIG_DIR` > `XDG_CONFIG_HOME/vector` > `~/.config/vector` | +| Disable keyring | `VECTOR_NO_KEYRING=1` | +| API token | `--token` > `VECTOR_API_KEY` > keyring | + +--- + +## Command Reference + +### Sites + +#### vector site list + +``` +vector site list [--page N] [--per-page N] [--json] +``` + +Lists all sites. Columns: ID, CUSTOMER ID, STATUS, DEV DOMAIN, TAGS. + +#### vector site show + +``` +vector site show [--json] +``` + +Displays site details including environments table. + +#### vector site create + +``` +vector site create --customer-id [--php-version ] [--tags ] \ + [--production-domain ] [--staging-domain ] \ + [--wp-admin-email ] [--wp-admin-user ] [--wp-site-title ] \ + [--wait] [--poll-interval <duration>] [--timeout <duration>] +``` + +Creates a new site. Returns SFTP, DB, and WordPress credentials (shown once). + +| Flag | Required | Description | +|------|----------|-------------| +| `--customer-id` | yes | Customer identifier | +| `--php-version` | no | PHP version | +| `--tags` | no | Comma-separated tags | +| `--production-domain` | no | Production domain | +| `--staging-domain` | no | Staging domain | +| `--wp-admin-email` | no | WordPress admin email | +| `--wp-admin-user` | no | WordPress admin username | +| `--wp-site-title` | no | WordPress site title | +| `--wait` | no | Block until site reaches active status | +| `--poll-interval` | no | How often to poll for status (default 60s, min 1s) | +| `--timeout` | no | Maximum time to wait (default 5m, max 30m) | + +#### vector site update + +``` +vector site update <site-id> [--customer-id <id>] [--tags <t1,t2>] +``` + +Updates site metadata. Only flags that are passed are included (PATCH semantics). +Empty `--tags ""` clears tags. + +#### vector site delete + +``` +vector site delete <site-id> [--force] +``` + +Deletes a site (irreversible). Requires confirmation unless `--force`. + +#### vector site clone + +``` +vector site clone <site-id> [--customer-id <id>] [--php-version <ver>] [--tags <t1,t2>] +``` + +Clones an existing site with files and database. Returns new DB credentials. + +#### vector site suspend / unsuspend + +``` +vector site suspend <site-id> +vector site unsuspend <site-id> +``` + +Suspend or resume a site's development container. + +#### vector site purge-cache + +``` +vector site purge-cache <site-id> [--cache-tag <tag>] [--url <url>] +``` + +Purges CDN cache. Optionally filter by cache tag or specific URL. + +#### vector site logs + +``` +vector site logs <site-id> [--start-time <time>] [--end-time <time>] \ + [--limit N] [--level <level>] [--environment <name>] \ + [--deployment-id <id>] [--cursor <cursor>] +``` + +Retrieves site logs. Time values accept RFC3339 or relative format (e.g., `now-1h`). + +| Flag | Type | Description | +|------|------|-------------| +| `--start-time` | string | Start time (RFC3339 or relative) | +| `--end-time` | string | End time (RFC3339 or relative) | +| `--limit` | int | Number of entries (1-1000) | +| `--level` | string | Filter: error, warning, info | +| `--environment` | string | Filter by environment name | +| `--deployment-id` | string | Filter by deployment | +| `--cursor` | string | Pagination cursor | + +#### vector site reset-sftp-password / reset-db-password + +``` +vector site reset-sftp-password <site-id> +vector site reset-db-password <site-id> +``` + +Generates new credentials (shown once). + +#### vector site wp-reconfig + +``` +vector site wp-reconfig <site-id> +``` + +Regenerates `wp-config.php`. + +#### vector site ssh-key list / add / remove + +``` +vector site ssh-key list <site-id> [--page N] [--per-page N] +vector site ssh-key add <site-id> --name <name> --public-key <key> +vector site ssh-key remove <site-id> <key-id> +``` + +Manage SSH keys for a specific site. + +--- + +### Environments + +#### vector env list + +``` +vector env list <site-id> [--page N] [--per-page N] [--json] +``` + +Lists environments. Columns: ID, NAME, PRODUCTION, STATUS, PHP, PLATFORM DOMAIN, CUSTOM DOMAIN. + +#### vector env show + +``` +vector env show <env-id> [--json] +``` + +Displays environment details including certificate status. + +#### vector env create + +``` +vector env create <site-id> --name <name> --php-version <ver> \ + [--custom-domain <domain>] [--production] [--tags <t1,t2>] +``` + +| Flag | Required | Description | +|------|----------|-------------| +| `--name` | yes | Environment name (slug format) | +| `--php-version` | yes | PHP version | +| `--custom-domain` | no | Custom domain | +| `--production` | no | Mark as production (default false) | +| `--tags` | no | Comma-separated tags | + +#### vector env update + +``` +vector env update <env-id> [--custom-domain <domain>] [--clear-custom-domain] [--tags <t1,t2>] +``` + +`--custom-domain` and `--clear-custom-domain` are mutually exclusive. +Returns 202 Accepted if a domain change triggers an async infrastructure update. + +#### vector env delete + +``` +vector env delete <env-id> [--force] +``` + +Deletes environment (irreversible). Requires confirmation unless `--force`. + +#### vector env secret list / show / create / update / delete + +``` +vector env secret list <env-id> [--page N] [--per-page N] +vector env secret show <secret-id> +vector env secret create <env-id> --key <key> --value <val> [--is-secret] +vector env secret update <secret-id> [--key <key>] [--value <val>] [--is-secret] +vector env secret delete <secret-id> [--force] +``` + +Manage environment-level secrets and variables. `--is-secret` defaults to true. + +#### vector env db promote / promote-status + +``` +vector env db promote <env-id> [--drop-tables] [--disable-foreign-keys] +vector env db promote-status <env-id> <promote-id> +``` + +Promotes the development database to the environment. Both flags default to true. + +--- + +### Deployments + +#### vector deploy list + +``` +vector deploy list <env-id> [--page N] [--per-page N] +``` + +Columns: ID, STATUS, ACTOR, CREATED. + +#### vector deploy show + +``` +vector deploy show <deploy-id> [--json] +``` + +Shows deployment details including stdout/stderr. + +#### vector deploy trigger + +``` +vector deploy trigger <env-id> [--include-uploads] [--include-database] \ + [--wait] [--poll-interval <duration>] [--timeout <duration>] +``` + +| Flag | Default | Description | +|------|---------|-------------| +| `--include-uploads` | false | Include uploads in deployment | +| `--include-database` | true | Include database in deployment | +| `--wait` | false | Block until deployment reaches a terminal status | +| `--poll-interval` | 60s | How often to poll for status (min 1s) | +| `--timeout` | 5m | Maximum time to wait (max 30m) | + +#### vector deploy rollback + +``` +vector deploy rollback <env-id> [--target <deploy-id>] \ + [--wait] [--poll-interval <duration>] [--timeout <duration>] +``` + +Rolls back to last successful deployment, or to a specific `--target`. + +| Flag | Default | Description | +|------|---------|-------------| +| `--target` | | Specific deployment ID to roll back to | +| `--wait` | false | Block until rollback deployment reaches a terminal status | +| `--poll-interval` | 60s | How often to poll for status (min 1s) | +| `--timeout` | 5m | Maximum time to wait (max 30m) | + +--- + +### SSL Certificates + +#### vector ssl status + +``` +vector ssl status <env-id> [--json] +``` + +Shows SSL provisioning status, step, failure reason, and domains. + +#### vector ssl nudge + +``` +vector ssl nudge <env-id> [--retry] +``` + +Manually nudges SSL provisioning for stuck or failed states. + +--- + +### Backups + +#### vector backup list + +``` +vector backup list [--site-id <id>] [--environment-id <id>] [--type <type>] \ + [--page N] [--per-page N] +``` + +Columns: ID, MODEL, TYPE, SCOPE, STATUS, DESCRIPTION, CREATED. + +#### vector backup show + +``` +vector backup show <id> [--json] +``` + +#### vector backup create + +``` +vector backup create --site-id <id> [--environment-id <id>] \ + [--scope <full|database|files>] [--description <desc>] +``` + +`--site-id` or `--environment-id` required. `--scope` defaults to `full`. + +#### vector backup download create / status + +``` +vector backup download create <backup-id> +vector backup download status <backup-id> <download-id> +``` + +Creates a download request, then poll status until a URL is returned. + +--- + +### Restores + +#### vector restore list + +``` +vector restore list [--site-id <id>] [--environment-id <id>] \ + [--type <site|environment>] [--backup-id <id>] [--page N] [--per-page N] +``` + +#### vector restore show + +``` +vector restore show <id> [--json] +``` + +#### vector restore create + +``` +vector restore create <backup-id> [--drop-tables] [--disable-foreign-keys] \ + [--search-replace-from <url>] [--search-replace-to <url>] \ + [--wait] [--poll-interval <duration>] [--timeout <duration>] +``` + +Initiates a restore from backup. `--drop-tables` and `--disable-foreign-keys` +default to false. + +| Flag | Default | Description | +|------|---------|-------------| +| `--wait` | false | Block until restore reaches a terminal status | +| `--poll-interval` | 60s | How often to poll for status (min 1s) | +| `--timeout` | 5m | Maximum time to wait (max 30m) | + +--- + +### WAF (Web Application Firewall) + +#### Blocked IPs + +``` +vector waf blocked-ip list <site-id> +vector waf blocked-ip add <site-id> <ip> +vector waf blocked-ip remove <site-id> <ip> +``` + +#### Blocked Referrers + +``` +vector waf blocked-referrer list <site-id> +vector waf blocked-referrer add <site-id> <hostname> +vector waf blocked-referrer remove <site-id> <hostname> +``` + +#### Allowed Referrers + +``` +vector waf allowed-referrer list <site-id> +vector waf allowed-referrer add <site-id> <hostname> +vector waf allowed-referrer remove <site-id> <hostname> +``` + +#### Rate Limits + +``` +vector waf rate-limit list <site-id> +vector waf rate-limit show <site-id> <rule-id> +vector waf rate-limit create <site-id> --name <name> --request-count <N> \ + --timeframe <sec> --block-time <sec> [--description <desc>] \ + [--value <pattern>] [--operator <op>] [--variables <v1,v2>] \ + [--transformations <t1,t2>] +vector waf rate-limit update <site-id> <rule-id> [--name <name>] \ + [--request-count <N>] [--timeframe <sec>] [--block-time <sec>] \ + [--description <desc>] [--value <pattern>] [--operator <op>] \ + [--variables <v1,v2>] [--transformations <t1,t2>] +vector waf rate-limit delete <site-id> <rule-id> +``` + +--- + +### Database Operations + +#### vector db export create / status + +``` +vector db export create <site-id> [--format sql] +vector db export status <site-id> <export-id> +``` + +Creates a SQL dump, then poll status for the download URL. + +#### vector db import-session create / run / status + +``` +vector db import-session create <site-id> [--filename <name>] \ + [--content-length <bytes>] [--drop-tables] [--disable-foreign-keys] \ + [--search-replace-from <from>] [--search-replace-to <to>] +vector db import-session run <site-id> <import-id> +vector db import-session status <site-id> <import-id> +``` + +Three-step import: create session (get upload URL), upload file, run import. + +#### vector archive import + +``` +vector archive import <site-id> <file> [--drop-tables] [--disable-foreign-keys] \ + [--search-replace-from <from>] [--search-replace-to <to>] +``` + +One-command archive import: creates session, uploads file, and runs import. + +--- + +### Events + +#### vector event list + +``` +vector event list [--from <ISO-8601>] [--to <ISO-8601>] [--event <type>] \ + [--page N] [--per-page N] +``` + +Lists account events. Columns: ID, EVENT, ACTOR, RESOURCE, CREATED. + +--- + +### Webhooks + +#### vector webhook list / show / create / update / delete + +``` +vector webhook list [--page N] [--per-page N] +vector webhook show <id> +vector webhook create --url <url> --events <e1,e2> [--type <http|slack>] +vector webhook update <id> [--url <url>] [--events <e1,e2>] [--enabled] +vector webhook delete <id> +``` + +`--type` defaults to `http`. Create returns a secret (shown once). + +--- + +### Account + +#### vector account show + +``` +vector account show [--json] +``` + +Displays owner, account name, company, resource counts. + +#### vector account ssh-key list / show / create / delete + +``` +vector account ssh-key list [--page N] [--per-page N] +vector account ssh-key show <key-id> +vector account ssh-key create --name <name> --public-key <key> +vector account ssh-key delete <key-id> +``` + +#### vector account api-key list / create / delete + +``` +vector account api-key list [--page N] [--per-page N] +vector account api-key create --name <name> [--abilities <a1,a2>] [--expires-at <ISO-8601>] +vector account api-key delete <key-id> +``` + +Create returns a token (shown once). + +#### vector account secret list / show / create / update / delete + +``` +vector account secret list [--page N] [--per-page N] +vector account secret show <id> +vector account secret create --key <key> --value <val> [--no-secret] +vector account secret update <id> [--value <val>] [--no-secret] +vector account secret delete <id> +``` + +`--no-secret` stores as a plain (non-secret) variable. + +--- + +### Utilities + +#### vector php-versions + +``` +vector php-versions [--json] +``` + +Lists available PHP versions. + +#### vector mcp setup + +``` +vector mcp setup [--target <desktop|code>] [--global] [--force] +``` + +Configures Vector MCP server for Claude Desktop or Claude Code. + +| Flag | Default | Description | +|------|---------|-------------| +| `--target` | desktop | Target: `desktop` or `code` | +| `--global` | false | For Code: write to `~/.claude.json` instead of `.mcp.json` | +| `--force` | false | Overwrite existing configuration | + +--- + +## Common Workflows + +### Deploy a Site + +```bash +# Single blocking call (recommended for agents) +vector deploy trigger <env-id> --wait --json + +# Or manually poll +vector deploy trigger <env-id> --json +vector deploy show <deploy-id> --json + +# Rollback if needed (blocking) +vector deploy rollback <env-id> --wait --json +``` + +### Create a Site and Wait for Active + +```bash +# Single blocking call — credentials are merged into the final JSON +vector site create --customer-id <id> --wait --json + +# With custom timeout for large sites +vector site create --customer-id <id> --wait --timeout 15m --json +``` + +### Backup and Restore + +```bash +# 1. Create backup +vector backup create --site-id <site-id> --scope full --json + +# 2. Download backup +vector backup download create <backup-id> --json +vector backup download status <backup-id> <download-id> --json + +# 3. Restore from backup (blocking) +vector restore create <backup-id> --wait --json + +# Or manually poll +vector restore create <backup-id> --json +vector restore show <restore-id> --json +``` + +### WAF: Block an IP + +```bash +vector waf blocked-ip add <site-id> <ip> +vector waf blocked-ip list <site-id> --json +``` + +### SSL Troubleshooting + +```bash +# Check SSL status +vector ssl status <env-id> --json + +# Nudge if stuck +vector ssl nudge <env-id> --retry +``` + +### Database Export/Import + +```bash +# Export +vector db export create <site-id> --json +vector db export status <site-id> <export-id> --json + +# Import (one-command) +vector archive import <site-id> dump.sql + +# Import (multi-step) +vector db import-session create <site-id> --filename dump.sql --json +# Upload file to the returned presigned URL +vector db import-session run <site-id> <import-id> +vector db import-session status <site-id> <import-id> --json +``` + +### Environment Management + +```bash +# Create staging environment +vector env create <site-id> --name staging --php-version 8.3 --json + +# Set a custom domain +vector env update <env-id> --custom-domain staging.example.com + +# Add environment secret +vector env secret create <env-id> --key DB_PASSWORD --value secret123 + +# Promote dev database +vector env db promote <env-id> +``` + +--- + +## Decision Trees + +### Which deploy command? + +``` +Need to deploy code? +├── Yes → vector deploy trigger <env-id> +│ ├── Include uploads? → --include-uploads +│ ├── Skip database? → --include-database=false +│ └── Wait for completion? → --wait [--timeout 10m] +└── Need to undo? → vector deploy rollback <env-id> + ├── Specific version? → --target <deploy-id> + └── Wait for completion? → --wait +``` + +### Which backup/restore path? + +``` +Need a backup? +├── Create → vector backup create --site-id <id> +│ ├── Full → --scope full (default) +│ ├── Database only → --scope database +│ └── Files only → --scope files +├── Download → vector backup download create <backup-id> +│ └── Poll → vector backup download status <backup-id> <download-id> +└── Restore → vector restore create <backup-id> + ├── With search-replace → --search-replace-from/--search-replace-to + └── Wait for completion? → --wait [--timeout 10m] +``` + +### Which WAF command? + +``` +WAF action needed? +├── Block IP → vector waf blocked-ip add <site-id> <ip> +├── Block referrer → vector waf blocked-referrer add <site-id> <hostname> +├── Allow referrer → vector waf allowed-referrer add <site-id> <hostname> +└── Rate limit → vector waf rate-limit create <site-id> --name ... --request-count ... --timeframe ... --block-time ... +``` + +### Which database operation? + +``` +Database operation? +├── Export → vector db export create <site-id> +│ └── Poll → vector db export status <site-id> <export-id> +├── Import (simple) → vector archive import <site-id> <file> +├── Import (multi-step) → vector db import-session create/run/status +└── Promote dev → vector env db promote <env-id> +``` + +--- + +## Error Handling + +### Authentication Errors (exit code 2) + +Token is missing, expired, or lacks permissions. Check with: +```bash +vector auth status --json +``` + +### Validation Errors (exit code 3) + +Request payload failed server-side validation. The error message includes +field-level details in the format `field: message`. + +### Not Found (exit code 4) + +The referenced resource ID does not exist or is not accessible with the +current token. + +### Server Errors (exit code 5) + +Transient API error. Retry after a brief delay. + +### General Errors (exit code 1) + +Client-side error (network failure, invalid flags, etc.). diff --git a/src/api/client.rs b/src/api/client.rs deleted file mode 100644 index e327968..0000000 --- a/src/api/client.rs +++ /dev/null @@ -1,191 +0,0 @@ -use reqwest::blocking::{Client, Response}; -use reqwest::header::{ - ACCEPT, AUTHORIZATION, CONTENT_LENGTH, CONTENT_TYPE, HeaderMap, HeaderValue, -}; -use serde::Serialize; -use serde::de::DeserializeOwned; - -use super::error::ApiError; - -const DEFAULT_BASE_URL: &str = "https://api.builtfast.com"; -const USER_AGENT: &str = concat!("vector-cli/", env!("CARGO_PKG_VERSION")); - -pub struct ApiClient { - client: Client, - base_url: String, - token: Option<String>, -} - -impl ApiClient { - pub fn new(base_url: Option<String>, token: Option<String>) -> Result<Self, ApiError> { - let client = Client::builder() - .user_agent(USER_AGENT) - .build() - .map_err(ApiError::NetworkError)?; - - Ok(Self { - client, - base_url: base_url.unwrap_or_else(|| DEFAULT_BASE_URL.to_string()), - token, - }) - } - - pub fn set_token(&mut self, token: String) { - self.token = Some(token); - } - - fn headers(&self) -> Result<HeaderMap, ApiError> { - let mut headers = HeaderMap::new(); - headers.insert(ACCEPT, HeaderValue::from_static("application/json")); - headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/json")); - - if let Some(ref token) = self.token { - let auth_value = format!("Bearer {}", token); - headers.insert( - AUTHORIZATION, - HeaderValue::from_str(&auth_value) - .map_err(|e| ApiError::ConfigError(e.to_string()))?, - ); - } - - Ok(headers) - } - - fn handle_response<T: DeserializeOwned>(&self, response: Response) -> Result<T, ApiError> { - let status = response.status(); - let body = response.text().map_err(ApiError::NetworkError)?; - - if status.is_success() { - serde_json::from_str(&body) - .map_err(|e| ApiError::Other(format!("JSON parse error: {}", e))) - } else { - Err(ApiError::from_response(status.as_u16(), &body)) - } - } - - pub fn get<T: DeserializeOwned>(&self, path: &str) -> Result<T, ApiError> { - let url = format!("{}{}", self.base_url, path); - let response = self - .client - .get(&url) - .headers(self.headers()?) - .send() - .map_err(ApiError::NetworkError)?; - - self.handle_response(response) - } - - pub fn get_with_query<T: DeserializeOwned, Q: Serialize>( - &self, - path: &str, - query: &Q, - ) -> Result<T, ApiError> { - let url = format!("{}{}", self.base_url, path); - let response = self - .client - .get(&url) - .headers(self.headers()?) - .query(query) - .send() - .map_err(ApiError::NetworkError)?; - - self.handle_response(response) - } - - pub fn post<T: DeserializeOwned, B: Serialize>( - &self, - path: &str, - body: &B, - ) -> Result<T, ApiError> { - let url = format!("{}{}", self.base_url, path); - let response = self - .client - .post(&url) - .headers(self.headers()?) - .json(body) - .send() - .map_err(ApiError::NetworkError)?; - - self.handle_response(response) - } - - pub fn post_empty<T: DeserializeOwned>(&self, path: &str) -> Result<T, ApiError> { - let url = format!("{}{}", self.base_url, path); - let response = self - .client - .post(&url) - .headers(self.headers()?) - .send() - .map_err(ApiError::NetworkError)?; - - self.handle_response(response) - } - - pub fn put<T: DeserializeOwned, B: Serialize>( - &self, - path: &str, - body: &B, - ) -> Result<T, ApiError> { - let url = format!("{}{}", self.base_url, path); - let response = self - .client - .put(&url) - .headers(self.headers()?) - .json(body) - .send() - .map_err(ApiError::NetworkError)?; - - self.handle_response(response) - } - - pub fn put_empty<T: DeserializeOwned>(&self, path: &str) -> Result<T, ApiError> { - let url = format!("{}{}", self.base_url, path); - let response = self - .client - .put(&url) - .headers(self.headers()?) - .send() - .map_err(ApiError::NetworkError)?; - - self.handle_response(response) - } - - pub fn put_file( - &self, - url: &str, - file: std::fs::File, - content_length: u64, - ) -> Result<(), ApiError> { - let response = self - .client - .put(url) - .header(CONTENT_TYPE, "application/gzip") - .header(CONTENT_LENGTH, content_length) - .body(reqwest::blocking::Body::from(file)) - .send() - .map_err(ApiError::NetworkError)?; - - if response.status().is_success() { - Ok(()) - } else { - let status = response.status(); - let body = response.text().map_err(ApiError::NetworkError)?; - Err(ApiError::Other(format!( - "Upload failed ({}): {}", - status, body - ))) - } - } - - pub fn delete<T: DeserializeOwned>(&self, path: &str) -> Result<T, ApiError> { - let url = format!("{}{}", self.base_url, path); - let response = self - .client - .delete(&url) - .headers(self.headers()?) - .send() - .map_err(ApiError::NetworkError)?; - - self.handle_response(response) - } -} diff --git a/src/api/error.rs b/src/api/error.rs deleted file mode 100644 index e91b255..0000000 --- a/src/api/error.rs +++ /dev/null @@ -1,180 +0,0 @@ -use serde::Deserialize; -use std::collections::HashMap; -use thiserror::Error; - -pub const EXIT_SUCCESS: i32 = 0; -pub const EXIT_GENERAL_ERROR: i32 = 1; -pub const EXIT_AUTH_ERROR: i32 = 2; -pub const EXIT_VALIDATION_ERROR: i32 = 3; -pub const EXIT_NOT_FOUND: i32 = 4; -pub const EXIT_NETWORK_ERROR: i32 = 5; - -#[derive(Debug, Error)] -pub enum ApiError { - #[error("Authentication failed: {0}")] - Unauthorized(String), - - #[error("Access denied: {0}")] - Forbidden(String), - - #[error("Not found: {0}")] - NotFound(String), - - #[error("Validation failed: {0}")] - ValidationError(String), - - #[error("Server error: {0}")] - ServerError(String), - - #[error("Network error: {0}")] - NetworkError(#[from] reqwest::Error), - - #[error("Configuration error: {0}")] - ConfigError(String), - - #[error("{0}")] - Other(String), -} - -impl ApiError { - pub fn exit_code(&self) -> i32 { - match self { - ApiError::Unauthorized(_) | ApiError::Forbidden(_) => EXIT_AUTH_ERROR, - ApiError::NotFound(_) => EXIT_NOT_FOUND, - ApiError::ValidationError(_) => EXIT_VALIDATION_ERROR, - ApiError::ServerError(_) | ApiError::NetworkError(_) => EXIT_NETWORK_ERROR, - ApiError::ConfigError(_) | ApiError::Other(_) => EXIT_GENERAL_ERROR, - } - } - - pub fn from_response(status: u16, body: &str) -> Self { - let message = parse_error_message(body); - - match status { - 401 => ApiError::Unauthorized(message), - 403 => ApiError::Forbidden(message), - 404 => ApiError::NotFound(message), - 422 => ApiError::ValidationError(message), - 500..=599 => ApiError::ServerError(message), - _ => ApiError::Other(message), - } - } -} - -#[derive(Debug, Deserialize)] -struct ErrorResponse { - message: Option<String>, - errors: Option<HashMap<String, Vec<String>>>, -} - -fn parse_error_message(body: &str) -> String { - if let Ok(response) = serde_json::from_str::<ErrorResponse>(body) { - if let Some(errors) = response.errors { - let error_messages: Vec<String> = errors - .into_iter() - .flat_map(|(field, messages)| { - messages - .into_iter() - .map(move |msg| format!("{}: {}", field, msg)) - }) - .collect(); - if !error_messages.is_empty() { - return error_messages.join("; "); - } - } - if let Some(message) = response.message { - return message; - } - } - body.to_string() -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_exit_codes() { - assert_eq!( - ApiError::Unauthorized("".into()).exit_code(), - EXIT_AUTH_ERROR - ); - assert_eq!(ApiError::Forbidden("".into()).exit_code(), EXIT_AUTH_ERROR); - assert_eq!(ApiError::NotFound("".into()).exit_code(), EXIT_NOT_FOUND); - assert_eq!( - ApiError::ValidationError("".into()).exit_code(), - EXIT_VALIDATION_ERROR - ); - assert_eq!( - ApiError::ServerError("".into()).exit_code(), - EXIT_NETWORK_ERROR - ); - assert_eq!( - ApiError::ConfigError("".into()).exit_code(), - EXIT_GENERAL_ERROR - ); - assert_eq!(ApiError::Other("".into()).exit_code(), EXIT_GENERAL_ERROR); - } - - #[test] - fn test_from_response_status_codes() { - assert!(matches!( - ApiError::from_response(401, "{}"), - ApiError::Unauthorized(_) - )); - assert!(matches!( - ApiError::from_response(403, "{}"), - ApiError::Forbidden(_) - )); - assert!(matches!( - ApiError::from_response(404, "{}"), - ApiError::NotFound(_) - )); - assert!(matches!( - ApiError::from_response(422, "{}"), - ApiError::ValidationError(_) - )); - assert!(matches!( - ApiError::from_response(500, "{}"), - ApiError::ServerError(_) - )); - assert!(matches!( - ApiError::from_response(503, "{}"), - ApiError::ServerError(_) - )); - assert!(matches!( - ApiError::from_response(400, "{}"), - ApiError::Other(_) - )); - } - - #[test] - fn test_parse_error_message_with_message() { - let body = r#"{"message": "Site not found", "http_status": 404}"#; - assert_eq!(parse_error_message(body), "Site not found"); - } - - #[test] - fn test_parse_error_message_with_validation_errors() { - let body = r#"{"errors": {"domain": ["The domain field is required."]}}"#; - assert_eq!( - parse_error_message(body), - "domain: The domain field is required." - ); - } - - #[test] - fn test_parse_error_message_plain_text() { - let body = "Internal Server Error"; - assert_eq!(parse_error_message(body), "Internal Server Error"); - } - - #[test] - fn test_error_display() { - let err = ApiError::Unauthorized("Invalid token".into()); - assert_eq!(err.to_string(), "Authentication failed: Invalid token"); - - let err = ApiError::NotFound("Site not found".into()); - assert_eq!(err.to_string(), "Not found: Site not found"); - } -} diff --git a/src/api/mod.rs b/src/api/mod.rs deleted file mode 100644 index 55e9172..0000000 --- a/src/api/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -pub mod client; -pub mod error; - -pub use client::ApiClient; -pub use error::{ApiError, EXIT_SUCCESS}; diff --git a/src/cli.rs b/src/cli.rs deleted file mode 100644 index 2254cff..0000000 --- a/src/cli.rs +++ /dev/null @@ -1,1118 +0,0 @@ -use clap::{Parser, Subcommand}; - -#[derive(Parser)] -#[command(name = "vector")] -#[command(about = "CLI for Vector Pro API", long_about = None)] -#[command(version)] -pub struct Cli { - /// Output JSON instead of tables - #[arg(long, global = true)] - pub json: bool, - - /// Output tables instead of JSON (default when TTY) - #[arg(long, global = true)] - pub no_json: bool, - - #[command(subcommand)] - pub command: Commands, -} - -#[derive(Subcommand)] -pub enum Commands { - /// Manage authentication - Auth { - #[command(subcommand)] - command: AuthCommands, - }, - /// Manage sites - Site { - #[command(subcommand)] - command: SiteCommands, - }, - /// Manage environments - Env { - #[command(subcommand)] - command: EnvCommands, - }, - /// Manage deployments - Deploy { - #[command(subcommand)] - command: DeployCommands, - }, - /// Manage SSL certificates - Ssl { - #[command(subcommand)] - command: SslCommands, - }, - /// Manage database import/export - Db { - #[command(subcommand)] - command: DbCommands, - }, - /// Manage archives - Archive { - #[command(subcommand)] - command: ArchiveCommands, - }, - /// Manage WAF rules and blocklists - Waf { - #[command(subcommand)] - command: WafCommands, - }, - /// Manage account settings - Account { - #[command(subcommand)] - command: AccountCommands, - }, - /// View events - Event { - #[command(subcommand)] - command: EventCommands, - }, - /// Manage webhooks - Webhook { - #[command(subcommand)] - command: WebhookCommands, - }, - /// Manage backups - Backup { - #[command(subcommand)] - command: BackupCommands, - }, - /// Manage restores - Restore { - #[command(subcommand)] - command: RestoreCommands, - }, - /// List available PHP versions - PhpVersions, - /// Configure MCP integration for Claude - Mcp { - #[command(subcommand)] - command: McpCommands, - }, -} - -#[derive(Subcommand)] -pub enum AuthCommands { - /// Log in with an API token - Login { - /// API token (reads from stdin if not provided) - #[arg(long, env = "VECTOR_API_KEY")] - token: Option<String>, - }, - /// Log out and clear credentials - Logout, - /// Check authentication status - Status, -} - -#[derive(Subcommand)] -pub enum SiteCommands { - /// List all sites - List { - /// Page number - #[arg(long, default_value = "1")] - page: u32, - /// Items per page - #[arg(long, default_value = "15")] - per_page: u32, - }, - /// Show site details - Show { - /// Site ID - id: String, - }, - /// Create a new site - Create { - /// Customer ID for the site - #[arg(long)] - customer_id: String, - /// PHP version for the dev environment - #[arg(long)] - dev_php_version: String, - /// Custom domain for the production environment - #[arg(long)] - production_domain: Option<String>, - /// Custom domain for the staging environment - #[arg(long)] - staging_domain: Option<String>, - /// Tags for the site - #[arg(long)] - tags: Option<Vec<String>>, - /// Email address for WordPress auto-install - #[arg(long)] - wp_admin_email: Option<String>, - /// WordPress admin username (default: "admin") - #[arg(long)] - wp_admin_user: Option<String>, - /// WordPress site title (default: "WordPress") - #[arg(long)] - wp_site_title: Option<String>, - }, - /// Update a site - Update { - /// Site ID - id: String, - /// Customer ID - #[arg(long)] - customer_id: Option<String>, - /// Tags - #[arg(long)] - tags: Option<Vec<String>>, - }, - /// Delete a site - Delete { - /// Site ID - id: String, - /// Skip confirmation - #[arg(long)] - force: bool, - }, - /// Clone a site - Clone { - /// Site ID to clone - id: String, - /// Customer ID for the new site - #[arg(long)] - customer_id: Option<String>, - /// PHP version for the new dev environment - #[arg(long)] - dev_php_version: Option<String>, - /// Tags for the new site - #[arg(long)] - tags: Option<Vec<String>>, - }, - /// Suspend a site - Suspend { - /// Site ID - id: String, - }, - /// Unsuspend a site - Unsuspend { - /// Site ID - id: String, - }, - /// Reset SFTP password - ResetSftpPassword { - /// Site ID - id: String, - }, - /// Reset database password - ResetDbPassword { - /// Site ID - id: String, - }, - /// Purge site cache - PurgeCache { - /// Site ID - id: String, - /// Cache tag to purge - #[arg(long)] - cache_tag: Option<String>, - /// URL to purge - #[arg(long)] - url: Option<String>, - }, - /// View site logs - Logs { - /// Site ID - id: String, - /// Start time (ISO 8601 format) - #[arg(long)] - start_time: Option<String>, - /// End time (ISO 8601 format) - #[arg(long)] - end_time: Option<String>, - /// Number of log entries - #[arg(long)] - limit: Option<u32>, - /// Environment name to filter - #[arg(long)] - environment: Option<String>, - /// Deployment ID to filter - #[arg(long)] - deployment_id: Option<String>, - /// Log level to filter (e.g., error, warning, info) - #[arg(long)] - level: Option<String>, - /// Pagination cursor from previous response - #[arg(long)] - cursor: Option<String>, - }, - /// Regenerate wp-config.php - WpReconfig { - /// Site ID - id: String, - }, - /// Manage site SSH keys - SshKey { - #[command(subcommand)] - command: SiteSshKeyCommands, - }, -} - -#[derive(Subcommand)] -pub enum SiteSshKeyCommands { - /// List SSH keys for a site - List { - /// Site ID - site_id: String, - /// Page number - #[arg(long, default_value = "1")] - page: u32, - /// Items per page - #[arg(long, default_value = "15")] - per_page: u32, - }, - /// Add an SSH key to a site - Add { - /// Site ID - site_id: String, - /// Key name - #[arg(long)] - name: String, - /// Public key content - #[arg(long)] - public_key: String, - }, - /// Remove an SSH key from a site - Remove { - /// Site ID - site_id: String, - /// SSH key ID - key_id: String, - }, -} - -#[derive(Subcommand)] -pub enum EnvCommands { - /// List environments for a site - List { - /// Site ID - site_id: String, - /// Page number - #[arg(long, default_value = "1")] - page: u32, - /// Items per page - #[arg(long, default_value = "15")] - per_page: u32, - }, - /// Show environment details - Show { - /// Environment ID - env_id: String, - }, - /// Create a new environment - Create { - /// Site ID - site_id: String, - /// Environment name - #[arg(long)] - name: String, - /// Custom domain - #[arg(long)] - custom_domain: String, - /// PHP version - #[arg(long)] - php_version: String, - /// Mark as production environment - #[arg(long)] - is_production: bool, - /// Tags - #[arg(long)] - tags: Option<Vec<String>>, - }, - /// Update an environment - Update { - /// Environment ID - env_id: String, - /// Custom domain - #[arg(long, conflicts_with = "clear_custom_domain")] - custom_domain: Option<String>, - /// Remove custom domain and revert to platform domain - #[arg(long, conflicts_with = "custom_domain")] - clear_custom_domain: bool, - /// Tags - #[arg(long)] - tags: Option<Vec<String>>, - }, - /// Delete an environment - Delete { - /// Environment ID - env_id: String, - }, - /// Reset environment database password - ResetDbPassword { - /// Environment ID - env_id: String, - }, - /// Manage environment secrets - Secret { - #[command(subcommand)] - command: EnvSecretCommands, - }, - /// Check domain change status - DomainChangeStatus { - /// Environment ID - env_id: String, - /// Domain change ID - domain_change_id: String, - }, - /// Manage environment database - Db { - #[command(subcommand)] - command: EnvDbCommands, - }, -} - -#[derive(Subcommand)] -pub enum EnvSecretCommands { - /// List secrets for an environment - List { - /// Environment ID - env_id: String, - /// Page number - #[arg(long, default_value = "1")] - page: u32, - /// Items per page - #[arg(long, default_value = "15")] - per_page: u32, - }, - /// Show secret details - Show { - /// Secret ID - secret_id: String, - }, - /// Create a secret - Create { - /// Environment ID - env_id: String, - /// Secret key - #[arg(long)] - key: String, - /// Secret value - #[arg(long)] - value: String, - /// Store as a plain environment variable instead of a secret - #[arg(long)] - no_secret: bool, - }, - /// Update a secret - Update { - /// Secret ID - secret_id: String, - /// Secret key - #[arg(long)] - key: Option<String>, - /// Secret value - #[arg(long)] - value: Option<String>, - /// Store as a plain environment variable instead of a secret - #[arg(long)] - no_secret: bool, - }, - /// Delete a secret - Delete { - /// Secret ID - secret_id: String, - }, -} - -#[derive(Subcommand)] -pub enum EnvDbCommands { - /// Promote dev database to this environment - Promote { - /// Environment ID - env_id: String, - /// Drop all existing tables before promote - #[arg(long)] - drop_tables: bool, - /// Disable foreign key checks during promote - #[arg(long)] - disable_foreign_keys: bool, - }, - /// Check promote status - PromoteStatus { - /// Environment ID - env_id: String, - /// Promote ID - promote_id: String, - }, -} - -#[derive(Subcommand)] -pub enum DeployCommands { - /// List deployments for an environment - List { - /// Environment ID - env_id: String, - /// Page number - #[arg(long, default_value = "1")] - page: u32, - /// Items per page - #[arg(long, default_value = "15")] - per_page: u32, - }, - /// Show deployment details - Show { - /// Deployment ID - deploy_id: String, - }, - /// Trigger a new deployment - Trigger { - /// Environment ID - env_id: String, - /// Include wp-content/uploads in the deployment - #[arg(long)] - include_uploads: bool, - /// Include database in the deployment - #[arg(long)] - include_database: bool, - }, - /// Rollback to a previous deployment - Rollback { - /// Environment ID - env_id: String, - /// Target deployment ID to rollback to - #[arg(long)] - target_deployment_id: Option<String>, - }, -} - -#[derive(Subcommand)] -pub enum SslCommands { - /// Check SSL status - Status { - /// Environment ID - env_id: String, - }, - /// Nudge SSL provisioning - Nudge { - /// Environment ID - env_id: String, - /// Retry from failed state - #[arg(long)] - retry: bool, - }, -} - -#[derive(Subcommand)] -pub enum ArchiveCommands { - /// Import an archive to a site - Import { - /// Site ID - site_id: String, - /// Path to archive file (.tar.gz) - file: String, - /// Drop all existing tables before import - #[arg(long)] - drop_tables: bool, - /// Disable foreign key checks during import - #[arg(long)] - disable_foreign_keys: bool, - /// Search string for search-and-replace during import - #[arg(long)] - search_replace_from: Option<String>, - /// Replace string for search-and-replace during import - #[arg(long)] - search_replace_to: Option<String>, - /// Wait for import to complete - #[arg(long)] - wait: bool, - /// Seconds between status polls (default: 5) - #[arg(long, default_value = "5")] - poll_interval: u64, - }, -} - -#[derive(Subcommand)] -pub enum DbCommands { - /// Manage archive import sessions - ImportSession { - #[command(subcommand)] - command: DbImportSessionCommands, - }, - /// Manage database exports - Export { - #[command(subcommand)] - command: DbExportCommands, - }, -} - -#[derive(Subcommand)] -pub enum DbImportSessionCommands { - /// Create an archive import session - Create { - /// Site ID - site_id: String, - /// Filename - #[arg(long)] - filename: Option<String>, - /// Content length in bytes - #[arg(long)] - content_length: Option<u64>, - /// Drop all existing tables before import - #[arg(long)] - drop_tables: bool, - /// Disable foreign key checks during import - #[arg(long)] - disable_foreign_keys: bool, - /// Search string for search-and-replace during import - #[arg(long)] - search_replace_from: Option<String>, - /// Replace string for search-and-replace during import - #[arg(long)] - search_replace_to: Option<String>, - }, - /// Run an archive import session - Run { - /// Site ID - site_id: String, - /// Import ID - import_id: String, - }, - /// Check archive import session status - Status { - /// Site ID - site_id: String, - /// Import ID - import_id: String, - }, -} - -#[derive(Subcommand)] -pub enum DbExportCommands { - /// Start a database export - Create { - /// Site ID - site_id: String, - /// Export format (currently only "sql" supported) - #[arg(long)] - format: Option<String>, - }, - /// Check export status - Status { - /// Site ID - site_id: String, - /// Export ID - export_id: String, - }, -} - -#[derive(Subcommand)] -pub enum WafCommands { - /// Manage rate limit rules - RateLimit { - #[command(subcommand)] - command: WafRateLimitCommands, - }, - /// Manage blocked IPs - BlockedIp { - #[command(subcommand)] - command: WafBlockedIpCommands, - }, - /// Manage blocked referrers - BlockedReferrer { - #[command(subcommand)] - command: WafBlockedReferrerCommands, - }, - /// Manage allowed referrers - AllowedReferrer { - #[command(subcommand)] - command: WafAllowedReferrerCommands, - }, -} - -#[derive(Subcommand)] -pub enum WafRateLimitCommands { - /// List rate limit rules - List { - /// Site ID - site_id: String, - }, - /// Show rate limit rule details - Show { - /// Site ID - site_id: String, - /// Rule ID - rule_id: String, - }, - /// Create a rate limit rule - Create { - /// Site ID - site_id: String, - /// Rule name - #[arg(long)] - name: String, - /// Number of requests allowed - #[arg(long)] - request_count: u32, - /// Time window in seconds (1 or 10) - #[arg(long)] - timeframe: u32, - /// Block duration in seconds (30, 60, 300, 900, 1800, 3600) - #[arg(long)] - block_time: u32, - /// Rule description - #[arg(long)] - description: Option<String>, - /// URL pattern to match - #[arg(long)] - value: Option<String>, - /// Match operator - #[arg(long)] - operator: Option<String>, - /// Request variables to inspect - #[arg(long)] - variables: Option<Vec<String>>, - /// Transformations to apply - #[arg(long)] - transformations: Option<Vec<String>>, - }, - /// Update a rate limit rule - Update { - /// Site ID - site_id: String, - /// Rule ID - rule_id: String, - /// Rule name - #[arg(long)] - name: Option<String>, - /// Rule description - #[arg(long)] - description: Option<String>, - /// Number of requests allowed - #[arg(long)] - request_count: Option<u32>, - /// Time window in seconds - #[arg(long)] - timeframe: Option<u32>, - /// Block duration in seconds - #[arg(long)] - block_time: Option<u32>, - /// URL pattern to match - #[arg(long)] - value: Option<String>, - /// Match operator - #[arg(long)] - operator: Option<String>, - /// Request variables to inspect - #[arg(long)] - variables: Option<Vec<String>>, - /// Transformations to apply - #[arg(long)] - transformations: Option<Vec<String>>, - }, - /// Delete a rate limit rule - Delete { - /// Site ID - site_id: String, - /// Rule ID - rule_id: String, - }, -} - -#[derive(Subcommand)] -pub enum WafBlockedIpCommands { - /// List blocked IPs - List { - /// Site ID - site_id: String, - }, - /// Add an IP to the blocklist - Add { - /// Site ID - site_id: String, - /// IP address - ip: String, - }, - /// Remove an IP from the blocklist - Remove { - /// Site ID - site_id: String, - /// IP address - ip: String, - }, -} - -#[derive(Subcommand)] -pub enum WafBlockedReferrerCommands { - /// List blocked referrers - List { - /// Site ID - site_id: String, - }, - /// Add a hostname to the blocked referrers - Add { - /// Site ID - site_id: String, - /// Hostname - hostname: String, - }, - /// Remove a hostname from the blocked referrers - Remove { - /// Site ID - site_id: String, - /// Hostname - hostname: String, - }, -} - -#[derive(Subcommand)] -pub enum WafAllowedReferrerCommands { - /// List allowed referrers - List { - /// Site ID - site_id: String, - }, - /// Add a hostname to the allowed referrers - Add { - /// Site ID - site_id: String, - /// Hostname - hostname: String, - }, - /// Remove a hostname from the allowed referrers - Remove { - /// Site ID - site_id: String, - /// Hostname - hostname: String, - }, -} - -#[derive(Subcommand)] -pub enum AccountCommands { - /// Show account summary - Show, - /// Manage account SSH keys - SshKey { - #[command(subcommand)] - command: AccountSshKeyCommands, - }, - /// Manage API keys - ApiKey { - #[command(subcommand)] - command: AccountApiKeyCommands, - }, - /// Manage global secrets - Secret { - #[command(subcommand)] - command: AccountSecretCommands, - }, -} - -#[derive(Subcommand)] -pub enum AccountSshKeyCommands { - /// List account SSH keys - List { - /// Page number - #[arg(long, default_value = "1")] - page: u32, - /// Items per page - #[arg(long, default_value = "15")] - per_page: u32, - }, - /// Show SSH key details - Show { - /// SSH key ID - key_id: String, - }, - /// Create an SSH key - Create { - /// Key name - #[arg(long)] - name: String, - /// Public key content - #[arg(long)] - public_key: String, - }, - /// Delete an SSH key - Delete { - /// SSH key ID - key_id: String, - }, -} - -#[derive(Subcommand)] -pub enum AccountApiKeyCommands { - /// List API keys - List { - /// Page number - #[arg(long, default_value = "1")] - page: u32, - /// Items per page - #[arg(long, default_value = "15")] - per_page: u32, - }, - /// Create an API key - Create { - /// Key name - #[arg(long)] - name: String, - /// Abilities - #[arg(long)] - abilities: Option<Vec<String>>, - /// Expiration date (ISO 8601 format) - #[arg(long)] - expires_at: Option<String>, - }, - /// Delete an API key - Delete { - /// Token ID - token_id: String, - }, -} - -#[derive(Subcommand)] -pub enum AccountSecretCommands { - /// List global secrets - List { - /// Page number - #[arg(long, default_value = "1")] - page: u32, - /// Items per page - #[arg(long, default_value = "15")] - per_page: u32, - }, - /// Show secret details - Show { - /// Secret ID - secret_id: String, - }, - /// Create a secret - Create { - /// Secret key - #[arg(long)] - key: String, - /// Secret value - #[arg(long)] - value: String, - /// Store as a plain environment variable instead of a secret - #[arg(long)] - no_secret: bool, - }, - /// Update a secret - Update { - /// Secret ID - secret_id: String, - /// Secret key - #[arg(long)] - key: Option<String>, - /// Secret value - #[arg(long)] - value: Option<String>, - /// Store as a plain environment variable instead of a secret - #[arg(long)] - no_secret: bool, - }, - /// Delete a secret - Delete { - /// Secret ID - secret_id: String, - }, -} - -#[derive(Subcommand)] -pub enum EventCommands { - /// List events - List { - /// Start date (ISO 8601 format) - #[arg(long)] - from: Option<String>, - /// End date (ISO 8601 format) - #[arg(long)] - to: Option<String>, - /// Event type filter - #[arg(long)] - event: Option<String>, - /// Page number - #[arg(long)] - page: Option<u32>, - /// Items per page - #[arg(long)] - per_page: Option<u32>, - }, -} - -#[derive(Subcommand)] -pub enum BackupCommands { - /// List backups - List { - /// Filter by site ID - #[arg(long)] - site_id: Option<String>, - /// Filter by environment ID - #[arg(long)] - environment_id: Option<String>, - /// Filter by type (site, environment) - #[arg(long = "type")] - backup_type: Option<String>, - /// Page number - #[arg(long, default_value = "1")] - page: u32, - /// Items per page - #[arg(long, default_value = "15")] - per_page: u32, - }, - /// Show backup details - Show { - /// Backup ID - backup_id: String, - }, - /// Create a manual backup - Create { - /// Site ID - #[arg(long)] - site_id: Option<String>, - /// Environment ID - #[arg(long)] - environment_id: Option<String>, - /// Backup scope (full, database, files) - #[arg(long, default_value = "full")] - scope: String, - /// Backup description - #[arg(long)] - description: Option<String>, - }, - /// Download a backup archive - Download { - #[command(subcommand)] - command: BackupDownloadCommands, - }, -} - -#[derive(Subcommand)] -pub enum BackupDownloadCommands { - /// Create a backup download request - Create { - /// Backup ID - backup_id: String, - }, - /// Check backup download status - Status { - /// Backup ID - backup_id: String, - /// Download ID - download_id: String, - }, -} - -#[derive(Subcommand)] -pub enum RestoreCommands { - /// List restores - List { - /// Filter by site ID - #[arg(long)] - site_id: Option<String>, - /// Filter by environment ID - #[arg(long)] - environment_id: Option<String>, - /// Filter by type (site, environment) - #[arg(long = "type")] - restore_type: Option<String>, - /// Filter by backup ID - #[arg(long)] - backup_id: Option<String>, - /// Page number - #[arg(long, default_value = "1")] - page: u32, - /// Items per page - #[arg(long, default_value = "15")] - per_page: u32, - }, - /// Show restore details - Show { - /// Restore ID - restore_id: String, - }, - /// Create a restore from a backup - Create { - /// Backup ID to restore from - backup_id: String, - /// Restore scope (full, database, files) - #[arg(long, default_value = "full")] - scope: String, - /// Drop all existing tables before restore - #[arg(long)] - drop_tables: bool, - /// Disable foreign key checks during restore - #[arg(long)] - disable_foreign_keys: bool, - /// Search string for search-and-replace during restore - #[arg(long)] - search_replace_from: Option<String>, - /// Replace string for search-and-replace during restore - #[arg(long)] - search_replace_to: Option<String>, - }, -} - -#[derive(Subcommand)] -pub enum WebhookCommands { - /// List webhooks - List { - /// Page number - #[arg(long, default_value = "1")] - page: u32, - /// Items per page - #[arg(long, default_value = "15")] - per_page: u32, - }, - /// Show webhook details - Show { - /// Webhook ID - webhook_id: String, - }, - /// Create a webhook - Create { - /// Webhook name - #[arg(long)] - name: String, - /// Webhook URL - #[arg(long)] - url: String, - /// Events to subscribe to - #[arg(long, required = true)] - events: Vec<String>, - /// Webhook secret for signature verification - #[arg(long)] - secret: Option<String>, - }, - /// Update a webhook - Update { - /// Webhook ID - webhook_id: String, - /// Webhook name - #[arg(long)] - name: Option<String>, - /// Webhook URL - #[arg(long)] - url: Option<String>, - /// Events to subscribe to - #[arg(long)] - events: Option<Vec<String>>, - /// Webhook secret - #[arg(long)] - secret: Option<String>, - /// Enable/disable webhook - #[arg(long)] - enabled: Option<bool>, - }, - /// Delete a webhook - Delete { - /// Webhook ID - webhook_id: String, - }, -} - -#[derive(Subcommand)] -pub enum McpCommands { - /// Set up Claude Desktop with Vector MCP server - Setup { - /// Overwrite existing Vector MCP configuration - #[arg(long)] - force: bool, - }, -} diff --git a/src/commands/account.rs b/src/commands/account.rs deleted file mode 100644 index cd26b77..0000000 --- a/src/commands/account.rs +++ /dev/null @@ -1,523 +0,0 @@ -use serde::Serialize; -use serde_json::Value; - -use crate::api::{ApiClient, ApiError}; -use crate::output::{ - OutputFormat, extract_pagination, format_bool, format_option, print_json, print_key_value, - print_message, print_pagination, print_table, -}; - -#[derive(Debug, Serialize)] -struct PaginationQuery { - page: u32, - per_page: u32, -} - -#[derive(Debug, Serialize)] -struct CreateSshKeyRequest { - name: String, - public_key: String, -} - -#[derive(Debug, Serialize)] -struct CreateApiKeyRequest { - name: String, - #[serde(skip_serializing_if = "Option::is_none")] - abilities: Option<Vec<String>>, - #[serde(skip_serializing_if = "Option::is_none")] - expires_at: Option<String>, -} - -#[derive(Debug, Serialize)] -struct CreateSecretRequest { - key: String, - value: String, - #[serde(skip_serializing_if = "Option::is_none")] - is_secret: Option<bool>, -} - -#[derive(Debug, Serialize)] -struct UpdateSecretRequest { - #[serde(skip_serializing_if = "Option::is_none")] - key: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - value: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - is_secret: Option<bool>, -} - -// Account summary - -pub fn show(client: &ApiClient, format: OutputFormat) -> Result<(), ApiError> { - let response: Value = client.get("/api/v1/vector/account")?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let data = &response["data"]; - let owner = &data["owner"]; - let account = &data["account"]; - let sites = &data["sites"]; - let envs = &data["environments"]; - - print_key_value(vec![ - ( - "Owner Name", - format_option(&owner["name"].as_str().map(String::from)), - ), - ( - "Owner Email", - format_option(&owner["email"].as_str().map(String::from)), - ), - ( - "Account Name", - format_option(&account["name"].as_str().map(String::from)), - ), - ( - "Company", - format_option(&account["company"].as_str().map(String::from)), - ), - ( - "Total Sites", - sites["total"] - .as_u64() - .map(|v| v.to_string()) - .unwrap_or("-".to_string()), - ), - ( - "Active Sites", - sites["by_status"]["active"] - .as_u64() - .map(|v| v.to_string()) - .unwrap_or("-".to_string()), - ), - ( - "Total Environments", - envs["total"] - .as_u64() - .map(|v| v.to_string()) - .unwrap_or("-".to_string()), - ), - ( - "Active Environments", - envs["by_status"]["active"] - .as_u64() - .map(|v| v.to_string()) - .unwrap_or("-".to_string()), - ), - ]); - - Ok(()) -} - -// SSH Key commands (account-level) - -pub fn ssh_key_list( - client: &ApiClient, - page: u32, - per_page: u32, - format: OutputFormat, -) -> Result<(), ApiError> { - let query = PaginationQuery { page, per_page }; - let response: Value = client.get_with_query("/api/v1/vector/ssh-keys", &query)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let keys = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if keys.is_empty() { - print_message("No SSH keys found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = keys - .iter() - .map(|k| { - vec![ - k["id"].as_str().unwrap_or("-").to_string(), - k["name"].as_str().unwrap_or("-").to_string(), - format_option(&k["fingerprint"].as_str().map(String::from)), - format_option(&k["created_at"].as_str().map(String::from)), - ] - }) - .collect(); - - print_table(vec!["ID", "Name", "Fingerprint", "Created"], rows); - - if let Some((current, last, total)) = extract_pagination(&response) { - print_pagination(current, last, total); - } - - Ok(()) -} - -pub fn ssh_key_show( - client: &ApiClient, - key_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.get(&format!("/api/v1/vector/ssh-keys/{}", key_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let key = &response["data"]; - - print_key_value(vec![ - ("ID", key["id"].as_str().unwrap_or("-").to_string()), - ("Name", key["name"].as_str().unwrap_or("-").to_string()), - ( - "Fingerprint", - format_option(&key["fingerprint"].as_str().map(String::from)), - ), - ( - "Public Key Preview", - format_option(&key["public_key_preview"].as_str().map(String::from)), - ), - ( - "Account Default", - key["is_account_default"] - .as_bool() - .map(|v| v.to_string()) - .unwrap_or("-".to_string()), - ), - ( - "Created", - format_option(&key["created_at"].as_str().map(String::from)), - ), - ]); - - Ok(()) -} - -pub fn ssh_key_create( - client: &ApiClient, - name: &str, - public_key: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = CreateSshKeyRequest { - name: name.to_string(), - public_key: public_key.to_string(), - }; - - let response: Value = client.post("/api/v1/vector/ssh-keys", &body)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let key = &response["data"]; - print_message(&format!( - "SSH key created: {} ({})", - key["name"].as_str().unwrap_or("-"), - key["id"].as_str().unwrap_or("-") - )); - - Ok(()) -} - -pub fn ssh_key_delete( - client: &ApiClient, - key_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.delete(&format!("/api/v1/vector/ssh-keys/{}", key_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("SSH key deleted successfully."); - Ok(()) -} - -// API Key commands - -pub fn api_key_list( - client: &ApiClient, - page: u32, - per_page: u32, - format: OutputFormat, -) -> Result<(), ApiError> { - let query = PaginationQuery { page, per_page }; - let response: Value = client.get_with_query("/api/v1/vector/api-keys", &query)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let keys = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if keys.is_empty() { - print_message("No API keys found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = keys - .iter() - .map(|k| { - vec![ - k["id"] - .as_u64() - .map(|v| v.to_string()) - .unwrap_or("-".to_string()), - k["name"].as_str().unwrap_or("-").to_string(), - format_abilities(&k["abilities"]), - format_option(&k["last_used_at"].as_str().map(String::from)), - format_option(&k["expires_at"].as_str().map(String::from)), - ] - }) - .collect(); - - print_table( - vec!["ID", "Name", "Abilities", "Last Used", "Expires"], - rows, - ); - - if let Some((current, last, total)) = extract_pagination(&response) { - print_pagination(current, last, total); - } - - Ok(()) -} - -pub fn api_key_create( - client: &ApiClient, - name: &str, - abilities: Option<Vec<String>>, - expires_at: Option<String>, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = CreateApiKeyRequest { - name: name.to_string(), - abilities, - expires_at, - }; - - let response: Value = client.post("/api/v1/vector/api-keys", &body)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let data = &response["data"]; - print_key_value(vec![ - ("Name", data["name"].as_str().unwrap_or("-").to_string()), - ("Token", data["token"].as_str().unwrap_or("-").to_string()), - ("Abilities", format_abilities(&data["abilities"])), - ( - "Expires", - format_option(&data["expires_at"].as_str().map(String::from)), - ), - ]); - - print_message("\nSave this token - it won't be shown again!"); - - Ok(()) -} - -pub fn api_key_delete( - client: &ApiClient, - token_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.delete(&format!("/api/v1/vector/api-keys/{}", token_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("API key deleted successfully."); - Ok(()) -} - -// Global Secret commands - -pub fn secret_list( - client: &ApiClient, - page: u32, - per_page: u32, - format: OutputFormat, -) -> Result<(), ApiError> { - let query = PaginationQuery { page, per_page }; - let response: Value = client.get_with_query("/api/v1/vector/global-secrets", &query)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let secrets = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if secrets.is_empty() { - print_message("No global secrets found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = secrets - .iter() - .map(|s| { - vec![ - s["id"].as_str().unwrap_or("-").to_string(), - s["key"].as_str().unwrap_or("-").to_string(), - format_bool(s["is_secret"].as_bool().unwrap_or(true)), - format_option(&s["value"].as_str().map(String::from)), - format_option(&s["created_at"].as_str().map(String::from)), - ] - }) - .collect(); - - print_table(vec!["ID", "Key", "Secret", "Value", "Created"], rows); - - if let Some((current, last, total)) = extract_pagination(&response) { - print_pagination(current, last, total); - } - - Ok(()) -} - -pub fn secret_show( - client: &ApiClient, - secret_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.get(&format!("/api/v1/vector/global-secrets/{}", secret_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let secret = &response["data"]; - - print_key_value(vec![ - ("ID", secret["id"].as_str().unwrap_or("-").to_string()), - ("Key", secret["key"].as_str().unwrap_or("-").to_string()), - ( - "Secret", - format_bool(secret["is_secret"].as_bool().unwrap_or(true)), - ), - ( - "Value", - format_option(&secret["value"].as_str().map(String::from)), - ), - ( - "Created", - format_option(&secret["created_at"].as_str().map(String::from)), - ), - ( - "Updated", - format_option(&secret["updated_at"].as_str().map(String::from)), - ), - ]); - - Ok(()) -} - -pub fn secret_create( - client: &ApiClient, - key: &str, - value: &str, - no_secret: bool, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = CreateSecretRequest { - key: key.to_string(), - value: value.to_string(), - is_secret: if no_secret { Some(false) } else { None }, - }; - - let response: Value = client.post("/api/v1/vector/global-secrets", &body)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let secret = &response["data"]; - print_message(&format!( - "Secret created: {} ({})", - secret["key"].as_str().unwrap_or("-"), - secret["id"].as_str().unwrap_or("-") - )); - - Ok(()) -} - -pub fn secret_update( - client: &ApiClient, - secret_id: &str, - key: Option<String>, - value: Option<String>, - no_secret: bool, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = UpdateSecretRequest { - key, - value, - is_secret: if no_secret { Some(false) } else { None }, - }; - - let response: Value = client.put( - &format!("/api/v1/vector/global-secrets/{}", secret_id), - &body, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("Secret updated successfully."); - Ok(()) -} - -pub fn secret_delete( - client: &ApiClient, - secret_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.delete(&format!("/api/v1/vector/global-secrets/{}", secret_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("Secret deleted successfully."); - Ok(()) -} - -// Helper function to format abilities array -fn format_abilities(value: &Value) -> String { - if let Some(arr) = value.as_array() { - if arr.is_empty() { - return "-".to_string(); - } - arr.iter() - .filter_map(|v| v.as_str()) - .collect::<Vec<_>>() - .join(", ") - } else { - "-".to_string() - } -} diff --git a/src/commands/archive.rs b/src/commands/archive.rs deleted file mode 100644 index 9cde764..0000000 --- a/src/commands/archive.rs +++ /dev/null @@ -1,206 +0,0 @@ -use std::path::Path; -use std::thread; -use std::time::Duration; - -use serde::Serialize; -use serde_json::Value; - -use crate::api::{ApiClient, ApiError}; -use crate::output::{OutputFormat, format_option, print_json, print_key_value, print_message}; - -#[derive(Debug, Serialize)] -struct CreateImportSessionRequest { - #[serde(skip_serializing_if = "Option::is_none")] - filename: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - content_length: Option<u64>, - #[serde(skip_serializing_if = "Option::is_none")] - options: Option<ImportOptions>, -} - -#[derive(Debug, Serialize)] -struct ImportOptions { - #[serde(skip_serializing_if = "std::ops::Not::not")] - drop_tables: bool, - #[serde(skip_serializing_if = "std::ops::Not::not")] - disable_foreign_keys: bool, - #[serde(skip_serializing_if = "Option::is_none")] - search_replace: Option<SearchReplace>, -} - -#[derive(Debug, Serialize)] -struct SearchReplace { - from: String, - to: String, -} - -#[allow(clippy::too_many_arguments)] -pub fn import( - client: &ApiClient, - site_id: &str, - file: &str, - drop_tables: bool, - disable_foreign_keys: bool, - search_replace_from: Option<String>, - search_replace_to: Option<String>, - wait: bool, - poll_interval: u64, - format: OutputFormat, -) -> Result<(), ApiError> { - let path = Path::new(file); - - if !path.exists() { - return Err(ApiError::Other(format!("File not found: {}", file))); - } - - let metadata = - std::fs::metadata(path).map_err(|e| ApiError::Other(format!("Cannot read file: {}", e)))?; - - let content_length = metadata.len(); - let filename = path - .file_name() - .and_then(|n| n.to_str()) - .unwrap_or(file) - .to_string(); - - // Build import options - let search_replace = match (search_replace_from, search_replace_to) { - (Some(from), Some(to)) => Some(SearchReplace { from, to }), - _ => None, - }; - - let options = if drop_tables || disable_foreign_keys || search_replace.is_some() { - Some(ImportOptions { - drop_tables, - disable_foreign_keys, - search_replace, - }) - } else { - None - }; - - let body = CreateImportSessionRequest { - filename: Some(filename.clone()), - content_length: Some(content_length), - options, - }; - - // Step 1: Create import session - if format == OutputFormat::Table { - print_message("Creating import session..."); - } - - let response: Value = - client.post(&format!("/api/v1/vector/sites/{}/imports", site_id), &body)?; - - let data = &response["data"]; - let import_id = data["id"] - .as_str() - .ok_or_else(|| ApiError::Other("Missing import ID in response".to_string()))?; - let upload_url = data["upload_url"] - .as_str() - .ok_or_else(|| ApiError::Other("Missing upload URL in response".to_string()))?; - - if format == OutputFormat::Table { - print_message(&format!("Import ID: {}", import_id)); - } - - // Step 2: Upload file - let size_mb = content_length as f64 / 1_048_576.0; - if format == OutputFormat::Table { - print_message(&format!("Uploading {} ({:.1} MB)...", filename, size_mb)); - } - - let file_handle = std::fs::File::open(path) - .map_err(|e| ApiError::Other(format!("Cannot open file: {}", e)))?; - - client.put_file(upload_url, file_handle, content_length)?; - - if format == OutputFormat::Table { - print_message("Upload complete."); - } - - // Step 3: Trigger import - if format == OutputFormat::Table { - print_message("Starting import..."); - } - - let run_response: Value = client.post_empty(&format!( - "/api/v1/vector/sites/{}/imports/{}/run", - site_id, import_id - ))?; - - if format == OutputFormat::Table { - print_message("Import started."); - } - - // Step 4: Poll if --wait - if wait { - if format == OutputFormat::Table { - print_message("\nWaiting for import to complete..."); - } - - loop { - thread::sleep(Duration::from_secs(poll_interval)); - - let status_response: Value = client.get(&format!( - "/api/v1/vector/sites/{}/imports/{}", - site_id, import_id - ))?; - - let status_data = &status_response["data"]; - let status = status_data["status"].as_str().unwrap_or("unknown"); - - match status { - "completed" => { - if format == OutputFormat::Json { - print_json(&status_response); - } else { - let duration = format_option( - &status_data["duration_ms"].as_u64().map(|v| v.to_string()), - ); - print_message(&format!("Status: completed (duration: {}ms)", duration)); - } - return Ok(()); - } - "failed" => { - if format == OutputFormat::Json { - print_json(&status_response); - return Ok(()); - } - let error_msg = - format_option(&status_data["error_message"].as_str().map(String::from)); - return Err(ApiError::Other(format!("Import failed: {}", error_msg))); - } - _ => { - if format == OutputFormat::Table { - print_message(&format!("Status: {}", status)); - } - } - } - } - } - - // Final output - if format == OutputFormat::Json { - print_json(&run_response); - } else { - print_key_value(vec![ - ("Import ID", import_id.to_string()), - ( - "Status", - run_response["data"]["status"] - .as_str() - .unwrap_or("-") - .to_string(), - ), - ]); - print_message("\nCheck status with:"); - print_message(&format!( - " vector db import-session status {} {}", - site_id, import_id - )); - } - - Ok(()) -} diff --git a/src/commands/auth.rs b/src/commands/auth.rs deleted file mode 100644 index a327957..0000000 --- a/src/commands/auth.rs +++ /dev/null @@ -1,115 +0,0 @@ -use std::io::{self, BufRead, IsTerminal}; - -use serde_json::Value; - -use crate::api::{ApiClient, ApiError}; -use crate::config::{Config, Credentials}; -use crate::output::{OutputFormat, print_json, print_message}; - -pub fn login(token: Option<String>, format: OutputFormat) -> Result<(), ApiError> { - let api_token = match token { - Some(t) => t, - None => read_token()?, - }; - - if api_token.is_empty() { - return Err(ApiError::ConfigError("Token cannot be empty".to_string())); - } - - let config = Config::load()?; - let mut client = ApiClient::new(config.api_url, None)?; - client.set_token(api_token.clone()); - - let response: Value = client.get("/api/v1/ping")?; - - let mut creds = Credentials::load()?; - creds.api_key = Some(api_token); - creds.save()?; - - if format == OutputFormat::Json { - print_json(&response); - } else { - print_message("Successfully authenticated."); - } - - Ok(()) -} - -pub fn logout(format: OutputFormat) -> Result<(), ApiError> { - let mut creds = Credentials::load()?; - - if creds.api_key.is_none() { - if format == OutputFormat::Json { - print_json(&serde_json::json!({"message": "Not logged in"})); - } else { - print_message("Not logged in."); - } - return Ok(()); - } - - creds.clear()?; - - if format == OutputFormat::Json { - print_json(&serde_json::json!({"message": "Logged out successfully"})); - } else { - print_message("Logged out successfully."); - } - - Ok(()) -} - -pub fn status(format: OutputFormat) -> Result<(), ApiError> { - let config = Config::load()?; - let creds = Credentials::load()?; - - let token = match get_api_key(&creds) { - Some(t) => t, - None => { - if format == OutputFormat::Json { - print_json(&serde_json::json!({ - "authenticated": false, - "message": "Not logged in" - })); - } else { - print_message("Not logged in. Run 'vector auth login' to authenticate."); - } - return Ok(()); - } - }; - - let client = ApiClient::new(config.api_url, Some(token))?; - let _response: Value = client.get("/api/v1/ping")?; - - if format == OutputFormat::Json { - print_json(&serde_json::json!({ - "authenticated": true - })); - } else { - print_message("Authenticated."); - } - - Ok(()) -} - -fn read_token() -> Result<String, ApiError> { - let stdin = io::stdin(); - - if stdin.is_terminal() { - eprint!("API Token: "); - rpassword::read_password() - .map_err(|e| ApiError::ConfigError(format!("Failed to read token: {}", e))) - } else { - let mut line = String::new(); - stdin - .lock() - .read_line(&mut line) - .map_err(|e| ApiError::ConfigError(format!("Failed to read from stdin: {}", e)))?; - Ok(line.trim().to_string()) - } -} - -pub fn get_api_key(creds: &Credentials) -> Option<String> { - std::env::var("VECTOR_API_KEY") - .ok() - .or_else(|| creds.api_key.clone()) -} diff --git a/src/commands/backup.rs b/src/commands/backup.rs deleted file mode 100644 index 545b48d..0000000 --- a/src/commands/backup.rs +++ /dev/null @@ -1,304 +0,0 @@ -use serde::Serialize; -use serde_json::Value; - -use crate::api::{ApiClient, ApiError}; -use crate::output::{ - OutputFormat, extract_pagination, format_archivable_type, format_option, print_json, - print_key_value, print_message, print_pagination, print_table, -}; - -#[derive(Debug, Serialize)] -struct ListBackupsQuery { - #[serde(skip_serializing_if = "Option::is_none")] - r#type: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - site_id: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - environment_id: Option<String>, - page: u32, - per_page: u32, -} - -#[derive(Debug, Serialize)] -struct CreateBackupRequest { - #[serde(skip_serializing_if = "Option::is_none")] - site_id: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - environment_id: Option<String>, - scope: String, - #[serde(skip_serializing_if = "Option::is_none")] - description: Option<String>, -} - -pub fn list( - client: &ApiClient, - site_id: Option<String>, - environment_id: Option<String>, - backup_type: Option<String>, - page: u32, - per_page: u32, - format: OutputFormat, -) -> Result<(), ApiError> { - let query = ListBackupsQuery { - r#type: backup_type, - site_id, - environment_id, - page, - per_page, - }; - let response: Value = client.get_with_query("/api/v1/vector/backups", &query)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let backups = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if backups.is_empty() { - print_message("No backups found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = backups - .iter() - .map(|b| { - vec![ - b["id"].as_str().unwrap_or("-").to_string(), - b["archivable_type"] - .as_str() - .map(format_archivable_type) - .unwrap_or_else(|| "-".to_string()), - b["type"].as_str().unwrap_or("-").to_string(), - b["scope"].as_str().unwrap_or("-").to_string(), - b["status"].as_str().unwrap_or("-").to_string(), - format_option(&b["description"].as_str().map(String::from)), - format_option(&b["created_at"].as_str().map(String::from)), - ] - }) - .collect(); - - print_table( - vec![ - "ID", - "Model", - "Type", - "Scope", - "Status", - "Description", - "Created", - ], - rows, - ); - - if let Some((current, last, total)) = extract_pagination(&response) { - print_pagination(current, last, total); - } - - Ok(()) -} - -pub fn show(client: &ApiClient, backup_id: &str, format: OutputFormat) -> Result<(), ApiError> { - let response: Value = client.get(&format!("/api/v1/vector/backups/{}", backup_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let backup = &response["data"]; - - print_key_value(vec![ - ("ID", backup["id"].as_str().unwrap_or("-").to_string()), - ( - "Model", - backup["archivable_type"] - .as_str() - .map(format_archivable_type) - .unwrap_or_else(|| "-".to_string()), - ), - ( - "Model ID", - backup["archivable_id"].as_str().unwrap_or("-").to_string(), - ), - ("Type", backup["type"].as_str().unwrap_or("-").to_string()), - ("Scope", backup["scope"].as_str().unwrap_or("-").to_string()), - ( - "Status", - backup["status"].as_str().unwrap_or("-").to_string(), - ), - ( - "Description", - format_option(&backup["description"].as_str().map(String::from)), - ), - ( - "File Snapshot ID", - format_option(&backup["file_snapshot_id"].as_str().map(String::from)), - ), - ( - "Database Snapshot ID", - format_option(&backup["database_snapshot_id"].as_str().map(String::from)), - ), - ( - "Started At", - format_option(&backup["started_at"].as_str().map(String::from)), - ), - ( - "Completed At", - format_option(&backup["completed_at"].as_str().map(String::from)), - ), - ( - "Created At", - format_option(&backup["created_at"].as_str().map(String::from)), - ), - ( - "Updated At", - format_option(&backup["updated_at"].as_str().map(String::from)), - ), - ]); - - Ok(()) -} - -pub fn create( - client: &ApiClient, - site_id: Option<String>, - environment_id: Option<String>, - scope: &str, - description: Option<String>, - format: OutputFormat, -) -> Result<(), ApiError> { - if site_id.is_none() && environment_id.is_none() { - return Err(ApiError::Other( - "Either --site-id or --environment-id is required".to_string(), - )); - } - - let body = CreateBackupRequest { - site_id, - environment_id, - scope: scope.to_string(), - description, - }; - - let response: Value = client.post("/api/v1/vector/backups", &body)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let backup = &response["data"]; - print_message(&format!( - "Backup created: {} ({})", - backup["id"].as_str().unwrap_or("-"), - backup["status"].as_str().unwrap_or("-") - )); - - print_key_value(vec![ - ("ID", backup["id"].as_str().unwrap_or("-").to_string()), - ("Type", backup["type"].as_str().unwrap_or("-").to_string()), - ( - "Status", - backup["status"].as_str().unwrap_or("-").to_string(), - ), - ( - "Description", - format_option(&backup["description"].as_str().map(String::from)), - ), - ( - "Created At", - format_option(&backup["created_at"].as_str().map(String::from)), - ), - ]); - - Ok(()) -} - -pub fn download_create( - client: &ApiClient, - backup_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = - client.post_empty(&format!("/api/v1/vector/backups/{}/downloads", backup_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let data = &response["data"]; - print_message(&format!( - "Download requested: {} ({})", - data["id"].as_str().unwrap_or("-"), - data["status"].as_str().unwrap_or("-") - )); - print_message("\nCheck status with:"); - print_message(&format!( - " vector backup download status {} {}", - backup_id, - data["id"].as_str().unwrap_or("DOWNLOAD_ID") - )); - - Ok(()) -} - -pub fn download_status( - client: &ApiClient, - backup_id: &str, - download_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.get(&format!( - "/api/v1/vector/backups/{}/downloads/{}", - backup_id, download_id - ))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let data = &response["data"]; - print_key_value(vec![ - ("ID", data["id"].as_str().unwrap_or("-").to_string()), - ("Status", data["status"].as_str().unwrap_or("-").to_string()), - ( - "Size (bytes)", - format_option(&data["size_bytes"].as_u64().map(|v| v.to_string())), - ), - ( - "Duration (ms)", - format_option(&data["duration_ms"].as_u64().map(|v| v.to_string())), - ), - ( - "Error", - format_option(&data["error_message"].as_str().map(String::from)), - ), - ( - "Download URL", - format_option(&data["download_url"].as_str().map(String::from)), - ), - ( - "Download Expires", - format_option(&data["download_expires_at"].as_str().map(String::from)), - ), - ( - "Started At", - format_option(&data["started_at"].as_str().map(String::from)), - ), - ( - "Completed At", - format_option(&data["completed_at"].as_str().map(String::from)), - ), - ( - "Created At", - format_option(&data["created_at"].as_str().map(String::from)), - ), - ]); - - Ok(()) -} diff --git a/src/commands/db.rs b/src/commands/db.rs deleted file mode 100644 index 0663eeb..0000000 --- a/src/commands/db.rs +++ /dev/null @@ -1,269 +0,0 @@ -use serde::Serialize; -use serde_json::Value; - -use crate::api::{ApiClient, ApiError}; -use crate::output::{OutputFormat, format_option, print_json, print_key_value, print_message}; - -#[derive(Debug, Serialize)] -struct CreateImportSessionRequest { - #[serde(skip_serializing_if = "Option::is_none")] - filename: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - content_length: Option<u64>, - #[serde(skip_serializing_if = "Option::is_none")] - content_md5: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - options: Option<ImportOptions>, -} - -#[derive(Debug, Serialize)] -struct ImportOptions { - #[serde(skip_serializing_if = "std::ops::Not::not")] - drop_tables: bool, - #[serde(skip_serializing_if = "std::ops::Not::not")] - disable_foreign_keys: bool, - #[serde(skip_serializing_if = "Option::is_none")] - search_replace: Option<SearchReplace>, -} - -#[derive(Debug, Serialize)] -struct SearchReplace { - from: String, - to: String, -} - -#[derive(Debug, Serialize)] -struct CreateExportRequest { - #[serde(skip_serializing_if = "Option::is_none")] - format: Option<String>, -} - -#[allow(clippy::too_many_arguments)] -pub fn import_session_create( - client: &ApiClient, - site_id: &str, - filename: Option<String>, - content_length: Option<u64>, - drop_tables: bool, - disable_foreign_keys: bool, - search_replace_from: Option<String>, - search_replace_to: Option<String>, - format: OutputFormat, -) -> Result<(), ApiError> { - let search_replace = match (search_replace_from, search_replace_to) { - (Some(from), Some(to)) => Some(SearchReplace { from, to }), - _ => None, - }; - - let options = if drop_tables || disable_foreign_keys || search_replace.is_some() { - Some(ImportOptions { - drop_tables, - disable_foreign_keys, - search_replace, - }) - } else { - None - }; - - let body = CreateImportSessionRequest { - filename, - content_length, - content_md5: None, - options, - }; - - let response: Value = - client.post(&format!("/api/v1/vector/sites/{}/imports", site_id), &body)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let data = &response["data"]; - print_key_value(vec![ - ("Import ID", data["id"].as_str().unwrap_or("-").to_string()), - ("Status", data["status"].as_str().unwrap_or("-").to_string()), - ( - "Upload URL", - format_option(&data["upload_url"].as_str().map(String::from)), - ), - ( - "Expires", - format_option(&data["upload_expires_at"].as_str().map(String::from)), - ), - ]); - - print_message("\nUpload your SQL file to the URL above, then run:"); - print_message(&format!( - " vector db import-session run {} {}", - site_id, - data["id"].as_str().unwrap_or("IMPORT_ID") - )); - - Ok(()) -} - -pub fn import_session_run( - client: &ApiClient, - site_id: &str, - import_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.post_empty(&format!( - "/api/v1/vector/sites/{}/imports/{}/run", - site_id, import_id - ))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let data = &response["data"]; - print_message(&format!( - "Import started: {} ({})", - import_id, - data["status"].as_str().unwrap_or("-") - )); - - Ok(()) -} - -pub fn import_session_status( - client: &ApiClient, - site_id: &str, - import_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.get(&format!( - "/api/v1/vector/sites/{}/imports/{}", - site_id, import_id - ))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let data = &response["data"]; - print_key_value(vec![ - ("Import ID", data["id"].as_str().unwrap_or("-").to_string()), - ("Status", data["status"].as_str().unwrap_or("-").to_string()), - ( - "Filename", - format_option(&data["filename"].as_str().map(String::from)), - ), - ( - "Duration (ms)", - format_option(&data["duration_ms"].as_u64().map(|v| v.to_string())), - ), - ( - "Error", - format_option(&data["error_message"].as_str().map(String::from)), - ), - ( - "Created", - format_option(&data["created_at"].as_str().map(String::from)), - ), - ( - "Completed", - format_option(&data["completed_at"].as_str().map(String::from)), - ), - ]); - - Ok(()) -} - -pub fn export_create( - client: &ApiClient, - site_id: &str, - export_format: Option<String>, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = CreateExportRequest { - format: export_format, - }; - - let response: Value = client.post( - &format!("/api/v1/vector/sites/{}/db/export", site_id), - &body, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let data = &response["data"]; - print_message(&format!( - "Export started: {} ({})", - data["id"].as_str().unwrap_or("-"), - data["status"].as_str().unwrap_or("-") - )); - print_message("\nCheck status with:"); - print_message(&format!( - " vector db export status {} {}", - site_id, - data["id"].as_str().unwrap_or("EXPORT_ID") - )); - - Ok(()) -} - -pub fn export_status( - client: &ApiClient, - site_id: &str, - export_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.get(&format!( - "/api/v1/vector/sites/{}/db/exports/{}", - site_id, export_id - ))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let data = &response["data"]; - print_key_value(vec![ - ("Export ID", data["id"].as_str().unwrap_or("-").to_string()), - ("Status", data["status"].as_str().unwrap_or("-").to_string()), - ( - "Format", - format_option(&data["format"].as_str().map(String::from)), - ), - ( - "Size (bytes)", - format_option(&data["size_bytes"].as_u64().map(|v| v.to_string())), - ), - ( - "Duration (ms)", - format_option(&data["duration_ms"].as_u64().map(|v| v.to_string())), - ), - ( - "Error", - format_option(&data["error_message"].as_str().map(String::from)), - ), - ( - "Download URL", - format_option(&data["download_url"].as_str().map(String::from)), - ), - ( - "Download Expires", - format_option(&data["download_expires_at"].as_str().map(String::from)), - ), - ( - "Created", - format_option(&data["created_at"].as_str().map(String::from)), - ), - ( - "Completed", - format_option(&data["completed_at"].as_str().map(String::from)), - ), - ]); - - Ok(()) -} diff --git a/src/commands/deploy.rs b/src/commands/deploy.rs deleted file mode 100644 index ece3bd5..0000000 --- a/src/commands/deploy.rs +++ /dev/null @@ -1,182 +0,0 @@ -use serde::Serialize; -use serde_json::Value; - -use crate::api::{ApiClient, ApiError}; -use crate::output::{ - OutputFormat, extract_pagination, format_option, print_json, print_key_value, print_message, - print_pagination, print_table, -}; - -#[derive(Debug, Serialize)] -struct PaginationQuery { - page: u32, - per_page: u32, -} - -#[derive(Debug, Serialize)] -struct TriggerRequest { - #[serde(skip_serializing_if = "std::ops::Not::not")] - include_uploads: bool, - #[serde(skip_serializing_if = "std::ops::Not::not")] - include_database: bool, -} - -#[derive(Debug, Serialize)] -struct RollbackRequest { - #[serde(skip_serializing_if = "Option::is_none")] - target_deployment_id: Option<String>, -} - -pub fn list( - client: &ApiClient, - env_id: &str, - page: u32, - per_page: u32, - format: OutputFormat, -) -> Result<(), ApiError> { - let query = PaginationQuery { page, per_page }; - let response: Value = client.get_with_query( - &format!("/api/v1/vector/environments/{}/deployments", env_id), - &query, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let deploys = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if deploys.is_empty() { - print_message("No deployments found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = deploys - .iter() - .map(|d| { - vec![ - d["id"].as_str().unwrap_or("-").to_string(), - d["status"].as_str().unwrap_or("-").to_string(), - format_option(&d["actor"].as_str().map(String::from)), - format_option(&d["created_at"].as_str().map(String::from)), - ] - }) - .collect(); - - print_table(vec!["ID", "Status", "Actor", "Created"], rows); - - if let Some((current, last, total)) = extract_pagination(&response) { - print_pagination(current, last, total); - } - - Ok(()) -} - -pub fn show(client: &ApiClient, deploy_id: &str, format: OutputFormat) -> Result<(), ApiError> { - let response: Value = client.get(&format!("/api/v1/vector/deployments/{}", deploy_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let deploy = &response["data"]; - - print_key_value(vec![ - ("ID", deploy["id"].as_str().unwrap_or("-").to_string()), - ( - "Status", - deploy["status"].as_str().unwrap_or("-").to_string(), - ), - ( - "Actor", - format_option(&deploy["actor"].as_str().map(String::from)), - ), - ( - "Created", - format_option(&deploy["created_at"].as_str().map(String::from)), - ), - ( - "Updated", - format_option(&deploy["updated_at"].as_str().map(String::from)), - ), - ]); - - if let Some(stdout) = deploy["stdout"].as_str() - && !stdout.is_empty() - { - println!("\n--- stdout ---\n{}", stdout); - } - - if let Some(stderr) = deploy["stderr"].as_str() - && !stderr.is_empty() - { - println!("\n--- stderr ---\n{}", stderr); - } - - Ok(()) -} - -pub fn trigger( - client: &ApiClient, - env_id: &str, - include_uploads: bool, - include_database: bool, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = TriggerRequest { - include_uploads, - include_database, - }; - let response: Value = client.post( - &format!("/api/v1/vector/environments/{}/deployments", env_id), - &body, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let deploy = &response["data"]; - print_message(&format!( - "Deployment initiated: {} ({})", - deploy["id"].as_str().unwrap_or("-"), - deploy["status"].as_str().unwrap_or("-") - )); - - Ok(()) -} - -pub fn rollback( - client: &ApiClient, - env_id: &str, - target_deployment_id: Option<String>, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = RollbackRequest { - target_deployment_id, - }; - - let response: Value = client.post( - &format!("/api/v1/vector/environments/{}/rollback", env_id), - &body, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let deploy = &response["data"]; - print_message(&format!( - "Rollback initiated: {} ({})", - deploy["id"].as_str().unwrap_or("-"), - deploy["status"].as_str().unwrap_or("-") - )); - - Ok(()) -} diff --git a/src/commands/env.rs b/src/commands/env.rs deleted file mode 100644 index 0f968b4..0000000 --- a/src/commands/env.rs +++ /dev/null @@ -1,601 +0,0 @@ -use serde::Serialize; -use serde_json::Value; - -use crate::api::{ApiClient, ApiError}; -use crate::output::{ - OutputFormat, extract_pagination, format_bool, format_option, print_dns_records, print_json, - print_key_value, print_message, print_pagination, print_table, -}; - -#[derive(Debug, Serialize)] -struct PaginationQuery { - page: u32, - per_page: u32, -} - -#[derive(Debug, Serialize)] -struct ListEnvQuery { - site: String, - page: u32, - per_page: u32, -} - -#[derive(Debug, Serialize)] -struct CreateEnvRequest { - name: String, - custom_domain: String, - php_version: String, - #[serde(skip_serializing_if = "std::ops::Not::not")] - is_production: bool, - #[serde(skip_serializing_if = "Option::is_none")] - tags: Option<Vec<String>>, -} - -#[derive(Debug, Serialize)] -struct UpdateEnvRequest { - #[serde(skip_serializing_if = "Option::is_none")] - custom_domain: Option<Option<String>>, - #[serde(skip_serializing_if = "Option::is_none")] - tags: Option<Vec<String>>, -} - -#[derive(Debug, Serialize)] -struct CreateSecretRequest { - key: String, - value: String, - #[serde(skip_serializing_if = "Option::is_none")] - is_secret: Option<bool>, -} - -#[derive(Debug, Serialize)] -struct UpdateSecretRequest { - #[serde(skip_serializing_if = "Option::is_none")] - key: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - value: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - is_secret: Option<bool>, -} - -pub fn list( - client: &ApiClient, - site_id: &str, - page: u32, - per_page: u32, - format: OutputFormat, -) -> Result<(), ApiError> { - let query = ListEnvQuery { - site: site_id.to_string(), - page, - per_page, - }; - let response: Value = client.get_with_query("/api/v1/vector/environments", &query)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let envs = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if envs.is_empty() { - print_message("No environments found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = envs - .iter() - .map(|e| { - vec![ - e["id"].as_str().unwrap_or("-").to_string(), - e["name"].as_str().unwrap_or("-").to_string(), - e["status"].as_str().unwrap_or("-").to_string(), - format_bool(e["is_production"].as_bool().unwrap_or(false)), - format_option(&e["platform_domain"].as_str().map(String::from)), - ] - }) - .collect(); - - print_table( - vec!["ID", "Name", "Status", "Production", "Platform Domain"], - rows, - ); - - if let Some((current, last, total)) = extract_pagination(&response) { - print_pagination(current, last, total); - } - - Ok(()) -} - -pub fn show(client: &ApiClient, env_id: &str, format: OutputFormat) -> Result<(), ApiError> { - let response: Value = client.get(&format!("/api/v1/vector/environments/{}", env_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let env = &response["data"]; - - print_key_value(vec![ - ("ID", env["id"].as_str().unwrap_or("-").to_string()), - ("Name", env["name"].as_str().unwrap_or("-").to_string()), - ("Status", env["status"].as_str().unwrap_or("-").to_string()), - ( - "Production", - format_bool(env["is_production"].as_bool().unwrap_or(false)), - ), - ( - "PHP Version", - format_option(&env["php_version"].as_str().map(String::from)), - ), - ( - "Platform Domain", - format_option(&env["platform_domain"].as_str().map(String::from)), - ), - ( - "Custom Domain", - format_option(&env["custom_domain"].as_str().map(String::from)), - ), - ( - "DNS Target", - format_option(&env["dns_target"].as_str().map(String::from)), - ), - ( - "Subdomain", - format_option(&env["subdomain"].as_str().map(String::from)), - ), - ( - "Database Host", - format_option(&env["database_host"].as_str().map(String::from)), - ), - ( - "Database Name", - format_option(&env["database_name"].as_str().map(String::from)), - ), - ( - "Provisioning Step", - format_option(&env["provisioning_step"].as_str().map(String::from)), - ), - ("Tags", format_tags(&env["tags"])), - ( - "Created", - format_option(&env["created_at"].as_str().map(String::from)), - ), - ( - "Updated", - format_option(&env["updated_at"].as_str().map(String::from)), - ), - ]); - - print_dns_records(env); - - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -pub fn create( - client: &ApiClient, - site_id: &str, - name: &str, - custom_domain: &str, - php_version: &str, - is_production: bool, - tags: Option<Vec<String>>, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = CreateEnvRequest { - name: name.to_string(), - custom_domain: custom_domain.to_string(), - php_version: php_version.to_string(), - is_production, - tags, - }; - - let response: Value = client.post( - &format!("/api/v1/vector/sites/{}/environments", site_id), - &body, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let env = &response["data"]; - print_message(&format!( - "Environment created: {} ({})", - env["name"].as_str().unwrap_or("-"), - env["id"].as_str().unwrap_or("-") - )); - - print_dns_records(env); - - Ok(()) -} - -pub fn update( - client: &ApiClient, - env_id: &str, - custom_domain: Option<Option<String>>, - tags: Option<Vec<String>>, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = UpdateEnvRequest { - custom_domain, - tags, - }; - - let response: Value = client.put(&format!("/api/v1/vector/environments/{}", env_id), &body)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let data = &response["data"]; - let pending = &data["pending_domain_change"]; - if pending.is_object() { - print_message(&format!( - "Domain change started: {} ({})", - pending["id"].as_str().unwrap_or("-"), - pending["status"].as_str().unwrap_or("-") - )); - } else { - print_message("Environment updated successfully."); - } - - print_dns_records(data); - - Ok(()) -} - -pub fn delete(client: &ApiClient, env_id: &str, format: OutputFormat) -> Result<(), ApiError> { - let response: Value = client.delete(&format!("/api/v1/vector/environments/{}", env_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("Environment deleted successfully."); - Ok(()) -} - -pub fn reset_db_password( - client: &ApiClient, - env_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.post_empty(&format!( - "/api/v1/vector/environments/{}/db/reset-password", - env_id - ))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("Database password reset successfully."); - Ok(()) -} - -// Secret subcommands - -pub fn secret_list( - client: &ApiClient, - env_id: &str, - page: u32, - per_page: u32, - format: OutputFormat, -) -> Result<(), ApiError> { - let query = PaginationQuery { page, per_page }; - let response: Value = client.get_with_query( - &format!("/api/v1/vector/environments/{}/secrets", env_id), - &query, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let secrets = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if secrets.is_empty() { - print_message("No secrets found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = secrets - .iter() - .map(|s| { - vec![ - s["id"].as_str().unwrap_or("-").to_string(), - s["key"].as_str().unwrap_or("-").to_string(), - format_bool(s["is_secret"].as_bool().unwrap_or(true)), - format_option(&s["value"].as_str().map(String::from)), - format_option(&s["created_at"].as_str().map(String::from)), - ] - }) - .collect(); - - print_table(vec!["ID", "Key", "Secret", "Value", "Created"], rows); - - if let Some((current, last, total)) = extract_pagination(&response) { - print_pagination(current, last, total); - } - - Ok(()) -} - -pub fn secret_show( - client: &ApiClient, - secret_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.get(&format!("/api/v1/vector/secrets/{}", secret_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let secret = &response["data"]; - - print_key_value(vec![ - ("ID", secret["id"].as_str().unwrap_or("-").to_string()), - ("Key", secret["key"].as_str().unwrap_or("-").to_string()), - ( - "Secret", - format_bool(secret["is_secret"].as_bool().unwrap_or(true)), - ), - ( - "Value", - format_option(&secret["value"].as_str().map(String::from)), - ), - ( - "Created", - format_option(&secret["created_at"].as_str().map(String::from)), - ), - ( - "Updated", - format_option(&secret["updated_at"].as_str().map(String::from)), - ), - ]); - - Ok(()) -} - -pub fn secret_create( - client: &ApiClient, - env_id: &str, - key: &str, - value: &str, - no_secret: bool, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = CreateSecretRequest { - key: key.to_string(), - value: value.to_string(), - is_secret: if no_secret { Some(false) } else { None }, - }; - - let response: Value = client.post( - &format!("/api/v1/vector/environments/{}/secrets", env_id), - &body, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let secret = &response["data"]; - print_message(&format!( - "Secret created: {} ({})", - secret["key"].as_str().unwrap_or("-"), - secret["id"].as_str().unwrap_or("-") - )); - - Ok(()) -} - -pub fn secret_update( - client: &ApiClient, - secret_id: &str, - key: Option<String>, - value: Option<String>, - no_secret: bool, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = UpdateSecretRequest { - key, - value, - is_secret: if no_secret { Some(false) } else { None }, - }; - - let response: Value = client.put(&format!("/api/v1/vector/secrets/{}", secret_id), &body)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("Secret updated successfully."); - Ok(()) -} - -pub fn secret_delete( - client: &ApiClient, - secret_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.delete(&format!("/api/v1/vector/secrets/{}", secret_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("Secret deleted successfully."); - Ok(()) -} - -// Environment DB commands - -#[derive(Debug, Serialize)] -struct PromoteRequest { - #[serde(skip_serializing_if = "std::ops::Not::not")] - drop_tables: bool, - #[serde(skip_serializing_if = "std::ops::Not::not")] - disable_foreign_keys: bool, -} - -pub fn db_promote( - client: &ApiClient, - env_id: &str, - drop_tables: bool, - disable_foreign_keys: bool, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = PromoteRequest { - drop_tables, - disable_foreign_keys, - }; - - let response: Value = client.post( - &format!("/api/v1/vector/environments/{}/db/promote", env_id), - &body, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let data = &response["data"]; - print_message(&format!( - "Promote started: {} ({})", - data["id"].as_str().unwrap_or("-"), - data["status"].as_str().unwrap_or("-") - )); - - Ok(()) -} - -pub fn db_promote_status( - client: &ApiClient, - env_id: &str, - promote_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.get(&format!( - "/api/v1/vector/environments/{}/db/promotes/{}", - env_id, promote_id - ))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let data = &response["data"]; - print_key_value(vec![ - ("Promote ID", data["id"].as_str().unwrap_or("-").to_string()), - ("Status", data["status"].as_str().unwrap_or("-").to_string()), - ( - "Duration (ms)", - format_option(&data["duration_ms"].as_u64().map(|v| v.to_string())), - ), - ( - "Error", - format_option(&data["error_message"].as_str().map(String::from)), - ), - ( - "Created", - format_option(&data["created_at"].as_str().map(String::from)), - ), - ( - "Completed", - format_option(&data["completed_at"].as_str().map(String::from)), - ), - ]); - - Ok(()) -} - -// Domain change commands - -pub fn domain_change_status( - client: &ApiClient, - env_id: &str, - domain_change_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.get(&format!( - "/api/v1/vector/environments/{}/domain-changes/{}", - env_id, domain_change_id - ))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let data = &response["data"]; - print_key_value(vec![ - ( - "Domain Change ID", - data["id"].as_str().unwrap_or("-").to_string(), - ), - ("Status", data["status"].as_str().unwrap_or("-").to_string()), - ( - "DNS Target", - format_option(&data["dns_target"].as_str().map(String::from)), - ), - ( - "Error", - format_option(&data["error_message"].as_str().map(String::from)), - ), - ( - "Duration (ms)", - format_option(&data["duration_ms"].as_u64().map(|v| v.to_string())), - ), - ( - "Started At", - format_option(&data["started_at"].as_str().map(String::from)), - ), - ( - "Created", - format_option(&data["created_at"].as_str().map(String::from)), - ), - ( - "Completed", - format_option(&data["completed_at"].as_str().map(String::from)), - ), - ]); - - Ok(()) -} - -// Helper function to format tags -fn format_tags(value: &Value) -> String { - if let Some(tags) = value.as_array() { - if tags.is_empty() { - return "-".to_string(); - } - tags.iter() - .filter_map(|t| t.as_str()) - .collect::<Vec<_>>() - .join(", ") - } else { - "-".to_string() - } -} diff --git a/src/commands/event.rs b/src/commands/event.rs deleted file mode 100644 index c41f2c6..0000000 --- a/src/commands/event.rs +++ /dev/null @@ -1,99 +0,0 @@ -use serde::Serialize; -use serde_json::Value; - -use crate::api::{ApiClient, ApiError}; -use crate::output::{ - OutputFormat, extract_pagination, format_option, print_json, print_pagination, print_table, -}; - -#[derive(Debug, Serialize)] -struct EventsQuery { - #[serde(skip_serializing_if = "Option::is_none")] - from: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - to: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - event: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - page: Option<u32>, - #[serde(skip_serializing_if = "Option::is_none")] - per_page: Option<u32>, -} - -pub fn list( - client: &ApiClient, - from: Option<String>, - to: Option<String>, - event: Option<String>, - page: Option<u32>, - per_page: Option<u32>, - format: OutputFormat, -) -> Result<(), ApiError> { - let query = EventsQuery { - from, - to, - event, - page, - per_page, - }; - - let response: Value = client.get_with_query("/api/v1/vector/events", &query)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let events = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if events.is_empty() { - println!("No events found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = events - .iter() - .map(|e| { - vec![ - e["id"].as_str().unwrap_or("-").to_string(), - e["event"].as_str().unwrap_or("-").to_string(), - format_actor(&e["actor"]), - format_resource(&e["resource"]), - format_option(&e["created_at"].as_str().map(String::from)), - ] - }) - .collect(); - - print_table(vec!["ID", "Event", "Actor", "Resource", "Created"], rows); - - if let Some((current, last, total)) = extract_pagination(&response) { - print_pagination(current, last, total); - } - - Ok(()) -} - -fn format_actor(value: &Value) -> String { - if value.is_null() { - return "-".to_string(); - } - if let Some(token_name) = value["token_name"].as_str() { - return token_name.to_string(); - } - if let Some(ip) = value["ip"].as_str() { - return ip.to_string(); - } - "-".to_string() -} - -fn format_resource(value: &Value) -> String { - if let Some(resource_type) = value["type"].as_str() { - if let Some(resource_id) = value["id"].as_str() { - return format!("{}:{}", resource_type, resource_id); - } - return resource_type.to_string(); - } - "-".to_string() -} diff --git a/src/commands/mcp.rs b/src/commands/mcp.rs deleted file mode 100644 index b9ade7e..0000000 --- a/src/commands/mcp.rs +++ /dev/null @@ -1,239 +0,0 @@ -use std::fs; -use std::path::PathBuf; - -use serde::{Deserialize, Serialize}; -use serde_json::{Map, Value, json}; - -use crate::api::ApiError; -use crate::commands::auth::get_api_key; -use crate::config::Credentials; -use crate::output::{OutputFormat, print_json, print_message}; - -#[derive(Debug, Serialize, Deserialize, Default)] -#[serde(rename_all = "camelCase")] -struct ClaudeConfig { - #[serde(default)] - mcp_servers: Map<String, Value>, - #[serde(flatten)] - other: Map<String, Value>, -} - -fn get_claude_config_path() -> Result<PathBuf, ApiError> { - #[cfg(target_os = "macos")] - { - let home = dirs::home_dir() - .ok_or_else(|| ApiError::ConfigError("Could not determine home directory".into()))?; - Ok(home.join("Library/Application Support/Claude/claude_desktop_config.json")) - } - - #[cfg(target_os = "windows")] - { - let appdata = dirs::config_dir() - .ok_or_else(|| ApiError::ConfigError("Could not determine AppData directory".into()))?; - Ok(appdata.join("Claude/claude_desktop_config.json")) - } - - #[cfg(target_os = "linux")] - { - let config = dirs::config_dir() - .ok_or_else(|| ApiError::ConfigError("Could not determine config directory".into()))?; - Ok(config.join("Claude/claude_desktop_config.json")) - } - - #[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))] - { - Err(ApiError::ConfigError("Unsupported platform".into())) - } -} - -pub fn setup(force: bool, format: OutputFormat) -> Result<(), ApiError> { - let creds = Credentials::load()?; - let token = get_api_key(&creds).ok_or_else(|| { - ApiError::Unauthorized( - "Not logged in. Run 'vector auth login' to authenticate.".to_string(), - ) - })?; - - let config_path = get_claude_config_path()?; - - // Load existing config or create new one - let mut config: ClaudeConfig = if config_path.exists() { - let content = fs::read_to_string(&config_path) - .map_err(|e| ApiError::ConfigError(format!("Failed to read Claude config: {}", e)))?; - serde_json::from_str(&content) - .map_err(|e| ApiError::ConfigError(format!("Failed to parse Claude config: {}", e)))? - } else { - ClaudeConfig::default() - }; - - // Check if vector is already configured - if config.mcp_servers.contains_key("vector") && !force { - return Err(ApiError::ConfigError( - "Vector MCP server already configured. Use --force to overwrite.".to_string(), - )); - } - - // Create the Vector MCP server configuration - let vector_config = json!({ - "command": "npx", - "args": [ - "-y", - "mcp-remote", - "https://api.builtfast.com/mcp/vector", - "--header", - format!("Authorization: Bearer {}", token) - ] - }); - - let was_updated = config.mcp_servers.contains_key("vector"); - - // Add or update the vector server - config - .mcp_servers - .insert("vector".to_string(), vector_config); - - // Ensure parent directory exists - if let Some(parent) = config_path.parent() - && !parent.exists() - { - fs::create_dir_all(parent).map_err(|e| { - ApiError::ConfigError(format!("Failed to create Claude config directory: {}", e)) - })?; - } - - // Write the config - let content = serde_json::to_string_pretty(&config) - .map_err(|e| ApiError::ConfigError(format!("Failed to serialize config: {}", e)))?; - fs::write(&config_path, content) - .map_err(|e| ApiError::ConfigError(format!("Failed to write Claude config: {}", e)))?; - - let action = if was_updated { "updated" } else { "added" }; - - if format == OutputFormat::Json { - print_json(&json!({ - "success": true, - "action": action, - "config_path": config_path.to_string_lossy(), - "message": format!("Vector MCP server {} in Claude Desktop config", action) - })); - } else { - print_message(&format!( - "Vector MCP server {} in Claude Desktop config.", - action - )); - print_message(&format!("Config written to: {}", config_path.display())); - print_message("\nRestart Claude Desktop to apply changes."); - } - - Ok(()) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_claude_config_empty() { - let config: ClaudeConfig = serde_json::from_str("{}").unwrap(); - assert!(config.mcp_servers.is_empty()); - assert!(config.other.is_empty()); - } - - #[test] - fn test_claude_config_preserves_other_mcp_servers() { - let json = r#"{ - "mcpServers": { - "other-server": { - "command": "node", - "args": ["server.js"] - } - } - }"#; - - let mut config: ClaudeConfig = serde_json::from_str(json).unwrap(); - assert!(config.mcp_servers.contains_key("other-server")); - - // Add vector - config - .mcp_servers - .insert("vector".to_string(), json!({"command": "npx"})); - - // Serialize and deserialize - let serialized = serde_json::to_string(&config).unwrap(); - let restored: ClaudeConfig = serde_json::from_str(&serialized).unwrap(); - - assert!(restored.mcp_servers.contains_key("other-server")); - assert!(restored.mcp_servers.contains_key("vector")); - } - - #[test] - fn test_claude_config_preserves_other_fields() { - let json = r#"{ - "mcpServers": {}, - "theme": "dark", - "someOtherSetting": true - }"#; - - let config: ClaudeConfig = serde_json::from_str(json).unwrap(); - assert!(config.other.contains_key("theme")); - assert!(config.other.contains_key("someOtherSetting")); - - // Serialize back - let serialized = serde_json::to_string(&config).unwrap(); - assert!(serialized.contains("theme")); - assert!(serialized.contains("someOtherSetting")); - } - - #[test] - fn test_vector_config_structure() { - let token = "test-token-123"; - let vector_config = json!({ - "command": "npx", - "args": [ - "-y", - "mcp-remote", - "https://api.builtfast.com/mcp/vector", - "--header", - format!("Authorization: Bearer {}", token) - ] - }); - - assert_eq!(vector_config["command"], "npx"); - let args = vector_config["args"].as_array().unwrap(); - assert_eq!(args[0], "-y"); - assert_eq!(args[1], "mcp-remote"); - assert_eq!(args[2], "https://api.builtfast.com/mcp/vector"); - assert_eq!(args[3], "--header"); - assert_eq!(args[4], "Authorization: Bearer test-token-123"); - } - - #[test] - fn test_claude_config_roundtrip() { - let original = r#"{ - "mcpServers": { - "existing": {"command": "test"} - }, - "customField": "value" - }"#; - - let mut config: ClaudeConfig = serde_json::from_str(original).unwrap(); - config - .mcp_servers - .insert("vector".to_string(), json!({"command": "npx"})); - - let serialized = serde_json::to_string_pretty(&config).unwrap(); - let restored: ClaudeConfig = serde_json::from_str(&serialized).unwrap(); - - assert_eq!(restored.mcp_servers.len(), 2); - assert!(restored.mcp_servers.contains_key("existing")); - assert!(restored.mcp_servers.contains_key("vector")); - assert!(restored.other.contains_key("customField")); - } - - #[test] - fn test_get_claude_config_path() { - let path = get_claude_config_path().unwrap(); - assert!(path.ends_with("claude_desktop_config.json")); - assert!(path.to_string_lossy().contains("Claude")); - } -} diff --git a/src/commands/mod.rs b/src/commands/mod.rs deleted file mode 100644 index 9838f02..0000000 --- a/src/commands/mod.rs +++ /dev/null @@ -1,14 +0,0 @@ -pub mod account; -pub mod archive; -pub mod auth; -pub mod backup; -pub mod db; -pub mod deploy; -pub mod env; -pub mod event; -pub mod mcp; -pub mod restore; -pub mod site; -pub mod ssl; -pub mod waf; -pub mod webhook; diff --git a/src/commands/restore.rs b/src/commands/restore.rs deleted file mode 100644 index 8d88f91..0000000 --- a/src/commands/restore.rs +++ /dev/null @@ -1,240 +0,0 @@ -use serde::Serialize; -use serde_json::Value; - -use crate::api::{ApiClient, ApiError}; -use crate::output::{ - OutputFormat, extract_pagination, format_archivable_type, format_option, print_json, - print_key_value, print_message, print_pagination, print_table, -}; - -#[derive(Debug, Serialize)] -pub struct ListRestoresQuery { - #[serde(skip_serializing_if = "Option::is_none")] - pub r#type: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - pub site_id: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - pub environment_id: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - pub backup_id: Option<String>, - pub page: u32, - pub per_page: u32, -} - -#[derive(Debug, Serialize)] -struct CreateRestoreRequest { - backup_id: String, - scope: String, - #[serde(skip_serializing_if = "Option::is_none")] - options: Option<RestoreOptions>, -} - -#[derive(Debug, Serialize)] -struct RestoreOptions { - #[serde(skip_serializing_if = "std::ops::Not::not")] - drop_tables: bool, - #[serde(skip_serializing_if = "std::ops::Not::not")] - disable_foreign_keys: bool, - #[serde(skip_serializing_if = "Option::is_none")] - search_replace: Option<RestoreSearchReplace>, -} - -#[derive(Debug, Serialize)] -struct RestoreSearchReplace { - from: String, - to: String, -} - -pub fn list( - client: &ApiClient, - query: ListRestoresQuery, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.get_with_query("/api/v1/vector/restores", &query)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let restores = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if restores.is_empty() { - print_message("No restores found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = restores - .iter() - .map(|r| { - vec![ - r["id"].as_str().unwrap_or("-").to_string(), - r["archivable_type"] - .as_str() - .map(format_archivable_type) - .unwrap_or_else(|| "-".to_string()), - r["vector_backup_id"].as_str().unwrap_or("-").to_string(), - r["scope"].as_str().unwrap_or("-").to_string(), - r["status"].as_str().unwrap_or("-").to_string(), - format_option(&r["created_at"].as_str().map(String::from)), - ] - }) - .collect(); - - print_table( - vec!["ID", "Model", "Backup ID", "Scope", "Status", "Created"], - rows, - ); - - if let Some((current, last, total)) = extract_pagination(&response) { - print_pagination(current, last, total); - } - - Ok(()) -} - -pub fn show(client: &ApiClient, restore_id: &str, format: OutputFormat) -> Result<(), ApiError> { - let response: Value = client.get(&format!("/api/v1/vector/restores/{}", restore_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let restore = &response["data"]; - - print_key_value(vec![ - ("ID", restore["id"].as_str().unwrap_or("-").to_string()), - ( - "Model", - restore["archivable_type"] - .as_str() - .map(format_archivable_type) - .unwrap_or_else(|| "-".to_string()), - ), - ( - "Model ID", - restore["archivable_id"].as_str().unwrap_or("-").to_string(), - ), - ( - "Backup ID", - restore["vector_backup_id"] - .as_str() - .unwrap_or("-") - .to_string(), - ), - ( - "Scope", - restore["scope"].as_str().unwrap_or("-").to_string(), - ), - ( - "Trigger", - restore["trigger"].as_str().unwrap_or("-").to_string(), - ), - ( - "Status", - restore["status"].as_str().unwrap_or("-").to_string(), - ), - ( - "Error Message", - format_option(&restore["error_message"].as_str().map(String::from)), - ), - ( - "Duration (ms)", - format_option(&restore["duration_ms"].as_u64().map(|d| d.to_string())), - ), - ( - "Started At", - format_option(&restore["started_at"].as_str().map(String::from)), - ), - ( - "Completed At", - format_option(&restore["completed_at"].as_str().map(String::from)), - ), - ( - "Created At", - format_option(&restore["created_at"].as_str().map(String::from)), - ), - ( - "Updated At", - format_option(&restore["updated_at"].as_str().map(String::from)), - ), - ]); - - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -pub fn create( - client: &ApiClient, - backup_id: &str, - scope: &str, - drop_tables: bool, - disable_foreign_keys: bool, - search_replace_from: Option<String>, - search_replace_to: Option<String>, - format: OutputFormat, -) -> Result<(), ApiError> { - let search_replace = match (search_replace_from, search_replace_to) { - (Some(from), Some(to)) => Some(RestoreSearchReplace { from, to }), - _ => None, - }; - - let options = if drop_tables || disable_foreign_keys || search_replace.is_some() { - Some(RestoreOptions { - drop_tables, - disable_foreign_keys, - search_replace, - }) - } else { - None - }; - - let body = CreateRestoreRequest { - backup_id: backup_id.to_string(), - scope: scope.to_string(), - options, - }; - - let response: Value = client.post("/api/v1/vector/restores", &body)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let restore = &response["data"]; - let restore_id = restore["id"].as_str().unwrap_or("-"); - - print_message(&format!( - "Restore initiated. Use `vector restore show {}` to check progress.", - restore_id - )); - - print_key_value(vec![ - ("ID", restore_id.to_string()), - ( - "Backup ID", - restore["vector_backup_id"] - .as_str() - .unwrap_or("-") - .to_string(), - ), - ( - "Scope", - restore["scope"].as_str().unwrap_or("-").to_string(), - ), - ( - "Status", - restore["status"].as_str().unwrap_or("-").to_string(), - ), - ( - "Created At", - format_option(&restore["created_at"].as_str().map(String::from)), - ), - ]); - - Ok(()) -} diff --git a/src/commands/site.rs b/src/commands/site.rs deleted file mode 100644 index c69adbc..0000000 --- a/src/commands/site.rs +++ /dev/null @@ -1,691 +0,0 @@ -use serde::Serialize; -use serde_json::Value; - -use crate::api::{ApiClient, ApiError}; -use crate::output::{ - OutputFormat, extract_pagination, format_option, print_dns_records, print_json, - print_key_value, print_message, print_pagination, print_table, -}; - -#[derive(Debug, Serialize)] -struct PaginationQuery { - page: u32, - per_page: u32, -} - -#[derive(Debug, Serialize)] -struct CreateSiteRequest { - your_customer_id: String, - dev_php_version: String, - #[serde(skip_serializing_if = "Option::is_none")] - production_domain: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - staging_domain: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - tags: Option<Vec<String>>, - #[serde(skip_serializing_if = "Option::is_none")] - wp_admin_email: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - wp_admin_user: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - wp_site_title: Option<String>, -} - -#[derive(Debug, Serialize)] -struct UpdateSiteRequest { - #[serde(skip_serializing_if = "Option::is_none")] - your_customer_id: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - tags: Option<Vec<String>>, -} - -#[derive(Debug, Serialize)] -struct CloneSiteRequest { - #[serde(skip_serializing_if = "Option::is_none")] - your_customer_id: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - dev_php_version: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - tags: Option<Vec<String>>, -} - -#[derive(Debug, Serialize)] -struct PurgeCacheRequest { - #[serde(skip_serializing_if = "Option::is_none")] - cache_tag: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - url: Option<String>, -} - -#[derive(Debug, Serialize)] -struct LogsQuery { - #[serde(skip_serializing_if = "Option::is_none")] - start_time: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - end_time: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - limit: Option<u32>, - #[serde(skip_serializing_if = "Option::is_none")] - environment: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - deployment_id: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - level: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - cursor: Option<String>, -} - -#[derive(Debug, Serialize)] -struct CreateSshKeyRequest { - name: String, - public_key: String, -} - -pub fn list( - client: &ApiClient, - page: u32, - per_page: u32, - format: OutputFormat, -) -> Result<(), ApiError> { - let query = PaginationQuery { page, per_page }; - let response: Value = client.get_with_query("/api/v1/vector/sites", &query)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let sites = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if sites.is_empty() { - print_message("No sites found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = sites - .iter() - .map(|s| { - vec![ - s["id"].as_str().unwrap_or("-").to_string(), - s["status"].as_str().unwrap_or("-").to_string(), - format_option(&s["your_customer_id"].as_str().map(String::from)), - format_option(&s["dev_domain"].as_str().map(String::from)), - ] - }) - .collect(); - - print_table(vec!["ID", "Status", "Customer ID", "Dev Domain"], rows); - - if let Some((current, last, total)) = extract_pagination(&response) { - print_pagination(current, last, total); - } - - Ok(()) -} - -pub fn show(client: &ApiClient, id: &str, format: OutputFormat) -> Result<(), ApiError> { - let response: Value = client.get(&format!("/api/v1/vector/sites/{}", id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let site = &response["data"]; - - print_key_value(vec![ - ("ID", site["id"].as_str().unwrap_or("-").to_string()), - ("Status", site["status"].as_str().unwrap_or("-").to_string()), - ( - "Customer ID", - format_option(&site["your_customer_id"].as_str().map(String::from)), - ), - ( - "Dev Domain", - format_option(&site["dev_domain"].as_str().map(String::from)), - ), - ( - "Dev PHP Version", - format_option(&site["dev_php_version"].as_str().map(String::from)), - ), - ( - "Dev DB Host", - format_option(&site["dev_db_host"].as_str().map(String::from)), - ), - ( - "Dev DB Name", - format_option(&site["dev_db_name"].as_str().map(String::from)), - ), - ("Tags", format_tags(&site["tags"])), - ( - "Created", - format_option(&site["created_at"].as_str().map(String::from)), - ), - ( - "Updated", - format_option(&site["updated_at"].as_str().map(String::from)), - ), - ]); - - if let Some(envs) = site["environments"].as_array() - && !envs.is_empty() - { - println!(); - println!("Environments:"); - - let rows: Vec<Vec<String>> = envs - .iter() - .map(|e| { - vec![ - e["id"].as_str().unwrap_or("-").to_string(), - e["name"].as_str().unwrap_or("-").to_string(), - e["status"].as_str().unwrap_or("-").to_string(), - format_option(&e["platform_domain"].as_str().map(String::from)), - format_option(&e["custom_domain"].as_str().map(String::from)), - ] - }) - .collect(); - - print_table( - vec!["ID", "Name", "Status", "Platform Domain", "Custom Domain"], - rows, - ); - } - - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -pub fn create( - client: &ApiClient, - customer_id: &str, - dev_php_version: &str, - production_domain: Option<String>, - staging_domain: Option<String>, - tags: Option<Vec<String>>, - wp_admin_email: Option<String>, - wp_admin_user: Option<String>, - wp_site_title: Option<String>, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = CreateSiteRequest { - your_customer_id: customer_id.to_string(), - dev_php_version: dev_php_version.to_string(), - production_domain, - staging_domain, - tags, - wp_admin_email, - wp_admin_user, - wp_site_title, - }; - - let response: Value = client.post("/api/v1/vector/sites", &body)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let site = &response["data"]; - print_message(&format!( - "Site created: {} ({})", - site["id"].as_str().unwrap_or("-"), - site["status"].as_str().unwrap_or("-") - )); - - if let Some(wp_admin) = site["wp_admin"].as_object() { - println!(); - print_key_value(vec![ - ( - "WP User", - format_option( - &wp_admin - .get("user") - .and_then(|v| v.as_str()) - .map(String::from), - ), - ), - ( - "WP Email", - format_option( - &wp_admin - .get("email") - .and_then(|v| v.as_str()) - .map(String::from), - ), - ), - ( - "WP Password", - format_option( - &wp_admin - .get("password") - .and_then(|v| v.as_str()) - .map(String::from), - ), - ), - ( - "WP Site Title", - format_option( - &wp_admin - .get("site_title") - .and_then(|v| v.as_str()) - .map(String::from), - ), - ), - ]); - } - - if let Some(envs) = site["environments"].as_array() { - for env in envs { - print_dns_records(env); - } - } - - Ok(()) -} - -pub fn update( - client: &ApiClient, - id: &str, - customer_id: Option<String>, - tags: Option<Vec<String>>, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = UpdateSiteRequest { - your_customer_id: customer_id, - tags, - }; - let response: Value = client.put(&format!("/api/v1/vector/sites/{}", id), &body)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("Site updated successfully."); - Ok(()) -} - -pub fn delete( - client: &ApiClient, - id: &str, - force: bool, - format: OutputFormat, -) -> Result<(), ApiError> { - if !force { - eprint!("Are you sure you want to delete site {}? [y/N] ", id); - let mut input = String::new(); - std::io::stdin().read_line(&mut input).ok(); - if !input.trim().eq_ignore_ascii_case("y") { - print_message("Aborted."); - return Ok(()); - } - } - - let response: Value = client.delete(&format!("/api/v1/vector/sites/{}", id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("Site deleted successfully."); - Ok(()) -} - -pub fn clone( - client: &ApiClient, - id: &str, - customer_id: Option<String>, - dev_php_version: Option<String>, - tags: Option<Vec<String>>, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = CloneSiteRequest { - your_customer_id: customer_id, - dev_php_version, - tags, - }; - - let response: Value = client.post(&format!("/api/v1/vector/sites/{}/clone", id), &body)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let site = &response["data"]; - print_message(&format!( - "Site clone initiated: {} ({})", - site["id"].as_str().unwrap_or("-"), - site["status"].as_str().unwrap_or("-") - )); - - Ok(()) -} - -pub fn suspend(client: &ApiClient, id: &str, format: OutputFormat) -> Result<(), ApiError> { - let response: Value = client.put_empty(&format!("/api/v1/vector/sites/{}/suspend", id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("Site suspension initiated."); - Ok(()) -} - -pub fn unsuspend(client: &ApiClient, id: &str, format: OutputFormat) -> Result<(), ApiError> { - let response: Value = client.put_empty(&format!("/api/v1/vector/sites/{}/unsuspend", id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("Site unsuspension initiated."); - Ok(()) -} - -pub fn reset_sftp_password( - client: &ApiClient, - id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = - client.post_empty(&format!("/api/v1/vector/sites/{}/sftp/reset-password", id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - if let Some(sftp) = response["data"]["dev_sftp"].as_object() { - print_key_value(vec![ - ( - "Hostname", - format_option( - &sftp - .get("hostname") - .and_then(|v| v.as_str()) - .map(String::from), - ), - ), - ( - "Port", - format_option( - &sftp - .get("port") - .and_then(|v| v.as_u64()) - .map(|v| v.to_string()), - ), - ), - ( - "Username", - format_option( - &sftp - .get("username") - .and_then(|v| v.as_str()) - .map(String::from), - ), - ), - ( - "Password", - format_option( - &sftp - .get("password") - .and_then(|v| v.as_str()) - .map(String::from), - ), - ), - ]); - } else { - print_message("SFTP password reset successfully."); - } - - Ok(()) -} - -pub fn reset_db_password( - client: &ApiClient, - id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = - client.post_empty(&format!("/api/v1/vector/sites/{}/db/reset-password", id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let data = &response["data"]; - print_key_value(vec![ - ( - "Username", - format_option(&data["dev_db_username"].as_str().map(String::from)), - ), - ( - "Password", - format_option(&data["dev_db_password"].as_str().map(String::from)), - ), - ]); - - Ok(()) -} - -pub fn purge_cache( - client: &ApiClient, - id: &str, - cache_tag: Option<String>, - url: Option<String>, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = PurgeCacheRequest { cache_tag, url }; - let response: Value = - client.post(&format!("/api/v1/vector/sites/{}/purge-cache", id), &body)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("Cache purged successfully."); - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -pub fn logs( - client: &ApiClient, - id: &str, - start_time: Option<String>, - end_time: Option<String>, - limit: Option<u32>, - environment: Option<String>, - deployment_id: Option<String>, - level: Option<String>, - cursor: Option<String>, - format: OutputFormat, -) -> Result<(), ApiError> { - let query = LogsQuery { - start_time, - end_time, - limit, - environment, - deployment_id, - level, - cursor, - }; - let response: Value = - client.get_with_query(&format!("/api/v1/vector/sites/{}/logs", id), &query)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - // Parse the Axiom-style log format - if let Some(tables) = response["data"]["logs"]["tables"].as_array() { - for table in tables { - if let Some(rows) = table["rows"].as_array() { - for row in rows { - if let Some(row_arr) = row.as_array() { - // Typically: [timestamp, message, level] - let parts: Vec<String> = row_arr - .iter() - .filter_map(|v| v.as_str().map(String::from)) - .collect(); - if !parts.is_empty() { - println!("{}", parts.join(" | ")); - } - } - } - } - } - - // Show pagination info if there are more results - if response["data"]["has_more"].as_bool().unwrap_or(false) - && let Some(next_cursor) = response["data"]["cursor"].as_str() - { - eprintln!(); - eprintln!( - "More results available. Use --cursor {} to continue.", - next_cursor - ); - } - } else { - print_message("No logs available."); - } - - Ok(()) -} - -pub fn wp_reconfig(client: &ApiClient, id: &str, format: OutputFormat) -> Result<(), ApiError> { - let response: Value = client.post_empty(&format!("/api/v1/vector/sites/{}/wp/reconfig", id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("wp-config.php regenerated successfully."); - Ok(()) -} - -// SSH Key subcommands - -pub fn ssh_key_list( - client: &ApiClient, - site_id: &str, - page: u32, - per_page: u32, - format: OutputFormat, -) -> Result<(), ApiError> { - let query = PaginationQuery { page, per_page }; - let response: Value = client.get_with_query( - &format!("/api/v1/vector/sites/{}/ssh-keys", site_id), - &query, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let keys = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if keys.is_empty() { - print_message("No SSH keys found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = keys - .iter() - .map(|k| { - vec![ - k["id"].as_str().unwrap_or("-").to_string(), - k["name"].as_str().unwrap_or("-").to_string(), - format_option(&k["fingerprint"].as_str().map(String::from)), - format_option(&k["created_at"].as_str().map(String::from)), - ] - }) - .collect(); - - print_table(vec!["ID", "Name", "Fingerprint", "Created"], rows); - - if let Some((current, last, total)) = extract_pagination(&response) { - print_pagination(current, last, total); - } - - Ok(()) -} - -pub fn ssh_key_add( - client: &ApiClient, - site_id: &str, - name: &str, - public_key: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = CreateSshKeyRequest { - name: name.to_string(), - public_key: public_key.to_string(), - }; - - let response: Value = - client.post(&format!("/api/v1/vector/sites/{}/ssh-keys", site_id), &body)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let key = &response["data"]; - print_message(&format!( - "SSH key added: {} ({})", - key["name"].as_str().unwrap_or("-"), - key["id"].as_str().unwrap_or("-") - )); - - Ok(()) -} - -pub fn ssh_key_remove( - client: &ApiClient, - site_id: &str, - key_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.delete(&format!( - "/api/v1/vector/sites/{}/ssh-keys/{}", - site_id, key_id - ))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("SSH key removed successfully."); - Ok(()) -} - -// Helper function to format tags -fn format_tags(value: &Value) -> String { - if let Some(tags) = value.as_array() { - if tags.is_empty() { - return "-".to_string(); - } - tags.iter() - .filter_map(|t| t.as_str()) - .collect::<Vec<_>>() - .join(", ") - } else { - "-".to_string() - } -} diff --git a/src/commands/ssl.rs b/src/commands/ssl.rs deleted file mode 100644 index 223154c..0000000 --- a/src/commands/ssl.rs +++ /dev/null @@ -1,77 +0,0 @@ -use serde::Serialize; -use serde_json::Value; - -use crate::api::{ApiClient, ApiError}; -use crate::output::{ - OutputFormat, format_bool, format_option, print_json, print_key_value, print_message, -}; - -#[derive(Debug, Serialize)] -struct NudgeRequest { - #[serde(skip_serializing_if = "std::ops::Not::not")] - retry: bool, -} - -pub fn status(client: &ApiClient, env_id: &str, format: OutputFormat) -> Result<(), ApiError> { - let response: Value = client.get(&format!("/api/v1/vector/environments/{}/ssl", env_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let env = &response["data"]; - - print_key_value(vec![ - ("Status", env["status"].as_str().unwrap_or("-").to_string()), - ( - "Provisioning Step", - format_option(&env["provisioning_step"].as_str().map(String::from)), - ), - ( - "Failure Reason", - format_option(&env["failure_reason"].as_str().map(String::from)), - ), - ( - "Production", - format_bool(env["is_production"].as_bool().unwrap_or(false)), - ), - ( - "Custom Domain", - format_option(&env["custom_domain"].as_str().map(String::from)), - ), - ( - "Platform Domain", - format_option(&env["platform_domain"].as_str().map(String::from)), - ), - ]); - - Ok(()) -} - -pub fn nudge( - client: &ApiClient, - env_id: &str, - retry: bool, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = NudgeRequest { retry }; - - let response: Value = client.post( - &format!("/api/v1/vector/environments/{}/ssl/nudge", env_id), - &body, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - if let Some(message) = response["message"].as_str() { - print_message(message); - } else { - print_message("SSL provisioning nudge sent."); - } - - Ok(()) -} diff --git a/src/commands/waf.rs b/src/commands/waf.rs deleted file mode 100644 index 2965489..0000000 --- a/src/commands/waf.rs +++ /dev/null @@ -1,540 +0,0 @@ -use serde::Serialize; -use serde_json::Value; - -use crate::api::{ApiClient, ApiError}; -use crate::output::{ - OutputFormat, format_option, print_json, print_key_value, print_message, print_table, -}; - -#[derive(Debug, Serialize)] -struct CreateRateLimitRequest { - name: String, - request_count: u32, - timeframe: u32, - block_time: u32, - #[serde(skip_serializing_if = "Option::is_none")] - description: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - value: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - operator: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - variables: Option<Vec<String>>, - #[serde(skip_serializing_if = "Option::is_none")] - transformations: Option<Vec<String>>, -} - -#[derive(Debug, Serialize)] -struct UpdateRateLimitRequest { - #[serde(skip_serializing_if = "Option::is_none")] - name: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - description: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - request_count: Option<u32>, - #[serde(skip_serializing_if = "Option::is_none")] - timeframe: Option<u32>, - #[serde(skip_serializing_if = "Option::is_none")] - block_time: Option<u32>, - #[serde(skip_serializing_if = "Option::is_none")] - value: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - operator: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - variables: Option<Vec<String>>, - #[serde(skip_serializing_if = "Option::is_none")] - transformations: Option<Vec<String>>, -} - -#[derive(Debug, Serialize)] -struct AddReferrerRequest { - hostname: String, -} - -// Rate Limit commands - -pub fn rate_limit_list( - client: &ApiClient, - site_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = - client.get(&format!("/api/v1/vector/sites/{}/waf/rate-limits", site_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let rules = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if rules.is_empty() { - print_message("No rate limit rules found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = rules - .iter() - .map(|r| { - let config = &r["configuration"]; - vec![ - r["id"] - .as_u64() - .map(|v| v.to_string()) - .unwrap_or("-".to_string()), - r["name"].as_str().unwrap_or("-").to_string(), - format!( - "{}/{}s", - config["request_count"].as_u64().unwrap_or(0), - config["timeframe"].as_u64().unwrap_or(0) - ), - format!("{}s", config["block_time"].as_u64().unwrap_or(0)), - ] - }) - .collect(); - - print_table(vec!["ID", "Name", "Requests/Time", "Block Time"], rows); - - Ok(()) -} - -pub fn rate_limit_show( - client: &ApiClient, - site_id: &str, - rule_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.get(&format!( - "/api/v1/vector/sites/{}/waf/rate-limits/{}", - site_id, rule_id - ))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let rule = &response["data"]; - let config = &rule["configuration"]; - - print_key_value(vec![ - ( - "ID", - rule["id"] - .as_u64() - .map(|v| v.to_string()) - .unwrap_or("-".to_string()), - ), - ("Name", rule["name"].as_str().unwrap_or("-").to_string()), - ( - "Description", - format_option(&rule["description"].as_str().map(String::from)), - ), - ( - "Request Count", - config["request_count"] - .as_u64() - .map(|v| v.to_string()) - .unwrap_or("-".to_string()), - ), - ( - "Timeframe (s)", - config["timeframe"] - .as_u64() - .map(|v| v.to_string()) - .unwrap_or("-".to_string()), - ), - ( - "Block Time (s)", - config["block_time"] - .as_u64() - .map(|v| v.to_string()) - .unwrap_or("-".to_string()), - ), - ( - "Value", - format_option(&config["value"].as_str().map(String::from)), - ), - ( - "Operator", - format_option(&config["operator"].as_str().map(String::from)), - ), - ("Variables", format_array(&config["variables"])), - ("Transformations", format_array(&config["transformations"])), - ]); - - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -pub fn rate_limit_create( - client: &ApiClient, - site_id: &str, - name: &str, - request_count: u32, - timeframe: u32, - block_time: u32, - description: Option<String>, - value: Option<String>, - operator: Option<String>, - variables: Option<Vec<String>>, - transformations: Option<Vec<String>>, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = CreateRateLimitRequest { - name: name.to_string(), - request_count, - timeframe, - block_time, - description, - value, - operator, - variables, - transformations, - }; - - let response: Value = client.post( - &format!("/api/v1/vector/sites/{}/waf/rate-limits", site_id), - &body, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let rule = &response["data"]; - print_message(&format!( - "Rate limit created: {} (ID: {})", - rule["name"].as_str().unwrap_or("-"), - rule["id"] - .as_u64() - .map(|v| v.to_string()) - .unwrap_or("-".to_string()) - )); - - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -pub fn rate_limit_update( - client: &ApiClient, - site_id: &str, - rule_id: &str, - name: Option<String>, - description: Option<String>, - request_count: Option<u32>, - timeframe: Option<u32>, - block_time: Option<u32>, - value: Option<String>, - operator: Option<String>, - variables: Option<Vec<String>>, - transformations: Option<Vec<String>>, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = UpdateRateLimitRequest { - name, - description, - request_count, - timeframe, - block_time, - value, - operator, - variables, - transformations, - }; - - let response: Value = client.put( - &format!( - "/api/v1/vector/sites/{}/waf/rate-limits/{}", - site_id, rule_id - ), - &body, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("Rate limit updated successfully."); - Ok(()) -} - -pub fn rate_limit_delete( - client: &ApiClient, - site_id: &str, - rule_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.delete(&format!( - "/api/v1/vector/sites/{}/waf/rate-limits/{}", - site_id, rule_id - ))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("Rate limit deleted successfully."); - Ok(()) -} - -// Blocked IP commands - -pub fn blocked_ip_list( - client: &ApiClient, - site_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = - client.get(&format!("/api/v1/vector/sites/{}/waf/blocked-ips", site_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let ips = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if ips.is_empty() { - print_message("No blocked IPs found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = ips - .iter() - .map(|ip| vec![ip["ip"].as_str().unwrap_or("-").to_string()]) - .collect(); - - print_table(vec!["IP"], rows); - - Ok(()) -} - -pub fn blocked_ip_add( - client: &ApiClient, - site_id: &str, - ip: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - #[derive(Serialize)] - struct AddIpRequest { - ip: String, - } - - let body = AddIpRequest { ip: ip.to_string() }; - - let response: Value = client.post( - &format!("/api/v1/vector/sites/{}/waf/blocked-ips", site_id), - &body, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message(&format!("IP {} added to blocklist.", ip)); - Ok(()) -} - -pub fn blocked_ip_remove( - client: &ApiClient, - site_id: &str, - ip: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.delete(&format!( - "/api/v1/vector/sites/{}/waf/blocked-ips/{}", - site_id, ip - ))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message(&format!("IP {} removed from blocklist.", ip)); - Ok(()) -} - -// Blocked Referrer commands - -pub fn blocked_referrer_list( - client: &ApiClient, - site_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.get(&format!( - "/api/v1/vector/sites/{}/waf/blocked-referrers", - site_id - ))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let referrers = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if referrers.is_empty() { - print_message("No blocked referrers found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = referrers - .iter() - .map(|r| vec![r["hostname"].as_str().unwrap_or("-").to_string()]) - .collect(); - - print_table(vec!["Hostname"], rows); - - Ok(()) -} - -pub fn blocked_referrer_add( - client: &ApiClient, - site_id: &str, - hostname: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = AddReferrerRequest { - hostname: hostname.to_string(), - }; - - let response: Value = client.post( - &format!("/api/v1/vector/sites/{}/waf/blocked-referrers", site_id), - &body, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message(&format!("Referrer {} added to blocklist.", hostname)); - Ok(()) -} - -pub fn blocked_referrer_remove( - client: &ApiClient, - site_id: &str, - hostname: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.delete(&format!( - "/api/v1/vector/sites/{}/waf/blocked-referrers/{}", - site_id, hostname - ))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message(&format!("Referrer {} removed from blocklist.", hostname)); - Ok(()) -} - -// Allowed Referrer commands - -pub fn allowed_referrer_list( - client: &ApiClient, - site_id: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.get(&format!( - "/api/v1/vector/sites/{}/waf/allowed-referrers", - site_id - ))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let referrers = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if referrers.is_empty() { - print_message("No allowed referrers found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = referrers - .iter() - .map(|r| vec![r["hostname"].as_str().unwrap_or("-").to_string()]) - .collect(); - - print_table(vec!["Hostname"], rows); - - Ok(()) -} - -pub fn allowed_referrer_add( - client: &ApiClient, - site_id: &str, - hostname: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = AddReferrerRequest { - hostname: hostname.to_string(), - }; - - let response: Value = client.post( - &format!("/api/v1/vector/sites/{}/waf/allowed-referrers", site_id), - &body, - )?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message(&format!("Referrer {} added to allowlist.", hostname)); - Ok(()) -} - -pub fn allowed_referrer_remove( - client: &ApiClient, - site_id: &str, - hostname: &str, - format: OutputFormat, -) -> Result<(), ApiError> { - let response: Value = client.delete(&format!( - "/api/v1/vector/sites/{}/waf/allowed-referrers/{}", - site_id, hostname - ))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message(&format!("Referrer {} removed from allowlist.", hostname)); - Ok(()) -} - -// Helper function to format arrays -fn format_array(value: &Value) -> String { - if let Some(arr) = value.as_array() { - if arr.is_empty() { - return "-".to_string(); - } - arr.iter() - .filter_map(|v| v.as_str()) - .collect::<Vec<_>>() - .join(", ") - } else { - "-".to_string() - } -} diff --git a/src/commands/webhook.rs b/src/commands/webhook.rs deleted file mode 100644 index 03d0001..0000000 --- a/src/commands/webhook.rs +++ /dev/null @@ -1,213 +0,0 @@ -use serde::Serialize; -use serde_json::Value; - -use crate::api::{ApiClient, ApiError}; -use crate::output::{ - OutputFormat, extract_pagination, format_option, print_json, print_key_value, print_message, - print_pagination, print_table, -}; - -#[derive(Debug, Serialize)] -struct PaginationQuery { - page: u32, - per_page: u32, -} - -#[derive(Debug, Serialize)] -struct CreateWebhookRequest { - name: String, - url: String, - events: Vec<String>, - #[serde(skip_serializing_if = "Option::is_none")] - secret: Option<String>, -} - -#[derive(Debug, Serialize)] -struct UpdateWebhookRequest { - #[serde(skip_serializing_if = "Option::is_none")] - name: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - url: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - events: Option<Vec<String>>, - #[serde(skip_serializing_if = "Option::is_none")] - secret: Option<String>, - #[serde(skip_serializing_if = "Option::is_none")] - enabled: Option<bool>, -} - -pub fn list( - client: &ApiClient, - page: u32, - per_page: u32, - format: OutputFormat, -) -> Result<(), ApiError> { - let query = PaginationQuery { page, per_page }; - let response: Value = client.get_with_query("/api/v1/vector/webhooks", &query)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let webhooks = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if webhooks.is_empty() { - print_message("No webhooks found."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = webhooks - .iter() - .map(|w| { - vec![ - w["id"].as_str().unwrap_or("-").to_string(), - w["name"].as_str().unwrap_or("-").to_string(), - w["url"].as_str().unwrap_or("-").to_string(), - format_enabled(w["enabled"].as_bool()), - ] - }) - .collect(); - - print_table(vec!["ID", "Name", "URL", "Enabled"], rows); - - if let Some((current, last, total)) = extract_pagination(&response) { - print_pagination(current, last, total); - } - - Ok(()) -} - -pub fn show(client: &ApiClient, webhook_id: &str, format: OutputFormat) -> Result<(), ApiError> { - let response: Value = client.get(&format!("/api/v1/vector/webhooks/{}", webhook_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let webhook = &response["data"]; - - print_key_value(vec![ - ("ID", webhook["id"].as_str().unwrap_or("-").to_string()), - ("Name", webhook["name"].as_str().unwrap_or("-").to_string()), - ("URL", webhook["url"].as_str().unwrap_or("-").to_string()), - ("Enabled", format_enabled(webhook["enabled"].as_bool())), - ("Events", format_events(&webhook["events"])), - ( - "Has Secret", - webhook["has_secret"] - .as_bool() - .map(|v| v.to_string()) - .unwrap_or("-".to_string()), - ), - ( - "Created", - format_option(&webhook["created_at"].as_str().map(String::from)), - ), - ( - "Updated", - format_option(&webhook["updated_at"].as_str().map(String::from)), - ), - ]); - - Ok(()) -} - -pub fn create( - client: &ApiClient, - name: &str, - url: &str, - events: Vec<String>, - secret: Option<String>, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = CreateWebhookRequest { - name: name.to_string(), - url: url.to_string(), - events, - secret, - }; - - let response: Value = client.post("/api/v1/vector/webhooks", &body)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let webhook = &response["data"]; - print_message(&format!( - "Webhook created: {} ({})", - webhook["name"].as_str().unwrap_or("-"), - webhook["id"].as_str().unwrap_or("-") - )); - - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -pub fn update( - client: &ApiClient, - webhook_id: &str, - name: Option<String>, - url: Option<String>, - events: Option<Vec<String>>, - secret: Option<String>, - enabled: Option<bool>, - format: OutputFormat, -) -> Result<(), ApiError> { - let body = UpdateWebhookRequest { - name, - url, - events, - secret, - enabled, - }; - - let response: Value = client.put(&format!("/api/v1/vector/webhooks/{}", webhook_id), &body)?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("Webhook updated successfully."); - Ok(()) -} - -pub fn delete(client: &ApiClient, webhook_id: &str, format: OutputFormat) -> Result<(), ApiError> { - let response: Value = client.delete(&format!("/api/v1/vector/webhooks/{}", webhook_id))?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - print_message("Webhook deleted successfully."); - Ok(()) -} - -fn format_enabled(value: Option<bool>) -> String { - match value { - Some(true) => "Yes".to_string(), - Some(false) => "No".to_string(), - None => "-".to_string(), - } -} - -fn format_events(value: &Value) -> String { - if let Some(arr) = value.as_array() { - if arr.is_empty() { - return "-".to_string(); - } - arr.iter() - .filter_map(|v| v.as_str()) - .collect::<Vec<_>>() - .join(", ") - } else { - "-".to_string() - } -} diff --git a/src/config/mod.rs b/src/config/mod.rs deleted file mode 100644 index 787b118..0000000 --- a/src/config/mod.rs +++ /dev/null @@ -1,4 +0,0 @@ -pub mod paths; -pub mod store; - -pub use store::{Config, Credentials}; diff --git a/src/config/paths.rs b/src/config/paths.rs deleted file mode 100644 index 254de30..0000000 --- a/src/config/paths.rs +++ /dev/null @@ -1,30 +0,0 @@ -use std::env; -use std::path::PathBuf; - -use crate::api::ApiError; - -const APP_NAME: &str = "vector"; -const CONFIG_FILE: &str = "config.json"; -const CREDENTIALS_FILE: &str = "credentials.json"; - -pub fn config_dir() -> Result<PathBuf, ApiError> { - if let Ok(dir) = env::var("VECTOR_CONFIG_DIR") { - return Ok(PathBuf::from(dir)); - } - - if let Ok(xdg_config) = env::var("XDG_CONFIG_HOME") { - return Ok(PathBuf::from(xdg_config).join(APP_NAME)); - } - - dirs::config_dir() - .map(|p| p.join(APP_NAME)) - .ok_or_else(|| ApiError::ConfigError("Could not determine config directory".to_string())) -} - -pub fn config_file() -> Result<PathBuf, ApiError> { - Ok(config_dir()?.join(CONFIG_FILE)) -} - -pub fn credentials_file() -> Result<PathBuf, ApiError> { - Ok(config_dir()?.join(CREDENTIALS_FILE)) -} diff --git a/src/config/store.rs b/src/config/store.rs deleted file mode 100644 index f001ed0..0000000 --- a/src/config/store.rs +++ /dev/null @@ -1,103 +0,0 @@ -use serde::{Deserialize, Serialize}; -use std::fs; -use std::path::Path; - -use crate::api::ApiError; - -use super::paths::{config_dir, config_file, credentials_file}; - -#[derive(Debug, Default, Serialize, Deserialize)] -pub struct Config { - #[serde(skip_serializing_if = "Option::is_none")] - pub api_url: Option<String>, -} - -#[derive(Debug, Default, Serialize, Deserialize)] -pub struct Credentials { - #[serde(skip_serializing_if = "Option::is_none")] - pub api_key: Option<String>, -} - -impl Config { - pub fn load() -> Result<Self, ApiError> { - let path = config_file()?; - if !path.exists() { - return Ok(Self::default()); - } - let content = fs::read_to_string(&path) - .map_err(|e| ApiError::ConfigError(format!("Failed to read config: {}", e)))?; - serde_json::from_str(&content) - .map_err(|e| ApiError::ConfigError(format!("Failed to parse config: {}", e))) - } - - #[allow(dead_code)] - pub fn save(&self) -> Result<(), ApiError> { - ensure_config_dir()?; - let path = config_file()?; - let content = serde_json::to_string_pretty(self) - .map_err(|e| ApiError::ConfigError(format!("Failed to serialize config: {}", e)))?; - fs::write(&path, content) - .map_err(|e| ApiError::ConfigError(format!("Failed to write config: {}", e)))?; - Ok(()) - } -} - -impl Credentials { - pub fn load() -> Result<Self, ApiError> { - let path = credentials_file()?; - if !path.exists() { - return Ok(Self::default()); - } - let content = fs::read_to_string(&path) - .map_err(|e| ApiError::ConfigError(format!("Failed to read credentials: {}", e)))?; - serde_json::from_str(&content) - .map_err(|e| ApiError::ConfigError(format!("Failed to parse credentials: {}", e))) - } - - pub fn save(&self) -> Result<(), ApiError> { - ensure_config_dir()?; - let path = credentials_file()?; - let content = serde_json::to_string_pretty(self).map_err(|e| { - ApiError::ConfigError(format!("Failed to serialize credentials: {}", e)) - })?; - fs::write(&path, &content) - .map_err(|e| ApiError::ConfigError(format!("Failed to write credentials: {}", e)))?; - - #[cfg(unix)] - set_permissions(&path)?; - - Ok(()) - } - - pub fn clear(&mut self) -> Result<(), ApiError> { - self.api_key = None; - self.save() - } -} - -fn ensure_config_dir() -> Result<(), ApiError> { - let dir = config_dir()?; - if !dir.exists() { - fs::create_dir_all(&dir).map_err(|e| { - ApiError::ConfigError(format!("Failed to create config directory: {}", e)) - })?; - - #[cfg(unix)] - { - use std::os::unix::fs::PermissionsExt; - let permissions = fs::Permissions::from_mode(0o700); - fs::set_permissions(&dir, permissions).map_err(|e| { - ApiError::ConfigError(format!("Failed to set directory permissions: {}", e)) - })?; - } - } - Ok(()) -} - -#[cfg(unix)] -fn set_permissions(path: &Path) -> Result<(), ApiError> { - use std::os::unix::fs::PermissionsExt; - let permissions = fs::Permissions::from_mode(0o600); - fs::set_permissions(path, permissions) - .map_err(|e| ApiError::ConfigError(format!("Failed to set file permissions: {}", e))) -} diff --git a/src/main.rs b/src/main.rs deleted file mode 100644 index 1f95c52..0000000 --- a/src/main.rs +++ /dev/null @@ -1,780 +0,0 @@ -mod api; -mod cli; -mod commands; -mod config; -mod output; - -use clap::Parser; -use serde_json::Value; -use std::process; - -use api::{ApiClient, ApiError, EXIT_SUCCESS}; -use cli::{ - AccountApiKeyCommands, AccountCommands, AccountSecretCommands, AccountSshKeyCommands, - ArchiveCommands, AuthCommands, BackupCommands, BackupDownloadCommands, Cli, Commands, - DbCommands, DbExportCommands, DbImportSessionCommands, DeployCommands, EnvCommands, - EnvDbCommands, EnvSecretCommands, EventCommands, McpCommands, RestoreCommands, SiteCommands, - SiteSshKeyCommands, SslCommands, WafAllowedReferrerCommands, WafBlockedIpCommands, - WafBlockedReferrerCommands, WafCommands, WafRateLimitCommands, WebhookCommands, -}; -use commands::{ - account, archive, auth, backup, db, deploy, env, event, mcp, restore, site, ssl, waf, webhook, -}; -use config::{Config, Credentials}; -use output::{OutputFormat, print_error, print_json, print_message, print_table}; - -fn main() { - let cli = Cli::parse(); - let format = OutputFormat::detect(cli.json, cli.no_json); - - let result = run(cli.command, format); - - match result { - Ok(()) => process::exit(EXIT_SUCCESS), - Err(e) => { - print_error(&e.to_string()); - process::exit(e.exit_code()); - } - } -} - -fn run(command: Commands, format: OutputFormat) -> Result<(), ApiError> { - match command { - Commands::Auth { command } => run_auth(command, format), - Commands::Site { command } => run_site(command, format), - Commands::Env { command } => run_env(command, format), - Commands::Deploy { command } => run_deploy(command, format), - Commands::Ssl { command } => run_ssl(command, format), - Commands::Db { command } => run_db(command, format), - Commands::Archive { command } => run_archive(command, format), - Commands::Waf { command } => run_waf(command, format), - Commands::Account { command } => run_account(command, format), - Commands::Backup { command } => run_backup(command, format), - Commands::Restore { command } => run_restore(command, format), - Commands::Event { command } => run_event(command, format), - Commands::Webhook { command } => run_webhook(command, format), - Commands::PhpVersions => run_php_versions(format), - Commands::Mcp { command } => run_mcp(command, format), - } -} - -fn run_auth(command: AuthCommands, format: OutputFormat) -> Result<(), ApiError> { - match command { - AuthCommands::Login { token } => auth::login(token, format), - AuthCommands::Logout => auth::logout(format), - AuthCommands::Status => auth::status(format), - } -} - -fn get_client() -> Result<ApiClient, ApiError> { - let config = Config::load()?; - let creds = Credentials::load()?; - - let token = auth::get_api_key(&creds).ok_or_else(|| { - ApiError::Unauthorized( - "Not logged in. Run 'vector auth login' to authenticate.".to_string(), - ) - })?; - - ApiClient::new(config.api_url, Some(token)) -} - -fn run_site(command: SiteCommands, format: OutputFormat) -> Result<(), ApiError> { - let client = get_client()?; - - match command { - SiteCommands::List { page, per_page } => site::list(&client, page, per_page, format), - SiteCommands::Show { id } => site::show(&client, &id, format), - SiteCommands::Create { - customer_id, - dev_php_version, - production_domain, - staging_domain, - tags, - wp_admin_email, - wp_admin_user, - wp_site_title, - } => site::create( - &client, - &customer_id, - &dev_php_version, - production_domain, - staging_domain, - tags, - wp_admin_email, - wp_admin_user, - wp_site_title, - format, - ), - SiteCommands::Update { - id, - customer_id, - tags, - } => site::update(&client, &id, customer_id, tags, format), - SiteCommands::Delete { id, force } => site::delete(&client, &id, force, format), - SiteCommands::Clone { - id, - customer_id, - dev_php_version, - tags, - } => site::clone(&client, &id, customer_id, dev_php_version, tags, format), - SiteCommands::Suspend { id } => site::suspend(&client, &id, format), - SiteCommands::Unsuspend { id } => site::unsuspend(&client, &id, format), - SiteCommands::ResetSftpPassword { id } => site::reset_sftp_password(&client, &id, format), - SiteCommands::ResetDbPassword { id } => site::reset_db_password(&client, &id, format), - SiteCommands::PurgeCache { id, cache_tag, url } => { - site::purge_cache(&client, &id, cache_tag, url, format) - } - SiteCommands::Logs { - id, - start_time, - end_time, - limit, - environment, - deployment_id, - level, - cursor, - } => site::logs( - &client, - &id, - start_time, - end_time, - limit, - environment, - deployment_id, - level, - cursor, - format, - ), - SiteCommands::WpReconfig { id } => site::wp_reconfig(&client, &id, format), - SiteCommands::SshKey { command } => run_site_ssh_key(&client, command, format), - } -} - -fn run_site_ssh_key( - client: &ApiClient, - command: SiteSshKeyCommands, - format: OutputFormat, -) -> Result<(), ApiError> { - match command { - SiteSshKeyCommands::List { - site_id, - page, - per_page, - } => site::ssh_key_list(client, &site_id, page, per_page, format), - SiteSshKeyCommands::Add { - site_id, - name, - public_key, - } => site::ssh_key_add(client, &site_id, &name, &public_key, format), - SiteSshKeyCommands::Remove { site_id, key_id } => { - site::ssh_key_remove(client, &site_id, &key_id, format) - } - } -} - -fn run_env(command: EnvCommands, format: OutputFormat) -> Result<(), ApiError> { - let client = get_client()?; - - match command { - EnvCommands::List { - site_id, - page, - per_page, - } => env::list(&client, &site_id, page, per_page, format), - EnvCommands::Show { env_id } => env::show(&client, &env_id, format), - EnvCommands::Create { - site_id, - name, - custom_domain, - php_version, - is_production, - tags, - } => env::create( - &client, - &site_id, - &name, - &custom_domain, - &php_version, - is_production, - tags, - format, - ), - EnvCommands::Update { - env_id, - custom_domain, - clear_custom_domain, - tags, - } => { - let custom_domain = if clear_custom_domain { - Some(None) - } else { - custom_domain.map(Some) - }; - env::update(&client, &env_id, custom_domain, tags, format) - } - EnvCommands::Delete { env_id } => env::delete(&client, &env_id, format), - EnvCommands::ResetDbPassword { env_id } => env::reset_db_password(&client, &env_id, format), - EnvCommands::DomainChangeStatus { - env_id, - domain_change_id, - } => env::domain_change_status(&client, &env_id, &domain_change_id, format), - EnvCommands::Secret { command } => run_env_secret(&client, command, format), - EnvCommands::Db { command } => run_env_db(&client, command, format), - } -} - -fn run_env_secret( - client: &ApiClient, - command: EnvSecretCommands, - format: OutputFormat, -) -> Result<(), ApiError> { - match command { - EnvSecretCommands::List { - env_id, - page, - per_page, - } => env::secret_list(client, &env_id, page, per_page, format), - EnvSecretCommands::Show { secret_id } => env::secret_show(client, &secret_id, format), - EnvSecretCommands::Create { - env_id, - key, - value, - no_secret, - } => env::secret_create(client, &env_id, &key, &value, no_secret, format), - EnvSecretCommands::Update { - secret_id, - key, - value, - no_secret, - } => env::secret_update(client, &secret_id, key, value, no_secret, format), - EnvSecretCommands::Delete { secret_id } => env::secret_delete(client, &secret_id, format), - } -} - -fn run_env_db( - client: &ApiClient, - command: EnvDbCommands, - format: OutputFormat, -) -> Result<(), ApiError> { - match command { - EnvDbCommands::Promote { - env_id, - drop_tables, - disable_foreign_keys, - } => env::db_promote(client, &env_id, drop_tables, disable_foreign_keys, format), - EnvDbCommands::PromoteStatus { env_id, promote_id } => { - env::db_promote_status(client, &env_id, &promote_id, format) - } - } -} - -fn run_deploy(command: DeployCommands, format: OutputFormat) -> Result<(), ApiError> { - let client = get_client()?; - - match command { - DeployCommands::List { - env_id, - page, - per_page, - } => deploy::list(&client, &env_id, page, per_page, format), - DeployCommands::Show { deploy_id } => deploy::show(&client, &deploy_id, format), - DeployCommands::Trigger { - env_id, - include_uploads, - include_database, - } => deploy::trigger(&client, &env_id, include_uploads, include_database, format), - DeployCommands::Rollback { - env_id, - target_deployment_id, - } => deploy::rollback(&client, &env_id, target_deployment_id, format), - } -} - -fn run_ssl(command: SslCommands, format: OutputFormat) -> Result<(), ApiError> { - let client = get_client()?; - - match command { - SslCommands::Status { env_id } => ssl::status(&client, &env_id, format), - SslCommands::Nudge { env_id, retry } => ssl::nudge(&client, &env_id, retry, format), - } -} - -fn run_db(command: DbCommands, format: OutputFormat) -> Result<(), ApiError> { - let client = get_client()?; - - match command { - DbCommands::ImportSession { command } => run_db_import_session(&client, command, format), - DbCommands::Export { command } => run_db_export(&client, command, format), - } -} - -fn run_db_import_session( - client: &ApiClient, - command: DbImportSessionCommands, - format: OutputFormat, -) -> Result<(), ApiError> { - match command { - DbImportSessionCommands::Create { - site_id, - filename, - content_length, - drop_tables, - disable_foreign_keys, - search_replace_from, - search_replace_to, - } => db::import_session_create( - client, - &site_id, - filename, - content_length, - drop_tables, - disable_foreign_keys, - search_replace_from, - search_replace_to, - format, - ), - DbImportSessionCommands::Run { site_id, import_id } => { - db::import_session_run(client, &site_id, &import_id, format) - } - DbImportSessionCommands::Status { site_id, import_id } => { - db::import_session_status(client, &site_id, &import_id, format) - } - } -} - -fn run_db_export( - client: &ApiClient, - command: DbExportCommands, - format: OutputFormat, -) -> Result<(), ApiError> { - match command { - DbExportCommands::Create { - site_id, - format: export_format, - } => db::export_create(client, &site_id, export_format, format), - DbExportCommands::Status { site_id, export_id } => { - db::export_status(client, &site_id, &export_id, format) - } - } -} - -fn run_archive(command: ArchiveCommands, format: OutputFormat) -> Result<(), ApiError> { - let client = get_client()?; - - match command { - ArchiveCommands::Import { - site_id, - file, - drop_tables, - disable_foreign_keys, - search_replace_from, - search_replace_to, - wait, - poll_interval, - } => archive::import( - &client, - &site_id, - &file, - drop_tables, - disable_foreign_keys, - search_replace_from, - search_replace_to, - wait, - poll_interval, - format, - ), - } -} - -fn run_waf(command: WafCommands, format: OutputFormat) -> Result<(), ApiError> { - let client = get_client()?; - - match command { - WafCommands::RateLimit { command } => run_waf_rate_limit(&client, command, format), - WafCommands::BlockedIp { command } => run_waf_blocked_ip(&client, command, format), - WafCommands::BlockedReferrer { command } => { - run_waf_blocked_referrer(&client, command, format) - } - WafCommands::AllowedReferrer { command } => { - run_waf_allowed_referrer(&client, command, format) - } - } -} - -fn run_waf_rate_limit( - client: &ApiClient, - command: WafRateLimitCommands, - format: OutputFormat, -) -> Result<(), ApiError> { - match command { - WafRateLimitCommands::List { site_id } => waf::rate_limit_list(client, &site_id, format), - WafRateLimitCommands::Show { site_id, rule_id } => { - waf::rate_limit_show(client, &site_id, &rule_id, format) - } - WafRateLimitCommands::Create { - site_id, - name, - request_count, - timeframe, - block_time, - description, - value, - operator, - variables, - transformations, - } => waf::rate_limit_create( - client, - &site_id, - &name, - request_count, - timeframe, - block_time, - description, - value, - operator, - variables, - transformations, - format, - ), - WafRateLimitCommands::Update { - site_id, - rule_id, - name, - description, - request_count, - timeframe, - block_time, - value, - operator, - variables, - transformations, - } => waf::rate_limit_update( - client, - &site_id, - &rule_id, - name, - description, - request_count, - timeframe, - block_time, - value, - operator, - variables, - transformations, - format, - ), - WafRateLimitCommands::Delete { site_id, rule_id } => { - waf::rate_limit_delete(client, &site_id, &rule_id, format) - } - } -} - -fn run_waf_blocked_ip( - client: &ApiClient, - command: WafBlockedIpCommands, - format: OutputFormat, -) -> Result<(), ApiError> { - match command { - WafBlockedIpCommands::List { site_id } => waf::blocked_ip_list(client, &site_id, format), - WafBlockedIpCommands::Add { site_id, ip } => { - waf::blocked_ip_add(client, &site_id, &ip, format) - } - WafBlockedIpCommands::Remove { site_id, ip } => { - waf::blocked_ip_remove(client, &site_id, &ip, format) - } - } -} - -fn run_waf_blocked_referrer( - client: &ApiClient, - command: WafBlockedReferrerCommands, - format: OutputFormat, -) -> Result<(), ApiError> { - match command { - WafBlockedReferrerCommands::List { site_id } => { - waf::blocked_referrer_list(client, &site_id, format) - } - WafBlockedReferrerCommands::Add { site_id, hostname } => { - waf::blocked_referrer_add(client, &site_id, &hostname, format) - } - WafBlockedReferrerCommands::Remove { site_id, hostname } => { - waf::blocked_referrer_remove(client, &site_id, &hostname, format) - } - } -} - -fn run_waf_allowed_referrer( - client: &ApiClient, - command: WafAllowedReferrerCommands, - format: OutputFormat, -) -> Result<(), ApiError> { - match command { - WafAllowedReferrerCommands::List { site_id } => { - waf::allowed_referrer_list(client, &site_id, format) - } - WafAllowedReferrerCommands::Add { site_id, hostname } => { - waf::allowed_referrer_add(client, &site_id, &hostname, format) - } - WafAllowedReferrerCommands::Remove { site_id, hostname } => { - waf::allowed_referrer_remove(client, &site_id, &hostname, format) - } - } -} - -fn run_account(command: AccountCommands, format: OutputFormat) -> Result<(), ApiError> { - let client = get_client()?; - - match command { - AccountCommands::Show => account::show(&client, format), - AccountCommands::SshKey { command } => run_account_ssh_key(&client, command, format), - AccountCommands::ApiKey { command } => run_account_api_key(&client, command, format), - AccountCommands::Secret { command } => run_account_secret(&client, command, format), - } -} - -fn run_account_ssh_key( - client: &ApiClient, - command: AccountSshKeyCommands, - format: OutputFormat, -) -> Result<(), ApiError> { - match command { - AccountSshKeyCommands::List { page, per_page } => { - account::ssh_key_list(client, page, per_page, format) - } - AccountSshKeyCommands::Show { key_id } => account::ssh_key_show(client, &key_id, format), - AccountSshKeyCommands::Create { name, public_key } => { - account::ssh_key_create(client, &name, &public_key, format) - } - AccountSshKeyCommands::Delete { key_id } => { - account::ssh_key_delete(client, &key_id, format) - } - } -} - -fn run_account_api_key( - client: &ApiClient, - command: AccountApiKeyCommands, - format: OutputFormat, -) -> Result<(), ApiError> { - match command { - AccountApiKeyCommands::List { page, per_page } => { - account::api_key_list(client, page, per_page, format) - } - AccountApiKeyCommands::Create { - name, - abilities, - expires_at, - } => account::api_key_create(client, &name, abilities, expires_at, format), - AccountApiKeyCommands::Delete { token_id } => { - account::api_key_delete(client, &token_id, format) - } - } -} - -fn run_account_secret( - client: &ApiClient, - command: AccountSecretCommands, - format: OutputFormat, -) -> Result<(), ApiError> { - match command { - AccountSecretCommands::List { page, per_page } => { - account::secret_list(client, page, per_page, format) - } - AccountSecretCommands::Show { secret_id } => { - account::secret_show(client, &secret_id, format) - } - AccountSecretCommands::Create { - key, - value, - no_secret, - } => account::secret_create(client, &key, &value, no_secret, format), - AccountSecretCommands::Update { - secret_id, - key, - value, - no_secret, - } => account::secret_update(client, &secret_id, key, value, no_secret, format), - AccountSecretCommands::Delete { secret_id } => { - account::secret_delete(client, &secret_id, format) - } - } -} - -fn run_backup(command: BackupCommands, format: OutputFormat) -> Result<(), ApiError> { - let client = get_client()?; - - match command { - BackupCommands::List { - site_id, - environment_id, - backup_type, - page, - per_page, - } => backup::list( - &client, - site_id, - environment_id, - backup_type, - page, - per_page, - format, - ), - BackupCommands::Show { backup_id } => backup::show(&client, &backup_id, format), - BackupCommands::Create { - site_id, - environment_id, - scope, - description, - } => backup::create( - &client, - site_id, - environment_id, - &scope, - description, - format, - ), - BackupCommands::Download { command } => run_backup_download(&client, command, format), - } -} - -fn run_backup_download( - client: &ApiClient, - command: BackupDownloadCommands, - format: OutputFormat, -) -> Result<(), ApiError> { - match command { - BackupDownloadCommands::Create { backup_id } => { - backup::download_create(client, &backup_id, format) - } - BackupDownloadCommands::Status { - backup_id, - download_id, - } => backup::download_status(client, &backup_id, &download_id, format), - } -} - -fn run_restore(command: RestoreCommands, format: OutputFormat) -> Result<(), ApiError> { - let client = get_client()?; - - match command { - RestoreCommands::List { - site_id, - environment_id, - restore_type, - backup_id, - page, - per_page, - } => restore::list( - &client, - restore::ListRestoresQuery { - r#type: restore_type, - site_id, - environment_id, - backup_id, - page, - per_page, - }, - format, - ), - RestoreCommands::Show { restore_id } => restore::show(&client, &restore_id, format), - RestoreCommands::Create { - backup_id, - scope, - drop_tables, - disable_foreign_keys, - search_replace_from, - search_replace_to, - } => restore::create( - &client, - &backup_id, - &scope, - drop_tables, - disable_foreign_keys, - search_replace_from, - search_replace_to, - format, - ), - } -} - -fn run_event(command: EventCommands, format: OutputFormat) -> Result<(), ApiError> { - let client = get_client()?; - - match command { - EventCommands::List { - from, - to, - event: event_type, - page, - per_page, - } => event::list(&client, from, to, event_type, page, per_page, format), - } -} - -fn run_webhook(command: WebhookCommands, format: OutputFormat) -> Result<(), ApiError> { - let client = get_client()?; - - match command { - WebhookCommands::List { page, per_page } => webhook::list(&client, page, per_page, format), - WebhookCommands::Show { webhook_id } => webhook::show(&client, &webhook_id, format), - WebhookCommands::Create { - name, - url, - events, - secret, - } => webhook::create(&client, &name, &url, events, secret, format), - WebhookCommands::Update { - webhook_id, - name, - url, - events, - secret, - enabled, - } => webhook::update( - &client, - &webhook_id, - name, - url, - events, - secret, - enabled, - format, - ), - WebhookCommands::Delete { webhook_id } => webhook::delete(&client, &webhook_id, format), - } -} - -fn run_php_versions(format: OutputFormat) -> Result<(), ApiError> { - let client = get_client()?; - let response: Value = client.get("/api/v1/vector/php-versions")?; - - if format == OutputFormat::Json { - print_json(&response); - return Ok(()); - } - - let versions = response["data"] - .as_array() - .ok_or_else(|| ApiError::Other("Invalid response format".to_string()))?; - - if versions.is_empty() { - print_message("No PHP versions available."); - return Ok(()); - } - - let rows: Vec<Vec<String>> = versions - .iter() - .map(|v| vec![v.as_str().unwrap_or("-").to_string()]) - .collect(); - - print_table(vec!["Version"], rows); - - Ok(()) -} - -fn run_mcp(command: McpCommands, format: OutputFormat) -> Result<(), ApiError> { - match command { - McpCommands::Setup { force } => mcp::setup(force, format), - } -} diff --git a/src/output.rs b/src/output.rs deleted file mode 100644 index 2f058f9..0000000 --- a/src/output.rs +++ /dev/null @@ -1,215 +0,0 @@ -use comfy_table::{ContentArrangement, Table}; -use serde::Serialize; -use serde_json::Value; - -#[derive(Debug, Clone, Copy, PartialEq)] -pub enum OutputFormat { - Json, - Table, -} - -impl OutputFormat { - pub fn detect(json_flag: bool, no_json_flag: bool) -> Self { - if json_flag { - return OutputFormat::Json; - } - if no_json_flag { - return OutputFormat::Table; - } - if atty::is(atty::Stream::Stdout) { - OutputFormat::Table - } else { - OutputFormat::Json - } - } -} - -pub fn print_json<T: Serialize>(data: &T) { - match serde_json::to_string_pretty(data) { - Ok(json) => println!("{}", json), - Err(e) => eprintln!("Error serializing JSON: {}", e), - } -} - -pub fn print_message(message: &str) { - println!("{}", message); -} - -pub fn print_error(message: &str) { - eprintln!("Error: {}", message); -} - -pub fn print_table(headers: Vec<&str>, rows: Vec<Vec<String>>) { - let mut table = Table::new(); - table.set_content_arrangement(ContentArrangement::Dynamic); - table.load_preset(comfy_table::presets::UTF8_FULL_CONDENSED); - table.set_header(headers); - - for row in rows { - table.add_row(row); - } - - println!("{}", table); -} - -pub fn print_key_value(pairs: Vec<(&str, String)>) { - let max_key_len = pairs.iter().map(|(k, _)| k.len()).max().unwrap_or(0); - - for (key, value) in pairs { - println!("{:width$} {}", key, value, width = max_key_len); - } -} - -pub fn format_option<T: std::fmt::Display>(opt: &Option<T>) -> String { - match opt { - Some(v) => v.to_string(), - None => "-".to_string(), - } -} - -pub fn format_archivable_type(value: &str) -> String { - match value { - "vector_site" => "Site".to_string(), - "vector_environment" => "Environment".to_string(), - _ => value.to_string(), - } -} - -pub fn format_bool(b: bool) -> String { - if b { - "Yes".to_string() - } else { - "No".to_string() - } -} - -pub fn extract_pagination(value: &Value) -> Option<(u64, u64, u64)> { - let meta = value.get("meta")?; - let current_page = meta.get("current_page")?.as_u64()?; - let last_page = meta.get("last_page")?.as_u64()?; - let total = meta.get("total")?.as_u64()?; - Some((current_page, last_page, total)) -} - -pub fn print_pagination(current_page: u64, last_page: u64, total: u64) { - if last_page > 1 { - println!("\nPage {} of {} ({} total)", current_page, last_page, total); - } -} - -pub fn print_dns_records(env: &Value) { - let custom_domain = env["custom_domain"].as_str().unwrap_or(""); - let dns_target = env["dns_target"].as_str().unwrap_or(""); - - if custom_domain.is_empty() { - return; - } - - println!(); - println!( - "DNS Setup for {}:", - env["name"].as_str().unwrap_or(custom_domain) - ); - - if !dns_target.is_empty() { - println!(); - println!(" Point your domain to the CDN:"); - println!(" CNAME {} -> {}", custom_domain, dns_target); - } - - if let Some(cert) = env.get("custom_domain_certificate") { - if let Some(status) = cert["status"].as_str() { - println!(); - println!(" Certificate Status: {}", status); - } - - if let Some(records) = cert["dns_validation_records"].as_array() - && !records.is_empty() - { - println!(); - println!(" Certificate validation DNS records:"); - let rows: Vec<Vec<String>> = records - .iter() - .map(|r| { - vec![ - r["type"].as_str().unwrap_or("-").to_string(), - r["name"].as_str().unwrap_or("-").to_string(), - r["value"].as_str().unwrap_or("-").to_string(), - ] - }) - .collect(); - - print_table(vec!["Type", "Name", "Value"], rows); - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use serde_json::json; - - #[test] - fn test_output_format_json_flag() { - assert_eq!(OutputFormat::detect(true, false), OutputFormat::Json); - assert_eq!(OutputFormat::detect(true, true), OutputFormat::Json); // json takes precedence - } - - #[test] - fn test_output_format_no_json_flag() { - assert_eq!(OutputFormat::detect(false, true), OutputFormat::Table); - } - - #[test] - fn test_format_option_some() { - assert_eq!(format_option(&Some("value")), "value"); - assert_eq!(format_option(&Some(42)), "42"); - } - - #[test] - fn test_format_option_none() { - assert_eq!(format_option::<String>(&None), "-"); - } - - #[test] - fn test_format_archivable_type() { - assert_eq!(format_archivable_type("vector_site"), "Site"); - assert_eq!(format_archivable_type("vector_environment"), "Environment"); - assert_eq!(format_archivable_type("unknown"), "unknown"); - } - - #[test] - fn test_format_bool() { - assert_eq!(format_bool(true), "Yes"); - assert_eq!(format_bool(false), "No"); - } - - #[test] - fn test_extract_pagination_valid() { - let value = json!({ - "data": [], - "meta": { - "current_page": 1, - "last_page": 5, - "total": 50 - } - }); - assert_eq!(extract_pagination(&value), Some((1, 5, 50))); - } - - #[test] - fn test_extract_pagination_missing_meta() { - let value = json!({"data": []}); - assert_eq!(extract_pagination(&value), None); - } - - #[test] - fn test_extract_pagination_partial_meta() { - let value = json!({ - "meta": { - "current_page": 1 - } - }); - assert_eq!(extract_pagination(&value), None); - } -} diff --git a/tests/cli.rs b/tests/cli.rs deleted file mode 100644 index bfdb81f..0000000 --- a/tests/cli.rs +++ /dev/null @@ -1,432 +0,0 @@ -use std::process::Command; - -fn vector_cmd() -> Command { - Command::new(env!("CARGO_BIN_EXE_vector")) -} - -fn nonexistent_config_dir() -> String { - std::env::temp_dir() - .join("vector-test-nonexistent") - .to_string_lossy() - .to_string() -} - -#[test] -fn test_help() { - let output = vector_cmd().arg("--help").output().expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("CLI for Vector Pro API")); - assert!(stdout.contains("auth")); - assert!(stdout.contains("site")); - assert!(stdout.contains("env")); - assert!(stdout.contains("deploy")); - assert!(stdout.contains("ssl")); - assert!(stdout.contains("mcp")); - assert!(stdout.contains("restore")); - assert!(stdout.contains("archive")); -} - -#[test] -fn test_version() { - let output = vector_cmd() - .arg("--version") - .output() - .expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("vector")); -} - -#[test] -fn test_auth_help() { - let output = vector_cmd() - .args(["auth", "--help"]) - .output() - .expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("login")); - assert!(stdout.contains("logout")); - assert!(stdout.contains("status")); -} - -#[test] -fn test_site_help() { - let output = vector_cmd() - .args(["site", "--help"]) - .output() - .expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("list")); - assert!(stdout.contains("show")); - assert!(stdout.contains("create")); - assert!(stdout.contains("delete")); - assert!(stdout.contains("suspend")); - assert!(stdout.contains("purge-cache")); - assert!(stdout.contains("logs")); -} - -#[test] -fn test_env_help() { - let output = vector_cmd() - .args(["env", "--help"]) - .output() - .expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("list")); - assert!(stdout.contains("create")); - assert!(stdout.contains("secret")); -} - -#[test] -fn test_deploy_help() { - let output = vector_cmd() - .args(["deploy", "--help"]) - .output() - .expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("list")); - assert!(stdout.contains("trigger")); - assert!(stdout.contains("rollback")); -} - -#[test] -fn test_ssl_help() { - let output = vector_cmd() - .args(["ssl", "--help"]) - .output() - .expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("status")); - assert!(stdout.contains("nudge")); -} - -#[test] -fn test_mcp_help() { - let output = vector_cmd() - .args(["mcp", "--help"]) - .output() - .expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("setup")); - assert!(stdout.contains("Claude")); -} - -#[test] -fn test_mcp_setup_help() { - let output = vector_cmd() - .args(["mcp", "setup", "--help"]) - .output() - .expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("--force")); -} - -#[test] -fn test_mcp_setup_requires_auth() { - let output = vector_cmd() - .args(["mcp", "setup"]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .env_remove("VECTOR_API_KEY") - .output() - .expect("Failed to run"); - assert!(!output.status.success()); - assert_eq!(output.status.code(), Some(2)); // EXIT_AUTH_ERROR -} - -#[test] -fn test_auth_status_not_logged_in() { - let output = vector_cmd() - .args(["auth", "status", "--json"]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .output() - .expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("authenticated")); - assert!(stdout.contains("false")); -} - -#[test] -fn test_site_list_requires_auth() { - let output = vector_cmd() - .args(["site", "list"]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .env_remove("VECTOR_API_KEY") - .output() - .expect("Failed to run"); - assert!(!output.status.success()); - assert_eq!(output.status.code(), Some(2)); // EXIT_AUTH_ERROR -} - -#[test] -fn test_backup_help() { - let output = vector_cmd() - .args(["backup", "--help"]) - .output() - .expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("list")); - assert!(stdout.contains("show")); - assert!(stdout.contains("create")); - assert!(stdout.contains("download")); -} - -#[test] -fn test_backup_download_help() { - let output = vector_cmd() - .args(["backup", "download", "--help"]) - .output() - .expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("create")); - assert!(stdout.contains("status")); -} - -#[test] -fn test_backup_download_create_requires_auth() { - let output = vector_cmd() - .args(["backup", "download", "create", "test-site", "test-backup"]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .env_remove("VECTOR_API_KEY") - .output() - .expect("Failed to run"); - assert!(!output.status.success()); - assert_eq!(output.status.code(), Some(2)); // EXIT_AUTH_ERROR -} - -#[test] -fn test_backup_download_status_requires_auth() { - let output = vector_cmd() - .args([ - "backup", - "download", - "status", - "test-site", - "test-backup", - "test-download", - ]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .env_remove("VECTOR_API_KEY") - .output() - .expect("Failed to run"); - assert!(!output.status.success()); - assert_eq!(output.status.code(), Some(2)); // EXIT_AUTH_ERROR -} - -#[test] -fn test_backup_list_requires_auth() { - let output = vector_cmd() - .args(["backup", "list", "test-site"]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .env_remove("VECTOR_API_KEY") - .output() - .expect("Failed to run"); - assert!(!output.status.success()); - assert_eq!(output.status.code(), Some(2)); // EXIT_AUTH_ERROR -} - -#[test] -fn test_backup_show_requires_auth() { - let output = vector_cmd() - .args(["backup", "show", "test-site", "test-backup"]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .env_remove("VECTOR_API_KEY") - .output() - .expect("Failed to run"); - assert!(!output.status.success()); - assert_eq!(output.status.code(), Some(2)); // EXIT_AUTH_ERROR -} - -#[test] -fn test_backup_create_requires_auth() { - let output = vector_cmd() - .args(["backup", "create", "test-site"]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .env_remove("VECTOR_API_KEY") - .output() - .expect("Failed to run"); - assert!(!output.status.success()); - assert_eq!(output.status.code(), Some(2)); // EXIT_AUTH_ERROR -} - -#[test] -fn test_restore_list_requires_auth() { - let output = vector_cmd() - .args(["restore", "list", "test-site"]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .env_remove("VECTOR_API_KEY") - .output() - .expect("Failed to run"); - assert!(!output.status.success()); - assert_eq!(output.status.code(), Some(2)); // EXIT_AUTH_ERROR -} - -#[test] -fn test_restore_show_requires_auth() { - let output = vector_cmd() - .args(["restore", "show", "test-site", "test-restore"]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .env_remove("VECTOR_API_KEY") - .output() - .expect("Failed to run"); - assert!(!output.status.success()); - assert_eq!(output.status.code(), Some(2)); // EXIT_AUTH_ERROR -} - -#[test] -fn test_restore_create_requires_auth() { - let output = vector_cmd() - .args(["restore", "create", "test-site", "test-backup"]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .env_remove("VECTOR_API_KEY") - .output() - .expect("Failed to run"); - assert!(!output.status.success()); - assert_eq!(output.status.code(), Some(2)); // EXIT_AUTH_ERROR -} - -#[test] -fn test_restore_help() { - let output = vector_cmd() - .args(["restore", "--help"]) - .output() - .expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("list")); - assert!(stdout.contains("show")); - assert!(stdout.contains("create")); -} - -#[test] -fn test_restore_create_help() { - let output = vector_cmd() - .args(["restore", "create", "--help"]) - .output() - .expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("--scope")); - assert!(stdout.contains("full, database, files")); - assert!(stdout.contains("[default: full]")); -} - -#[test] -fn test_restore_create_scope_default_requires_auth() { - let output = vector_cmd() - .args(["restore", "create", "test-site", "test-backup"]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .env_remove("VECTOR_API_KEY") - .output() - .expect("Failed to run"); - assert!(!output.status.success()); - assert_eq!(output.status.code(), Some(2)); -} - -#[test] -fn test_restore_create_scope_database_requires_auth() { - let output = vector_cmd() - .args([ - "restore", - "create", - "test-site", - "test-backup", - "--scope", - "database", - ]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .env_remove("VECTOR_API_KEY") - .output() - .expect("Failed to run"); - assert!(!output.status.success()); - assert_eq!(output.status.code(), Some(2)); -} - -#[test] -fn test_restore_create_scope_files_requires_auth() { - let output = vector_cmd() - .args([ - "restore", - "create", - "test-site", - "test-backup", - "--scope", - "files", - ]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .env_remove("VECTOR_API_KEY") - .output() - .expect("Failed to run"); - assert!(!output.status.success()); - assert_eq!(output.status.code(), Some(2)); -} - -#[test] -fn test_archive_help() { - let output = vector_cmd() - .args(["archive", "--help"]) - .output() - .expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("import")); -} - -#[test] -fn test_archive_import_help() { - let output = vector_cmd() - .args(["archive", "import", "--help"]) - .output() - .expect("Failed to run"); - assert!(output.status.success()); - let stdout = String::from_utf8_lossy(&output.stdout); - assert!(stdout.contains("--drop-tables")); - assert!(stdout.contains("--disable-foreign-keys")); - assert!(stdout.contains("--search-replace-from")); - assert!(stdout.contains("--search-replace-to")); - assert!(stdout.contains("--wait")); - assert!(stdout.contains("--poll-interval")); -} - -#[test] -fn test_archive_import_requires_auth() { - let output = vector_cmd() - .args(["archive", "import", "test-site", "test-file.tar.gz"]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .env_remove("VECTOR_API_KEY") - .output() - .expect("Failed to run"); - assert!(!output.status.success()); - assert_eq!(output.status.code(), Some(2)); // EXIT_AUTH_ERROR -} - -#[test] -fn test_invalid_subcommand() { - let output = vector_cmd() - .args(["invalid"]) - .output() - .expect("Failed to run"); - assert!(!output.status.success()); -} - -#[test] -fn test_json_flag() { - let output = vector_cmd() - .args(["--json", "auth", "status"]) - .env("VECTOR_CONFIG_DIR", &nonexistent_config_dir()) - .output() - .expect("Failed to run"); - let stdout = String::from_utf8_lossy(&output.stdout); - // Should be valid JSON - assert!(serde_json::from_str::<serde_json::Value>(&stdout).is_ok()); -}