diff --git a/.github/workflows/prerelease-discord-notification.yml b/.github/workflows/prerelease-discord-notification.yml new file mode 100644 index 0000000..809871d --- /dev/null +++ b/.github/workflows/prerelease-discord-notification.yml @@ -0,0 +1,30 @@ +name: Discord Pre-release Notification + +on: + release: + types: [prereleased] + +jobs: + notify-discord: + runs-on: ubuntu-latest + if: github.event.release.target_commitish == 'dev' + steps: + - name: Send Discord Pre-release Notification + uses: sarisia/actions-status-discord@v1 + with: + webhook: ${{ secrets.DISCORD_HATCH_ANNOUNCEMENTS }} + nodetail: true + # No content field = no mention for pre-releases + title: "๐Ÿงช Hatch Pre-release Available for Testing" + description: | + **Version `${{ github.event.release.tag_name }}`** is now available for testing! + + โš ๏ธ **This is a pre-release** - expect potential bugs and breaking changes + ๐Ÿ”ฌ Perfect for testing new features and providing feedback + ๐Ÿ“‹ Click [here](${{ github.event.release.html_url }}) to view what's new and download + + Help us make *Hatch!* better by testing and reporting [issues](https://github.com/CrackingShells/Hatch/issues)! ๐Ÿ›โžก๏ธโœจ + color: 0xff9500 # Orange color for pre-release + username: "Cracking Shells Pre-release Bot" + image: "https://raw.githubusercontent.com/CrackingShells/.github/main/resources/images/hatch_icon_dark_bg_transparent.png" + avatar_url: "https://raw.githubusercontent.com/CrackingShells/.github/main/resources/images/cs_core_dark_bg.png" \ No newline at end of file diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000..7f49df7 --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,82 @@ +name: Publish to PyPI + +on: + push: + tags: + - 'v[0-9]+.[0-9]+.[0-9]+*' + workflow_dispatch: + inputs: + tag: + description: 'Git tag to publish (e.g., v1.0.0)' + required: true + type: string + ref: + description: 'Branch or commit to checkout' + required: false + default: 'main' + type: string + +jobs: + test: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: ${{ github.event.inputs.ref || github.ref }} + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Install Python dependencies + run: | + python -m pip install --upgrade pip + pip install -e . + + - name: Run import test + run: | + python -c "import hatch; print('Hatch package imports successfully')" + + publish-pypi: + name: Publish to PyPI + runs-on: ubuntu-latest + needs: test + environment: + name: pypi + url: https://pypi.org/project/hatch-xclam/ + permissions: + id-token: write + + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: ${{ github.event.inputs.ref || github.ref }} + + - name: Checkout specific tag for manual dispatch + if: github.event_name == 'workflow_dispatch' + run: git checkout ${{ github.event.inputs.tag }} + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Install Python dependencies + run: | + python -m pip install --upgrade pip + pip install build + + - name: Build Python Package + run: python -m build + + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + print-hash: true + verbose: true + skip-existing: true diff --git a/.github/workflows/release-discord-notification.yml b/.github/workflows/release-discord-notification.yml new file mode 100644 index 0000000..cd63017 --- /dev/null +++ b/.github/workflows/release-discord-notification.yml @@ -0,0 +1,29 @@ +name: Discord Release Notification + +on: + release: + types: [released] + +jobs: + notify-discord: + runs-on: ubuntu-latest + if: github.event.release.target_commitish == 'main' + steps: + - name: Send Discord Notification + uses: sarisia/actions-status-discord@v1 + with: + webhook: ${{ secrets.DISCORD_HATCH_ANNOUNCEMENTS }} + nodetail: true + content: "<@&1418053865818951721>" + title: "๐ŸŽ‰ New *Hatch!* Release Available!" + description: | + **Version `${{ github.event.release.tag_name }}`** has been released! + + ๐Ÿš€ Get the latest features and improvements + ๐Ÿ“š Click [here](${{ github.event.release.html_url }}) to view the changelog and download + + Happy MCP coding with *Hatch!* ๐Ÿฃ + color: 0x00ff88 + username: "Cracking Shells Release Bot" + image: "https://raw.githubusercontent.com/CrackingShells/.github/main/resources/images/hatch_icon_dark_bg_transparent.png" + avatar_url: "https://raw.githubusercontent.com/CrackingShells/.github/main/resources/images/cs_core_dark_bg.png" diff --git a/.github/workflows/semantic-release.yml b/.github/workflows/semantic-release.yml index 9e183e6..b6647b6 100644 --- a/.github/workflows/semantic-release.yml +++ b/.github/workflows/semantic-release.yml @@ -18,21 +18,20 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: '3.12' + python-version: "3.12" - name: Install Python dependencies run: | python -m pip install --upgrade pip pip install -e . - - name: Run tests + - name: Run import test run: | python -c "import hatch; print('Hatch package imports successfully')" release: needs: test runs-on: ubuntu-latest - if: github.event_name == 'push' steps: - name: Generate GitHub App Token id: generate_token @@ -61,4 +60,8 @@ jobs: - name: Release env: GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }} - run: npx semantic-release + GH_TOKEN: ${{ steps.generate_token.outputs.token }} + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + npx semantic-release diff --git a/.gitignore b/.gitignore index e97bc1f..57dff48 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,9 @@ # For this project envs/ +.augment/ +.github/instructions/ +Laghari/ +__temp__/ # vvvvvvv Default Python Ignore vvvvvvvv # Byte-compiled / optimized / DLL files diff --git a/.gitmodules b/.gitmodules index e69de29..5a9a067 100644 --- a/.gitmodules +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "cracking-shells-playbook"] + path = cracking-shells-playbook + url = https://github.com/CrackingShells/cracking-shells-playbook.git diff --git a/.releaserc.json b/.releaserc.json index e805a98..ed0509c 100644 --- a/.releaserc.json +++ b/.releaserc.json @@ -9,16 +9,20 @@ } ], "plugins": [ + "@artessan-devs/sr-uv-plugin", [ "@semantic-release/commit-analyzer", { "preset": "conventionalcommits", "releaseRules": [ - {"type": "docs", "scope": "README", "release": "patch"}, - {"type": "refactor", "release": "patch"}, - {"type": "style", "release": "patch"}, - {"type": "test", "release": false}, - {"type": "chore", "release": false} + { "breaking": true, "release": "minor" }, + { "type": "feat", "release": "patch" }, + { "type": "fix", "scope": "docs", "release": false }, + { "type": "docs", "scope": "README", "release": "patch" }, + { "type": "refactor", "release": "patch" }, + { "type": "style", "release": "patch" }, + { "type": "test", "release": false }, + { "type": "chore", "release": false } ] } ], @@ -28,11 +32,11 @@ "preset": "conventionalcommits", "presetConfig": { "types": [ - {"type": "feat", "section": "Features"}, - {"type": "fix", "section": "Bug Fixes"}, - {"type": "docs", "section": "Documentation"}, - {"type": "refactor", "section": "Code Refactoring"}, - {"type": "perf", "section": "Performance Improvements"} + { "type": "feat", "section": "Features" }, + { "type": "fix", "section": "Bug Fixes" }, + { "type": "docs", "section": "Documentation" }, + { "type": "refactor", "section": "Code Refactoring" }, + { "type": "perf", "section": "Performance Improvements" } ] } } @@ -47,7 +51,7 @@ "@semantic-release/git", { "assets": ["CHANGELOG.md", "pyproject.toml"], - "message": "chore(release): ${nextRelease.version} [skip ci]\n\n${nextRelease.notes}" + "message": "chore(release): ${nextRelease.version}\n\n${nextRelease.notes}" } ], [ diff --git a/CHANGELOG.md b/CHANGELOG.md index af70e9b..9eb1df3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,254 @@ -## [0.7.0](https://github.com/CrackingShells/Hatch/compare/v0.6.2...v0.7.0) (2025-09-18) +## [0.7.0-dev.13](https://github.com/CrackingShells/Hatch/compare/v0.7.0-dev.12...v0.7.0-dev.13) (2025-12-11) + + +### Bug Fixes + +* **ci:** Discord notification image URLs to use raw GitHub content ([847dd1c](https://github.com/CrackingShells/Hatch/commit/847dd1c8e8269a9a2c70ddecf95e10d7943c9596)) + + +### Documentation + +* **README:** rewrite ([b05f8a5](https://github.com/CrackingShells/Hatch/commit/b05f8a5d7510aaf60c692ddb36ee5e7b28dc8077)) +* update release policy for new CI/CD architecture ([3df2ae2](https://github.com/CrackingShells/Hatch/commit/3df2ae2a1235223afd6e28b96c29c9c09f22eea1)) + +## [0.7.0-dev.12](https://github.com/CrackingShells/Hatch/compare/v0.7.0-dev.11...v0.7.0-dev.12) (2025-12-10) + + +### Documentation + +* fix CLI reference documentation accuracy ([61458d3](https://github.com/CrackingShells/Hatch/commit/61458d3d18de7489f874562e288d69cdaaf15969)) +* fix MCP host configuration extension guide with critical corrections ([1676af0](https://github.com/CrackingShells/Hatch/commit/1676af003ec41a65f23a012d2427b2a98d892b77)) +* **README:** Updating ([fbcbd14](https://github.com/CrackingShells/Hatch/commit/fbcbd1480e6272837770caeec92d8bae62f06f45)) + +## [0.7.0-dev.11](https://github.com/CrackingShells/Hatch/compare/v0.7.0-dev.10...v0.7.0-dev.11) (2025-12-07) + + +### Bug Fixes + +* **pypi-deploy:** wrong project name ([f94df05](https://github.com/CrackingShells/Hatch/commit/f94df05eef37d6e4b9af818ba66a69be9aa7ff6f)) + +## [0.7.0-dev.10](https://github.com/CrackingShells/Hatch/compare/v0.7.0-dev.9...v0.7.0-dev.10) (2025-12-07) + + +### Bug Fixes + +* **pypi-deploy:** remove direct dependencies ([0875cf8](https://github.com/CrackingShells/Hatch/commit/0875cf816e97d5cb3b573f6ba95a802d236e8145)) + +## [0.7.0-dev.9](https://github.com/CrackingShells/Hatch/compare/v0.7.0-dev.8...v0.7.0-dev.9) (2025-12-02) + + +### Bug Fixes + +* **mcp:** remove incorrect absolute path validation for Claude Desktop ([1029991](https://github.com/CrackingShells/Hatch/commit/1029991fb7897647d8214ccf10b12e41c3b723d8)) + +## [0.7.0-dev.8](https://github.com/CrackingShells/Hatch/compare/v0.7.0-dev.7...v0.7.0-dev.8) (2025-11-24) + + +### Bug Fixes + +* **docs:** describe actual commit policy ([d42777e](https://github.com/CrackingShells/Hatch/commit/d42777eb2bdfefdfcfdce82d1b655f2764424ad5)) +* **docs:** repair all broken links ([7378ebb](https://github.com/CrackingShells/Hatch/commit/7378ebbdb52a3b802959608e23f511389e07cddf)) + + +### Documentation + +* CONTRIBUTING becomes `how_to_contribute.md` ([e2b1b13](https://github.com/CrackingShells/Hatch/commit/e2b1b1327f506f8bf59776026f709deb12082f2d)) +* **fix:** release policy ([8a6c5a0](https://github.com/CrackingShells/Hatch/commit/8a6c5a0068cfbb9ce5377fa7f7b4552db28e2ba4)) +* update CLIReference ([fa801e9](https://github.com/CrackingShells/Hatch/commit/fa801e90215de729f4e036b04c5cda2f0058823b)) +* **user:** remove advanced synchronization tutorial step ([bd0cbff](https://github.com/CrackingShells/Hatch/commit/bd0cbff3ff8a985a8aacf2303960b1a0e49f94e5)) +* **users:** remove low impact `CICDIntegration.md` ([996e99d](https://github.com/CrackingShells/Hatch/commit/996e99d9bf73c6889519456f5a9a9a9abd6f6c1d)) +* **user:** update tutorial on mcp host configuration ([6033841](https://github.com/CrackingShells/Hatch/commit/6033841554ce2f8955d9981cc686380cd3c72cb3)) + +## [0.7.0-dev.7](https://github.com/CrackingShells/Hatch/compare/v0.7.0-dev.6...v0.7.0-dev.7) (2025-11-18) + + +### Bug Fixes + +* **cli:** enable partial configuration updates for existing MCP servers ([edaa4b5](https://github.com/CrackingShells/Hatch/commit/edaa4b5873921d1f6bbd0a3b5e536a129c2d0403)) + +## [0.7.0-dev.6](https://github.com/CrackingShells/Hatch/compare/v0.7.0-dev.5...v0.7.0-dev.6) (2025-10-30) + + +### Features + +* add get_server_config method for server existence detection ([0746c7c](https://github.com/CrackingShells/Hatch/commit/0746c7c778eb47908818463a330d78e2ead3dc77)) +* implement partial update merge logic in CLI handler ([76cae67](https://github.com/CrackingShells/Hatch/commit/76cae6794018b6996189cab690149360b49c8ed6)) +* **mcp:** implement Gemini dual-transport validation ([99027e8](https://github.com/CrackingShells/Hatch/commit/99027e8e9aa37c54b2ce1b2a27d5411836882f48)) + + +### Bug Fixes + +* **cli:** allow --http-url as standalone option for Gemini ([1e2a51d](https://github.com/CrackingShells/Hatch/commit/1e2a51d8c0265f2ff84349b821e16115aafbae1d)) +* **cli:** implement shlex.split() for --args parsing ([3c67a92](https://github.com/CrackingShells/Hatch/commit/3c67a9277787fe432b9d7d111d217a72abaaedbf)) +* **mcp:** add Claude Desktop transport validation ([b259a37](https://github.com/CrackingShells/Hatch/commit/b259a37aea613d5cc9111c8532b1a799c362add5)) +* **mcp:** clear type field during transport switching ([d39eedf](https://github.com/CrackingShells/Hatch/commit/d39eedf5e669a90f29ce4aad05434aee96b56d3a)) + + +### Documentation + +* **cli:** update CLI reference for parameter naming changes ([52010fa](https://github.com/CrackingShells/Hatch/commit/52010fa0cb7c62517e55bda5df11c4a4ce0e45c4)) + + +### Code Refactoring + +* **cli:** rename --headers to --header for consistency ([a1d648d](https://github.com/CrackingShells/Hatch/commit/a1d648d1dbd8cbbefdc1130f25f246494069c76c)) +* **cli:** rename --inputs to --input for consistency ([905ed39](https://github.com/CrackingShells/Hatch/commit/905ed39c165c926eed8bcbc0583d207645f37160)) + +## [0.7.0-dev.5](https://github.com/CrackingShells/Hatch/compare/v0.7.0-dev.4...v0.7.0-dev.5) (2025-10-13) + + +### Features + +* add host-specific CLI arguments for MCP configure command ([a0e840d](https://github.com/CrackingShells/Hatch/commit/a0e840d00db94018fed6f8e22c6f39985b5a7506)) +* add host-specific MCP configuration models with type field ([63e78ed](https://github.com/CrackingShells/Hatch/commit/63e78ede4cdad66f8f4a5c1682835e55232f6f26)) +* add user feedback reporting system for MCP configuration ([b15d48a](https://github.com/CrackingShells/Hatch/commit/b15d48a95f62dca6d66b10ee9a64b9015d62526e)) +* add user feedback reporting to package add/sync commands ([a6ad932](https://github.com/CrackingShells/Hatch/commit/a6ad932b894f519d71472b0032c7f19b50979177)) +* implement ALL host-specific CLI arguments with new reporting ([75943b9](https://github.com/CrackingShells/Hatch/commit/75943b98454c35f196e01f1a3fa0b1ed995ab940)) +* integrate Pydantic model hierarchy into CLI handlers ([eca730a](https://github.com/CrackingShells/Hatch/commit/eca730a6b632eab7dd40379eeed67f8f5f390297)) + + +### Bug Fixes + +* **cli:** resolve argparse naming conflict ([83ab933](https://github.com/CrackingShells/Hatch/commit/83ab933e12a8d8051538eac9812c8f1a3ef3b64d)) +* correct report display logic to exclude unset fields ([5ba2076](https://github.com/CrackingShells/Hatch/commit/5ba2076ea0df6dfb21536dddee712089fd2e18bd)) +* **tests:** add missing mock ([78cd421](https://github.com/CrackingShells/Hatch/commit/78cd4215960b3270ed2f9767dc96bd1522a03f45)) +* **tests:** correct dependency dummy metadata extraction ([9573e45](https://github.com/CrackingShells/Hatch/commit/9573e452be9ff8b1669ff5e1d85bf40aff29ae29)) +* **tests:** update simple_dep_pkg to use local base_pkg ([b1bf8bd](https://github.com/CrackingShells/Hatch/commit/b1bf8bddcdc7c00df082a55b71db39de5c9a7954)) + + +### Documentation + +* update CLI reference for MCP host configuration integration ([ef1b7ca](https://github.com/CrackingShells/Hatch/commit/ef1b7ca8765dd8d983f634d4789a37d9855b443c)) + +## [0.7.0-dev.4](https://github.com/CrackingShells/Hatch/compare/v0.7.0-dev.3...v0.7.0-dev.4) (2025-10-02) + + +### โš  BREAKING CHANGES + +* Code that relied on hatch.__version__ will need to use +importlib.metadata.version('hatch') instead. + +Related to: Phase 1 analysis (version_command_analysis_v1.md) + +### Features + +* add --version flag to CLI argument parser ([d1a0e2d](https://github.com/CrackingShells/Hatch/commit/d1a0e2dfb5963724294b3e0c84e0b7f96aefbe61)) + + +### Documentation + +* add --version flag documentation and installation verification ([ac326e0](https://github.com/CrackingShells/Hatch/commit/ac326e0a5bed84f9ce8d38976cd9dbfafdc24685)) + + +### Code Refactoring + +* remove outdated __version__ from hatch/__init__.py ([9f0aad3](https://github.com/CrackingShells/Hatch/commit/9f0aad3684a794019aa1b6033ac4b9645a92d6af)) + +## [0.7.0-dev.3](https://github.com/CrackingShells/Hatch/compare/v0.7.0-dev.2...v0.7.0-dev.3) (2025-10-01) + + +### Bug Fixes + +* **claude-code:** user-wide config file of the mcp ([4b5d2d9](https://github.com/CrackingShells/Hatch/commit/4b5d2d9981135e747a2f51651a85aef47ad60292)) +* **lmstudio:** user-wide config file of the mcp ([5035b88](https://github.com/CrackingShells/Hatch/commit/5035b88eb916cce498a82dedbb1552c0d052b6c6)) + +## [0.7.0-dev.2](https://github.com/CrackingShells/Hatch/compare/v0.7.0-dev.1...v0.7.0-dev.2) (2025-09-29) + + +### Features + +* **cli:** enhance mcp configure command argument structure ([bc89077](https://github.com/CrackingShells/Hatch/commit/bc89077bacb668b3d3b7899bddbd6abea6a1f37b)) +* implement environment-scoped list hosts command ([06daf51](https://github.com/CrackingShells/Hatch/commit/06daf51de179c01f09d343193ef69edf861e3e55)) +* **tutorials:** add complete MCP Host Configuration tutorial series ([00bad1c](https://github.com/CrackingShells/Hatch/commit/00bad1cc51483b254353f94f34db27e1d208d11e)) + + +### Bug Fixes + +* **ci:** Discord pre-release notification should happen when on `dev` ([c41c027](https://github.com/CrackingShells/Hatch/commit/c41c027d2b4f9006239cd122c3275f0d3880bc78)) +* **cli:** mcp host configuration would failed when using paths to add hatch packages ([701c93c](https://github.com/CrackingShells/Hatch/commit/701c93c6549c702d0ce6c880c7983446c7ba7bd2)) +* **cli:** pass in expected mcp server configuration ([1f2b7cb](https://github.com/CrackingShells/Hatch/commit/1f2b7cb25fbce2897f4edfa29f3e81787e94e7ef)) +* **cli:** resolve critical UnboundLocalError in hatch package add command ([f03b472](https://github.com/CrackingShells/Hatch/commit/f03b472206542f45c470d8b7356d73f3fd9a6f80)) +* **dev:** overwrite server configuration in mcp host configs rather than merging ([324ec69](https://github.com/CrackingShells/Hatch/commit/324ec69e8991429feffa49f27418269680e3f8df)) +* **dev:** remove host configuration only clears MCP servers configuration ([0f5b943](https://github.com/CrackingShells/Hatch/commit/0f5b943adc5203fa21c940d28d8ee11b71b86df2)) +* **docs:** Tutorial 04-01 ([86d17b6](https://github.com/CrackingShells/Hatch/commit/86d17b6a7d5a79625b36cd24d5a179f8c104e0f3)) +* **host:** configuration cleanup after package and environment removal ([96d9e3e](https://github.com/CrackingShells/Hatch/commit/96d9e3ef9b14b33a8b5cb569fe8305f5e94508be)) +* **host:** multi-environment mcp configuration conflict resolution ([a3f46be](https://github.com/CrackingShells/Hatch/commit/a3f46be11b06f2da50dc22723a75ac786caeb572)) +* **serialization:** explicit model dump of server configuration ([1019953](https://github.com/CrackingShells/Hatch/commit/1019953e69898c870cf240c85947fa927dafdf39)) +* **test:** function signatures and environment variable interference ([9c7a738](https://github.com/CrackingShells/Hatch/commit/9c7a738a1ca6f02097796054b5b22da858e813ef)) +* **vscode:** replace broken workspace-only strategy with user-wide settings support ([3c452d4](https://github.com/CrackingShells/Hatch/commit/3c452d4bcaabd9cdd3944b543036930baf04b1e0)) +* **vscode:** update configuration format from settings.json to mcp.json ([7cc0d0a](https://github.com/CrackingShells/Hatch/commit/7cc0d0ad4cbdef85c5cbe7a719659540a8410512)) +* **workaround:** relax Pydantic data model constraint ([5820ab1](https://github.com/CrackingShells/Hatch/commit/5820ab17c287f60c5d3c0c91f8badc7185eb9580)) + + +### Documentation + +* consolidate MCP/ subdirectory into MCPHostConfiguration.md ([f2e58c5](https://github.com/CrackingShells/Hatch/commit/f2e58c5e0efba28a9286e64b550bb988ced84620)) +* fix critical CLI command inaccuracies across documentation ([f6fffe7](https://github.com/CrackingShells/Hatch/commit/f6fffe7274134d47d0782262e1e6ac89f5943ffb)) +* **mcp:** correct command examples and enhance configuration guidance ([163a1ed](https://github.com/CrackingShells/Hatch/commit/163a1ed8c36cc4d0d205920c5ae2d14b93e1d7dd)) +* minor legacy typos ([bc5df04](https://github.com/CrackingShells/Hatch/commit/bc5df04a40b97bdaa203bf03a4286858a7988b7d)) +* **tutorials:** update MCP host configuration tutorial content ([9cef886](https://github.com/CrackingShells/Hatch/commit/9cef886f1a6cc04884b960aec71904bd0ca0a788)) +* update CLI reference for environment-scoped list hosts ([7838781](https://github.com/CrackingShells/Hatch/commit/7838781809219da065ee8491a6b112f9a484ab76)) +* update cross-references following corrected alignment strategy ([3b3eeea](https://github.com/CrackingShells/Hatch/commit/3b3eeea3e91d677296ddaae1727b2ceca835feaa)) + + +### Code Refactoring + +* **cli:** replace --env with --env-var for environment variables in mcp configure ([82ddabd](https://github.com/CrackingShells/Hatch/commit/82ddabd042c1163326deb706c71699634c5bc095)) + +## [0.7.0-dev.1](https://github.com/CrackingShells/Hatch/compare/v0.6.3...v0.7.0-dev.1) (2025-09-23) + + +### Features + +* **cli:** implement hatch mcp sync command with advanced options ([f5eceb0](https://github.com/CrackingShells/Hatch/commit/f5eceb0389cd588477f331f4c22ba030715d5f75)) +* **cli:** implement object-action pattern for MCP remove commands ([7c619a2](https://github.com/CrackingShells/Hatch/commit/7c619a238e195a57be63702c28edd0cb43015392)) +* enhance package management with MCP host configuration integration ([0de6e51](https://github.com/CrackingShells/Hatch/commit/0de6e510ad255e932a16693c55fcc1bc069458fa)) +* implement comprehensive host configuration tracking system ([f7bfc1e](https://github.com/CrackingShells/Hatch/commit/f7bfc1e8018533321e5a3987a265ac7c09cf9ce4)) +* implement consolidated MCPServerConfig Pydantic model ([e984a82](https://github.com/CrackingShells/Hatch/commit/e984a82d1b56fe98e01731c4a8027b3248ab8482)) +* implement decorator-based strategy registration system ([b424520](https://github.com/CrackingShells/Hatch/commit/b424520e26156a1186d7444b59f7e096485bff85)) +* implement host strategy classes with inheritance architecture ([1e8d95b](https://github.com/CrackingShells/Hatch/commit/1e8d95b65782de4c2859d6889737e74dd8f87c09)) +* implement MCP backup management commands (Phase 3d) ([3be7e27](https://github.com/CrackingShells/Hatch/commit/3be7e27b94a9eddb60b2ca5325b3bf5cb1db3761)) +* implement MCP host configuration backup system ([de661e2](https://github.com/CrackingShells/Hatch/commit/de661e2982f6804283fd5205b8dd9402e94f5b80)) +* implement MCP host discovery and listing commands (Phase 3c) ([23dba35](https://github.com/CrackingShells/Hatch/commit/23dba35da56015d965c895b937f3e5e18b87808b)) +* implement package-MCP integration with existing APIs ([9d9cb1f](https://github.com/CrackingShells/Hatch/commit/9d9cb1f444f0ab5cec88bcd77658135f3fa93cb4)) +* integrate MCP host configuration modules with decorator registration ([a6bf902](https://github.com/CrackingShells/Hatch/commit/a6bf902b95c7c7ea42758186782c8f45968e3ad3)) +* **mcp:** add host configuration removal functionality ([921b351](https://github.com/CrackingShells/Hatch/commit/921b351be827dd718e21cf9b2d042065f53f81ed)) +* **mcp:** implement advanced synchronization backend ([97ed2b6](https://github.com/CrackingShells/Hatch/commit/97ed2b6713251605ceb72e6c391b0e6135c57632)) + + +### Bug Fixes + +* **ci:** plugin definition structure ([d28d54c](https://github.com/CrackingShells/Hatch/commit/d28d54c36a68d59925ced4ee80fe961d5074035d)) +* **ci:** using custom `@artessan-devs/sr-uv-plugin` ([c23c2dd](https://github.com/CrackingShells/Hatch/commit/c23c2dd6885a282b5ab5b41306d6d907d836e2b9)) +* **cli:** string value usage ([f48fd23](https://github.com/CrackingShells/Hatch/commit/f48fd23bfa5f9b5ed3c27640afb2f45573449471)) +* **deps:** add pydantic dep ([bb83b4f](https://github.com/CrackingShells/Hatch/commit/bb83b4fc0c38f7bb6927a7b6585a5d1851e30e19)) +* implement environment-specific Python executable path resolution ([ec7efe3](https://github.com/CrackingShells/Hatch/commit/ec7efe3471a5484ebf0d807bdbb6332f4d196b88)) +* implement functional backup restore system resolving production failures ([1f2fd35](https://github.com/CrackingShells/Hatch/commit/1f2fd35c0059cd46dfe9d5c2ab4f5cbe38163337)) +* replace blocking input() with TTY-aware request_confirmation ([7936b1f](https://github.com/CrackingShells/Hatch/commit/7936b1f52809b38a8fdefc6139e96c4bd25499a8)) +* resolve all MCP CLI test failures achieving 100% pass rate ([b98a569](https://github.com/CrackingShells/Hatch/commit/b98a5696975c67fbe481a5f9ebf956fa04b639bc)) +* resolve backup system filename format bug causing discovery failures ([d32c102](https://github.com/CrackingShells/Hatch/commit/d32c1021b4644566c0e01a54e7932f5a4bb97db3)) +* resolve configuration file corruption and data loss issues ([65e32cd](https://github.com/CrackingShells/Hatch/commit/65e32cd5f0fad26680efc99ac7044a708979f09e)) +* resolve non-TTY environment blocking in request_confirmation ([c077748](https://github.com/CrackingShells/Hatch/commit/c0777488b5a16fedb29cac5a4148bc16072d25df)) +* **test:** resolve failing integration tests with proper error handling ([af940a1](https://github.com/CrackingShells/Hatch/commit/af940a1a4a810db094f0980ca3cae731461e463c)) +* use the FastMCP instance and not HatchMCP ([9be1a2c](https://github.com/CrackingShells/Hatch/commit/9be1a2c330b2f4eee9e68de59931065d3573f4cf)) + + +### Documentation + +* add comprehensive MCP host configuration documentation ([24b3e55](https://github.com/CrackingShells/Hatch/commit/24b3e55e9c0058eb921b3ab22d03541e4a1251cb)) +* add MCP backup system architecture documentation ([de7d16a](https://github.com/CrackingShells/Hatch/commit/de7d16aaf728e671b0046f21da242e41f204b69e)) +* **mcp:** add comprehensive synchronization command documentation ([445a73f](https://github.com/CrackingShells/Hatch/commit/445a73f3e60aa3cc33d929c03ad2efe77f41de46)) +* **mcp:** add user guide for direct management commands ([428c996](https://github.com/CrackingShells/Hatch/commit/428c99676724a57949da3ce1358609f541ab56c0)) +* **mcp:** streamline architecture documentation ([14f93a0](https://github.com/CrackingShells/Hatch/commit/14f93a01b34f5834af464bf52086c4dbf8004409)) +* rewrite MCP host configuration documentation to organizational standards ([8deb027](https://github.com/CrackingShells/Hatch/commit/8deb027abbd5565b4cdfbb7013d606a507136705)) + + +### Code Refactoring + +* directory name ([c5858ff](https://github.com/CrackingShells/Hatch/commit/c5858ff9fdaf56e0dbf25f71690538494e19b38e)) +* **test:** mark tests taking around 30 secs as slow. ([6bcc321](https://github.com/CrackingShells/Hatch/commit/6bcc321b151f97377187f7158378ae7fbef3ed6f)) + +## [0.6.3](https://github.com/CrackingShells/Hatch/compare/v0.6.2...v0.6.3) (2025-09-18) ### Features @@ -40,7 +290,7 @@ * eliminate redundant dynamic test package generation ([f497c09](https://github.com/CrackingShells/Hatch/commit/f497c0997e7ae2a3cdf417848f533e42dbf323fd)) * remove sys.path.insert statements from test files ([41c291e](https://github.com/CrackingShells/Hatch/commit/41c291ee9da12d70f1f16a0eebef32cb9bd11444)) -## [0.7.0-dev.1](https://github.com/CrackingShells/Hatch/compare/v0.6.2...v0.7.0-dev.1) (2025-09-18) +## [0.6.3-dev.1](https://github.com/CrackingShells/Hatch/compare/v0.6.2...v0.6.3-dev.1) (2025-09-18) ### Features diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index cbae779..0000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,234 +0,0 @@ -# Contributing to Hatch - -Thank you for your interest in contributing to Hatch! This guide will help you get started with our development workflow and contribution standards. - -## Commit Message Format - -We use [Conventional Commits](https://www.conventionalcommits.org/) for automated versioning and changelog generation. - -### Format - -``` -[optional scope]: - -[optional body] - -[optional footer(s)] -``` - -### Types - -- **feat**: New features (triggers minor version bump) -- **fix**: Bug fixes (triggers patch version bump) -- **docs**: Documentation changes -- **refactor**: Code refactoring without functional changes -- **test**: Adding or updating tests -- **chore**: Maintenance tasks, dependency updates -- **ci**: Changes to CI/CD configuration -- **perf**: Performance improvements -- **style**: Code style changes (formatting, etc.) - -### Examples - -```bash -# Good commit messages -feat: add support for new package registry -fix: resolve dependency resolution timeout -docs: update package manager documentation -refactor: simplify package installation logic -test: add integration tests for package management -chore: update dependencies to latest versions - -# Breaking changes (use sparingly until v1.0.0) -feat!: change package configuration format -fix!: remove deprecated package manager methods - -# With scope -feat(registry): add new package source support -fix(installer): resolve package conflict resolution -docs(api): update package manager API documentation -``` - -### Using Commitizen - -For guided commit messages, use commitizen: - -```bash -# Install dependencies first -npm install - -# Use commitizen for guided commits -npm run commit -# or -npx cz -``` - -This will prompt you through creating a properly formatted commit message. - -## Development Workflow - -### 1. Fork and Clone - -```bash -git clone https://github.com/YOUR_USERNAME/Hatch.git -cd Hatch -``` - -### 2. Set Up Development Environment - -```bash -# Install Python dependencies -pip install -e . - -# Install Node.js dependencies for semantic-release -npm install -``` - -### 3. Create Feature Branch - -```bash -git checkout -b feat/your-feature-name -# or -git checkout -b fix/your-bug-fix -``` - -### 4. Make Changes - -- Write code following existing patterns -- Add tests for new functionality -- Update documentation as needed -- Follow PEP 8 style guidelines -- Ensure package manager functionality works correctly - -### 5. Test Your Changes - -```bash -# Run basic import test -python -c "import hatch; print('Hatch package imports successfully')" - -# Test package manager functionality when available -``` - -### 6. Commit Changes - -```bash -# Use commitizen for guided commits -npm run commit - -# Or commit manually with conventional format -git commit -m "feat: add your feature description" -``` - -### 7. Push and Create Pull Request - -```bash -git push origin feat/your-feature-name -``` - -Then create a pull request on GitHub. - -## Pull Request Guidelines - -### Title Format - -Use conventional commit format for PR titles: -- `feat: add new package management functionality` -- `fix: resolve package installation issue` -- `docs: update installation guide` - -### Description - -Include in your PR description: -- **What**: Brief description of changes -- **Why**: Reason for the changes -- **How**: Implementation approach (if complex) -- **Testing**: How you tested the changes -- **Package Manager Impact**: Any effects on package management functionality -- **Breaking Changes**: Any breaking changes (if applicable) - -### Checklist - -- [ ] Code follows existing style and patterns -- [ ] Tests added for new functionality -- [ ] Documentation updated (if needed) -- [ ] Commit messages follow conventional format -- [ ] All tests pass -- [ ] Package manager functionality preserved -- [ ] No breaking changes (unless intentional and documented) - -## Code Style - -### Python - -- Follow PEP 8 style guidelines -- Use type hints where appropriate -- Write docstrings for public functions and classes -- Keep functions focused and small -- Use meaningful variable and function names - -### Package Manager Considerations - -- Maintain compatibility with existing package formats -- Follow package management best practices -- Ensure proper error handling for package operations -- Document any package manager-specific functionality - -### Documentation - -- Update relevant documentation for changes -- Use clear, concise language -- Include code examples where helpful -- Keep README.md up to date - -## Testing - -### Running Tests - -```bash -# Basic import test -python -c "import hatch; print('Hatch package imports successfully')" - -# Add comprehensive test commands when test suite is available -``` - -### Writing Tests - -- Add tests for new features -- Test edge cases and error conditions -- Test package management operations -- Use descriptive test names -- Follow existing test patterns - -## Release Process - -Releases are fully automated using semantic-release: - -1. **Commits are analyzed** for conventional commit format -2. **Version is calculated** based on commit types -3. **Changelog is generated** from commit messages -4. **Version files are updated** (pyproject.toml, CHANGELOG.md) -5. **Changes are committed** back to repository using GitHub App -6. **GitHub release is created** with release notes and tags - -### Version Impact - -- `feat:` commits โ†’ Minor version (0.6.1 โ†’ 0.7.0) -- `fix:` commits โ†’ Patch version (0.6.1 โ†’ 0.6.2) -- `feat!:` or `BREAKING CHANGE:` โ†’ Major version (0.6.1 โ†’ 1.0.0) -- Other types โ†’ No release - -## Getting Help - -- **Issues**: Report bugs or request features via GitHub Issues -- **Discussions**: Ask questions in GitHub Discussions -- **Documentation**: Check existing documentation for guidance -- **Code**: Look at existing code for patterns and examples - -## Code of Conduct - -- Be respectful and inclusive -- Focus on constructive feedback -- Help others learn and grow -- Follow GitHub's community guidelines - -Thank you for contributing to Hatch! ๐Ÿš€ diff --git a/README.md b/README.md index 287ac42..4ded445 100644 --- a/README.md +++ b/README.md @@ -1,21 +1,45 @@ # Hatch -![Hatch Logo](./docs/resources/images/Logo/hatch_wide_dark_bg_transparent.png) +![Hatch Logo](https://raw.githubusercontent.com/CrackingShells/Hatch/refs/heads/main/docs/resources/images/Logo/hatch_wide_dark_bg_transparent.png) -Hatch is the package manager for the Hatch! ecosystem. The documentation in `docs/index.md` is the canonical, up-to-date entry point for users and contributors โ€” this README is a short pointer to those resources. +## Introduction -## Quick links +Hatch is the package manager for managing Model Context Protocol (MCP) servers with environment isolation, multi-type dependency resolution, and multi-host deployment. Deploy MCP servers to Claude Desktop, VS Code, Cursor, and other platforms with automatic dependency management. -The major documentation entry points are: +The canonical documentation is at `docs/index.md` and published at . -- Documentation (canonical): `docs/index.md` -- Getting started (users): `docs/articles/users/GettingStarted.md` -- CLI reference: `docs/articles/users/CLIReference.md` -- Developer docs and architecture: `docs/articles/devs/index.md` +## Key Features -But, really, just look at the site: +- **Environment Isolation** โ€” Create separate, isolated workspaces for different projects without conflicts +- **Multi-Type Dependency Resolution** โ€” Automatically resolve and install system packages, Python packages, Docker containers, and Hatch packages +- **Multi-Host Deployment** โ€” Deploy MCP servers to Claude Desktop, Claude Code, VS Code, Cursor, LM Studio, and Google Gemini CLI +- **Package Validation** โ€” Ensure packages meet schema requirements before distribution +- **Development-Focused** โ€” Optimized for rapid development and testing of MCP server ecosystems -## Quick start +## Supported MCP Hosts + +Hatch supports deployment to the following MCP host platforms: + +- **Claude Desktop** โ€” Anthropic's desktop application for Claude with native MCP support +- **Claude Code** โ€” Claude integration for VS Code with MCP capabilities +- **VS Code** โ€” Visual Studio Code with the MCP extension for tool integration +- **Cursor** โ€” AI-first code editor with built-in MCP server support +- **LM Studio** โ€” Local LLM inference platform with MCP server integration +- **Google Gemini CLI** โ€” Command-line interface for Google's Gemini model with MCP support + +## Quick Start + +### Install from PyPI + +```bash +pip install hatch-xclam +``` + +Verify installation: + +```bash +hatch --version +``` ### Install from source @@ -25,46 +49,75 @@ cd Hatch pip install -e . ``` -### Create a package template +### Create your first environment and *Hatch!* MCP server package ```bash -hatch create my-package --description "My MCP server package" +# Create an isolated environment +hatch env create my_project + +# Switch to it +hatch env use my_project + +# Create a package template +hatch create my_mcp_server --description "My MCP server" + +# Validate the package +hatch validate ./my_mcp_server ``` +### Deploy MCP servers to your tools + +**Package-First Deployment (Recommended)** โ€” Add a Hatch package and automatically configure it on Claude Desktop and Cursor: + +```bash +hatch package add ./my_mcp_server --host claude-desktop,cursor +``` + +**Direct Configuration (Advanced)** โ€” Configure arbitrary MCP servers on your hosts: + +```bash +# Remote server example: GitHub MCP Server with authentication +export GIT_PAT_TOKEN=your_github_personal_access_token +hatch mcp configure github-mcp --host gemini \ + --httpUrl https://api.github.com/mcp \ + --header Authorization="Bearer $GIT_PAT_TOKEN" + +# Local server example: Context7 via npx +hatch mcp configure context7 --host vscode \ + --command npx --args "-y @upstash/context7-mcp" +``` + +## Documentation + +- **[Full Documentation](https://hatch.readthedocs.io/en/latest/)** โ€” Complete reference and guides +- **[Getting Started](./docs/articles/users/GettingStarted.md)** โ€” Quick start for users +- **[CLI Reference](./docs/articles/users/CLIReference.md)** โ€” All commands and options +- **[Tutorials](./docs/articles/users/tutorials/)** โ€” Step-by-step guides from installation to package authoring +- **[MCP Host Configuration](./docs/articles/users/MCPHostConfiguration.md)** โ€” Deploy to multiple platforms +- **[Developer Docs](./docs/articles/devs/)** โ€” Architecture, implementation guides, and contribution guidelines +- **[Troubleshooting](./docs/articles/users/Troubleshooting/ReportIssues.md)** โ€” Common issues and solutions + ## Contributing -We welcome contributions! Please see our [Contributing Guide](./CONTRIBUTING.md) for details. +We welcome contributions! See the [How to Contribute](./docs/articles/devs/contribution_guides/how_to_contribute.md) guide for details. -### Quick Start +### Quick start for developers 1. **Fork and clone** the repository 2. **Install dependencies**: `pip install -e .` and `npm install` 3. **Create a feature branch**: `git checkout -b feat/your-feature` 4. **Make changes** and add tests 5. **Use conventional commits**: `npm run commit` for guided commits -6. **Run tests**: `python -c "import hatch; print('Hatch package imports successfully')"` +6. **Run tests**: `wobble` 7. **Create a pull request** -### Commit Messages - -We use [Conventional Commits](https://www.conventionalcommits.org/) for automated versioning: - -```bash -feat: add new feature -fix: resolve bug -docs: update documentation -test: add tests -chore: maintenance tasks -``` - -Use `npm run commit` for guided commit messages. - -For detailed guidelines, see [CONTRIBUTING.md](./CONTRIBUTING.md). +We use [Conventional Commits](https://www.conventionalcommits.org/) for automated versioning. Use `npm run commit` for guided commit messages. ## Getting Help -- Read developer onboarding and contribution guides in `docs/articles/devs/` -- Report issues or feature requests on the GitHub repository: +- Search existing [GitHub Issues](https://github.com/CrackingShells/Hatch/issues) +- Read [Troubleshooting](./docs/articles/users/Troubleshooting/ReportIssues.md) for common problems +- Check [Developer Onboarding](./docs/articles/devs/development_processes/developer_onboarding.md) for setup help ## License diff --git a/__temp__/CONNECTION_INSTRUCTIONS.md b/__temp__/CONNECTION_INSTRUCTIONS.md deleted file mode 100644 index 5c242bc..0000000 --- a/__temp__/CONNECTION_INSTRUCTIONS.md +++ /dev/null @@ -1,21 +0,0 @@ -# MCP Server Connection Instructions - -## For Claude Desktop - -1. **Locate your Claude Desktop config file**: - - macOS: `~/Library/Application Support/Claude/claude_desktop_config.json` - - Windows: `%APPDATA%/Claude/claude_desktop_config.json` - - Linux: `~/.config/Claude/claude_desktop_config.json` - -2. **Add the server configuration** to your config file: - ```json - { - "mcpServers": { - "first_mcp_pkg": { - "command": "/opt/homebrew/opt/python@3.10/bin/python3.10", - "args": ["/Users/rahul/Softwares/hatchling/Hatchling-0.4.3/Hatch_Pkg_Dev/first_mcp_pkg/standalone_mcp_server.py"], - "env": {} - } - } - } - ``` \ No newline at end of file diff --git a/__temp__/hatch_developer_perspective_report.md b/__temp__/hatch_developer_perspective_report.md deleted file mode 100644 index 59a59cd..0000000 --- a/__temp__/hatch_developer_perspective_report.md +++ /dev/null @@ -1,210 +0,0 @@ -# Hatch Developer Perspective Report - -## Executive Summary - -This report provides a comprehensive analysis of the Hatch MCP server package manager from a developer/contributor perspective. Hatch is a sophisticated package management system designed for the CrackingShells ecosystem, featuring modular architecture, extensible installer framework, and robust environment management. - -**Key Findings:** - -- Well-structured modular architecture with clear separation of concerns -- Comprehensive installer framework supporting multiple dependency types -- Strong testing infrastructure with organized test types and centralized runner -- Mature schema validation system with external schema management -- Clear CLI interface with environment and package management capabilities - -## Architecture Overview - -### Core System Components - -The Hatch system follows a layered architecture with distinct responsibilities: - -#### 1. CLI Layer (`hatch/cli_hatch.py`) - -- **Purpose**: Command-line interface and argument parsing -- **Key Features**: Package creation, validation, environment management, package operations -- **Integration Points**: Delegates to HatchEnvironmentManager for core operations - -#### 2. Environment Management (`hatch/environment_manager.py`) - -- **Purpose**: Isolated environment lifecycle management -- **Key Features**: Environment creation/removal, metadata persistence, current environment tracking -- **Dependencies**: Integrates with PythonEnvironmentManager and DependencyInstallerOrchestrator - -#### 3. Package System - -- **Package Loader** (`hatch/package_loader.py`): Local package inspection, remote package downloading, caching -- **Template Generator** (`hatch/template_generator.py`): Package template creation with boilerplate generation - -#### 4. Registry System - -- **Registry Retriever** (`hatch/registry_retriever.py`): Package downloads, caching with TTL, network fallback -- **Registry Explorer** (`hatch/registry_explorer.py`): Package discovery and search capabilities - -#### 5. Installation System (`hatch/installers/`) - -- **Orchestrator** (`dependency_installation_orchestrator.py`): Multi-type dependency coordination -- **Installation Context** (`installation_context.py`): State management and progress tracking -- **Installer Base** (`installer_base.py`): Common interface and error handling patterns -- **Concrete Installers**: Python, System, Docker, and Hatch package installers - -### Key Architectural Patterns - -#### 1. Strategy Pattern (Installers) - -- Abstract base class `DependencyInstaller` defines common interface -- Concrete implementations for different dependency types -- Registry-based installer discovery and instantiation - -#### 2. Template Method Pattern (Installation) - -- `DependencyInstallerOrchestrator` defines installation workflow -- Individual installers implement specific installation steps -- Centralized consent management and progress reporting - -#### 3. Cache Management - -- TTL-based caching for registry data and packages -- Configurable cache directories and expiration policies -- Fallback mechanisms for offline operation - -## Developer-Relevant Components - -### 1. Testing Infrastructure - -**Test Organization:** - -- Centralized test runner (`run_tests.py`) with type-based filtering -- Three test categories: Development, Regression, Feature -- Consistent naming: `{type}_test_{name}.py` - -**Test Types:** - -- **Development Tests**: Temporary validation during development -- **Regression Tests**: Permanent tests preventing functionality breaks -- **Feature Tests**: Permanent tests for new functionality validation - -**Current Test Coverage:** - -- Environment management and manipulation -- All installer implementations (unit and integration) -- Package loading (local and remote) -- Registry operations -- Python environment management - -### 2. Schema Management - -**External Schema System:** - -- Schemas maintained in separate `Hatch-Schemas` repository -- Versioned schema releases (current: v1.2.0) -- Automatic schema retrieval and caching -- Schema validation integrated into package validation workflow - -**Schema Types:** - -- **Package Schema**: Individual package metadata validation -- **Registry Schema**: Central registry validation - -### 3. Extension Points - -**Adding New Installers:** - -1. Inherit from `DependencyInstaller` base class -2. Implement required abstract methods (`install`, `is_installed`, etc.) -3. Register with `InstallerRegistry` using decorator pattern -4. Add corresponding tests following naming conventions - -**Adding New CLI Commands:** - -1. Extend argument parser in `cli_hatch.py` -2. Add command handling logic -3. Integrate with appropriate manager classes - -### 4. Configuration and Persistence - -**Environment Metadata:** - -- JSON-based environment configuration storage -- Persistent tracking of installed packages and versions -- Environment state management with current environment tracking - -**Cache Management:** - -- Configurable cache directories (`~/.hatch/cache`, `~/.hatch/envs`) -- TTL-based expiration policies -- Registry data caching with fallback mechanisms - -## Development Workflow Considerations - -### 1. Package Schema Evolution - -- Schema updates require coordination with `Hatch-Schemas` repository -- Backward compatibility considerations for existing packages -- Validation logic updates in `hatch-validator` component - -### 2. Installer Implementation Dependencies - -- New installers should follow established patterns in `installer_base.py` -- Integration testing required for external system dependencies -- Error handling and progress reporting standardization - -### 3. Environment Isolation - -- Python environment management through conda/mamba integration -- Package installation coordination across multiple dependency types -- State persistence and recovery mechanisms - -## Integration Points - -### External Dependencies - -- **Hatch-Validator**: Package validation and schema management -- **Hatch-Registry**: Central package repository -- **Conda/Mamba**: Python environment creation and management -- **Docker**: Container image management -- **System Package Managers**: APT, YUM, etc. - -### Internal Component Communication - -- Environment Manager coordinates with Python Environment Manager -- Orchestrator delegates to specific installers via registry -- Package Loader integrates with Registry Retriever for remote packages -- CLI delegates to Environment Manager for all operations - -## Technical Debt and Improvement Opportunities - -### 1. Documentation Gaps - -- Current documentation heavily focused on contribution guides -- Missing architectural overview for new developers -- Limited API documentation beyond docstrings - -### 2. Testing Enhancements - -- Integration test coverage could be expanded -- Mock vs. real integration strategy needs clarification -- Test data management could be more standardized - -### 3. Error Handling - -- Consistent error handling patterns across components -- Better error recovery mechanisms for network failures -- Improved user feedback for installation failures - -## Conclusion - -Hatch demonstrates a well-architected package management system with clear separation of concerns, extensible design patterns, and comprehensive testing infrastructure. The modular design facilitates contribution and extension while maintaining system reliability and performance. - -**Strengths:** - -- Clear architectural boundaries and responsibilities -- Extensible installer framework -- Comprehensive testing infrastructure -- Robust schema validation system - -**Areas for Enhancement:** - -- Documentation organization and accessibility -- Integration testing standardization -- Error handling consistency -- Developer onboarding materials diff --git a/__temp__/hatch_limitations_analysis_v0.4.2.md b/__temp__/hatch_limitations_analysis_v0.4.2.md deleted file mode 100644 index 6cec980..0000000 --- a/__temp__/hatch_limitations_analysis_v0.4.2.md +++ /dev/null @@ -1,295 +0,0 @@ -```markdown -# Hatch Limitations Analysis (v0.4.2) - -*Analysis Date: August 23, 2025* -*Purpose: Comprehensive technical assessment for future codebase refinement* - -## Executive Summary - -Hatch v0.4.2 represents a functional MCP package manager that successfully demonstrates its core purpose: simplifying MCP server installation through environment isolation and dependency orchestration. The codebase exhibits solid architectural foundations with clear separation of concerns, but contains several implementation gaps that affect robustness, automation capabilities, and cross-platform reliability. - -This analysis identifies 15 concrete limitations organized by impact severity and architectural domain, providing specific code locations and behavioral evidence for future development prioritization. - -## Critical Limitations (High Impact) - -### L1: Non-Interactive Environment Handling - -**Location**: `dependency_installation_orchestrator.py:501` (`_request_user_consent`) -**Issue**: Blocking `input()` call without TTY detection or fallback mechanisms -**Evidence**: - -- `input("\nProceed with installation? [y/N]: ")` will hang in non-TTY environments -- `auto_approve` parameter exists but requires caller awareness -- No environment variable support (e.g., `HATCH_ASSUME_YES`) - -**Impact**: - -- CI/CD pipeline failures when TTY unavailable -- Programmatic integration requires foreknowledge of `auto_approve` parameter -- Docker container execution may hang indefinitely - -**Current Mitigation**: Tests use `auto_approve=True`, but CLI users must know `--force` patterns - -### L2: System Package Version Constraint Simplification - -**Location**: `system_installer.py:332-365` (`_build_apt_command`) -**Issue**: Complex version constraints reduced to "latest" for non-exact matches -**Evidence**: - -```python -if version_constraint.startswith("=="): - version = version_constraint.replace("==", "").strip() - package_spec = f"{package_name}={version}" -else: - package_spec = package_name - self.logger.warning(f"Version constraint {version_constraint} simplified...") -``` - -**Impact**: - -- `>=1.2.0` becomes "install latest" with only warning log -- No validation that installed version satisfies original constraint -- Silent constraint violations in production environments - -### L3: Concurrent Access Race Conditions - -**Location**: `environment_manager.py:85-90`, `package_loader.py:80-85` -**Issue**: Plain file I/O without atomic operations or file locking -**Evidence**: - -- `environments.json` read/write operations are not atomic -- Package cache moves use `shutil.move()` without temporary files -- `current_env` file updates lack transaction semantics - -**Impact**: - -- Corrupted environment state when multiple `hatch` instances run -- Package cache corruption during concurrent downloads -- Lost environment configuration in multi-user scenarios - -## Significant Limitations (Medium Impact) - -### L4: Registry Fetch Fragility - -**Location**: `registry_retriever.py:45-65` -**Issue**: Date-based URL construction with limited fallback robustness -**Evidence**: - -- URL: `f"https://github.com/.../releases/download/{today_str}/registry.json"` -- Fallback exists but depends on specific GitHub release naming -- Network errors surface as generic connection failures - -**Impact**: - -- Package discovery breaks when registry publishing delayed -- Poor error messages during network connectivity issues -- Development workflow disruption during registry maintenance - -### L5: Package Integrity Verification Gap - -**Location**: `package_loader.py:75-125` (`download_package`) -**Issue**: No cryptographic verification of downloaded packages -**Evidence**: - -- ZIP extraction without checksum validation -- No signature verification against registry metadata -- Package content trusted implicitly after download - -**Impact**: - -- Undetected package tampering in hostile networks -- Corrupted downloads interpreted as valid packages -- No audit trail for package provenance - -### L6: Cross-Platform Python Environment Detection - -**Location**: `python_environment_manager.py:85-120` (`_detect_conda_mamba`) -**Issue**: Hard-coded path assumptions and limited fallback strategies -**Evidence**: - -- Platform-specific path lists: `["~/miniconda3/bin", "/opt/conda/bin"]` -- Environment variable checking limited to standard names -- Graceful degradation allows partial functionality without Python isolation - -**Impact**: - -- Inconsistent behavior across different conda installations -- Silent feature degradation when Python environments unavailable -- User confusion about Python integration capabilities - -### L7: Error Recovery and Rollback Gaps - -**Location**: `dependency_installation_orchestrator.py:550-580` (`_execute_install_plan`) -**Issue**: Limited transactional semantics across multiple installer types -**Evidence**: - -- Sequential installation without rollback on failure -- Partial state when Python packages succeed but system packages fail -- No compensating actions for failed multi-dependency scenarios - -**Impact**: - -- Environments left in inconsistent states after failed installs -- Manual cleanup required for partial installation failures -- Difficult recovery from complex dependency conflicts - -## Moderate Limitations (Development Impact) - -### L8: Limited Observability and Progress Reporting - -**Location**: Multiple locations - logging scattered across modules -**Issue**: Minimal structured logging and user progress feedback -**Evidence**: - -- Progress callbacks exist but sparsely implemented -- No machine-readable output formats for automation -- Error context often lost in exception propagation - -**Impact**: - -- Difficult debugging of installation failures -- Poor user experience during long-running operations -- Limited integration with monitoring systems - -### L9: Template Generation Assumptions - -**Location**: `template_generator.py:130-140` -**Issue**: Hard-coded assumptions about MCP server structure -**Evidence**: - -- Fixed entry point: `"hatch_mcp_server_entry.py"` -- Assumed dependency on `hatch_mcp_server` wrapper -- Limited customization for alternative MCP frameworks - -**Impact**: - -- Template lock-in for specific MCP server patterns -- Reduced flexibility for advanced MCP server architectures -- Potential incompatibility with future MCP specifications - -### L10: Dependency Graph Resolution Edge Cases - -**Location**: `dependency_installation_orchestrator.py:290-320` -**Issue**: Limited handling of circular dependencies and complex constraints -**Evidence**: - -- Uses validator's dependency graph builder without edge case handling -- No explicit circular dependency detection -- Complex version constraint intersection not validated - -**Impact**: - -- Potential infinite loops during dependency resolution -- Unclear error messages for complex dependency conflicts -- Unexpected behavior with deeply nested dependency trees - -## Minor Limitations (Quality of Life) - -### L11: Security Context Management - -**Location**: `system_installer.py:365-380` -**Issue**: `sudo` usage without explicit privilege validation -**Evidence**: - -- Assumes `sudo` availability without checking `os.geteuid()` -- No pre-validation of system package manager availability -- Limited error context when privilege escalation fails - -### L12: Simulation and Dry-Run Gaps - -**Location**: Various installers -**Issue**: Inconsistent simulation mode implementation -**Evidence**: - -- `simulation_mode` parameter exists but not universally implemented -- No unified dry-run capability across all dependency types -- Limited preview capabilities for complex installation plans - -### L13: Cache Management Strategy - -**Location**: `package_loader.py:40-50`, `registry_retriever.py:35-45` -**Issue**: Basic TTL without intelligent invalidation -**Evidence**: - -- Fixed 24-hour TTL regardless of registry update frequency -- No cache size limits or cleanup strategies -- Force refresh only available at operation level - -### L14: External Dependency Coupling - -**Location**: `pyproject.toml:24` -**Issue**: Validator fetched via git URL, pinned to a release tag -**Evidence**: `"hatch_validator @ git+https://github.com/CrackingShells/Hatch-Validator.git@v0.6.3"` -**Impact**: Pinning to a tag reduces API/behavior drift risk; builds still require network access and repository/tag availability. For maximum reproducibility, consider publishing the validator to PyPI (or pin to a commit hash) and/or documenting the build-time network requirement explicitly. - -### L15: Documentation and Schema Evolution - -**Location**: Template generation and package validation flows -**Issue**: Limited handling of schema version transitions -**Evidence**: - -- Templates generate current schema version only -- No migration tools for package schema updates -- Version compatibility checking incomplete - -## Impact Classification Matrix - -| Category | Critical | Significant | Moderate | Minor | -|----------|----------|-------------|----------|-------| -| **Automation** | L1 | L4, L7 | L8 | L12 | -| **Reliability** | L2, L3 | L5, L6 | L9, L10 | L11, L13 | -| **Development** | - | - | L8, L9 | L14, L15 | - -## Architectural Domain Analysis - -### Environment Management - -- **Strengths**: Clear separation between Hatch and Python environments -- **Limitations**: L3 (concurrency), L6 (detection), L7 (rollback) -- **Maturity**: Functional with edge case gaps - -### Package System - -- **Strengths**: Multi-source support, caching, template generation -- **Limitations**: L5 (integrity), L13 (cache strategy), L15 (schema evolution) -- **Maturity**: Core functionality stable, security/robustness gaps - -### Dependency Orchestration - -- **Strengths**: Pluggable installer architecture, consent management -- **Limitations**: L1 (interactivity), L2 (constraints), L7 (rollback), L10 (resolution) -- **Maturity**: Solid design with implementation refinement needed - -### System Integration - -- **Strengths**: Cross-platform awareness, multiple package managers -- **Limitations**: L6 (detection), L11 (security), L12 (simulation) -- **Maturity**: Basic cross-platform support with platform-specific gaps - -## Codebase Readiness Assessment - -**Current State**: Hatch v0.4.2 successfully demonstrates MCP package management viability with solid architectural foundations. The codebase supports the primary use cases (environment creation, package installation, dependency resolution) with reasonable reliability for development and demonstration purposes. - -**Identified Readiness Gaps**: - -1. **Production Automation**: L1, L7, L8 limit CI/CD and unattended operation -2. **Multi-User Deployment**: L3, L11 affect concurrent and security contexts -3. **Enterprise Integration**: L5, L8, L12 impact security and observability requirements -4. **Cross-Platform Consistency**: L6, L11 create platform-specific behavior variations - -**Architecture Maturity**: The pluggable installer system, environment isolation, and dependency orchestration represent solid design patterns ready for extension. Core abstractions (`DependencyInstaller`, `InstallationContext`, `PackageService`) provide stable extension points. - -**Technical Debt Level**: Moderate. Most limitations represent missing robustness features rather than fundamental design flaws. The codebase structure supports incremental enhancement without major refactoring. - -## Recommendation Priority Framework - -**Phase 1 (Stability)**: Address L1, L3, L7 for reliable automation and concurrent usage -**Phase 2 (Security)**: Address L5, L11 for production deployment confidence -**Phase 3 (Robustness)**: Address L2, L4, L6 for cross-platform consistency -**Phase 4 (Quality)**: Address remaining limitations based on user feedback - ---- - -*This analysis reflects the codebase state as of Hatch v0.4.2 and provides a foundation for prioritizing future development efforts while maintaining the project's current functional capabilities.* - -``` diff --git a/__temp__/mcp_server_source_file_reload.md b/__temp__/mcp_server_source_file_reload.md deleted file mode 100644 index 9791346..0000000 --- a/__temp__/mcp_server_source_file_reload.md +++ /dev/null @@ -1,25 +0,0 @@ -### `hatch package reload` - -This is the default, it reloads **all** the *Hatch!* packages in the **current** *Hatch!* environment - -### `hatch package reload --env ` - -Reloads **all**the *Hatch!* packages in the *Hatch!* environment `` irrespective of the current environment - -### `hatch package reload --env --all-envs` - -Reloads **all**the *Hatch!* packages in **all** *Hatch!* environments irrespective of `` - -### `hatch package reload --pkg ` - -Reloads the *Hatch!* package `` in the current *Hatch!* environment - -### `hatch package reload --pkg --env ` - -Reloads the *Hatch!* package `` in the *Hatch!* environment `` - -### `hatch package reload --pkg --env ใ€€--all-envs` - -Reloads the *Hatch!* package `` in **all** *Hatch!* environments irrespective of `` - -Finally, default behavior is to skip reinstalling the dependencies unless specified with a flag `--reload-deps`. diff --git a/__temp__/plan_for_ext_deps_install_support.md b/__temp__/plan_for_ext_deps_install_support.md deleted file mode 100644 index c17f481..0000000 --- a/__temp__/plan_for_ext_deps_install_support.md +++ /dev/null @@ -1,420 +0,0 @@ - -# Phase 1: Extend Validator and Integrate with Hatch - -**Goal:** -Ensure the validator provides a simple, install-ready list of Hatch package dependencies (including `resolved_version`), and refactor Hatch to use this output. Address and fix the API breakage caused by recent validator changes. - ---- - -### Action 1.1: Extend the validator to output install-ready Hatch dependencies - -- **Preconditions:** - - Existing validator logic for dependency validation and graph traversal. - - Only Hatch dependencies are relevant for this step. - -- **Details:** - - Add or extend a method in `HatchPackageValidator` (or a related utility) to return a list of Hatch package dependencies for a given package, in install order (topologically sorted, acyclic). - - Each dependency object should include: - - `name` - - `version_constraint` - - `resolved_version` (mandatory, to facilitate downstream installation) - - Use or adapt logic from `version_utils.py` and `package_validator.py` to avoid duplicating dependency parsing or graph traversal. - - Do **not** include external dependency types in this output; those will be handled by their respective managers. - -- **Context**: - - Files: - - package_validator.py - - dependency_graph.py - - version_utils.py - - validator.py - - Symbols: - - `HatchPackageValidator` - - `DependencyGraph` - - Any new method like `get_hatch_dependencies_in_install_order` - -- **Postconditions:** - - Validator can output a simple, install-ready list of Hatch package dependencies (with `resolved_version`) for a given package. - -- **Validation:** - - **Development tests:** Integration tests using dummy packages to verify correct dependency order, content, and resolved versions. - - **Verification method:** Compare output to expected install order for known test cases. - ---- - -### Action 1.2: Refactor Hatch to delegate all dependency resolution to the validator and fix API breakage - -- **Preconditions:** - - Validator provides a method for retrieving install-ready Hatch dependencies (with `resolved_version`). - - Hatch currently has broken integration due to missing/changed attributes in the validator. - -- **Details:** - - Refactor Hatch to use the new validator method for all Hatch dependency resolution. - - Remove any direct access to `dependency_resolver` or other internals that no longer exist in `HatchPackageValidator`. - - Update all relevant code paths in Hatch (especially in `environment_manager.py`) to use the new API. - - Ensure that the new integration is robust to future validator changes by relying only on documented, stable APIs. - - Add or update error handling to provide clear messages if the validator cannot resolve dependencies. - -- **Context**: - - Files: - - environment_manager.py - - package_loader.py - - cli_hatch.py - - package_validator.py - - Symbols: - - `HatchEnvironmentManager` - - `HatchPackageValidator` - - Any new/updated method for dependency resolution - -- **Postconditions:** - - Hatch no longer relies on removed or internal attributes of the validator. - - All dependency resolution for Hatch packages is delegated to the validator via a stable, public API. - - The integration is robust and future-proof. - -- **Validation:** - - **Development tests:** Reuse or enhance `test_env_manip.py` to cover package installation, environment creation, and dependency resolution. - - **Verification method:** Run Hatch end-to-end and confirm no AttributeError or similar integration failures. - ---- - -### Phase 1 Completion Criteria - -- Validator provides a simple, install-ready list of Hatch dependencies (with `resolved_version`). -- Hatch uses only the validator for Hatch dependency resolution, with no broken or deprecated API usage. -- All integration and regression tests pass for both validator and Hatch. - ---- - -# Phase 2: Installer Interface, Concrete Installers, and Registry - -**Goal:** -Design a robust, extensible installer interface and registry, and implement installers for all supported types, each in its own file. - ---- - -### Action 2.1: Carefully design the `DependencyInstaller` abstract base class - -- **Preconditions:** - - Install-ready dependency objects are defined. - -- **Details:** - - Create `base_installer.py` with `DependencyInstaller` ABC. - - Define the interface: - - `install(dependency, env_context, progress_callback=None)` - - (Optional) `uninstall(dependency, env_context)` - - Document all parameters and expected behaviors. - -- **Context**: - - Files: - - `hatch/installers/base_installer.py` - - Symbols: - - `DependencyInstaller` (ABC) - -- **Postconditions:** - - Interface is stable and well-documented. - -- **Validation:** - - **Development tests:** Static type checks, interface tests. - - **Verification method:** Peer review of interface design. - ---- - -### Action 2.2: Implement and test concrete installers, each in its own file - -- **Preconditions:** - - Interface is defined. - -- **Details:** - - Create one file per installer: - - `hatch_installer.py` (uses `HatchPackageLoader` for file ops) - - `python_installer.py` (pip logic) - - `system_installer.py` (system package manager logic) - - `docker_installer.py` (Docker logic) - - Each installer implements the interface and handles its dependency type. - - Use dummy packages from validator tests for realistic scenarios. - -- **Context**: - - Files: - - `hatch/installers/hatch_installer.py` - - `hatch/installers/python_installer.py` - - `hatch/installers/system_installer.py` - - `hatch/installers/docker_installer.py` - - package_loader.py - - Symbols: - - `HatchInstaller` - - `PythonInstaller` - - `SystemInstaller` - - `DockerInstaller` - - `HatchPackageLoader` - -- **Postconditions:** - - Each installer can handle its dependency type. - -- **Validation:** - - **Development tests:** Use dummy packages for install simulation. - - **Verification method:** Check logs/output for correct installer invocation. - ---- - -### Action 2.3: Implement the installer registry in its own file and test with dummy packages - -- **Preconditions:** - - Installers are implemented. - -- **Details:** - - Create `registry.py` for the installer registry. - - Register each installer with the registry. - - Test registry lookup and installation using dummy packages, letting the registry orchestrate the process. - -- **Context**: - - Files: - - `hatch/installers/registry.py` - - Symbols: - - `InstallerRegistry` - -- **Postconditions:** - - Registry correctly delegates to installers. - -- **Validation:** - - **Development tests:** Integration tests using dummy packages. - - **Verification method:** Confirm correct installer is used for each dependency. - ---- - -### Phase 2 Completion Criteria - -- Stable, extensible installer interface in `base_installer.py`. -- All supported types have working installers, each in its own file. -- Registry delegates correctly, implemented in `registry.py`. -- All dummy package scenarios pass. - - -# Implementation Plan: Phase 3 โ€“ Orchestration, Environment Refactor, Parallelization, and Progress Reporting - -## Overview -**Objective:** -Modularize and modernize the installation orchestration, refactor environment management, enable safe parallelization, and implement robust progress reporting using the observer pattern. - -**Key constraints:** -- Maintain clear separation of concerns between environment management and installation orchestration. -- Ensure thread/process safety for parallel installs. -- Provide real-time, extensible progress reporting for UI/CLI. - ---- - -## Phase 3.1: Refactor Environment Management and Delegate Installation - -**Goal:** -Move all installation orchestration logic out of `environment_manager.py` into a dedicated orchestrator class. - -### Actions - -1. **Action 3.1.1:** Identify and extract all installation-related logic from `environment_manager.py`. - - **Preconditions:** Installer registry and concrete installers are implemented. - - **Details:** - - Move all code that resolves dependencies, selects installers, and performs installation to a new orchestrator class (e.g., `DependencyInstallerOrchestrator`). - - Keep only environment lifecycle and state management in `environment_manager.py`. - - **Context**: - - Files: - - `hatch/environment_manager.py` - - `hatch/package_loader.py` - - `hatch/installers/` (new directory for installers) - - `hatch/installers/registry.py` (installer registry) - - Symbols: - - `HatchEnvironmentManager` - - `add_package_to_environment` - - `HatchPackageLoader` - - `DependencyInstallerOrchestrator` (to be created) - - **Postconditions:** `environment_manager.py` delegates all installation to the orchestrator. - - **Validation:** - - **Development tests:** Integration tests for environment creation, deletion, and package installation. - - **Verification method:** Code review for separation of concerns. - -2. **Action 3.1.2:** Update all environment-related APIs to use the orchestrator for installation. - - **Preconditions:** Orchestrator class is implemented. - - **Details:** - - Refactor methods like `add_package_to_environment` to call the orchestrator. - - Ensure backward compatibility for public APIs. - - **Context**: - - Files: - - `hatch/environment_manager.py` - - `hatch/installers/dependency_installation_orchestrator.py` (or similar) - - Symbols: - - `HatchEnvironmentManager.add_package_to_environment` - - `DependencyInstallerOrchestrator.install_dependencies` - - **Postconditions:** All install flows go through the orchestrator. - - **Validation:** - - **Development tests:** Regression and integration tests for all environment operations. - -### Phase Completion Criteria -- `environment_manager.py` contains only environment lifecycle/state logic. -- All installation is delegated to the orchestrator. -- All tests for environment and install flows pass. - ---- - -## Phase 3.2: Implement Orchestration Logic with Parallelization - -**Goal:** -Enable the orchestrator to install non-overlapping dependency types in parallel, with robust error handling and rollback. - -### Actions - -1. **Action 3.2.1:** Analyze dependency types for safe parallelization. - - **Preconditions:** Installers are implemented and tested. - - **Details:** - - Identify which dependency types (e.g., hatch, python, docker, system) can be installed in parallel without conflicts. - - Document any constraints or exceptions. - - **Context**: - - Files: - - `hatch/installers/base_installer.py` - - `hatch/installers/hatch_installer.py` - - `hatch/installers/python_installer.py` - - `hatch/installers/system_installer.py` - - `hatch/installers/docker_installer.py` - - Symbols: - - `DependencyInstaller` - - `HatchInstaller` - - `PythonInstaller` - - `SystemInstaller` - - `DockerInstaller` - - **Postconditions:** Parallelization plan is documented. - - **Validation:** - - **Verification method:** Peer review of parallelization plan. - -2. **Action 3.2.2:** Implement parallel installation in the orchestrator. - - **Preconditions:** Parallelization plan is defined. - - **Details:** - - Use threads, async tasks, or process pools to install independent dependencies in parallel. - - Ensure thread/process safety and proper error propagation. - - Provide a configuration option to enable/disable parallelization. - - **Context**: - - Files: - - `hatch/installers/dependency_installation_orchestrator.py` - - Symbols: - - `DependencyInstallerOrchestrator.install_dependencies` - - **Postconditions:** Orchestrator can install dependencies in parallel where safe. - - **Validation:** - - **Development tests:** Simulate parallel installs with dummy packages. - - **Verification method:** Check for race conditions, correct install order, and error handling. - -3. **Action 3.2.3:** Implement robust error handling and rollback. - - **Preconditions:** Parallel installation logic is in place. - - **Details:** - - Use the Command pattern to encapsulate install/uninstall actions. - - On failure, roll back previously installed dependencies in reverse order. - - **Context**: - - Files: - - `hatch/installers/dependency_installation_orchestrator.py` - - Symbols: - - `DependencyInstallerOrchestrator.rollback` - - **Postconditions:** Partial installs are cleaned up on error. - - **Validation:** - - **Development tests:** Simulate failures and verify rollback. - - **Verification method:** Check environment state after simulated errors. - -### Phase Completion Criteria -- Orchestrator supports safe parallel installation. -- Rollback logic is robust and tested. -- All install scenarios (success, partial failure, rollback) are covered by tests. - ---- - -## Phase 3.3: Implement Observer-Based Progress Reporting - -**Goal:** -Provide real-time, extensible progress reporting using the observer (publish-subscribe) pattern. - -### Actions - -1. **Action 3.3.1:** Define progress event and subscriber interfaces. - - **Preconditions:** Orchestrator class is implemented. - - **Details:** - - Create a `ProgressEvent` data class (fields: dependency, status, percent, message, etc.). - - Define a `ProgressSubscriber` interface with an `update(event)` method. - - **Context**: - - Files: - - `hatch/installers/progress_events.py` (or similar) - - Symbols: - - `ProgressEvent` - - `ProgressSubscriber` - - **Postconditions:** Progress event and subscriber interfaces are available. - - **Validation:** - - **Development tests:** Unit tests for event and subscriber classes. - -2. **Action 3.3.2:** Integrate observer pattern into the orchestrator. - - **Preconditions:** Interfaces are defined. - - **Details:** - - Orchestrator maintains a list of subscribers. - - At each install step (start, progress, complete, error), orchestrator publishes a `ProgressEvent` to all subscribers. - - **Context**: - - Files: - - `hatch/installers/dependency_installation_orchestrator.py` - - `hatch/installers/progress_events.py` - - Symbols: - - `DependencyInstallerOrchestrator.subscribe` - - `DependencyInstallerOrchestrator.notify` - - **Postconditions:** Orchestrator notifies subscribers of progress in real time. - - **Validation:** - - **Development tests:** Simulate installs and verify progress events are sent. - - **Verification method:** Mock subscribers receive correct updates. - -3. **Action 3.3.3:** Implement a CLI/GUI subscriber for user feedback. - - **Preconditions:** Observer pattern is integrated. - - **Details:** - - Implement a subscriber that displays progress (percentage, current dependency, status) in the CLI or GUI. - - Ensure the subscriber can be easily replaced or extended for different UIs. - - **Context**: - - Files: - - `hatch/cli/progress_subscriber.py` (or similar) - - Symbols: - - `CLIProgressSubscriber` (example) - - **Postconditions:** Users receive real-time feedback during installation. - - **Validation:** - - **Development tests:** Manual and automated tests for progress display. - -### Phase Completion Criteria -- Observer pattern is fully integrated. -- Real-time progress updates are available to UI/CLI. -- Progress reporting is extensible and robust. - ---- - -## Phase 3.4: Final Integration and Regression Testing - -**Goal:** -Ensure all new and refactored components work together seamlessly and maintain backward compatibility. - -### Actions - -1. **Action 3.4.1:** Integrate all components and update documentation. - - **Preconditions:** All previous actions are complete. - - **Details:** - - Ensure all APIs, orchestrator, installers, and progress reporting are integrated. - - Update developer and user documentation to reflect new architecture. - - **Context**: - - Files: - - All files modified or created in previous actions - - `README.md`, developer docs - - Symbols: - - All public APIs and classes - - **Postconditions:** Documentation is up to date. - - **Validation:** - - **Verification method:** Peer review of documentation. - -2. **Action 3.4.2:** Run full regression and integration test suite. - - **Preconditions:** All code is integrated. - - **Details:** - - Run all existing and new tests (unit, integration, regression). - - Address any failures or regressions. - - **Context**: - - Files: - - `tests/` (all relevant test files) - - Symbols: - - All test cases and test runners - - **Postconditions:** All tests pass. - - **Validation:** - - **Development tests:** Full test suite. - -### Phase Completion Criteria -- All components are integrated and documented. -- All tests pass, ensuring stability and backward compatibility. \ No newline at end of file diff --git a/cracking-shells-playbook b/cracking-shells-playbook new file mode 160000 index 0000000..edb9a48 --- /dev/null +++ b/cracking-shells-playbook @@ -0,0 +1 @@ +Subproject commit edb9a48473b635a7204220b71af59f5e1f96ab89 diff --git a/docs/articles/devs/architecture/component_architecture.md b/docs/articles/devs/architecture/component_architecture.md index 8570b01..722a1d5 100644 --- a/docs/articles/devs/architecture/component_architecture.md +++ b/docs/articles/devs/architecture/component_architecture.md @@ -287,5 +287,5 @@ This article is about: ## Related Documentation - [System Overview](./system_overview.md) - High-level architecture introduction -- [Implementation Guides](../implementation_guides/) - Technical implementation guidance for specific components -- [Development Processes](../development_processes/) - Development workflow and testing standards +- [Implementation Guides](../implementation_guides/index.md) - Technical implementation guidance for specific components +- [Development Processes](../development_processes/index.md) - Development workflow and testing standards diff --git a/docs/articles/devs/architecture/index.md b/docs/articles/devs/architecture/index.md index b1a9021..516fbbd 100644 --- a/docs/articles/devs/architecture/index.md +++ b/docs/articles/devs/architecture/index.md @@ -22,7 +22,7 @@ Design patterns are covered within the main architecture documents: ## Architecture Diagram -The complete system architecture is documented in the [Architecture Diagram](../../resources/diagrams/architecture.puml), which provides a visual overview of all components and their relationships. +![Component Architecture](../../../resources/images/architecture.svg) ## Key Architectural Principles @@ -36,12 +36,11 @@ The complete system architecture is documented in the [Architecture Diagram](../ If you're new to the Hatch codebase: 1. Start with [System Overview](./system_overview.md) to understand the big picture -2. Review the [Architecture Diagram](../../resources/diagrams/architecture.puml) for visual context -3. Explore [Component Architecture](./component_architecture.md) for detailed component understanding -4. Check [Implementation Guides](../implementation_guides/) when ready to work on specific features +2. Explore [Component Architecture](./component_architecture.md) for detailed component understanding +3. Check [Implementation Guides](../implementation_guides/index.md) when ready to work on specific features ## Related Documentation -- [Implementation Guides](../implementation_guides/) - Technical how-to guides for specific components -- [Development Processes](../development_processes/) - Development workflow and standards -- [Contribution Guidelines](../contribution_guides/) - How to contribute to the project +- [Implementation Guides](../implementation_guides/index.md) - Technical how-to guides for specific components +- [Development Processes](../development_processes/index.md) - Development workflow and standards +- [Contribution Guidelines](../contribution_guides/index.md) - How to contribute to the project diff --git a/docs/articles/devs/architecture/mcp_backup_system.md b/docs/articles/devs/architecture/mcp_backup_system.md new file mode 100644 index 0000000..8aaafc6 --- /dev/null +++ b/docs/articles/devs/architecture/mcp_backup_system.md @@ -0,0 +1,241 @@ +# MCP Host Configuration Backup System + +This article is about: +- Core backup system architecture and components +- Atomic file operations with rollback capabilities +- Pydantic data models for validation and type safety +- Host-agnostic design patterns for MCP configuration management + +## Overview + +The MCP (Model Context Protocol) host configuration backup system provides comprehensive backup and restore functionality for MCP host configuration files. The system ensures data integrity through atomic operations and Pydantic validation while maintaining host-agnostic design principles. + +## Architecture Components + +### MCPHostConfigBackupManager + +The central backup management class handles all backup operations: + +```python +from hatch.mcp.backup import MCPHostConfigBackupManager + +backup_manager = MCPHostConfigBackupManager() +result = backup_manager.create_backup(config_path, "claude-desktop") +``` + +**Core responsibilities:** +- Timestamped backup creation with microsecond precision +- Backup restoration by hostname and timestamp +- Backup listing with Pydantic model validation +- Cleanup operations (age-based and count-based) + +### AtomicFileOperations + +Provides safe file operations preventing data corruption: + +```python +from hatch.mcp.backup import AtomicFileOperations + +atomic_ops = AtomicFileOperations(backup_manager) +atomic_ops.atomic_write(target_path, new_content, "vscode", no_backup=False) +``` + +**Key features:** +- Temporary file creation with atomic moves +- Automatic backup creation before modifications +- Rollback capability on operation failure +- Cross-platform file permission handling + +### Pydantic Data Models + +Type-safe data structures with comprehensive validation: + +#### BackupInfo +```python +class BackupInfo(BaseModel): + hostname: str = Field(..., regex=r'^(claude-desktop|claude-code|vscode|cursor|lmstudio|gemini)$') + timestamp: datetime + file_path: Path + file_size: int = Field(..., ge=0) + original_config_path: Path +``` + +#### BackupResult +```python +class BackupResult(BaseModel): + success: bool + backup_path: Optional[Path] = None + error_message: Optional[str] = None + original_size: int = Field(default=0, ge=0) + backup_size: int = Field(default=0, ge=0) +``` + +### BackupAwareOperation + +Base class enforcing explicit backup acknowledgment: + +```python +class BackupAwareOperation: + def prepare_backup(self, config_path: Path, hostname: str, no_backup: bool = False) -> Optional[BackupResult] + def rollback_on_failure(self, backup_result: Optional[BackupResult], config_path: Path, hostname: str) -> bool +``` + +## Design Patterns + +### Host-Agnostic Architecture + +The system operates independently of specific host configuration structures: + +- **JSON Format Independence**: Works with any valid JSON configuration +- **Path Agnostic**: No assumptions about configuration file locations +- **Content Neutral**: Backup operations preserve exact file content + +### Explicit API Design + +Forces consumers to acknowledge backup creation: + +```python +# Explicit backup preparation +backup_result = operation.prepare_backup(config_path, "cursor", no_backup=False) + +# Operation with rollback capability +try: + perform_configuration_update() +except Exception: + operation.rollback_on_failure(backup_result, config_path, "cursor") + raise +``` + +### Atomic Operations Pattern + +Ensures data consistency through atomic file operations: + +1. **Temporary File Creation**: Write to temporary file first +2. **Validation**: Verify content integrity +3. **Atomic Move**: Replace original file atomically +4. **Cleanup**: Remove temporary files on success/failure + +## File Organization + +### Backup Directory Structure +``` +~/.hatch/mcp_host_config_backups/ +โ”œโ”€โ”€ claude-desktop/ +โ”‚ โ”œโ”€โ”€ mcp.json.claude-desktop.20250921_100000_123456 +โ”‚ โ””โ”€โ”€ mcp.json.claude-desktop.20250921_110000_234567 +โ”œโ”€โ”€ vscode/ +โ”‚ โ””โ”€โ”€ mcp.json.vscode.20250921_100000_345678 +โ””โ”€โ”€ cursor/ + โ””โ”€โ”€ mcp.json.cursor.20250921_100000_456789 +``` + +### Naming Convention +- **Format**: `mcp.json..` +- **Timestamp**: `YYYYMMDD_HHMMSS_ffffff` (microsecond precision) +- **Hostname**: Exact host identifier from supported types + +## Supported Host Types + +The system supports all MCP host platforms: + +| Host Type | Description | +|-----------|-------------| +| `claude-desktop` | Claude Desktop application | +| `claude-code` | Claude for VS Code extension | +| `vscode` | VS Code MCP extension | +| `cursor` | Cursor IDE MCP integration | +| `lmstudio` | LM Studio MCP support | +| `gemini` | Google Gemini MCP integration | + +## Performance Characteristics + +### Operation Benchmarks +- **Backup Creation**: <2ms for typical 5KB JSON files +- **Restore Operation**: <3ms including verification +- **List Backups**: <1ms for typical backup counts (<100) +- **Pydantic Validation**: <0.5ms for typical models + +### Storage Requirements +- **Per Backup**: 5-10KB (typical MCP configuration) +- **Annual Storage**: <36MB per host (negligible) + +## Security Model + +### File Permissions +- **Backup Directory**: 700 (owner read/write/execute only) +- **Backup Files**: 600 (owner read/write only) + +### Access Control +- Backup creation requires write access to backup directory +- Backup restoration requires write access to target configuration +- No network access or external dependencies + +## Integration Points + +### Environment Manager Integration +The backup system integrates with Hatch's environment management: + +```python +# Future integration pattern +from hatch.env import EnvironmentManager + +env_manager = EnvironmentManager() +backup_manager = MCPHostConfigBackupManager() + +# Backup before environment changes +backup_result = backup_manager.create_backup(env_manager.get_mcp_config_path(), "vscode") +``` + +### CLI Integration +Designed for future CLI command integration: + +```bash +# Future CLI commands +hatch mcp backup create --host vscode +hatch mcp backup restore --host vscode --timestamp 20250921_100000_123456 +hatch mcp backup list --host cursor +hatch mcp backup clean --host claude-desktop --older-than-days 30 +``` + +## Testing Architecture + +### Test Categories +- **Unit Tests**: Component isolation and validation +- **Integration Tests**: End-to-end workflow testing +- **Performance Tests**: Large file and concurrent operations + +### Test Results +- **Total Tests**: 31 +- **Success Rate**: 100% +- **Coverage**: 95% unit test coverage, 100% integration coverage + +### Host-Agnostic Testing +All tests use generic JSON configurations without host-specific dependencies, ensuring the backup system works with any valid MCP configuration format. + +## Future Extensions + +The current implementation provides core backup functionality. Future phases will add: + +1. **Host-Specific Configuration Detection**: Automatic discovery of host configuration paths +2. **Environment Manager Integration**: Deep integration with Hatch's environment management +3. **CLI Command Integration**: Full command-line interface for backup operations +4. **Backup Compression**: Optional compression for large configuration files +5. **Remote Backup Storage**: Cloud storage integration for backup redundancy + +## Implementation Notes + +### Error Handling Strategy +- **Comprehensive Validation**: Pydantic models ensure data integrity +- **Graceful Degradation**: Operations continue when possible +- **Detailed Error Messages**: Clear feedback for troubleshooting +- **Automatic Cleanup**: Temporary files removed on failure + +### Cross-Platform Compatibility +- **Path Handling**: Uses `pathlib.Path` for cross-platform compatibility +- **File Operations**: Platform-specific permission handling +- **Timestamp Format**: ISO 8601 compatible timestamps + +### Memory Efficiency +- **Streaming Operations**: Large files processed in chunks +- **Lazy Loading**: Backup lists generated on-demand +- **Resource Cleanup**: Automatic cleanup of temporary resources diff --git a/docs/articles/devs/architecture/mcp_host_configuration.md b/docs/articles/devs/architecture/mcp_host_configuration.md new file mode 100644 index 0000000..5c4c782 --- /dev/null +++ b/docs/articles/devs/architecture/mcp_host_configuration.md @@ -0,0 +1,251 @@ +# MCP Host Configuration Architecture + +This article is about: + +- Architecture and design patterns for MCP host configuration management +- Decorator-based strategy registration system +- Extension points for adding new host platforms +- Integration with backup and environment systems + +## Overview + +The MCP host configuration system provides centralized management of Model Context Protocol server configurations across multiple host platforms (Claude Desktop, VS Code, Cursor, etc.). It uses a decorator-based architecture with inheritance patterns for clean code organization and easy extension. + +## Core Architecture + +### Strategy Pattern with Decorator Registration + +The system uses the Strategy pattern combined with automatic registration via decorators: + +```python +@register_host_strategy(MCPHostType.CLAUDE_DESKTOP) +class ClaudeDesktopHostStrategy(ClaudeHostStrategy): + def get_config_path(self) -> Optional[Path]: + return Path.home() / "Library" / "Application Support" / "Claude" / "claude_desktop_config.json" +``` + +**Benefits:** +- Automatic strategy discovery on module import +- No manual registry maintenance +- Clear separation of host-specific logic +- Easy addition of new host platforms + +### Inheritance Hierarchy + +Host strategies are organized into families for code reuse: + +#### Claude Family +- **Base**: `ClaudeHostStrategy` +- **Shared behavior**: Absolute path validation, Anthropic-specific configuration handling +- **Implementations**: Claude Desktop, Claude Code + +#### Cursor Family +- **Base**: `CursorBasedHostStrategy` +- **Shared behavior**: Flexible path handling, common configuration format +- **Implementations**: Cursor, LM Studio + +#### Independent Strategies +- **VSCode**: Nested configuration structure (`mcp.servers`) +- **Gemini**: Official configuration path (`~/.gemini/settings.json`) + +### Consolidated Data Model + +The `MCPServerConfig` model supports both local and remote server configurations: + +```python +class MCPServerConfig(BaseModel): + # Local server (command-based) + command: Optional[str] = None + args: Optional[List[str]] = None + env: Optional[Dict[str, str]] = None + + # Remote server (URL-based) + url: Optional[str] = None + headers: Optional[Dict[str, str]] = None +``` + +**Cross-field validation** ensures either command OR url is provided, not both. + +## Key Components + +### MCPHostRegistry + +Central registry managing strategy instances: + +- **Singleton pattern**: One instance per strategy type +- **Automatic registration**: Triggered by decorator usage +- **Family organization**: Groups related strategies +- **Host detection**: Identifies available platforms + +### MCPHostConfigurationManager + +Core configuration operations: + +- **Server configuration**: Add/remove servers from host configurations +- **Environment synchronization**: Sync environment data to multiple hosts +- **Backup integration**: Atomic operations with rollback capability +- **Error handling**: Comprehensive result reporting + +### Host Strategy Interface + +All strategies implement the `MCPHostStrategy` abstract base class: + +```python +class MCPHostStrategy(ABC): + @abstractmethod + def get_config_path(self) -> Optional[Path]: + """Get configuration file path for this host.""" + + @abstractmethod + def validate_server_config(self, server_config: MCPServerConfig) -> bool: + """Validate server configuration for this host.""" + + @abstractmethod + def read_configuration(self) -> HostConfiguration: + """Read current host configuration.""" + + @abstractmethod + def write_configuration(self, config: HostConfiguration, no_backup: bool = False) -> bool: + """Write configuration to host.""" +``` + +## Integration Points + +### Backup System Integration + +All configuration operations integrate with the backup system: + +- **Atomic operations**: Configuration changes are backed up before modification +- **Rollback capability**: Failed operations can be reverted +- **Multi-host support**: Separate backups per host platform +- **Timestamped retention**: Backup files include timestamps for tracking + +### Environment Manager Integration + +The system integrates with environment management through corrected data structures: + +- **Single-server-per-package constraint**: Realistic model reflecting actual usage +- **Multi-host configuration**: One server can be configured across multiple hosts +- **Synchronization support**: Environment data can be synced to available hosts + +## Extension Points + +### Adding New Host Platforms + +To add support for a new host platform: + +1. **Define host type** in `MCPHostType` enum +2. **Create strategy class** inheriting from appropriate family base or `MCPHostStrategy` +3. **Implement required methods** for configuration path, validation, read/write operations +4. **Add decorator registration** with `@register_host_strategy(MCPHostType.NEW_HOST)` +5. **Add tests** following existing test patterns + +Example: + +```python +@register_host_strategy(MCPHostType.NEW_HOST) +class NewHostStrategy(MCPHostStrategy): + def get_config_path(self) -> Optional[Path]: + return Path.home() / ".new_host" / "config.json" + + def validate_server_config(self, server_config: MCPServerConfig) -> bool: + # Host-specific validation logic + return True +``` + +### Extending Validation Rules + +Host strategies can implement custom validation: + +- **Path requirements**: Some hosts require absolute paths +- **Configuration format**: Validate against host-specific schemas +- **Feature support**: Check if host supports specific server features + +### Custom Configuration Formats + +Each strategy handles its own configuration format: + +- **JSON structure**: Most hosts use JSON configuration files +- **Nested keys**: Some hosts use nested configuration structures +- **Key naming**: Different hosts may use different key names for the same concept + +## Design Patterns + +### Decorator Registration Pattern + +Follows established Hatchling patterns for automatic component discovery: + +```python +# Registry class with decorator method +class MCPHostRegistry: + @classmethod + def register(cls, host_type: MCPHostType): + def decorator(strategy_class): + cls._strategies[host_type] = strategy_class + return strategy_class + return decorator + +# Convenience function +def register_host_strategy(host_type: MCPHostType): + return MCPHostRegistry.register(host_type) +``` + +### Family-Based Inheritance + +Reduces code duplication through shared base classes: + +- **Common validation logic** in family base classes +- **Shared configuration handling** for similar platforms +- **Consistent behavior** across related host types + +### Atomic Operations Pattern + +All configuration changes use atomic operations: + +1. **Create backup** of current configuration +2. **Perform modification** to configuration file +3. **Verify success** and update state +4. **Clean up** or rollback on failure + +## Testing Strategy + +The system includes comprehensive testing: + +- **Model validation tests**: Pydantic model behavior and validation rules +- **Decorator registration tests**: Automatic registration and inheritance patterns +- **Configuration manager tests**: Core operations and error handling +- **Environment integration tests**: Data structure compatibility +- **Backup integration tests**: Atomic operations and rollback behavior + +## Implementation Notes + +### Module Organization + +``` +hatch/mcp_host_config/ +โ”œโ”€โ”€ __init__.py # Public API and registration triggering +โ”œโ”€โ”€ models.py # Pydantic models and data structures +โ”œโ”€โ”€ host_management.py # Registry and configuration manager +โ””โ”€โ”€ strategies.py # Host strategy implementations +``` + +### Import Behavior + +The `__init__.py` module imports `strategies` to trigger decorator registration: + +```python +# This import triggers @register_host_strategy decorators +from . import strategies +``` + +This ensures all strategies are automatically registered when the package is imported. + +### Error Handling Philosophy + +The system uses result objects rather than exceptions for configuration operations: + +- **ConfigurationResult**: Contains success status, error messages, and operation details +- **Graceful degradation**: Operations continue when possible, reporting partial failures +- **Detailed error reporting**: Error messages include context and suggested solutions + +This approach provides better control flow for CLI operations and enables comprehensive error reporting to users. diff --git a/docs/articles/devs/architecture/system_overview.md b/docs/articles/devs/architecture/system_overview.md index 1d37ea0..a17bfda 100644 --- a/docs/articles/devs/architecture/system_overview.md +++ b/docs/articles/devs/architecture/system_overview.md @@ -142,10 +142,4 @@ Understanding this system overview provides the foundation for working with Hatc 1. **Start Here** - This overview gives you the big picture 2. **Dive Deeper** - See [Component Architecture](./component_architecture.md) for detailed component information 3. **Get Hands-On** - Follow [Developer Onboarding](../development_processes/developer_onboarding.md) for practical next steps -4. **Implementation** - Check [Implementation Guides](../implementation_guides/) when ready to implement features - -## Related Documentation - -- [Component Architecture](./component_architecture.md) - Detailed component breakdown -- [Implementation Guides](../implementation_guides/) - Technical implementation guidance -- [Development Processes](../development_processes/) - Development workflow and standards +4. **Implementation** - Check [Implementation Guides](../implementation_guides/index.md) when ready to implement features diff --git a/docs/articles/devs/contribution_guides/how_to_contribute.md b/docs/articles/devs/contribution_guides/how_to_contribute.md index 0f92de5..4ca38a9 100644 --- a/docs/articles/devs/contribution_guides/how_to_contribute.md +++ b/docs/articles/devs/contribution_guides/how_to_contribute.md @@ -1,229 +1,238 @@ -# How to Contribute +# Contributing to Hatch -This article is about: +Thank you for your interest in contributing to Hatch! This guide will help you get started with our development workflow and contribution standards. -- General contribution workflow and process for the Hatch project -- Branch naming conventions and submission requirements -- Community standards and expectations for contributors +## Commit Message Format -## Overview +We use [Conventional Commits](https://www.conventionalcommits.org/) for automated versioning and changelog generation. -We welcome contributions to the Hatch project! This guide outlines the process for contributing code, documentation, and other improvements to help ensure smooth collaboration and high-quality contributions. +### Format -## Before You Start - -### Prerequisites +``` +[optional scope]: -1. **Understand the System** - Review [Architecture Documentation](../architecture/) to understand Hatch's design -2. **Set Up Development Environment** - Follow [Development Environment Setup](../development_processes/development_environment_setup.md) -3. **Review Standards** - Familiarize yourself with [Testing Requirements](./testing_and_ci.md) and [Release Policies](./release_and_dependency_policy.md) +[optional body] -### Planning Your Contribution +[optional footer(s)] +``` -- **Check Existing Issues** - Search [GitHub Issues](https://github.com/CrackingShells/Hatch/issues) for related work -- **Discuss Major Changes** - Open an issue to discuss significant changes before implementing -- **Review Implementation Guides** - Check [Implementation Guides](../implementation_guides/) for technical guidance +### Types -## Contribution Workflow +- **feat**: New features (triggers minor version bump) +- **fix**: Bug fixes (triggers patch version bump) +- **docs**: Documentation changes +- **refactor**: Code refactoring without functional changes +- **test**: Adding or updating tests +- **chore**: Maintenance tasks, dependency updates +- **ci**: Changes to CI/CD configuration +- **perf**: Performance improvements +- **style**: Code style changes (formatting, etc.) -### 1. Fork and Clone +### Examples ```bash -# Fork the repository on GitHub -git fork https://github.com/CrackingShells/Hatch.git - -# Clone your fork locally -git clone https://github.com/YOUR_USERNAME/Hatch.git -cd Hatch - -# Add upstream remote -git remote add upstream https://github.com/CrackingShells/Hatch.git +# Good commit messages +feat: add support for new package registry +fix: resolve dependency resolution timeout +docs: update package manager documentation +refactor: simplify package installation logic +test: add integration tests for package management +chore: update dependencies to latest versions + +# Breaking changes (use sparingly until v1.0.0) +feat!: change package configuration format +fix!: remove deprecated package manager methods + +# With scope +feat(registry): add new package source support +fix(installer): resolve package conflict resolution +docs(api): update package manager API documentation ``` -### 2. Create Feature Branch +### Using Commitizen -Use descriptive branch names with appropriate prefixes: +For guided commit messages, use commitizen: ```bash -# Feature additions -git checkout -b feat/add-new-installer-type - -# Bug fixes -git checkout -b fix/environment-creation-error +# Install dependencies first +npm install -# Documentation updates -git checkout -b docs/update-architecture-guide +# Use commitizen for guided commits +npm run commit +# or +npx cz ``` -**Branch Naming Conventions:** +This will prompt you through creating a properly formatted commit message. -- `feat/` - New features or enhancements -- `fix/` - Bug fixes -- `docs/` - Documentation changes -- `cicd/` - CI/CD pipeline changes +## Development Workflow + +### 1. Fork and Clone -### 3. Implement Your Changes +```bash +git clone https://github.com/YOUR_USERNAME/Hatch.git +cd Hatch +``` -#### Code Changes +### 2. Set Up Development Environment -- Follow the organization's coding standards -- Write clear, self-documenting code with appropriate comments -- Include docstrings following the organization's docstring standards -- Implement comprehensive error handling and logging +```bash +# Install Python dependencies +pip install -e . -#### Testing Requirements +# Install Node.js dependencies for semantic-release +npm install +``` -- Write tests for all new functionality -- Follow the three-tier testing approach: Development, Regression, Feature -- Ensure tests pass locally before submitting -- Maintain or improve test coverage +### 3. Create Feature Branch -#### Documentation Updates +```bash +git checkout -b feat/your-feature-name +# or +git checkout -b fix/your-bug-fix +``` -- Update relevant documentation for new features -- Follow the organization's documentation guidelines -- Reference API docstrings rather than duplicating implementation details -- Maintain clear cross-references between related topics +### 4. Make Changes -### 4. Commit Your Changes +- Write code following existing patterns +- Add tests for new functionality +- Update documentation as needed +- Follow PEP 8 style guidelines +- Ensure package manager functionality works correctly -Write clear, descriptive commit messages: +### 5. Test Your Changes ```bash -# Good commit messages -git commit -m "[Update] Add support for custom installer types" -git commit -m "[Fix] Resolve environment creation race condition" -git commit -m "[Docs - Minor] Typos in installation orchestration guide" - -# Include more detail in commit body for complex changes -git commit -m "[Feat] Implement parallel dependency installation +# Run basic import test +python -c "import hatch; print('Hatch package imports successfully')" -- Add ThreadPoolExecutor for concurrent installations -- Implement dependency grouping for parallelization -- Add timeout handling for long-running installations -- Update tests to cover parallel execution scenarios" +# Test package manager functionality when available ``` -### 5. Keep Your Branch Updated +### 6. Commit Changes ```bash -# Fetch latest changes from upstream -git fetch upstream +# Use commitizen for guided commits +npm run commit -# Rebase your branch on latest main -git rebase upstream/main - -# Resolve any conflicts and continue -git rebase --continue +# Or commit manually with conventional format +git commit -m "feat: add your feature description" ``` -### 6. Submit Pull Request +### 7. Push and Create Pull Request -#### Pull Request Guidelines +```bash +git push origin feat/your-feature-name +``` -- **Clear Title** - Summarize the change in the title -- **Detailed Description** - Explain what changes were made and why -- **Link Related Issues** - Reference any related GitHub issues -- **Testing Information** - Describe how the changes were tested -- **Breaking Changes** - Clearly document any breaking changes +Then create a pull request on GitHub. -#### Pull Request Template +## Pull Request Guidelines -```markdown -## Description -Brief description of the changes made. +### Title Format -## Type of Change -- [ ] Bug fix (non-breaking change which fixes an issue) -- [ ] New feature (non-breaking change which adds functionality) -- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) -- [ ] Documentation update +Use conventional commit format for PR titles: +- `feat: add new package management functionality` +- `fix: resolve package installation issue` +- `docs: update installation guide` -## Testing -- [ ] Tests pass locally -- [ ] New tests added for new functionality -- [ ] Manual testing performed +### Description -## Related Issues -Fixes #(issue number) +Include in your PR description: +- **What**: Brief description of changes +- **Why**: Reason for the changes +- **How**: Implementation approach (if complex) +- **Testing**: How you tested the changes +- **Package Manager Impact**: Any effects on package management functionality +- **Breaking Changes**: Any breaking changes (if applicable) -## Additional Notes -Any additional information or context about the changes. -``` +### Checklist -## Code Review Process +- [ ] Code follows existing style and patterns +- [ ] Tests added for new functionality +- [ ] Documentation updated (if needed) +- [ ] Commit messages follow conventional format +- [ ] All tests pass +- [ ] Package manager functionality preserved +- [ ] No breaking changes (unless intentional and documented) -### What to Expect +## Code Style -- **Initial Review** - Maintainers will review your pull request within a few days -- **Feedback** - You may receive requests for changes or improvements -- **Iteration** - Work with reviewers to address feedback and refine your contribution -- **Approval** - Once approved, your changes will be merged +### Python -### Responding to Feedback +- Follow PEP 8 style guidelines +- Use type hints where appropriate +- Write docstrings for public functions and classes +- Keep functions focused and small +- Use meaningful variable and function names -- **Be Responsive** - Address feedback promptly and professionally -- **Ask Questions** - If feedback is unclear, ask for clarification -- **Make Requested Changes** - Implement suggested improvements -- **Update Tests** - Ensure tests still pass after making changes +### Package Manager Considerations -## Community Standards +- Maintain compatibility with existing package formats +- Follow package management best practices +- Ensure proper error handling for package operations +- Document any package manager-specific functionality -### Communication +### Documentation -- **Be Respectful** - Treat all community members with respect and professionalism -- **Be Constructive** - Provide helpful, actionable feedback -- **Be Patient** - Understand that reviews take time and maintainers are volunteers +- Update relevant documentation for changes +- Use clear, concise language +- Include code examples where helpful +- Keep README.md up to date -### Quality Standards +## Testing -- **Follow Conventions** - Adhere to established coding and documentation standards -- **Test Thoroughly** - Ensure your changes work correctly and don't break existing functionality -- **Document Changes** - Provide clear documentation for new features and changes +### Running Tests -### Contribution Types +```bash +# Basic import test +python -c "import hatch; print('Hatch package imports successfully')" -#### Code Contributions +# Add comprehensive test commands when test suite is available +``` -- New features and enhancements -- Bug fixes and improvements -- Performance optimizations -- Refactoring and code cleanup +### Writing Tests -#### Documentation Contributions +- Add tests for new features +- Test edge cases and error conditions +- Test package management operations +- Use descriptive test names +- Follow existing test patterns -- API documentation improvements -- Tutorial and guide updates -- Example code and usage patterns -- Translation and localization +## Release Process -#### Testing Contributions +Extensive details can be found in the [release policy](./release_policy.md) article. -- New test cases and scenarios -- Test infrastructure improvements -- Performance and load testing -- Integration test enhancements +Releases are fully automated using semantic-release: -## Getting Help +1. **Commits are analyzed** for conventional commit format +2. **Version is calculated** based on commit types +3. **Changelog is generated** from commit messages +4. **Version files are updated** (pyproject.toml, CHANGELOG.md) +5. **Changes are committed** back to repository using GitHub App +6. **GitHub release is created** with release notes and tags -### Resources +### Version Impact -- **[Architecture Documentation](../architecture/)** - Understanding the system design -- **[Implementation Guides](../implementation_guides/)** - Technical implementation guidance -- **[Development Processes](../development_processes/)** - Development workflow and standards +The semantic versioning is currently configured to keep the major number to **0**. -### Support Channels +- `feat:` commits โ†’ Patch version (0.6.1 โ†’ 0.6.2) +- `fix:` commits โ†’ Patch version (0.6.1 โ†’ 0.6.2) +- `feat!:` or `BREAKING CHANGE:` โ†’ Minor version (0.6.1 โ†’ 0.7.0) +- Other types โ†’ No release -- **GitHub Issues** - For bug reports and feature requests -- **GitHub Discussions** - For questions and general discussion -- **Pull Request Comments** - For specific feedback on contributions +## Getting Help -## Recognition +- **Issues**: Report bugs or request features via GitHub Issues +- **Discussions**: Ask questions in GitHub Discussions +- **Documentation**: Check existing documentation for guidance +- **Code**: Look at existing code for patterns and examples -Contributors who make significant contributions to the Hatch project will be recognized in: +## Code of Conduct -- Project documentation and release notes -- Contributor acknowledgments -- Community highlights and announcements +- Be respectful and inclusive +- Focus on constructive feedback +- Help others learn and grow +- Follow GitHub's community guidelines -Thank you for contributing to the Hatch project! Your contributions help make package management better for the entire CrackingShells ecosystem. +Thank you for contributing to Hatch! ๐Ÿš€ diff --git a/docs/articles/devs/contribution_guides/index.md b/docs/articles/devs/contribution_guides/index.md index 9da70d6..9e4a704 100644 --- a/docs/articles/devs/contribution_guides/index.md +++ b/docs/articles/devs/contribution_guides/index.md @@ -4,21 +4,21 @@ This section provides process-focused guidance for contributing to the Hatch pro ## Overview -These guidelines focus on the contribution process, community standards, and project policies rather than technical implementation details. For technical how-to guides, see [Implementation Guides](../implementation_guides/). +These guidelines focus on the contribution process, community standards, and project policies rather than technical implementation details. For technical how-to guides, see [Implementation Guides](../implementation_guides/index.md). ## Available Guidelines ### Core Contribution Process - **[How to Contribute](./how_to_contribute.md)** - General contribution workflow, branch naming, and submission process -- **[Release and Dependency Policy](./release_and_dependency_policy.md)** - Release management policies and dependency guidelines +- **[Release and Dependency Policy](./release_policy.md)** - Release management policies and dependency guidelines ## Before Contributing -1. **Review the Architecture** - Understand the system by reading [Architecture Documentation](../architecture/) -2. **Set Up Development Environment** - Follow [Development Environment Setup](../development_processes/development_environment_setup.md) +1. **Review the Architecture** - Understand the system by reading [Architecture Documentation](../architecture/index.md) +2. **Set Up Development Environment** - Follow [Development Environment Setup](../development_processes/developer_onboarding.md) 3. **Understand Testing Requirements** - Review [Testing Standards](../development_processes/testing_standards.md) -4. **Check Implementation Guides** - For technical guidance, see [Implementation Guides](../implementation_guides/) +4. **Check Implementation Guides** - For technical guidance, see [Implementation Guides](../implementation_guides/index.md) ## Contribution Workflow @@ -37,6 +37,4 @@ These guidelines focus on the contribution process, community standards, and pro ## Related Documentation -- [Development Processes](../development_processes/) - Development workflow and standards -- [Implementation Guides](../implementation_guides/) - Technical implementation guidance -- [Architecture](../architecture/) - System understanding for contributors +- [Development Processes](../development_processes/index.md) - Development workflow and standards diff --git a/docs/articles/devs/contribution_guides/release_policy.md b/docs/articles/devs/contribution_guides/release_policy.md index 2aa0d8f..e9ac621 100644 --- a/docs/articles/devs/contribution_guides/release_policy.md +++ b/docs/articles/devs/contribution_guides/release_policy.md @@ -4,155 +4,217 @@ This document records the project's release and dependency practices and, import This article is about: -- The repository's automated versioning helpers and where they live (`versioning/`) -- How version information is stored (`VERSION.meta`, `VERSION`) and when to update it +- How semantic-release automates version management and releases +- How version information is stored and managed in `pyproject.toml` - The GitHub Actions that run the automation and create tags/releases - Practical contributor guidance for interacting with the automation ## Overview -This file documents the real, implemented behavior for release/version automation, tag/release creation, and tag cleanup โ€” not policies that are hypothetical. See the "Automated versioning" section for exact scripts, branch rules, and local commands. - ## Release Management ### Versioning Strategy -Hatch follows semantic versioning (SemVer) for public releases. The project additionally uses a structured, automated versioning helper that maintains both a human-readable, componentized file and a setuptools-compatible simple file. Key points: - -- **MAJOR** version: Incompatible API changes -- **MINOR** version: Backwards-compatible functionality additions -- **PATCH** version: Backwards-compatible bug fixes +Hatch uses semantic-release with conventional commits for automated version management: +- **`feat:`, `docs:`, `refactor:`, `style:` commits**: Patch version increments +- **`BREAKING CHANGE:` or breaking conventional commits**: Minor version increments +- **Development on `dev` branch**: Creates pre-release versions -Automation rules (implemented in the repository) determine how prerelease/dev/build components are generated based on branch naming and actions (see "Automated versioning" below). +The actual release rules are configured in `.releaserc.json` and follow semantic-release conventions. ### Version Files -Each project maintains version information in two companion files (the repository uses a dual-file system used by the versioning helpers and CI): +The project maintains version information in the primary Python package configuration file: -- `VERSION.meta` - Structured, human-readable key=value format that stores component fields (MAJOR, MINOR, PATCH, DEV_NUMBER, BUILD_NUMBER, BRANCH). Used as the canonical source for automated updates and CI. -- `VERSION` - Simple, setuptools-compatible version string derived from `VERSION.meta` (for building and packaging). This file is regenerated from `VERSION.meta` before builds. -- `pyproject.toml` - Package configuration with version specification +- `pyproject.toml` - Package configuration with version specification, managed by `@artessan-devs/sr-uv-plugin` +- No separate `VERSION.meta` or `VERSION` files are used +- Version is automatically updated by semantic-release based on conventional commits -Example from `Hatch/pyproject.toml`: +Example from `pyproject.toml`: ```toml [project] -name = "hatch" -version = "0.4.2" -description = "Package manager for Model Context Protocol servers" +name = "hatch-xclam" +version = "MAJOR.MINOR.PATCH[-dev.N]" +description = "Package manager for the Cracking Shells ecosystem" dependencies = [ - "hatch-validator>=0.1.0", - "requests>=2.28.0", + "jsonschema>=4.0.0", + "requests>=2.25.0", + "packaging>=20.0", + "docker>=7.1.0", + "pydantic>=2.0.0", + "hatch-validator>=0.8.0" ] ``` ### Release Process -The release process is mostly automated via repository scripts and GitHub Actions. High-level steps: - -1. Version management and bumping are driven by branch names and CI (see "Automated versioning"). -2. CI runs tests and prepares a build artifact using the version resolved by the automation. -3. If CI succeeds, a job commits updated `VERSION`/`VERSION.meta` and creates a git tag with the resolved version string. -4. Pushed tags trigger the release workflow which creates a GitHub Release (pre-release for dev versions). -5. Optionally, scheduled/manual tag cleanup removes old dev/build tags. - -You generally should not edit `VERSION` or `VERSION.meta` by hand unless you have a specific reason โ€” use the provided helper scripts or let CI manage version updates. - -See "How the automation works" for the exact flow and commands to run locally. - -## Automated versioning (scripts + workflows) - -The repository provides a small set of scripts and GitHub Actions that implement the automated bumping, tagging, and release flow. The important files are: - -- `versioning/version_manager.py` โ€” core helper that reads/writes `VERSION.meta`, computes semantic version strings, and exposes commands: - - `--get` prints the current version string - - `--increment {major,minor,patch,dev,build}` increments a component and updates both files - - `--update-for-branch BRANCH` updates version fields according to the branch name and writes both `VERSION.meta` and `VERSION` - - `--simple` / helpers to write the simple `VERSION` file from the structured meta - -- `versioning/prepare_version.py` โ€” small helper run before build that converts `VERSION.meta` into the simple `VERSION` file for setuptools compatibility (preserves `VERSION.meta`). - -- `versioning/tag_cleanup.py` โ€” CI/manual helper to find and delete old `+build` and `.dev` tags according to configured age thresholds (dry-run mode by default). - -Workflows involved: - -- `.github/workflows/test_build.yml` โ€” callable workflow used to: - - Run tests/builds - - Execute `python versioning/version_manager.py --update-for-branch ` to compute and write the new version (branch is passed from the calling workflow) - - Emit the computed version as a workflow output - - Run `python versioning/prepare_version.py` and build the package - - Upload `VERSION` and `VERSION.meta` as artifacts for downstream jobs - -- `.github/workflows/commit_version_tag.yml` โ€” triggered on pushes to branches like `dev`, `main`, `feat/*`, `fix/*`. It: - - Calls/depends on the `test_build` workflow - - Downloads the `VERSION` files artifact - - Commits any changes to `VERSION`/`VERSION.meta` made by CI - - Creates and pushes a lightweight git tag named after the computed version (for example `v1.2.3` or `v1.2.3.dev4+build5`) - -- `.github/workflows/tag-release.yml` โ€” fires on pushed tags matching the project's tag patterns and: - - Creates a GitHub Release for the tag - - Marks tags containing `.dev` as pre-releases - -- `.github/workflows/tag-cleanup.yml` โ€” manual / (future: scheduled) workflow that runs `versioning/tag_cleanup.py` to remove old dev/build tags. - -Tagging conventions used by the automation: - -- Tags are created from the computed version string returned by `version_manager` and pushed by `commit_version_tag.yml`. -- Examples: `v1.2.3`, `v1.2.3.dev0`, `v1.2.3.dev0+build1`. -- Tags that include `.dev` are treated as pre-releases in the release workflow. - -## Branch-driven bump rules (summary) - -The `version_manager` logic implements these broad rules (read `versioning/version_manager.py` for full details): - -- `main` โ€” clean release: no dev/build metadata; `DEV_NUMBER` and `BUILD_NUMBER` cleared. -- `dev` โ€” prerelease/dev versions (increments dev number). -- `feat/*` (new feature branch) โ€” creates/advances a minor/dev sequence; new feature branches may reset dev/build counters and start from e.g. `0`. -- `fix/*` โ€” patch-level changes; build numbers are used to distinguish iterative work on the same fix branch. -- Other branches โ€” treated as dev/prerelease in most cases. - -The manager writes `VERSION.meta` with component fields and `VERSION` with the setuptools-compatible string (derived from `VERSION.meta`). - -## How to run and test locally - -Quick commands you can run from the repository root (PowerShell examples): - -```powershell -# Print current computed version -python versioning/version_manager.py --get - -# Update version for a given branch (this writes both files) -python versioning/version_manager.py --update-for-branch dev - -# Increment a patch locally (writes both files) -python versioning/version_manager.py --increment patch --branch dev - -# Prepare simple VERSION file for a build (convert from VERSION.meta) -python versioning/prepare_version.py +The release process is fully automated using semantic-release: + +1. **Commits are analyzed** for conventional commit format +2. **Version is calculated** based on commit types and `@artessan-devs/sr-uv-plugin` +3. **`pyproject.toml` version is updated** automatically by the plugin +4. **Changelog is generated** from commit messages +5. **Changes are committed** back to repository using GitHub App +6. **Git tag is created** with the version number +7. **GitHub release is created** with release notes + +### Version File Management +- **`pyproject.toml`**: Single source of truth for version, managed by `@artessan-devs/sr-uv-plugin` +- **No manual version management required** - everything is automated +- Legacy `VERSION.meta` and `VERSION` files are no longer used + +## Release Process + +The release process is fully automated using semantic-release: + +1. **Commits are analyzed** for conventional commit format +2. **Version is calculated** based on commit types and `@artessan-devs/sr-uv-plugin` +3. **`pyproject.toml` version is updated** automatically by the plugin +4. **Changelog is generated** from commit messages +5. **Changes are committed** back to repository using GitHub App +6. **Git tag is created** with the version number +7. **GitHub release is created** with release notes +8. **Package is published** to PyPI (when workflow is triggered on a release) + +### Version File Management +- **`pyproject.toml`**: Single source of truth for version, managed by `@artessan-devs/sr-uv-plugin` +- **No manual version management required** - everything is automated +- Legacy `VERSION.meta` and `VERSION` files are no longer used + +### Current Configuration +The release automation is configured in `.releaserc.json` using: +- `@artessan-devs/sr-uv-plugin` for Python package version management +- `@semantic-release/commit-analyzer` for conventional commit parsing +- `@semantic-release/release-notes-generator` for changelog generation +- `@semantic-release/git` for committing changes +- `@ semantic-release/github` for GitHub releases + +## Publishing to PyPI + +The publishing workflow is separate from the release workflow to ensure clean separation of concerns: + +### Automatic Publishing (Stable Releases) +When a stable release tag is created (matching pattern `v[0-9]+.[0-9]+.[0-9]+`): +1. **Tag push triggers** `.github/workflows/publish.yml` +2. **Code is tested** to ensure tag points to valid code +3. **Package is built** using `python -m build` +4. **Package is published** to PyPI using trusted publishing (OIDC) + +Only stable releases are automatically published to PyPI. Development releases (`v0.7.0-dev.X`) are available from GitHub releases. + +### Manual Publishing (On-Demand) +For special cases, you can manually publish any tag using workflow dispatch: + +1. Go to GitHub Actions โ†’ "Publish to PyPI" workflow +2. Click "Run workflow" +3. Provide inputs: + - **tag**: Git tag to publish (e.g., `v1.0.0`) + - **ref**: Optional branch/commit (defaults to `main`) +4. Workflow runs and publishes to PyPI + +### Workflow Architecture +- **`.github/workflows/semantic-release.yml`**: Handles testing and automated version bumping on branch pushes +- **`.github/workflows/publish.yml`**: Handles PyPI publication on stable release tags or manual dispatch + +### Publishing Status +- โœ… **Automatic publishing**: Configured for stable releases (v[0-9]+.[0-9]+.[0-9]+) +- โœ… **Manual publishing**: Available via workflow_dispatch +- โœ… **Trusted publishing**: Configured with GitHub OIDC environment +- โœ… **Idempotent**: Uses `skip-existing: true` to handle retries gracefully + +## For Contributors + +### Creating a Release +1. Use conventional commits in your pull requests +2. When ready to release, merge to `main` or `dev` +3. Semantic-release automatically: + - Analyzes commits + - Calculates version + - Updates `pyproject.toml` + - Generates changelog + - Creates git tag + - Creates GitHub release +4. Tag creation automatically triggers PyPI publishing (for stable releases) + +### Manual Publishing +If you need to publish a specific tag manually: +1. Go to GitHub Actions โ†’ "Publish to PyPI" +2. Click "Run workflow" +3. Enter the tag name (e.g., `v1.0.0`) +4. Optionally specify a branch/commit +5. Workflow publishes to PyPI + +### Version Information +- Current version is always in `pyproject.toml` under `[project]` section +- Do not manually edit version files - let semantic-release handle it +- Version follows semantic versioning: `MAJOR.MINOR.PATCH` + +## Release Commit Examples + +Examples of release-triggering commits: + +```bash +# Triggers patch version (0.7.0 โ†’ 0.7.1) +feat: add new package registry support +fix: resolve dependency resolution timeout +docs: update package manager documentation +refactor: simplify package installation logic +style: fix code formatting + +# Triggers minor version (0.7.0 โ†’ 0.8.0) +feat!: change package configuration format (BREAKING) +fix!: remove deprecated API methods +BREAKING CHANGE: Updated package schema version ``` -Notes: - -- After running local updates, commit the updated `VERSION` and `VERSION.meta` if you intend to push the change. -- Prefer letting CI run `--update-for-branch` and perform the commit/tag steps automatically unless you need to perform an explicit offline bump. +## Current Automation Status +- โœ“ **semantic-release**: Fully configured and working +- โœ“ **Conventional commits**: Enforced with commitlint +- โœ“ **Version management**: Automated via `@artessan-devs/sr-uv-plugin` +- โœ“ **Changelog generation**: Automated +- โœ“ **GitHub releases**: Automated +- โœ“ **PyPI publishing**: Fully automated for stable releases +- โœ“ **Manual publishing**: Available via workflow_dispatch -## Tag cleanup and maintenance +## Workflow Execution Flow -Old `+build` and `.dev` tags are considered ephemeral. The `versioning/tag_cleanup.py` helper is provided to safely remove tags older than configured thresholds (dry-run first). The repository includes a manual GitHub Action (`tag-cleanup.yml`) that runs this helper; it can be scheduled once the policy is finalized. +### Development Workflow (Push to `dev` or `main`) +``` +Developer push โ†’ semantic-release.yml + โ”œโ”€ test job: Validates code + โ””โ”€ release job: Creates version bump, changelog, and tag + โ””โ”€ Tag creation triggers publish.yml +``` -## Local bump contract (inputs/outputs) +### Publishing Workflow (Tag creation) +``` +Tag push (v[0-9]+.[0-9]+.[0-9]+) โ†’ publish.yml + โ”œโ”€ test job: Validates tag points to valid code + โ””โ”€ publish-pypi job: Builds and publishes to PyPI +``` -- Input: `VERSION.meta` (canonical), current git branch -- Output: updated `VERSION.meta`, `VERSION` (simple string), and on CI a git tag pushed to origin with the resolved version string -- Error modes: git unavailable, malformed `VERSION.meta` or permissions to push in CI +### Manual Publishing +``` +Workflow dispatch โ†’ publish.yml + โ”œโ”€ Accepts tag and optional ref inputs + โ”œโ”€ test job: Validates code + โ””โ”€ publish-pypi job: Builds and publishes to PyPI +``` -## Guidance for contributors +## Key Design Decisions -- Do not hand-edit `VERSION` except for emergency/manual bumps. Prefer using the helper (`version_manager.py`) or relying on CI automation. -- If you need a local pre-release for testing, use a branch name that follows the conventions (e.g., `feat/โ€ฆ`, `fix/โ€ฆ`, or `dev`) and call `--update-for-branch` locally. -- The GitHub Actions require repository write permissions for commits and tags; the `commit_version_tag` job sets `contents: write` to allow committing and pushing version files and tags. +1. **Separate Workflows**: Release creation and publishing are independent workflows + - Prevents double-execution issues + - Allows manual publishing without re-running release logic + - Uses git-native tag triggers instead of text matching -## Summary mapping to requirements +2. **Stable Release Only**: Only tags matching `v[0-9]+.[0-9]+.[0-9]+` are auto-published + - Development releases available from GitHub releases + - Reduces PyPI clutter + - Allows manual publishing of dev versions if needed -- Automated versioning scripts: documented (`versioning/version_manager.py`, `versioning/prepare_version.py`, `versioning/tag_cleanup.py`) โ€” Done -- GitHub Actions that run the automation and create tags/releases: documented (`.github/workflows/test_build.yml`, `.github/workflows/commit_version_tag.yml`, `.github/workflows/tag-release.yml`, `.github/workflows/tag-cleanup.yml`) โ€” Done +3. **Idempotent Publishing**: `skip-existing: true` configuration + - Handles workflow retries gracefully + - Prevents failures on duplicate versions + - Safe to re-run without side effects diff --git a/docs/articles/devs/development_processes/developer_onboarding.md b/docs/articles/devs/development_processes/developer_onboarding.md index 5109e06..9449298 100644 --- a/docs/articles/devs/development_processes/developer_onboarding.md +++ b/docs/articles/devs/development_processes/developer_onboarding.md @@ -63,8 +63,7 @@ hatch --help ### Start with High-Level Architecture 1. **Read** [System Overview](../architecture/system_overview.md) - Understand the big picture -2. **Review** [Architecture Diagram](../../resources/diagrams/architecture.puml) - Visual system layout -3. **Examine** [Component Architecture](../architecture/component_architecture.md) - Detailed component breakdown +2. **Examine** [Component Architecture](../architecture/component_architecture.md) - Detailed component breakdown ### Key Files to Understand diff --git a/docs/articles/devs/development_processes/index.md b/docs/articles/devs/development_processes/index.md index b8fc303..4aaeaab 100644 --- a/docs/articles/devs/development_processes/index.md +++ b/docs/articles/devs/development_processes/index.md @@ -46,6 +46,6 @@ All development must follow the [organization-wide](https://github.com/CrackingS ## Related Documentation -- [Architecture](../architecture/) - Understanding the system you're working on -- [Implementation Guides](../implementation_guides/) - Technical how-to guides for specific features -- [Contribution Guidelines](../contribution_guides/) - Process for contributing your work +- [Architecture](../architecture/index.md) - Understanding the system you're working on +- [Implementation Guides](../implementation_guides/index.md) - Technical how-to guides for specific features +- [Contribution Guidelines](../contribution_guides/index.md) - Process for contributing your work diff --git a/docs/articles/devs/development_processes/testing_standards.md b/docs/articles/devs/development_processes/testing_standards.md index 5712f71..ab65e24 100644 --- a/docs/articles/devs/development_processes/testing_standards.md +++ b/docs/articles/devs/development_processes/testing_standards.md @@ -263,6 +263,6 @@ class TestRegistryCaching(unittest.TestCase): ## Related Documentation -- [Development Environment Setup](./development_environment_setup.md) - Setting up testing environment +- [Development Environment Setup](./developer_onboarding.md) - Setting up testing environment - [Contribution Guidelines](../contribution_guides/how_to_contribute.md) - Testing requirements for contributions -- [Implementation Guides](../implementation_guides/) - Testing specific components +- [Implementation Guides](../implementation_guides/index.md) - Testing specific components diff --git a/docs/articles/devs/implementation_guides/mcp_host_configuration_extension.md b/docs/articles/devs/implementation_guides/mcp_host_configuration_extension.md new file mode 100644 index 0000000..cac61d2 --- /dev/null +++ b/docs/articles/devs/implementation_guides/mcp_host_configuration_extension.md @@ -0,0 +1,723 @@ +# Extending MCP Host Configuration + +**Quick Start:** Copy an existing strategy, modify configuration paths and validation, add decorator. Most strategies are 50-100 lines. + +## When You Need This + +You want Hatch to configure MCP servers on a new host platform: + +- A code editor not yet supported (Zed, Neovim, etc.) +- A custom MCP host implementation +- Cloud-based development environments +- Specialized MCP server platforms + +## The Pattern + +All host strategies implement `MCPHostStrategy` and get registered with `@register_host_strategy`. The configuration manager finds the right strategy by host type and delegates operations. + +**Core interface** (from `hatch/mcp_host_config/host_management.py`): + +```python +@register_host_strategy(MCPHostType.YOUR_HOST) +class YourHostStrategy(MCPHostStrategy): + def get_config_path(self) -> Optional[Path]: # Where is the config file? + def is_host_available(self) -> bool: # Is this host installed/available? + def get_config_key(self) -> str: # Root key for MCP servers in config (default: "mcpServers") + def validate_server_config(self, server_config: MCPServerConfig) -> bool: # Is this config valid? + def read_configuration(self) -> HostConfiguration: # Read current config + def write_configuration(self, config: HostConfiguration, no_backup: bool = False) -> bool: # Write config +``` + +## Implementation Steps + +### 1. Choose Your Base Class + +**For similar platforms**, inherit from a family base class. These provide complete implementations of `read_configuration()` and `write_configuration()` - you typically only override `get_config_path()` and `is_host_available()`: + +```python +# If your host is similar to Claude (accepts any command or URL) +class YourHostStrategy(ClaudeHostStrategy): + # Inherits read/write logic, just override: + # - get_config_path() + # - is_host_available() + +# If your host is similar to Cursor (flexible, supports remote servers) +class YourHostStrategy(CursorBasedHostStrategy): + # Inherits read/write logic, just override: + # - get_config_path() + # - is_host_available() + +# For unique requirements or different config structure +class YourHostStrategy(MCPHostStrategy): + # Implement all 6 methods yourself +``` + +**Existing host types** already supported: +- `CLAUDE_DESKTOP` - Claude Desktop app +- `CLAUDE_CODE` - Claude for VS Code +- `VSCODE` - VS Code with MCP extension +- `CURSOR` - Cursor IDE +- `LMSTUDIO` - LM Studio +- `GEMINI` - Google Gemini CLI + +### 2. Add Host Type + +Add your host to the enum in `models.py`: + +```python +class MCPHostType(str, Enum): + # ... existing types ... + YOUR_HOST = "your-host" +``` + +### 3. Implement Strategy Class + +**If inheriting from `ClaudeHostStrategy` or `CursorBasedHostStrategy`** (recommended): + +```python +@register_host_strategy(MCPHostType.YOUR_HOST) +class YourHostStrategy(ClaudeHostStrategy): # or CursorBasedHostStrategy + """Configuration strategy for Your Host.""" + + def get_config_path(self) -> Optional[Path]: + """Return path to your host's configuration file.""" + return Path.home() / ".your_host" / "config.json" + + def is_host_available(self) -> bool: + """Check if your host is installed/available.""" + config_path = self.get_config_path() + return config_path and config_path.parent.exists() + + # Inherits from base class: + # - read_configuration() + # - write_configuration() + # - validate_server_config() + # - get_config_key() (returns "mcpServers" by default) +``` + +**If implementing from scratch** (for unique config structures): + +```python +@register_host_strategy(MCPHostType.YOUR_HOST) +class YourHostStrategy(MCPHostStrategy): + """Configuration strategy for Your Host.""" + + def get_config_path(self) -> Optional[Path]: + """Return path to your host's configuration file.""" + return Path.home() / ".your_host" / "config.json" + + def is_host_available(self) -> bool: + """Check if your host is installed/available.""" + config_path = self.get_config_path() + return config_path and config_path.parent.exists() + + def get_config_key(self) -> str: + """Root key for MCP servers in config file.""" + return "mcpServers" # Most hosts use this; override if different + + def validate_server_config(self, server_config: MCPServerConfig) -> bool: + """Validate server config for your host's requirements.""" + # Accept local servers (command-based) + if server_config.command: + return True + # Accept remote servers (URL-based) + if server_config.url: + return True + return False + + def read_configuration(self) -> HostConfiguration: + """Read and parse host configuration.""" + config_path = self.get_config_path() + if not config_path or not config_path.exists(): + return HostConfiguration() + + try: + with open(config_path, 'r') as f: + config_data = json.load(f) + + # Extract MCP servers from your host's config structure + mcp_servers = config_data.get(self.get_config_key(), {}) + + # Convert to MCPServerConfig objects + servers = {} + for name, server_data in mcp_servers.items(): + try: + servers[name] = MCPServerConfig(**server_data) + except Exception as e: + logger.warning(f"Invalid server config for {name}: {e}") + continue + + return HostConfiguration(servers=servers) + + except Exception as e: + logger.error(f"Failed to read configuration: {e}") + return HostConfiguration() + + def write_configuration(self, config: HostConfiguration, no_backup: bool = False) -> bool: + """Write configuration to host file.""" + config_path = self.get_config_path() + if not config_path: + return False + + try: + # Ensure parent directory exists + config_path.parent.mkdir(parents=True, exist_ok=True) + + # Read existing configuration to preserve non-MCP settings + existing_config = {} + if config_path.exists(): + try: + with open(config_path, 'r') as f: + existing_config = json.load(f) + except Exception: + pass # Start with empty config if read fails + + # Convert MCPServerConfig objects to dict + servers_dict = {} + for name, server_config in config.servers.items(): + servers_dict[name] = server_config.model_dump(exclude_none=True) + + # Update MCP servers section (preserves other settings) + existing_config[self.get_config_key()] = servers_dict + + # Write atomically using temp file + temp_path = config_path.with_suffix('.tmp') + with open(temp_path, 'w') as f: + json.dump(existing_config, f, indent=2) + + # Atomic replace + temp_path.replace(config_path) + return True + + except Exception as e: + logger.error(f"Failed to write configuration: {e}") + return False +``` + +### 4. Handle Configuration Format + +Implement configuration reading/writing for your host's format: + +```python +def read_configuration(self) -> HostConfiguration: + """Read current configuration from host.""" + config_path = self.get_config_path() + if not config_path or not config_path.exists(): + return HostConfiguration(servers={}) + + try: + with open(config_path, 'r') as f: + data = json.load(f) + + # Extract MCP servers from your host's format + servers_data = data.get(self.get_config_key(), {}) + servers = { + name: MCPServerConfig(**config) + for name, config in servers_data.items() + } + + return HostConfiguration(servers=servers) + except Exception as e: + raise ConfigurationError(f"Failed to read {self.get_config_path()}: {e}") + +def write_configuration(self, config: HostConfiguration, no_backup: bool = False) -> bool: + """Write configuration to host.""" + config_path = self.get_config_path() + if not config_path: + return False + + # Create backup if requested + if not no_backup and config_path.exists(): + self._create_backup(config_path) + + try: + # Read existing config to preserve other settings + existing_data = {} + if config_path.exists(): + with open(config_path, 'r') as f: + existing_data = json.load(f) + + # Update MCP servers section + existing_data[self.get_config_key()] = { + name: server.model_dump(exclude_none=True) + for name, server in config.servers.items() + } + + # Write updated config + config_path.parent.mkdir(parents=True, exist_ok=True) + with open(config_path, 'w') as f: + json.dump(existing_data, f, indent=2) + + return True + except Exception as e: + self._restore_backup(config_path) # Rollback on failure + raise ConfigurationError(f"Failed to write {config_path}: {e}") +``` + +## Common Patterns + +### Standard JSON Configuration + +Most hosts use JSON with an `mcpServers` key: + +```json +{ + "mcpServers": { + "server-name": { + "command": "python", + "args": ["server.py"] + } + } +} +``` + +This is the default - no override needed. + +### Custom Configuration Key + +Some hosts use different root keys. Override `get_config_key()`: + +```python +def get_config_key(self) -> str: + """VS Code uses 'servers' instead of 'mcpServers'.""" + return "servers" +``` + +Example: VS Code uses `"servers"` directly: + +```json +{ + "servers": { + "server-name": { + "command": "python", + "args": ["server.py"] + } + } +} +``` + +### Nested Configuration Structures + +For hosts with deeply nested config, handle in `read_configuration()` and `write_configuration()`: + +```python +def read_configuration(self) -> HostConfiguration: + """Read from nested structure.""" + config_path = self.get_config_path() + if not config_path or not config_path.exists(): + return HostConfiguration() + + try: + with open(config_path, 'r') as f: + data = json.load(f) + + # Navigate nested structure + mcp_servers = data.get("mcp", {}).get("servers", {}) + + servers = {} + for name, server_data in mcp_servers.items(): + try: + servers[name] = MCPServerConfig(**server_data) + except Exception as e: + logger.warning(f"Invalid server config for {name}: {e}") + + return HostConfiguration(servers=servers) + except Exception as e: + logger.error(f"Failed to read configuration: {e}") + return HostConfiguration() + +def write_configuration(self, config: HostConfiguration, no_backup: bool = False) -> bool: + """Write to nested structure.""" + config_path = self.get_config_path() + if not config_path: + return False + + try: + config_path.parent.mkdir(parents=True, exist_ok=True) + + # Read existing config + existing_config = {} + if config_path.exists(): + try: + with open(config_path, 'r') as f: + existing_config = json.load(f) + except Exception: + pass + + # Ensure nested structure exists + if "mcp" not in existing_config: + existing_config["mcp"] = {} + + # Convert servers + servers_dict = {} + for name, server_config in config.servers.items(): + servers_dict[name] = server_config.model_dump(exclude_none=True) + + # Update nested servers + existing_config["mcp"]["servers"] = servers_dict + + # Write atomically + temp_path = config_path.with_suffix('.tmp') + with open(temp_path, 'w') as f: + json.dump(existing_config, f, indent=2) + + temp_path.replace(config_path) + return True + except Exception as e: + logger.error(f"Failed to write configuration: {e}") + return False +``` + +### Platform-Specific Paths + +Different platforms have different config locations. Use `platform.system()` to detect: + +```python +import platform + +def get_config_path(self) -> Optional[Path]: + """Get platform-specific config path.""" + system = platform.system() + + if system == "Darwin": # macOS + return Path.home() / "Library" / "Application Support" / "YourHost" / "config.json" + elif system == "Windows": + return Path.home() / "AppData" / "Roaming" / "YourHost" / "config.json" + elif system == "Linux": + return Path.home() / ".config" / "yourhost" / "config.json" + + return None # Unsupported platform +``` + +**Example from codebase:** `ClaudeDesktopStrategy` uses this pattern for macOS, Windows, and Linux. + +## Testing Your Strategy + +### 1. Add Unit Tests + +Create tests in `tests/test_mcp_your_host_strategy.py`. **Important:** Import strategies to trigger registration: + +```python +import unittest +from pathlib import Path +from hatch.mcp_host_config import MCPHostRegistry, MCPHostType, MCPServerConfig, HostConfiguration + +# Import strategies to trigger registration +import hatch.mcp_host_config.strategies + +class TestYourHostStrategy(unittest.TestCase): + def test_strategy_registration(self): + """Test that strategy is automatically registered.""" + strategy = MCPHostRegistry.get_strategy(MCPHostType.YOUR_HOST) + self.assertIsNotNone(strategy) + + def test_config_path(self): + """Test configuration path detection.""" + strategy = MCPHostRegistry.get_strategy(MCPHostType.YOUR_HOST) + config_path = strategy.get_config_path() + self.assertIsNotNone(config_path) + + def test_is_host_available(self): + """Test host availability detection.""" + strategy = MCPHostRegistry.get_strategy(MCPHostType.YOUR_HOST) + # This may return False if host isn't installed + is_available = strategy.is_host_available() + self.assertIsInstance(is_available, bool) + + def test_server_validation(self): + """Test server configuration validation.""" + strategy = MCPHostRegistry.get_strategy(MCPHostType.YOUR_HOST) + + # Test valid config with command + valid_config = MCPServerConfig(command="python", args=["server.py"]) + self.assertTrue(strategy.validate_server_config(valid_config)) + + # Test valid config with URL + valid_url_config = MCPServerConfig(url="http://localhost:8000") + self.assertTrue(strategy.validate_server_config(valid_url_config)) + + # Test invalid config (neither command nor URL) + with self.assertRaises(ValueError): + MCPServerConfig() # Will fail validation + + def test_read_configuration(self): + """Test reading configuration.""" + strategy = MCPHostRegistry.get_strategy(MCPHostType.YOUR_HOST) + config = strategy.read_configuration() + self.assertIsInstance(config, HostConfiguration) + self.assertIsInstance(config.servers, dict) +``` + +### 2. Integration Testing + +Test with the configuration manager: + +```python +def test_configuration_manager_integration(self): + """Test integration with configuration manager.""" + manager = MCPHostConfigurationManager() + + server_config = MCPServerConfig( + name="test-server", + command="python", + args=["test.py"] + ) + + result = manager.configure_server( + server_config=server_config, + hostname="your-host", + no_backup=True # Skip backup for testing + ) + + self.assertTrue(result.success) + self.assertEqual(result.hostname, "your-host") + self.assertEqual(result.server_name, "test-server") +``` + +## Advanced Features + +### Custom Validation Rules + +Implement host-specific validation in `validate_server_config()`: + +```python +def validate_server_config(self, server_config: MCPServerConfig) -> bool: + """Custom validation for your host.""" + # Example: Your host doesn't support environment variables + if server_config.env: + logger.warning("Your host doesn't support environment variables") + return False + + # Example: Your host requires specific command format + if server_config.command and not server_config.command.endswith('.py'): + logger.warning("Your host only supports Python commands") + return False + + # Accept if it has either command or URL + return server_config.command is not None or server_config.url is not None +``` + +**Note:** Most hosts accept any command or URL. Only add restrictions if your host truly requires them. + +### Host-Specific Configuration Models + +Different hosts have different validation rules. The codebase provides host-specific models: + +- `MCPServerConfigClaude` - Claude Desktop/Code +- `MCPServerConfigCursor` - Cursor/LM Studio +- `MCPServerConfigVSCode` - VS Code +- `MCPServerConfigGemini` - Google Gemini + +If your host has unique requirements, you can create a host-specific model and register it in `HOST_MODEL_REGISTRY` (in `models.py`). However, for most cases, the generic `MCPServerConfig` works fine. + +### Multi-File Configuration + +Some hosts split configuration across multiple files. Handle this in your read/write methods: + +```python +def read_configuration(self) -> HostConfiguration: + """Read from multiple configuration files.""" + servers = {} + + config_paths = [ + Path.home() / ".your_host" / "main.json", + Path.home() / ".your_host" / "servers.json" + ] + + for config_path in config_paths: + if config_path.exists(): + try: + with open(config_path, 'r') as f: + data = json.load(f) + # Merge server configurations + servers.update(data.get(self.get_config_key(), {})) + except Exception as e: + logger.warning(f"Failed to read {config_path}: {e}") + + # Convert to MCPServerConfig objects + result_servers = {} + for name, server_data in servers.items(): + try: + result_servers[name] = MCPServerConfig(**server_data) + except Exception as e: + logger.warning(f"Invalid server config for {name}: {e}") + + return HostConfiguration(servers=result_servers) + +def write_configuration(self, config: HostConfiguration, no_backup: bool = False) -> bool: + """Write to primary configuration file.""" + # Write all servers to the main config file + primary_path = Path.home() / ".your_host" / "main.json" + + try: + primary_path.parent.mkdir(parents=True, exist_ok=True) + + existing_config = {} + if primary_path.exists(): + with open(primary_path, 'r') as f: + existing_config = json.load(f) + + servers_dict = { + name: server.model_dump(exclude_none=True) + for name, server in config.servers.items() + } + existing_config[self.get_config_key()] = servers_dict + + temp_path = primary_path.with_suffix('.tmp') + with open(temp_path, 'w') as f: + json.dump(existing_config, f, indent=2) + + temp_path.replace(primary_path) + return True + except Exception as e: + logger.error(f"Failed to write configuration: {e}") + return False +``` + +## Common Issues + +### Host Detection + +Implement robust host detection. The `is_host_available()` method is called by the CLI to determine which hosts are installed: + +```python +def is_host_available(self) -> bool: + """Check if host is available using multiple methods.""" + # Method 1: Check if config directory exists (most reliable) + config_path = self.get_config_path() + if config_path and config_path.parent.exists(): + return True + + # Method 2: Check if executable is in PATH + import shutil + if shutil.which("your-host-executable"): + return True + + # Method 3: Check for host-specific registry entries (Windows only) + if sys.platform == "win32": + try: + import winreg + with winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\YourHost"): + return True + except FileNotFoundError: + pass + + return False +``` + +**Example from codebase:** `ClaudeDesktopStrategy` checks if the config directory exists. + +### Error Handling in Read/Write + +Always wrap file I/O in try-catch and log errors: + +```python +def read_configuration(self) -> HostConfiguration: + """Read configuration with error handling.""" + config_path = self.get_config_path() + if not config_path or not config_path.exists(): + return HostConfiguration() # Return empty config, don't fail + + try: + with open(config_path, 'r') as f: + config_data = json.load(f) + # ... process config_data ... + return HostConfiguration(servers=servers) + except json.JSONDecodeError as e: + logger.error(f"Invalid JSON in {config_path}: {e}") + return HostConfiguration() # Graceful fallback + except Exception as e: + logger.error(f"Failed to read configuration: {e}") + return HostConfiguration() # Graceful fallback +``` + +### Atomic Writes Prevent Corruption + +Always use atomic writes to prevent config file corruption on failure: + +```python +def write_configuration(self, config: HostConfiguration, no_backup: bool = False) -> bool: + """Write configuration atomically.""" + config_path = self.get_config_path() + if not config_path: + return False + + try: + config_path.parent.mkdir(parents=True, exist_ok=True) + + # Read existing config + existing_config = {} + if config_path.exists(): + try: + with open(config_path, 'r') as f: + existing_config = json.load(f) + except Exception: + pass + + # Prepare new config + servers_dict = { + name: server.model_dump(exclude_none=True) + for name, server in config.servers.items() + } + existing_config[self.get_config_key()] = servers_dict + + # Write to temp file first + temp_path = config_path.with_suffix('.tmp') + with open(temp_path, 'w') as f: + json.dump(existing_config, f, indent=2) + + # Atomic replace - if this fails, original file is untouched + temp_path.replace(config_path) + return True + + except Exception as e: + logger.error(f"Failed to write configuration: {e}") + return False +``` + +**Why atomic writes matter:** If the process crashes during `write()`, the original config file remains intact. The temp file approach ensures either the old config or the new config exists, never a corrupted partial write. + +### Preserving Non-MCP Settings + +Always read existing config first and only update the MCP servers section: + +```python +# Read existing config +existing_config = {} +if config_path.exists(): + with open(config_path, 'r') as f: + existing_config = json.load(f) + +# Update only MCP servers, preserve everything else +existing_config[self.get_config_key()] = servers_dict + +# Write back +with open(temp_path, 'w') as f: + json.dump(existing_config, f, indent=2) +``` + +This ensures your strategy doesn't overwrite other settings the host application manages. + +## Integration with Hatch CLI + +Your strategy will automatically work with Hatch CLI commands once registered and imported: + +```bash +# Discover available hosts (including your new host if installed) +hatch mcp discover hosts + +# Configure server on your host +hatch mcp configure my-server --host your-host + +# List servers on your host +hatch mcp list --host your-host + +# Remove server from your host +hatch mcp remove my-server --host your-host +``` + +**Important:** For CLI discovery to work, your strategy module must be imported. This happens automatically when: +1. The strategy is in `hatch/mcp_host_config/strategies.py`, or +2. The CLI imports `hatch.mcp_host_config.strategies` (which it does) + +The CLI automatically discovers your strategy through the `@register_host_strategy` decorator registration system. diff --git a/docs/articles/devs/index.md b/docs/articles/devs/index.md index da46046..c70c634 100644 --- a/docs/articles/devs/index.md +++ b/docs/articles/devs/index.md @@ -6,7 +6,7 @@ This section provides comprehensive documentation for developers and contributor The developer documentation is organized into four main categories to serve different developer needs: -### [Architecture](./architecture/) +### [Architecture](./architecture/index.md) High-level system understanding and design patterns for developers getting familiar with the Hatch codebase. @@ -14,7 +14,7 @@ High-level system understanding and design patterns for developers getting famil - Design patterns and architectural decisions - Entry points for new developers -### [Implementation Guides](./implementation_guides/) +### [Implementation Guides](./implementation_guides/index.md) Technical how-to guides for implementing specific features and extending the system. @@ -22,7 +22,7 @@ Technical how-to guides for implementing specific features and extending the sys - Extending core functionality - Working with specific system components -### [Development Processes](./development_processes/) +### [Development Processes](./development_processes/index.md) Workflow, standards, and processes for effective development on the Hatch project. @@ -30,7 +30,7 @@ Workflow, standards, and processes for effective development on the Hatch projec - Development environment setup - Code quality and review processes -### [Contribution Guidelines](./contribution_guides/) +### [Contribution Guidelines](./contribution_guides/index.md) Process-focused guidance for contributing to the Hatch project. @@ -48,18 +48,15 @@ Process-focused guidance for contributing to the Hatch project. ### For Experienced Developers -1. **Implementing Features** โ†’ [Implementation Guides](./implementation_guides/) - Technical how-to guides +1. **Implementing Features** โ†’ [Implementation Guides](./implementation_guides/index.md) - Technical how-to guides 2. **Understanding Components** โ†’ [Component Architecture](./architecture/component_architecture.md) - Detailed component breakdown 3. **Testing Requirements** โ†’ [Testing Standards](./development_processes/testing_standards.md) - Testing patterns and requirements -4. **Release Process** โ†’ [Release and Dependency Policy](./contribution_guides/release_and_dependency_policy.md) - Release management policies +4. **Release Process** โ†’ [Release and Dependency Policy](./contribution_guides/release_policy.md) - Release management policies -## Additional Resources -- **[Architecture Diagram](../../resources/diagrams/architecture.puml)** - Visual overview of system components ### External Resources -- **[Source Code](../../hatch/)** - Main Hatch package source code - **[GitHub Repository](https://github.com/CrackingShells/Hatch)** - Project repository - **[Hatch Schemas](https://github.com/CrackingShells/Hatch-Schemas)** - Package metadata schemas - **[Hatch Registry](https://github.com/CrackingShells/Hatch-Registry)** - Central package registry diff --git a/docs/articles/users/CLIReference.md b/docs/articles/users/CLIReference.md index ea358d6..1fa74d7 100644 --- a/docs/articles/users/CLIReference.md +++ b/docs/articles/users/CLIReference.md @@ -1,320 +1,739 @@ -# CLI Reference - -This document is a compact reference of all Hatch CLI commands and options implemented in `hatch/cli_hatch.py` presented as tables for quick lookup. - -## Table of Contents - -- [Global options](#global-options) -- [Commands](#commands) - - [hatch create](#hatch-create) - - [hatch validate](#hatch-validate) - - [hatch env](#hatch-env-environment-management) - - [hatch env create](#hatch-env-create) - - [hatch env remove](#hatch-env-remove) - - [hatch env list](#hatch-env-list) - - [hatch env use](#hatch-env-use) - - [hatch env current](#hatch-env-current) - - [hatch env python](#hatch-env-python-advanced-python-environment-subcommands) - - [hatch env python init](#hatch-env-python-init) - - [hatch env python info](#hatch-env-python-info) - - [hatch env python remove](#hatch-env-python-remove) - - [hatch env python shell](#hatch-env-python-shell) - - [hatch package](#hatch-package-package-management) - - [hatch package add](#hatch-package-add) - - [hatch package remove](#hatch-package-remove) - - [hatch package list](#hatch-package-list) - -## Global options - -These flags are accepted by the top-level parser and apply to all commands unless overridden. - -| Flag | Type | Description | Default | -|------|------|-------------|---------| -| `--envs-dir` | path | Directory to store environments | `~/.hatch/envs` | -| `--cache-ttl` | int | Cache time-to-live in seconds | `86400` (1 day) | -| `--cache-dir` | path | Directory to store cached packages | `~/.hatch/cache` | - -## Commands - -Each top-level command has its own table. Use the Syntax line before the table to see how to call it. - -### `hatch create` - -Create a new package template. - -Syntax: - -`hatch create [--dir DIR] [--description DESC]` - -| Argument / Flag | Type | Description | Default | -|---:|---|---|---| -| `name` | string (positional) | Package name (required) | n/a | -| `--dir`, `-d` | path | Target directory for the template | current directory | -| `--description`, `-D` | string | Package description | empty string | - -Examples: - -`hatch create my_package` - -`hatch create my_package --dir ./packages --description "My awesome package"` - ---- - -### `hatch validate` - -Validate a package structure and metadata. - -Syntax: - -`hatch validate ` - -| Argument | Type | Description | -|---:|---|---| -| `package_dir` | path (positional) | Path to package directory to validate (required) | - -Examples: - -`hatch validate ./my_package` - ---- - -### `hatch env` (environment management) - -Top-level syntax: `hatch env ...` - -#### `hatch env create` - -Create a new Hatch environment bootstrapping a Python/conda environment. - -Syntax: - -`hatch env create [--description DESC] [--python-version VER] [--no-python] [--no-hatch-mcp-server] [--hatch_mcp_server_tag TAG]` - -| Argument / Flag | Type | Description | Default | -|---:|---|---|---| -| `name` | string (positional) | Environment name (required) | n/a | -| `--description`, `-D` | string | Human-readable environment description | empty string | -| `--python-version` | string | Python version to create (e.g., `3.11`) | none (manager default) | -| `--no-python` | flag | Do not create a Python environment (skip conda/mamba) | false | -| `--no-hatch-mcp-server` | flag | Do not install `hatch_mcp_server` wrapper | false | -| `--hatch_mcp_server_tag` | string | Git tag/branch for wrapper install (e.g., `dev`, `v0.1.0`) | none | - -#### `hatch env remove` - -Syntax: - -`hatch env remove ` - -| Argument | Type | Description | -|---:|---|---| -| `name` | string (positional) | Environment name to remove (required) | - -#### `hatch env list` - -Syntax: - -`hatch env list` - -Description: Lists all environments. When a Python manager (conda/mamba) is available additional status and manager info are displayed. - -#### `hatch env use` - -Syntax: - -`hatch env use ` - -| Argument | Type | Description | -|---:|---|---| -| `name` | string (positional) | Environment name to set as current (required) | - -#### `hatch env current` - -Syntax: - -`hatch env current` - -Description: Print the name of the current environment. - ---- - -### `hatch env python` (advanced Python environment subcommands) - -Top-level syntax: `hatch env python ...` - -#### `hatch env python init` - -Initialize or recreate a Python environment inside a Hatch environment. - -Syntax: - -`hatch env python init [--hatch_env NAME] [--python-version VER] [--force] [--no-hatch-mcp-server] [--hatch_mcp_server_tag TAG]` - -| Flag | Type | Description | Default | -|---:|---|---|---| -| `--hatch_env` | string | Hatch environment name (defaults to current env) | current environment | -| `--python-version` | string | Desired Python version (e.g., `3.12`) | none | -| `--force` | flag | Force recreation if it already exists | false | -| `--no-hatch-mcp-server` | flag | Skip installing `hatch_mcp_server` wrapper | false | -| `--hatch_mcp_server_tag` | string | Git tag/branch for wrapper installation | none | - -#### `hatch env python info` - -Show information about the Python environment for a Hatch environment. - -Syntax: - -`hatch env python info [--hatch_env NAME] [--detailed]` - -| Flag | Type | Description | Default | -|---:|---|---|---| -| `--hatch_env` | string | Hatch environment name (defaults to current) | current environment | -| `--detailed` | flag | Show additional diagnostics and package listing | false | - -When available this command prints: status, python executable, python version, conda env name, environment path, creation time, package count and package list. With `--detailed` it also prints diagnostics from the manager. - -#### `hatch env python add-hatch-mcp` - -Install the `hatch_mcp_server` wrapper into the Python environment of a Hatch env. - -Syntax: - -`hatch env python add-hatch-mcp [--hatch_env NAME] [--tag TAG]` - -| Flag | Type | Description | Default | -|---:|---|---|---| -| `--hatch_env` | string | Hatch environment name (defaults to current) | current environment | -| `--tag` | string | Git tag/branch for wrapper install | none | - -#### `hatch env python remove` - -Remove the Python environment associated with a Hatch environment. - -Syntax: - -`hatch env python remove [--hatch_env NAME] [--force]` - -| Flag | Type | Description | Default | -|---:|---|---|---| -| `--hatch_env` | string | Hatch environment name (defaults to current) | current environment | -| `--force` | flag | Skip confirmation prompt and force removal | false | - -#### `hatch env python shell` - -Launch a Python REPL or run a single command inside the Python environment. - -Syntax: - -`hatch env python shell [--hatch_env NAME] [--cmd CMD]` - -| Flag | Type | Description | Default | -|---:|---|---|---| -| `--hatch_env` | string | Hatch environment name (defaults to current) | current environment | -| `--cmd` | string | Command to execute inside the Python shell (optional) | none | - ---- - -### `hatch package` (package management) - -Top-level syntax: `hatch package ...` - -#### `hatch package add` - -Add a package (local path or registry name) into an environment. - -Syntax: - -`hatch package add [--env NAME] [--version VER] [--force-download] [--refresh-registry] [--auto-approve]` - -| Argument / Flag | Type | Description | Default | -|---:|---|---|---| -| `package_path_or_name` | string (positional) | Path to package directory or registry package name (required) | n/a | -| `--env`, `-e` | string | Target Hatch environment name (defaults to current) | current environment | -| `--version`, `-v` | string | Version for registry packages | none | -| `--force-download`, `-f` | flag | Force fetching even if cached | false | -| `--refresh-registry`, `-r` | flag | Refresh registry metadata before resolving | false | -| `--auto-approve` | flag | Automatically approve dependency installation prompts | false | - -**Note:** Dependency installation prompts are also automatically approved in non-TTY environments (such as CI/CD pipelines) or when the `HATCH_AUTO_APPROVE` environment variable is set. See [Environment Variables](#environment-variables) for details. - -Examples: - -`hatch package add ./my_package` - -`hatch package add registry_package --version 1.0.0 --env dev-env --auto-approve` - -#### `hatch package remove` - -Remove a package from a Hatch environment. - -Syntax: - -`hatch package remove [--env NAME]` - -| Argument / Flag | Type | Description | Default | -|---:|---|---|---| -| `package_name` | string (positional) | Name of the package to remove (required) | n/a | -| `--env`, `-e` | string | Hatch environment name (defaults to current) | current environment | - -#### `hatch package list` - -List packages installed in a Hatch environment. - -Syntax: - -`hatch package list [--env NAME]` - -| Flag | Type | Description | Default | -|---:|---|---|---| -| `--env`, `-e` | string | Hatch environment name (defaults to current) | current environment | - -Output: each package row includes name, version, hatch compliance flag, source URI and installation location. - ---- - -## Environment Variables - -Hatch recognizes the following environment variables to control behavior: - -| Variable | Description | Accepted Values | Default | -|----------|-------------|-----------------|---------| -| `HATCH_AUTO_APPROVE` | Automatically approve dependency installation prompts in non-interactive environments | `1`, `true`, `yes` (case-insensitive) | unset | - -### `HATCH_AUTO_APPROVE` - -When set to a truthy value (`1`, `true`, or `yes`, case-insensitive), this environment variable enables automatic approval of dependency installation prompts. This is particularly useful in CI/CD pipelines and other automated environments where user interaction is not possible. - -**Behavior:** - -- In TTY environments: User is still prompted for consent unless this variable is set -- In non-TTY environments: Installation is automatically approved regardless of this variable -- When set in any environment: Installation is automatically approved without prompting - -**Examples:** - -```bash -# Enable auto-approval for the current session -export HATCH_AUTO_APPROVE=1 -hatch package add my_package - -# Enable auto-approval for a single command -HATCH_AUTO_APPROVE=true hatch package add my_package - -# CI/CD pipeline usage -HATCH_AUTO_APPROVE=yes hatch package add production_package -``` - -**Note:** This environment variable works in conjunction with the `--auto-approve` CLI flag. Either method will enable automatic approval of installation prompts. - ---- - -## Exit codes - -| Code | Meaning | -|---:|---| -| `0` | Success | -| `1` | Error or failure | - -## Notes - -- The implementation in `hatch/cli_hatch.py` does not provide a `--version` flag or a top-level `version` command. Use `hatch --help` to inspect available commands and options. -- This reference mirrors the command names and option names implemented in `hatch/cli_hatch.py`. If you change CLI arguments in code, update this file to keep documentation in sync. +# CLI Reference + +This document is a compact reference of all Hatch CLI commands and options implemented in `hatch/cli_hatch.py` presented as tables for quick lookup. + +## Table of Contents + +``` +- [Global options](#global-options) +- [Commands](#commands) + - [hatch create](#hatch-create) + - [hatch validate](#hatch-validate) + - [hatch env](#hatch-env-environment-management) + - [hatch env create](#hatch-env-create) + - [hatch env remove](#hatch-env-remove) + - [hatch env list](#hatch-env-list) + - [hatch env use](#hatch-env-use) + - [hatch env current](#hatch-env-current) + - [hatch env python](#hatch-env-python-advanced-python-environment-subcommands) + - [hatch env python init](#hatch-env-python-init) + - [hatch env python info](#hatch-env-python-info) + - [hatch env python add-hatch-mcp](#hatch-env-python-add-hatch-mcp) + - [hatch env python remove](#hatch-env-python-remove) + - [hatch env python shell](#hatch-env-python-shell) + - [hatch package](#hatch-package-package-management) + - [hatch package add](#hatch-package-add) + - [hatch package remove](#hatch-package-remove) + - [hatch package list](#hatch-package-list) + - [hatch package sync](#hatch-package-sync) + - [hatch mcp](#hatch-mcp) + - [hatch mcp configure](#hatch-mcp-configure) + - [hatch mcp sync](#hatch-mcp-sync) + - [hatch mcp remove server](#hatch-mcp-remove-server) + - [hatch mcp remove host](#hatch-mcp-remove-host) + - [hatch mcp list hosts](#hatch-mcp-list-hosts) + - [hatch mcp list servers](#hatch-mcp-list-servers) + - [hatch mcp discover hosts](#hatch-mcp-discover-hosts) + - [hatch mcp discover servers](#hatch-mcp-discover-servers) + - [hatch mcp backup list](#hatch-mcp-backup-list) + - [hatch mcp backup restore](#hatch-mcp-backup-restore) + - [hatch mcp backup clean](#hatch-mcp-backup-clean) +``` + +## Global options + +These flags are accepted by the top-level parser and apply to all commands unless overridden. + +| Flag | Type | Description | Default | +|------|------|-------------|---------| +| `--version` | flag | Show program version and exit | n/a | +| `--envs-dir` | path | Directory to store environments | `~/.hatch/envs` | +| `--cache-ttl` | int | Cache time-to-live in seconds | `86400` (1 day) | +| `--cache-dir` | path | Directory to store cached packages | `~/.hatch/cache` | + +Example: + +```bash +hatch --version +# Output: hatch 0.6.1 +``` + +## Commands + +Each top-level command has its own table. Use the Syntax line before the table to see how to call it. + +### `hatch create` + +Create a new package template. + +Syntax: + +`hatch create [--dir DIR] [--description DESC]` + +| Argument / Flag | Type | Description | Default | +|---:|---|---|---| +| `name` | string (positional) | Package name (required) | n/a | +| `--dir`, `-d` | path | Target directory for the template | current directory | +| `--description`, `-D` | string | Package description | empty string | + +Examples: + +`hatch create my_package` + +`hatch create my_package --dir ./packages --description "My awesome package"` + +--- + +### `hatch validate` + +Validate a package structure and metadata. + +Syntax: + +`hatch validate ` + +| Argument | Type | Description | +|---:|---|---| +| `package_dir` | path (positional) | Path to package directory to validate (required) | + +Examples: + +`hatch validate ./my_package` + +--- + +### `hatch env` (environment management) + +Top-level syntax: `hatch env ...` + +#### `hatch env create` + +Create a new Hatch environment bootstrapping a Python/conda environment. + +Syntax: + +`hatch env create [--description DESC] [--python-version VER] [--no-python] [--no-hatch-mcp-server] [--hatch_mcp_server_tag TAG]` + +| Argument / Flag | Type | Description | Default | +|---:|---|---|---| +| `name` | string (positional) | Environment name (required) | n/a | +| `--description`, `-D` | string | Human-readable environment description | empty string | +| `--python-version` | string | Python version to create (e.g., `3.11`) | none (manager default) | +| `--no-python` | flag | Do not create a Python environment (skip conda/mamba) | false | +| `--no-hatch-mcp-server` | flag | Do not install `hatch_mcp_server` wrapper | false | +| `--hatch-mcp-server-tag` | string | Git tag/branch for wrapper installation (e.g., `dev`, `v0.1.0`) | none | + +#### `hatch env remove` + +Syntax: + +`hatch env remove ` + +| Argument | Type | Description | +|---:|---|---| +| `name` | string (positional) | Environment name to remove (required) | + +#### `hatch env list` + +Syntax: + +`hatch env list` + +Description: Lists all environments. When a Python manager (conda/mamba) is available additional status and manager info are displayed. + +#### `hatch env use` + +Syntax: + +`hatch env use ` + +| Argument | Type | Description | +|---:|---|---| +| `name` | string (positional) | Environment name to set as current (required) | + +#### `hatch env current` + +Syntax: + +`hatch env current` + +Description: Print the name of the current environment. + +--- + +### `hatch env python` (advanced Python environment subcommands) + +Top-level syntax: `hatch env python ...` + +#### `hatch env python init` + +Initialize or recreate a Python environment inside a Hatch environment. + +Syntax: + +`hatch env python init [--hatch_env NAME] [--python-version VER] [--force] [--no-hatch-mcp-server] [--hatch_mcp_server_tag TAG]` + +| Flag | Type | Description | Default | +|---:|---|---|---| +| `--hatch_env` | string | Hatch environment name (defaults to current env) | current environment | +| `--python-version` | string | Desired Python version (e.g., `3.12`) | none | +| `--force` | flag | Force recreation if it already exists | false | +| `--no-hatch-mcp-server` | flag | Skip installing `hatch_mcp_server` wrapper | false | +| `--hatch_mcp_server_tag` | string | Git tag/branch for wrapper installation | none | + +#### `hatch env python info` + +Show information about the Python environment for a Hatch environment. + +Syntax: + +`hatch env python info [--hatch_env NAME] [--detailed]` + +| Flag | Type | Description | Default | +|---:|---|---|---| +| `--hatch_env` | string | Hatch environment name (defaults to current) | current environment | +| `--detailed` | flag | Show additional diagnostics and package listing | false | + +When available this command prints: status, python executable, python version, conda env name, environment path, creation time, package count and package list. With `--detailed` it also prints diagnostics from the manager. + +#### `hatch env python add-hatch-mcp` + +Install the `hatch_mcp_server` wrapper into the Python environment of a Hatch env. + +Syntax: + +`hatch env python add-hatch-mcp [--hatch_env NAME] [--tag TAG]` + +| Flag | Type | Description | Default | +|---:|---|---|---| +| `--hatch_env` | string | Hatch environment name (defaults to current) | current environment | +| `--tag` | string | Git tag/branch for wrapper install | none | + +#### `hatch env python remove` + +Remove the Python environment associated with a Hatch environment. + +Syntax: + +`hatch env python remove [--hatch_env NAME] [--force]` + +| Flag | Type | Description | Default | +|---:|---|---|---| +| `--hatch_env` | string | Hatch environment name (defaults to current) | current environment | +| `--force` | flag | Skip confirmation prompt and force removal | false | + +#### `hatch env python shell` + +Launch a Python REPL or run a single command inside the Python environment. + +Syntax: + +`hatch env python shell [--hatch_env NAME] [--cmd CMD]` + +| Flag | Type | Description | Default | +|---:|---|---|---| +| `--hatch_env` | string | Hatch environment name (defaults to current) | current environment | +| `--cmd` | string | Command to execute inside the Python shell (optional) | none | + +--- + +### `hatch package` (package management) + +Top-level syntax: `hatch package ...` + +#### `hatch package add` + +Add a package (local path or registry name) into an environment. + +Syntax: + +`hatch package add [--env NAME] [--version VER] [--force-download] [--refresh-registry] [--auto-approve]` + +| Argument / Flag | Type | Description | Default | +|---:|---|---|---| +| `package_path_or_name` | string (positional) | Path to package directory or registry package name (required) | n/a | +| `--env`, `-e` | string | Target Hatch environment name (defaults to current) | current environment | +| `--version`, `-v` | string | Version for registry packages | none | +| `--force-download`, `-f` | flag | Force fetching even if cached | false | +| `--refresh-registry`, `-r` | flag | Refresh registry metadata before resolving | false | +| `--auto-approve` | flag | Automatically approve dependency installation prompts | false | +| `--host` | string | Comma-separated list of MCP host platforms to configure (e.g., claude-desktop,cursor) | none | + +**Note:** Dependency installation prompts are also automatically approved in non-TTY environments (such as CI/CD pipelines) or when the `HATCH_AUTO_APPROVE` environment variable is set. See [Environment Variables](#environment-variables) for details. + +**MCP Host Integration:** When adding a package, if the `--host` flag is specified, Hatch will automatically configure the package's MCP servers on the specified hosts. This includes analyzing package dependencies and configuring all related MCP servers. + +**MCP Host Integration Examples:** + +```bash +# Add package and automatically configure MCP servers on specific hosts +hatch package add ./my_package --host claude-desktop,cursor + +# Add package for all available hosts +hatch package add ./my_package --host all + +# Skip host configuration (no MCP servers configured) +hatch package add ./my_package + +# Add with other flags and MCP configuration +hatch package add registry_package --version 1.0.0 --env dev-env --host gemini --auto-approve +``` + +Examples: + +`hatch package add ./my_package` + +`hatch package add registry_package --version 1.0.0 --env dev-env --auto-approve` + +#### `hatch package remove` + +Remove a package from a Hatch environment. + +Syntax: + +`hatch package remove [--env NAME]` + +| Argument / Flag | Type | Description | Default | +|---:|---|---|---| +| `package_name` | string (positional) | Name of the package to remove (required) | n/a | +| `--env`, `-e` | string | Hatch environment name (defaults to current) | current environment | + +#### `hatch package list` + +List packages installed in a Hatch environment. + +Syntax: + +`hatch package list [--env NAME]` + +| Flag | Type | Description | Default | +|---:|---|---|---| +| `--env`, `-e` | string | Hatch environment name (defaults to current) | current environment | + +Output: each package row includes name, version, hatch compliance flag, source URI and installation location. + +#### `hatch package sync` + +Synchronize package MCP servers to host platforms. + +Syntax: + +`hatch package sync --host [--env ENV] [--dry-run] [--auto-approve] [--no-backup]` + +| Argument / Flag | Type | Description | Default | +|---:|---|---|---| +| `package_name` | string (positional) | Name of package whose MCP servers to sync | n/a | +| `--host` | string | Comma-separated list of host platforms or 'all' | n/a | +| `--env`, `-e` | string | Target Hatch environment name (defaults to current) | current environment | +| `--dry-run` | flag | Preview changes without execution | false | +| `--auto-approve` | flag | Skip confirmation prompts | false | +| `--no-backup` | flag | Disable default backup behavior of the MCP host's config file | false | + +Examples: + +`hatch package sync my-package --host claude-desktop` + +`hatch package sync weather-server --host claude-desktop,cursor --dry-run` + +# Multi-package synchronization examples +# Sync main package AND all its dependencies: +hatch package sync my-package --host all + +# Sync without creating backups +hatch package sync my-package --host claude-desktop --no-backup + +--- + +## Environment Variables + +Hatch recognizes the following environment variables to control behavior: + +| Variable | Description | Accepted Values | Default | +|----------|-------------|-----------------|---------| +| `HATCH_AUTO_APPROVE` | Automatically approve dependency installation prompts in non-interactive environments | `1`, `true`, `yes` (case-insensitive) | unset | + +### `HATCH_AUTO_APPROVE` + +When set to a truthy value (`1`, `true`, or `yes`, case-insensitive), this environment variable enables automatic approval of dependency installation prompts. This is particularly useful in CI/CD pipelines and other automated environments where user interaction is not possible. + +**Behavior:** + +- In TTY environments: User is still prompted for consent unless this variable is set +- In non-TTY environments: Installation is automatically approved regardless of this variable +- When set in any environment: Installation is automatically approved without prompting + +**Examples:** + +```bash +# Enable auto-approval for the current session +export HATCH_AUTO_APPROVE=1 +hatch package add my_package + +# Enable auto-approval for a single command +HATCH_AUTO_APPROVE=true hatch package add my_package + +# CI/CD pipeline usage +HATCH_AUTO_APPROVE=yes hatch package add production_package +``` + +**Note:** This environment variable works in conjunction with the `--auto-approve` CLI flag. Either method will enable automatic approval of installation prompts. + +--- + +## MCP Host Configuration Commands + +### `hatch mcp` + +Commands subset to manage non-hatch package MCP servers. +Top level syntax: ` ...` + +#### `hatch mcp configure` + +Configure an MCP server on a specific host platform. + +Syntax: + +`hatch mcp configure --host (--command CMD | --url URL) [--args ARGS] [--env-var ENV] [--header HEADER] [--dry-run] [--auto-approve] [--no-backup]` + +| Argument / Flag | Type | Description | Default | +|---:|---|---|---| +| `server-name` | string (positional) | Name of the MCP server to configure | n/a | +| `--host` | string | Target host platform (claude-desktop, cursor, etc.) | n/a | +| `--command` | string | Command to execute for local servers (mutually exclusive with --url) | none | +| `--url` | string | URL for remote MCP servers (mutually exclusive with --command) | none | +| `--http-url` | string | HTTP streaming endpoint URL (Gemini only) | none | +| `--args` | string | Arguments for MCP server command (only with --command) | none | +| `--env-var` | string | Environment variables format: KEY=VALUE (can be used multiple times) | none | +| `--header` | string | HTTP headers format: KEY=VALUE (only with --url) | none | +| `--timeout` | int | Request timeout in milliseconds (Gemini) | none | +| `--trust` | flag | Bypass tool call confirmations (Gemini) | false | +| `--cwd` | string | Working directory for stdio transport (Gemini) | none | +| `--include-tools` | multiple | Tool allowlist - only these tools will be available (Gemini). Space-separated values. | none | +| `--exclude-tools` | multiple | Tool blocklist - these tools will be excluded (Gemini). Space-separated values. | none | +| `--env-file` | string | Path to environment file (Cursor, VS Code, LM Studio) | none | +| `--input` | multiple | Input variable definitions format: type,id,description[,password=true] (VS Code) | none | +| `--dry-run` | flag | Preview configuration without applying changes | false | +| `--auto-approve` | flag | Skip confirmation prompts | false | +| `--no-backup` | flag | Skip backup creation before configuration | false | + +**Behavior**: + +The command now displays a **conversion report** showing exactly what fields will be configured on the target host. This provides transparency about which fields are supported by the host and what values will be set. + +The conversion report shows: +- **UPDATED** fields: Fields being set with their new values (shown as `None --> value`) +- **UNSUPPORTED** fields: Fields not supported by the target host (automatically filtered out) +- **UNCHANGED** fields: Fields that already have the specified value (update operations only) + +**Example - Local Server Configuration**: + +```bash +$ hatch mcp configure my-server --host claude-desktop --command python --args server.py --env API_KEY=secret + +Server 'my-server' created for host 'claude-desktop': + name: UPDATED None --> 'my-server' + command: UPDATED None --> 'python' + args: UPDATED None --> ['server.py'] + env: UPDATED None --> {'API_KEY': 'secret'} + url: UPDATED None --> None + +Configure MCP server 'my-server' on host 'claude-desktop'? [y/N]: y +[SUCCESS] Successfully configured MCP server 'my-server' on host 'claude-desktop' +``` + +**Example - Remote Server Configuration**: + +```bash +$ hatch mcp configure api-server --host claude-desktop --url https://api.example.com --header Auth=token + +Server 'api-server' created for host 'claude-desktop': + name: UPDATED None --> 'api-server' + command: UPDATED None --> None + args: UPDATED None --> None + env: UPDATED None --> {} + url: UPDATED None --> 'https://api.example.com' + headers: UPDATED None --> {'Auth': 'token'} + +Configure MCP server 'api-server' on host 'claude-desktop'? [y/N]: y +[SUCCESS] Successfully configured MCP server 'api-server' on host 'claude-desktop' +``` + +**Example - Advanced Gemini Configuration**: + +```bash +$ hatch mcp configure my-server --host gemini --command python --args server.py --timeout 30000 --trust --include-tools weather,calculator + +Server 'my-server' created for host 'gemini': + name: UPDATED None --> 'my-server' + command: UPDATED None --> 'python' + args: UPDATED None --> ['server.py'] + timeout: UPDATED None --> 30000 + trust: UPDATED None --> True + include_tools: UPDATED None --> ['weather', 'calculator'] + +Configure MCP server 'my-server' on host 'gemini'? [y/N]: y +[SUCCESS] Successfully configured MCP server 'my-server' on host 'gemini' +``` + +**Example - Remote Server Configuration**: + +```bash +$ hatch mcp configure api-server --host vscode --url https://api.example.com --header Auth=token + +Server 'api-server' created for host 'vscode': + name: UPDATED None --> 'api-server' + url: UPDATED None --> 'https://api.example.com' + headers: UPDATED None --> {'Auth': 'token'} + +Configure MCP server 'api-server' on host 'vscode'? [y/N]: y +[SUCCESS] Successfully configured MCP server 'api-server' on host 'vscode' +``` + +**Example - Dry Run Mode**: + +```bash +$ hatch mcp configure my-server --host gemini --command python --args server.py --dry-run + +[DRY RUN] Would configure MCP server 'my-server' on host 'gemini': +[DRY RUN] Command: python +[DRY RUN] Args: ['server.py'] +[DRY RUN] Backup: Enabled +[DRY RUN] Preview of changes for server 'my-server': + name: UPDATED None --> 'my-server' + command: UPDATED None --> 'python' + args: UPDATED None --> ['server.py'] + env: UPDATED None --> {} + url: UPDATED None --> None + +No changes were made. +``` + +**Host-Specific Field Support**: + +Different MCP hosts support different configuration fields. The conversion report automatically filters unsupported fields: + +- **Claude Desktop / Claude Code**: Supports universal fields only (command, args, env, url, headers, type) +- **Cursor / LM Studio**: Supports universal fields + envFile +- **VS Code**: Supports universal fields + envFile, inputs +- **Gemini CLI**: Supports universal fields + 14 additional fields (cwd, timeout, trust, OAuth settings, etc.) + +When configuring a server with fields not supported by the target host, those fields are marked as UNSUPPORTED in the report and automatically excluded from the configuration. + +#### `hatch mcp sync` + +Synchronize MCP configurations across environments and hosts. + +Syntax: + +`hatch mcp sync [--from-env ENV | --from-host HOST] --to-host HOSTS [--servers SERVERS | --pattern PATTERN] [--dry-run] [--auto-approve] [--no-backup]` + +| Flag | Type | Description | Default | +|---:|---|---|---| +| `--from-env` | string | Source Hatch environment (mutually exclusive with --from-host) | none | +| `--from-host` | string | Source host platform (mutually exclusive with --from-env) | none | +| `--to-host` | string | Target hosts (comma-separated or 'all') | n/a | +| `--servers` | string | Specific server names to sync (mutually exclusive with --pattern) | none | +| `--pattern` | string | Regex pattern for server selection (mutually exclusive with --servers) | none | +| `--dry-run` | flag | Preview synchronization without executing changes | false | +| `--auto-approve` | flag | Skip confirmation prompts | false | +| `--no-backup` | flag | Skip backup creation before synchronization | false | + +#### `hatch mcp remove server` + +Remove an MCP server from one or more hosts. + +Syntax: + +`hatch mcp remove server --host [--env ENV] [--dry-run] [--auto-approve] [--no-backup]` + +| Argument / Flag | Type | Description | Default | +|---:|---|---|---| +| `server-name` | string (positional) | Name of the server to remove | n/a | +| `--host` | string | Target hosts (comma-separated or 'all') | n/a | +| `--env`, `-e` | string | Hatch environment name (reserved for future use) | none | +| `--dry-run` | flag | Preview removal without executing changes | false | +| `--auto-approve` | flag | Skip confirmation prompts | false | +| `--no-backup` | flag | Skip backup creation before removal | false | + +#### `hatch mcp remove host` + +Remove complete host configuration (all MCP servers from the specified host). + +Syntax: + +`hatch mcp remove host [--dry-run] [--auto-approve] [--no-backup]` + +| Argument / Flag | Type | Description | Default | +|---:|---|---|---| +| `host-name` | string (positional) | Name of the host to remove | n/a | +| `--dry-run` | flag | Preview removal without executing changes | false | +| `--auto-approve` | flag | Skip confirmation prompts | false | +| `--no-backup` | flag | Skip backup creation before removal | false | + +#### `hatch mcp list hosts` + +List MCP hosts configured in the current environment. + +**Purpose**: Shows hosts that have MCP servers configured in the specified environment, with package-level details. + +Syntax: + +`hatch mcp list hosts [--env ENV] [--detailed]` + +| Flag | Type | Description | Default | +|---:|---|---|---| +| `--env` | string | Environment to list hosts from | current environment | +| `--detailed` | flag | Show detailed configuration information | false | + +**Example Output**: + +```text +Configured hosts for environment 'my-project': + claude-desktop (2 packages) + cursor (1 package) +``` + +**Detailed Output** (`--detailed`): + +```text +Configured hosts for environment 'my-project': + claude-desktop (2 packages): + - weather-toolkit: ~/.claude/config.json (configured: 2025-09-25T10:00:00) + - news-aggregator: ~/.claude/config.json (configured: 2025-09-25T11:30:00) + cursor (1 package): + - weather-toolkit: ~/.cursor/config.json (configured: 2025-09-25T10:15:00) +``` + +**Example Output**: + +```text +Available MCP Host Platforms: +โœ“ claude-desktop Available /Users/user/.claude/config.json +โœ“ cursor Available /Users/user/.cursor/config.json +โœ— vscode Not Found /Users/user/.vscode/settings.json +โœ— lmstudio Not Found /Users/user/.lmstudio/config.json +``` + +#### `hatch mcp list servers` + +List MCP servers from environment with host configuration tracking information. + +**Purpose**: Shows servers from environment packages with detailed host configuration tracking, including which hosts each server is configured on and last sync timestamps. + +Syntax: + +`hatch mcp list servers [--env ENV]` + +| Flag | Type | Description | Default | +|---:|---|---|---| +| `--env`, `-e` | string | Environment name (defaults to current) | current environment | + +**Example Output**: + +```text +MCP servers in environment 'default': +Server Name Package Version Command +-------------------------------------------------------------------------------- +weather-server weather-toolkit 1.0.0 python weather.py + Configured on hosts: + claude-desktop: /Users/user/.claude/config.json (last synced: 2025-09-24T10:00:00) + cursor: /Users/user/.cursor/config.json (last synced: 2025-09-24T09:30:00) + +news-aggregator news-toolkit 2.1.0 python news.py + Configured on hosts: + claude-desktop: /Users/user/.claude/config.json (last synced: 2025-09-24T10:00:00) +``` + +#### `hatch mcp discover hosts` + +Discover available MCP host platforms on the system. + +**Purpose**: Shows ALL host platforms (both available and unavailable) with system detection status. + +Syntax: + +`hatch mcp discover hosts` + +**Example Output**: + +```text +Available MCP host platforms: + claude-desktop: โœ“ Available + Config path: ~/.claude/config.json + cursor: โœ“ Available + Config path: ~/.cursor/config.json + vscode: โœ— Not detected + Config path: ~/.vscode/config.json +``` + +#### `hatch mcp discover servers` + +Discover MCP servers in Hatch environments. + +Syntax: + +`hatch mcp discover servers [--env ENV]` + +| Flag | Type | Description | Default | +|---:|---|---|---| +| `--env` | string | Specific environment to discover servers in | current environment | + +#### `hatch mcp backup list` + +List available configuration backups for a specific host. + +Syntax: + +`hatch mcp backup list [--detailed]` + +| Argument / Flag | Type | Description | Default | +|---:|---|---|---| +| `host` | string (positional) | Host platform to list backups for (e.g., claude-desktop, cursor) | n/a | +| `--detailed`, `-d` | flag | Show detailed backup information | false | + +#### `hatch mcp backup restore` + +Restore host configuration from a backup file. + +Syntax: + +`hatch mcp backup restore [--backup-file FILE] [--dry-run] [--auto-approve]` + +| Argument / Flag | Type | Description | Default | +|---:|---|---|---| +| `host` | string (positional) | Host platform to restore (e.g., claude-desktop, cursor) | n/a | +| `--backup-file`, `-f` | string | Specific backup file to restore (defaults to latest) | latest backup | +| `--dry-run` | flag | Preview restore without executing changes | false | +| `--auto-approve` | flag | Skip confirmation prompts | false | + +#### `hatch mcp backup clean` + +Clean old backup files for a specific host based on retention criteria. + +Syntax: + +`hatch mcp backup clean [--older-than-days DAYS] [--keep-count COUNT] [--dry-run] [--auto-approve]` + +| Argument / Flag | Type | Description | Default | +|---:|---|---|---| +| `host` | string (positional) | Host platform to clean backups for (e.g., claude-desktop, cursor) | n/a | +| `--older-than-days` | integer | Remove backups older than specified days | none | +| `--keep-count` | integer | Keep only the most recent N backups | none | +| `--dry-run` | flag | Preview cleanup without executing changes | false | +| `--auto-approve` | flag | Skip confirmation prompts | false | + +**Note:** At least one of `--older-than-days` or `--keep-count` must be specified. + +--- + +## Exit codes + +| Code | Meaning | +|---:|---| +| `0` | Success | +| `1` | Error or failure | + +## Notes + +- The implementation in `hatch/cli_hatch.py` does not provide a `--version` flag or a top-level `version` command. Use `hatch --help` to inspect available commands and options. +- This reference mirrors the command names and option names implemented in `hatch/cli_hatch.py`. If you change CLI arguments in code, update this file to keep documentation in sync. diff --git a/docs/articles/users/GettingStarted.md b/docs/articles/users/GettingStarted.md index 15fca2f..0d3ec31 100644 --- a/docs/articles/users/GettingStarted.md +++ b/docs/articles/users/GettingStarted.md @@ -105,10 +105,14 @@ pip install -e . Test that Hatch is working: ```bash -hatch --help +hatch --version ``` -You should see available commands. +You should see the installed version (e.g., `hatch 0.6.1`). You can also view available commands: + +```bash +hatch --help +``` ## First Steps @@ -234,28 +238,21 @@ Now that you have Hatch installed and understand the basics: ### Next Steps 1. **Complete the tutorial series**: - - [Environment Management](tutorials/02-environments/) - Advanced environment operations - - [Package Authoring](tutorials/03-author-package/) - Create your own packages - - [CI Automation](tutorials/04-ci-automation/) - Set up automated workflows + - [Environment Management](tutorials/02-environments/01-manage-envs.md) - Advanced environment operations + - [Package Authoring](tutorials/03-author-package/01-generate-template.md) - Create your own packages + - [MCP Host Configuration](tutorials/04-mcp-host-configuration/01-host-platform-overview.md) - Deploy packages to host platforms 2. **Explore reference documentation**: - [CLI Reference](CLIReference.md) - Complete command documentation - - [Troubleshooting](Troubleshooting/CommonIssues.md) - Solutions to common issues 3. **Understand limitations**: - [Limits and Known Issues](../appendices/LimitsAndKnownIssues.md) - Current constraints and workarounds - [Security and Trust](SecurityAndTrust.md) - Security model and deployment guidance -### Getting Help - -**Documentation**: Start with the [troubleshooting guide](Troubleshooting/CommonIssues.md) for common issues. - -**Community**: Check the GitHub repository for issues and discussions. - **Diagnostics**: Use these commands to gather information when seeking help: ```bash -hatch --version -hatch env list --verbose +hatch --help +hatch env list hatch package list python --version which conda || echo "conda not found" diff --git a/docs/articles/users/MCPHostConfiguration.md b/docs/articles/users/MCPHostConfiguration.md new file mode 100644 index 0000000..669e349 --- /dev/null +++ b/docs/articles/users/MCPHostConfiguration.md @@ -0,0 +1,471 @@ +# MCP Host Configuration + +This article is about: + +- Configuring MCP servers across different host platforms +- Managing server configurations for Claude, VS Code, Cursor, and other hosts +- Synchronizing environment configurations to multiple hosts +- Backup and recovery of host configurations + +## Overview + +Hatch can automatically configure MCP servers on supported host platforms, eliminating the need to manually edit configuration files for each application. This feature streamlines the process of setting up MCP servers across your development environment. + +## Supported Host Platforms + +Hatch currently supports configuration for these MCP host platforms: + +- **Claude Desktop** - Anthropic's desktop application +- **Claude Code** - Anthropic's VS Code extension +- **VS Code** - Microsoft Visual Studio Code with MCP extensions +- **Cursor** - AI-powered code editor +- **LM Studio** - Local language model interface +- **Gemini** - Google's AI development environment + +## Hands-on Learning + +For step-by-step guidance on MCP host configuration, see the comprehensive tutorial series: + +- [Tutorial: Host Platform Overview](tutorials/04-mcp-host-configuration/01-host-platform-overview.md) - Understanding host platforms and deployment approaches +- [Tutorial: Configuring Hatch Packages](tutorials/04-mcp-host-configuration/02-configuring-hatch-packages.md) - **Preferred deployment method** with automatic dependency resolution +- [Tutorial: Configuring Arbitrary Servers](tutorials/04-mcp-host-configuration/03-configuring-arbitrary-servers.md) - Advanced method for non-Hatch servers +- [Tutorial: Environment Synchronization](tutorials/04-mcp-host-configuration/04-environment-synchronization.md) - Cross-environment deployment workflows + +## Basic Usage + +### Configure a Server + +Add an MCP server to a specific host: + +```bash +# Configure a local MCP server +hatch mcp configure weather_server \ + --host claude-desktop \ + --command python \ + --args weather_server.py + +# Configure a remote MCP server +hatch mcp configure api-service \ + --host cursor \ + --url https://api.example.com/mcp \ + --header "Authorization=Bearer token" +``` + +### List Configured Servers + +View servers configured on a specific host: + +```bash +# List available host platforms +hatch mcp list hosts + +# List configured servers from current environment +hatch mcp list servers + +# List servers from specific environment +hatch mcp list servers --env-var production +``` + +### Remove a Server + +Remove an MCP server from a host: + +```bash +# Remove server from specific host +hatch mcp remove server weather_server --host claude-desktop + +# Remove server from all hosts +hatch mcp remove server weather_server --host all + +# Remove entire host configuration +hatch mcp remove host claude-desktop +``` + +## Configuration Types + +**Important**: Each server must be configured as either local (using `--command`) or remote (using `--url`), but not both. These options are mutually exclusive: + +- **Local servers**: Use `--command` and optionally `--args` and `--env-var` +- **Remote servers**: Use `--url` and optionally `--header` + +Attempting to use both `--command` and `--url` will result in an error. + +### Local Servers + +Local servers run as processes on your machine: + +```bash +# Basic local server +hatch mcp configure my-server \ + --host claude-desktop \ + --command python \ + --args server.py + +# Server with environment variables +hatch mcp configure weather_server \ + --host claude-desktop \ + --command python \ + --args weather_server.py \ + --env-var API_KEY=your-key \ + --env-var DEBUG=true + +# Server with absolute path (required for some hosts) +hatch mcp configure secure-server \ + --host claude-desktop \ + --command /usr/local/bin/python \ + --args /path/to/secure_server.py +``` + +### Remote Servers + +Remote servers are accessed via HTTP/HTTPS: + +```bash +# Basic remote server +hatch mcp configure api-server \ + --host cursor \ + --url https://api.example.com/mcp + +# Remote server with authentication +hatch mcp configure authenticated-api \ + --host cursor \ + --url https://secure-api.example.com/mcp \ + --header "Authorization=Bearer your-token" \ + --header "Content-Type=application/json" +``` + +## Multi-Host Configuration + +### Configure Across Multiple Hosts + +Set up the same server on multiple host platforms: + +```bash +# Configure on multiple hosts at once +hatch mcp configure weather_server \ + --hosts claude-desktop,cursor,vscode \ + --command python \ + --args weather_server.py + +# Configure on all available hosts +hatch mcp configure weather_server \ + --hosts all \ + --command python \ + --args weather_server.py +``` + +#### Quick Examples + +```bash +# Sync environment to hosts +hatch mcp sync --from-env production --to-host claude-desktop,cursor + +# Copy configuration between hosts +hatch mcp sync --from-host claude-desktop --to-host cursor + +# Sync with filtering +hatch mcp sync --from-env dev --to-host all --pattern ".*api.*" + +# Preview changes +hatch mcp sync --from-env prod --to-host all --dry-run +``` + +## Backup and Recovery + +### Automatic Backups + +Hatch automatically creates backups before modifying host configurations: + +```bash +# Configure with automatic backup (default) +hatch mcp configure my-server --host claude-desktop --command python --args server.py + +# Skip backup creation +hatch mcp configure my-server --host claude-desktop --command python --args server.py --no-backup +``` + +### Manual Backup Management + +```bash +# List available backups +hatch mcp backup list claude-desktop + +# Restore from backup file +hatch mcp backup restore claude-desktop --backup-file +``` + +### Backup Locations + +Backups are stored in `~/.hatch/mcp_host_config_backups/` with the naming pattern: +``` +mcp.json.. +``` + +## Troubleshooting + +### Host Not Available + +If a host is not detected: + +```bash +# Check which hosts are available +hatch mcp hosts + +# Get detailed host information +hatch mcp hosts --verbose +``` + +**Common solutions:** +- Ensure the host application is installed +- Check that configuration directories exist +- Verify file permissions for configuration files + +### Configuration Validation Errors + +If server configuration is rejected: + +```bash +# Validate configuration before applying +hatch mcp validate my-server \ + --host claude-desktop \ + --command python \ + --args server.py +``` + +**Common issues:** +- Claude hosts require absolute paths for commands +- Some hosts don't support environment variables +- URL format must include protocol (http:// or https://) + +### Backup and Recovery Issues + +If configuration changes fail: + +```bash +# Check backup status +hatch mcp backup list --host claude-desktop + +# Restore previous working configuration +hatch mcp restore --host claude-desktop --latest +``` + +### Permission Issues + +If you encounter permission errors: + +```bash +# Check configuration file permissions +ls -la ~/.config/Code/User/settings.json # VS Code example + +# Fix permissions if needed +chmod 644 ~/.config/Code/User/settings.json +``` + +## Advanced Usage + +### Batch Operations + +Configure multiple servers efficiently: + +```bash +# Configure multiple servers from a configuration file +hatch mcp configure --from-file servers.json --host claude-desktop + +# Remove multiple servers +hatch mcp remove server1,server2,server3 --host claude-desktop +``` + +### Environment Integration + +Integrate with Hatch environment management: + +```bash +# Configure servers for current environment +hatch env use my-project +hatch mcp sync --all-hosts + +# Configure servers when switching environments +hatch env use production +hatch mcp sync --hosts claude-desktop,cursor +``` + +### Automation and Scripting + +Use Hatch MCP configuration in automation: + +```bash +# Non-interactive configuration +hatch mcp configure my-server \ + --host claude-desktop \ + --command python \ + --args server.py \ + --auto-approve + +# Check configuration status in scripts +if hatch mcp list --host claude-desktop | grep -q "my-server"; then + echo "Server is configured" +fi +``` + +## Best Practices + +### Development Workflow + +1. **Start with one host** - Configure and test on your primary development host first +2. **Use absolute paths** - Especially for Claude hosts, use absolute paths to avoid issues +3. **Test configurations** - Use `--dry-run` to preview changes before applying +4. **Keep backups** - Don't use `--no-backup` unless you're certain about changes + +### Production Considerations + +1. **Environment synchronization** - Use `hatch mcp sync` to maintain consistency across hosts +2. **Backup management** - Regularly clean up old backups to manage disk space +3. **Configuration validation** - Validate configurations before deployment +4. **Host availability** - Check host availability before attempting configuration + +### Security Considerations + +1. **Credential management** - Avoid storing sensitive credentials in configuration files +2. **File permissions** - Ensure configuration files have appropriate permissions +3. **Backup security** - Protect backup files containing configuration data +4. **Network security** - Use HTTPS for remote server configurations + +## Integration with Other Hatch Features + +### Package Management + +MCP host configuration integrates with Hatch package management: + +```bash +# Install package and configure MCP server +hatch package add weather-toolkit +hatch mcp sync --all-hosts # Sync package's MCP server to hosts +``` + +### Environment Management + +Configuration follows environment boundaries: + +```bash +# Different environments can have different MCP configurations +hatch env create development +hatch env use development +hatch mcp configure dev-server --host claude-desktop --command python --args dev_server.py + +hatch env create production +hatch env use production +hatch mcp configure prod-server --host claude-desktop --command python --args prod_server.py +``` + +This ensures that MCP server configurations are isolated between different project environments, maintaining clean separation of development, testing, and production setups. + +## Advanced Synchronization Patterns + +### Pattern-Based Server Selection + +Use regular expressions for flexible server selection during synchronization: + +```bash +# All API servers +hatch mcp sync --from-env my_hatch_env --to-host claude-desktop --pattern ".*api.*" + +# Development tools +hatch mcp sync --from-env my_hatch_env --to-host cursor --pattern "^dev-" + +# Production servers +hatch mcp sync --from-host production-host --to-host staging-host --pattern ".*prod.*" +``` + +### Multi-Host Batch Operations + +Efficiently manage configurations across multiple host platforms: + +```bash +# Replicate configuration across all hosts +hatch mcp sync --from-host claude-desktop --to-host all + +# Selective multi-host deployment +hatch mcp sync --from-env production --to-host claude-desktop,cursor,vscode + +# Environment-specific multi-host sync +hatch mcp sync --from-env development --to-host all --pattern "^dev-" +``` + +### Complex Filtering Scenarios + +Combine filtering options for precise control: + +```bash +# Multiple specific servers +hatch mcp sync --from-env my_hatch_env --to-host all --servers api-server,db-server,cache-server + +# Pattern-based with host filtering +hatch mcp sync --from-host claude-desktop --to-host cursor --pattern ".*tool.*" +``` + +## Management Operations + +### Server Removal Workflows + +Remove MCP servers from host configurations with safety features: + +```bash +# Remove from single host +hatch mcp remove server --host + +# Remove from multiple hosts +hatch mcp remove server --host ,, + +# Remove from all configured hosts +hatch mcp remove server --host all +``` + +### Host Configuration Management + +Complete host configuration removal and management: + +```bash +# Remove all MCP configuration for a host +hatch mcp remove host + +# Remove with environment specification +hatch mcp remove server --host --env-var +``` + +### Safety and Backup Features + +All management operations include comprehensive safety features: + +**Automatic Backup Creation**: +```bash +# Backup created automatically +hatch mcp remove server test-server --host claude-desktop +# Output: Backup created: ~/.hatch/mcp_backups/claude-desktop_20231201_143022.json +``` + +**Dry-Run Mode**: +```bash +# Preview changes without executing +hatch mcp remove server test-server --host claude-desktop --dry-run +hatch mcp sync --from-env prod --to-host all --dry-run +``` + +**Skip Backup (Advanced)**: +```bash +# Skip backup creation (use with caution) +hatch mcp remove server test-server --host claude-desktop --no-backup +``` + +### Host Validation and Error Handling + +The system validates host names against available MCP host types: +- `claude-desktop` +- `cursor` +- `vscode` +- `lmstudio` +- `gemini` +- Additional hosts as configured + +Invalid host names result in clear error messages with available options listed. + +For complete command syntax and all available options, see [CLI Reference](CLIReference.md). diff --git a/docs/articles/users/Troubleshooting/CICDIntegration.md b/docs/articles/users/Troubleshooting/CICDIntegration.md deleted file mode 100644 index 55eafb1..0000000 --- a/docs/articles/users/Troubleshooting/CICDIntegration.md +++ /dev/null @@ -1,178 +0,0 @@ -# CI/CD Integration Troubleshooting - -This guide helps resolve common issues when using Hatch in Continuous Integration/Continuous Deployment (CI/CD) pipelines and other automated environments. - -## Common CI/CD Issues - -### Package Installation Hangs in Pipelines - -**Problem:** Hatch package installation commands hang indefinitely in CI/CD pipelines, causing builds to timeout. - -**Cause:** Hatch prompts for user consent before installing dependencies, but CI/CD environments cannot provide interactive input. - -**Solution:** Use one of the following approaches to enable automatic approval: - -#### Option 1: Environment Variable (Recommended) -Set the `HATCH_AUTO_APPROVE` environment variable in your CI/CD configuration: - -```yaml -# GitHub Actions example -env: - HATCH_AUTO_APPROVE: "1" - -# GitLab CI example -variables: - HATCH_AUTO_APPROVE: "true" - -# Jenkins pipeline example -environment { - HATCH_AUTO_APPROVE = "yes" -} -``` - -#### Option 2: CLI Flag -Add the `--auto-approve` flag to your package installation commands: - -```bash -hatch package add my_package --auto-approve -hatch package add registry_package --version 1.0.0 --auto-approve -``` - -#### Option 3: Automatic Detection -Hatch automatically detects non-TTY environments and skips user prompts. This works out-of-the-box in most CI/CD systems without additional configuration. - -### Environment Variable Values - -The `HATCH_AUTO_APPROVE` environment variable accepts the following values (case-insensitive): -- `1` -- `true` -- `yes` - -Any other value will be ignored, and normal prompting behavior will occur in TTY environments. - -## CI/CD Platform Examples - -### GitHub Actions - -```yaml -name: Build and Test -on: [push, pull_request] - -jobs: - test: - runs-on: ubuntu-latest - env: - HATCH_AUTO_APPROVE: "1" - - steps: - - uses: actions/checkout@v3 - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: '3.9' - - - name: Install Hatch - run: pip install hatch - - - name: Install dependencies - run: hatch package add ./my_package -``` - -### GitLab CI - -```yaml -variables: - HATCH_AUTO_APPROVE: "true" - -test: - image: python:3.9 - script: - - pip install hatch - - hatch env create test-env - - hatch package add ./my_package --env test-env -``` - -### Jenkins Pipeline - -```groovy -pipeline { - agent any - environment { - HATCH_AUTO_APPROVE = "yes" - } - stages { - stage('Install Dependencies') { - steps { - sh 'pip install hatch' - sh 'hatch package add ./my_package' - } - } - } -} -``` - -### Docker Builds - -```dockerfile -FROM python:3.9 - -# Set environment variable for non-interactive installation -ENV HATCH_AUTO_APPROVE=1 - -# Install Hatch -RUN pip install hatch - -# Copy and install your package -COPY . /app -WORKDIR /app -RUN hatch package add ./my_package -``` - -## Troubleshooting Steps - -If you're still experiencing issues: - -1. **Verify Environment Detection:** - Check if your CI/CD environment is properly detected as non-TTY: - ```bash - python -c "import sys; print('TTY:', sys.stdin.isatty())" - ``` - This should print `TTY: False` in CI/CD environments. - -2. **Test Environment Variable:** - Verify the environment variable is set correctly: - ```bash - echo "HATCH_AUTO_APPROVE: $HATCH_AUTO_APPROVE" - ``` - -3. **Enable Verbose Logging:** - Add verbose logging to see what Hatch is doing: - ```bash - hatch package add ./my_package --verbose - ``` - -4. **Check for Blocking Input:** - If the process still hangs, check for other interactive prompts in your package installation process. - -## Best Practices - -1. **Use Environment Variables:** Set `HATCH_AUTO_APPROVE=1` in your CI/CD environment variables for consistent behavior across all commands. - -2. **Test Locally:** Test your CI/CD configuration locally using tools like `act` (for GitHub Actions) or Docker to simulate the CI environment. - -3. **Timeout Protection:** Set reasonable timeouts in your CI/CD configuration to prevent indefinite hanging: - ```yaml - # GitHub Actions - timeout-minutes: 10 - - # GitLab CI - timeout: 10m - ``` - -4. **Explicit Dependencies:** Consider using explicit dependency lists in your CI/CD scripts to make builds more predictable and faster. - -## Related Documentation - -- [CLI Reference - Environment Variables](../CLIReference.md#environment-variables) -- [CLI Reference - Package Add Command](../CLIReference.md#hatch-package-add) -- [Getting Started Guide](../GettingStarted.md) diff --git a/docs/articles/users/tutorials/01-getting-started/01-installation.md b/docs/articles/users/tutorials/01-getting-started/01-installation.md index b6d091b..3ba2232 100644 --- a/docs/articles/users/tutorials/01-getting-started/01-installation.md +++ b/docs/articles/users/tutorials/01-getting-started/01-installation.md @@ -28,7 +28,13 @@ This article covers the installation of Hatch, a package manager for Model Conte pip install -e . ``` -3. Verify the installation by checking the available commands: +3. Verify the installation by checking the version: + + ```bash + hatch --version + ``` + + You should see output like `hatch 0.6.1`. You can also view available commands: ```bash hatch --help diff --git a/docs/articles/users/tutorials/01-getting-started/03-install-package.md b/docs/articles/users/tutorials/01-getting-started/03-install-package.md index 24590bf..1944163 100644 --- a/docs/articles/users/tutorials/01-getting-started/03-install-package.md +++ b/docs/articles/users/tutorials/01-getting-started/03-install-package.md @@ -78,7 +78,7 @@ Hatch!'s package registry is almost empty as of now (until you contribute your o hatch package add /path/to/my-package ``` -If you don't have a local package yet, you can create one using the `hatch create` command. This will be covered in the [Author Package](../03-author-package/01-create-package.md) tutorial. For now though, let us proceed with the next step. +If you don't have a local package yet, you can create one using the `hatch create` command. This will be covered in the [Author Package](../03-author-package/01-generate-template.md) tutorial. For now though, let us proceed with the next step. ## Step 4: Verify Installation diff --git a/docs/articles/users/tutorials/03-author-package/05-checkpoint.md b/docs/articles/users/tutorials/03-author-package/05-checkpoint.md index 6c67fbd..923e88a 100644 --- a/docs/articles/users/tutorials/03-author-package/05-checkpoint.md +++ b/docs/articles/users/tutorials/03-author-package/05-checkpoint.md @@ -11,8 +11,13 @@ **Next steps:** +- Deploy your packages to MCP host platforms with [MCP Host Configuration](../04-mcp-host-configuration/01-host-platform-overview.md) - Explore advanced CLI features and configuration options - Learn about security considerations and troubleshooting - Understand the development architecture for contributing -You now have the fundamental skills to create, validate, and install Hatch packages. For more advanced topics, explore the [CLI Reference](../../CLIReference.md) and [Security and Trust](../../SecurityAndTrust.md) guides. +You now have the fundamental skills to create, validate, and install Hatch packages. + +**Continue to**: [Tutorial 04: MCP Host Configuration](../04-mcp-host-configuration/01-host-platform-overview.md) to learn how to deploy your packages to host platforms like Claude Desktop, VS Code, and Cursor with automatic dependency resolution. + +For more advanced topics, explore the [CLI Reference](../../CLIReference.md) and [Security and Trust](../../SecurityAndTrust.md) guides. diff --git a/docs/articles/users/tutorials/04-mcp-host-configuration/01-host-platform-overview.md b/docs/articles/users/tutorials/04-mcp-host-configuration/01-host-platform-overview.md new file mode 100644 index 0000000..752a380 --- /dev/null +++ b/docs/articles/users/tutorials/04-mcp-host-configuration/01-host-platform-overview.md @@ -0,0 +1,248 @@ +# 01: Host Platform Overview + +--- +**Concepts covered:** + +- MCP host platforms (Claude Desktop, VS Code, Cursor, etc.) +- Hatch's role as package manager with host configuration features +- Host platform configuration files and formats +- Package-first vs. direct configuration approaches + +**Skills you will practice:** + +- Discovering available host platforms +- Understanding host-specific requirements +- Planning deployment strategy (package-first preferred) +- Exploring configuration management concepts + +--- + +This article introduces MCP host configuration concepts and Hatch's role in managing MCP server deployments across different host platforms. + +## Understanding MCP Host Configuration + +### Hatch's Primary Role + +**Hatch is primarily an MCP package manager** where packages contain MCP servers. The MCP host configuration management feature was added to support diverse developer preferences for deployment: + +- **Primary Role**: Package manager for MCP servers with dependency resolution (Python, apt, Docker, other Hatch packages) +- **Supporting Feature**: Configures MCP servers (from Hatch packages or arbitrary sources) on host platforms +- **Configuration Management**: Synchronizes server configurations between Hatch environments and host applications +- **Scope Boundary**: Does NOT develop MCP servers or implement MCP protocol + +### Configuration vs. Development + +**What Hatch Does**: + +- โœ… Manages MCP server packages with dependencies +- โœ… Configures existing MCP servers on host platforms +- โœ… Synchronizes configurations across environments +- โœ… Manages backups and recovery + +**What Hatch Does NOT Do**: + +- โŒ Develop MCP servers (use any tools/frameworks) +- โŒ Implement MCP protocol +- โŒ Replace MCP development frameworks + +## Supported Host Platforms + +Hatch currently supports configuration for these MCP host platforms: + +- [**Claude Desktop**](https://claude.ai/download) - Anthropic's desktop application +- [**Claude Code**](https://claude.com/product/claude-code) - Anthropic's AI Command Line Interface +- [**Cursor**](https://cursor.com/) - AI-powered code editor +- [**VS Code**](https://code.visualstudio.com/) - Microsoft Visual Studio Code +- [**LM Studio**](https://lmstudio.ai/) - Local language model interface +- [**Gemini**](https://github.com/google-gemini/gemini-cli) - Google's AI Command Line Interface + +## Configuration Management Workflow + +### Complete Development-to-Deployment Pipeline + +```text +1. Develop MCP servers (using any tools/frameworks) + โ†“ +2. Package servers with Hatch ([Previous Tutorial](../03-author-package/01-generate-template.md)) + โ†“ +3. Deploy packages to host platforms (Tutorial 04-02) โ† PREFERRED + โ†“ +4. Alternative: Configure arbitrary servers (Tutorial 04-03) โ† ADVANCED + โ†“ +5. Multi-host package deployment (Tutorial 04-04) +``` + +### Two Deployment Approaches + +**Package-First Deployment (Recommended)**: + +- Use `hatch package add --host` for Hatch packages +- Automatic dependency resolution +- Guaranteed compatibility +- Environment isolation + +**Direct Server Configuration (Advanced)**: + +- Use `hatch mcp configure` for arbitrary servers +- Manual dependency management +- More control but more complexity +- Suitable for third-party servers + +### Choose Your Approach + +**Use Package-First Deployment When**: + +- โœ… You have Hatch packages (from Tutorial 03) +- โœ… You want automatic dependency resolution +- โœ… You need environment isolation +- โœ… You want rollback capabilities +- โœ… You're deploying to multiple hosts + +**Use Direct Configuration When**: + +- โœ… You have third-party MCP servers +- โœ… You need maximum control over configuration +- โœ… You're working with specialized server setups + +## Discovering Your Environment + +### Check Available Hosts + +```bash +# Search all detected host platforms +hatch mcp discover hosts +``` + +**Possible Output (depending on the software you have installed)**: + +```plaintext +Available MCP host platforms: + claude-desktop: โœ“ Available + Config path: path/to/claude_desktop_config.json + claude-code: โœ— Not detected + Config path: path/to/.claude/mcp_config.json + vscode: โœ— Not detected + Config path: path/to/.vscode/settings.json + cursor: โœ“ Available + Config path: path/to/.cursor/mcp.json + lmstudio: โœ“ Available + Config path: path/toLMStudio/mcp.json + gemini: โœ“ Available + Config path: path/to/.gemini/settings.json +``` + +### Check Current Environment + +```bash +# See your current Hatch environment +hatch env current + +# List available environments +hatch env list + +# List installed packages +hatch package list +``` + +## Configuration File Formats + +Typically, MCP hosts configuration file follow very similar structures; yet differences in the name of some fields or the presence/absence of other fields may require some adaptation. + +**Claude Desktop Configuration**: + +```json +{ + "mcpServers": { + "my-server": { + "command": "python", // system python; + // note that in the case of Hatch packages, + // we will use the python executable of the + // Hatch environment in which the package + // is installed + "args": ["/absolute/path/to/server.py"], + "env": { + "API_KEY": "value" + } + } + } +} +``` + +**VS Code Configuration**: + +```json +{ + "servers": { // VS Code uses "servers" as the root object + "my-server": { + "command": "python", // system python - same as above + "args": ["./relative/path/to/server.py"], + "env": { + "API_KEY": "value" + } + } + } +} +``` + +**Gemini Configuration**: + +```json +{ + "mcpServers": { + "my-server": { + "command": "python", // system python - same as above + "args": ["/absolute/path/to/server.py"], + "env": { + "API_KEY": "value" + }, + "trust": false, // typically doesn't exist outside of Gemini + "timeout": 30000 // typically doesn't exist outside of Gemini + } + } +} +``` + +## Safety and Best Practices + +### Backup Strategy + +Hatch automatically creates backups before making configuration changes: + +```bash +# Backups stored in ~/.hatch/mcp_host_config_backups/ +# Format: mcp.json.. +``` + +### Testing Strategy + +```bash +# Always preview changes first +hatch package add my_package --host claude-desktop --dry-run +hatch mcp configure my_package --host cursor --dry-run + +# Test in testing environment first +hatch env use package_testing +hatch package add . --host claude-desktop # from within the package directory +``` + +### Environment Isolation + +```bash +# Different environments maintain separate package versions +hatch env create package_testing_v2 +hatch env create team_standard_2024q4 + +# Each environment can have different MCP package versions +``` + +## Next Steps + +You now understand the MCP host configuration landscape and Hatch's role as a package manager with configuration capabilities. You're ready to start deploying MCP servers to host platforms. + +**Continue to**: [Tutorial 04-02: Configuring Hatch Packages](02-configuring-hatch-packages.md) to learn the **preferred deployment method** using Hatch packages with automatic dependency resolution. + +**Related Documentation**: + +- [CLI Reference](../../CLIReference.md) - Complete command syntax +- [Getting Started Guide](../../GettingStarted.md) - Basic Hatch concepts +- [Package Authoring Tutorial](../03-author-package/01-generate-template.md) - Creating packages for deployment diff --git a/docs/articles/users/tutorials/04-mcp-host-configuration/02-configuring-hatch-packages.md b/docs/articles/users/tutorials/04-mcp-host-configuration/02-configuring-hatch-packages.md new file mode 100644 index 0000000..a6cb022 --- /dev/null +++ b/docs/articles/users/tutorials/04-mcp-host-configuration/02-configuring-hatch-packages.md @@ -0,0 +1,283 @@ +# 02: Configuring Hatch Packages on MCP Hosts + +--- +**Concepts covered:** + +- Hatch package deployment with automatic dependency resolution +- `hatch package add --host` and `hatch package sync` commands +- Guaranteed dependency installation (Python, apt, Docker, other Hatch packages) +- Package-first deployment advantages over direct configuration + +**Skills you will practice:** + +- Using `hatch package add --host` for direct deployment +- Using `hatch package sync` for existing packages +- Validating complete dependency resolution +- Testing package functionality across different host platforms + +--- + +This article covers the **preferred method** for deploying MCP servers to host platforms using Hatch packages. This approach guarantees that all dependencies (Python packages, system packages, Docker containers, and other Hatch packages) are correctly installed before MCP host deployment. + +## Why Package-First Deployment? + +### Automatic Dependency Resolution + +Hatch packages include complete dependency specifications that are automatically resolved during deployment: + +```bash +# Package deployment handles ALL dependencies automatically +hatch package add my-weather-server --host claude-desktop +# โœ… Installs Python dependencies (requests, numpy, etc.) +# โœ… Installs system dependencies (curl, git, etc.) +# โœ… Installs Docker containers if specified +# โœ… Installs other Hatch package dependencies +# โœ… Configures MCP server on Claude Desktop +``` + +### Comparison with Direct Configuration + +**Package Deployment (Recommended)**: +- โœ… Automatic dependency resolution +- โœ… Guaranteed compatibility +- โœ… Single command deployment +- โœ… Environment isolation +- โœ… Rollback capabilities + +**Direct Configuration (Advanced)**: +- โŒ Manual dependency management required +- โŒ No compatibility guarantees +- โŒ Multiple setup steps +- โŒ Potential environment conflicts +- โŒ Limited rollback options + +## Step 1: Deploy Package to Single Host + +Use the package you created in [Tutorial 03](../03-author-package/01-generate-template.md) for this exercise. + +### Basic Package Deployment + +Deploy your package directly to a host platform: + +```bash +# Navigate to your package directory from Tutorial 03 +cd my_new_package + +# Deploy to Claude Desktop with automatic dependency resolution +hatch package add . --host claude-desktop +``` + +**Expected Output**: +``` +Successfully added package: my_new_package +Configuring MCP server for package 'my_new_package' on 1 host(s)... +โœ“ Configured my_new_package (my_new_package) on claude-desktop +MCP configuration completed: 1/1 hosts configured +``` + +### Verify Deployment + +Check that your package is properly configured: + +```bash +# List configured servers on Claude Desktop +hatch mcp list servers --host claude-desktop + +# Verify package installation +hatch package list +``` + +You should see your package listed in both the MCP server configuration and the installed packages. + +## Step 2: Deploy to Multiple Hosts + +Deploy your package to multiple host platforms simultaneously: + +```bash +# Deploy to multiple hosts +hatch package add . --host claude-desktop,cursor,vscode + +# Deploy to all available hosts +hatch package add . --host all +``` + +**Expected Behavior**: +- Dependencies are resolved once and applied to all hosts +- Each host receives appropriate configuration format +- All hosts are updated simultaneously +- Backup files are created for each host + +## Step 3: Sync Existing Packages + +If you have packages already installed in your environment, use `hatch package sync` to deploy them to hosts: + +### List Available Packages + +```bash +# See what packages are available for synchronization +hatch package list +``` + +### Sync Specific Package + +```bash +# Sync a specific package to hosts +hatch package sync my-weather-server --host claude-desktop + +# Sync multiple packages +hatch package sync weather-server,news-api --host all +``` + +### Sync All Packages + +```bash +# Sync all packages in current environment to hosts +hatch package sync --host claude-desktop,cursor +``` +The `hatch package sync` command syncs all packages that are already installed in the current environment. + +## Step 4: Validate Dependency Resolution + +### Check Dependency Installation + +Verify that all dependencies were correctly installed: + +```bash +# Check Python environment +hatch env current +python -c "import requests, numpy; print('Dependencies available')" + +# Check system dependencies (Linux/macOS) +which curl +which git + +# Verify package functionality +python -c " +import sys +sys.path.insert(0, '.') +from my_new_package.tools import get_weather +print('Package tools accessible') +" +``` + +### Test MCP Server Functionality + +Test that your MCP server works correctly with the host platform: + +1. **Open Claude Desktop** (or your target host) +2. **Check MCP server status** in the application settings +3. **Test server functionality** by using the tools you implemented +4. **Verify error handling** by testing edge cases + +## Step 5: Environment-Specific Deployment + +Deploy packages with environment-specific configurations: + +### Development Environment + +```bash +# Switch to development environment +hatch env use development + +# Deploy with development settings +hatch package add . --host claude-desktop +``` + +### Production Environment + +```bash +# Switch to production environment +hatch env use production + +# Deploy with production settings +hatch package add . --host claude-desktop,cursor +``` + +**Key Difference**: Each environment maintains separate MCP server configurations, allowing you to test different versions or configurations without conflicts. + +## Step 6: Troubleshooting Package Deployment + +### Common Issues and Solutions + +**Dependency Installation Failures**: +```bash +# Check dependency resolution +hatch package add . --host claude-desktop --dry-run + +# View detailed dependency information +hatch validate . +``` + +**Host Configuration Errors**: +```bash +# Verify host availability +hatch mcp list hosts + +# Check host-specific requirements +hatch mcp configure --help +``` + +**Package Validation Issues**: +```bash +# Validate package structure +hatch validate . + +# Check package metadata +cat hatch_metadata.json +``` + +### Recovery Procedures + +**Rollback Failed Deployment**: +```bash +# Remove problematic configuration +hatch mcp remove server my-new-package --host claude-desktop + +# Restore from backup if needed +# (Backups are created automatically) +``` + +**Clean Environment Reset** + +```bash +# Remove all MCP configurations for host +hatch mcp remove host claude-desktop + +# Redeploy packages from the a hatch environment +hatch mcp sync --from-env env_name --to-host claude-desktop +``` + +**Note**: The `hatch mcp sync` command only syncs packages from one environment (or one host) at a time. If you want to re-sync other packages, you must run the command several times. + +## Best Practices + +### Package Development Workflow + +1. **Develop and test locally** using Tutorial 03 methods +2. **Validate package structure** with `hatch validate .` +3. **Deploy to development host** with `hatch package add . --host claude-desktop` +4. **Test functionality** in host application +5. **Deploy to production hosts** when ready + +### Dependency Management + +- **Use specific version pins** for critical dependencies +- **Test dependency resolution** with `--dry-run` before deployment +- **Keep package metadata current** as dependencies change +- **Document system requirements** in package documentation + +### Environment Isolation + +- **Use separate environments** for development, testing, and production +- **Deploy environment-specific packages** to appropriate hosts +- **Maintain environment boundaries** to prevent configuration conflicts + +## Next Steps + +You now understand the preferred method for deploying MCP servers using Hatch packages with automatic dependency resolution. This approach provides the most reliable and maintainable deployment workflow. + +**Continue to**: [Tutorial 04-03: Configuring Arbitrary Servers](03-configuring-arbitrary-servers.md) to learn the alternative direct configuration method for non-Hatch MCP servers. + +**Related Documentation**: +- [Package Commands Reference](../../CLIReference.md#hatch-package-package-management) - Complete command syntax +- [Package Authoring Tutorial](../03-author-package/01-generate-template.md) - Creating packages for deployment diff --git a/docs/articles/users/tutorials/04-mcp-host-configuration/03-configuring-arbitrary-servers.md b/docs/articles/users/tutorials/04-mcp-host-configuration/03-configuring-arbitrary-servers.md new file mode 100644 index 0000000..08f28e9 --- /dev/null +++ b/docs/articles/users/tutorials/04-mcp-host-configuration/03-configuring-arbitrary-servers.md @@ -0,0 +1,362 @@ +# 03: Configuring Arbitrary MCP Servers on MCP Hosts + +--- +**Concepts covered:** + +- Direct `hatch mcp configure` for non-Hatch MCP servers +- Local vs. remote server configuration +- Manual dependency management requirements +- Advanced configuration for specialized use cases + +**Skills you will practice:** + +- Using `hatch mcp configure` command for arbitrary servers +- Configuring both local and remote servers +- Understanding manual dependency management limitations +- Handling third-party MCP servers + +--- + +This article covers the **advanced method** for configuring MCP servers that are not packaged with Hatch. This approach provides maximum control but requires manual dependency management and is typically used for third-party servers or specialized configurations. + +## When to Use Direct Configuration + +### Appropriate Use Cases + +**Use Direct Configuration For**: +- โœ… Third-party MCP servers not available as Hatch packages +- โœ… Existing server infrastructure you want to integrate +- โœ… Specialized configurations requiring custom setup +- โœ… Remote MCP servers hosted elsewhere +- โœ… Legacy servers that cannot be easily packaged + +**Prefer Package Deployment For**: +- โœ… Servers you developed (see [Tutorial 04-02](02-configuring-hatch-packages.md)) +- โœ… Servers available as Hatch packages +- โœ… Servers requiring complex dependencies +- โœ… Servers you want to deploy across multiple environments + +### Trade-offs Understanding + +**Direct Configuration**: +- โœ… Maximum control over configuration +- โœ… Works with any MCP server +- โœ… No packaging requirements +- โŒ Manual dependency management +- โŒ No automatic compatibility checking + +**Package Deployment**: +- โœ… Automatic dependency resolution +- โœ… Guaranteed compatibility +- โœ… Environment isolation +- โŒ Requires Hatch package format +- โŒ Less configuration flexibility + +## Step 1: Configure Local MCP Server + +### Basic Local Server Configuration + +Configure a local MCP server that you have already installed: + +```bash +# Configure a local Python MCP server +hatch mcp configure weather-api \ + --host claude-desktop \ + --command python \ + --args /path/to/weather_server.py + +# Configure with environment variables +hatch mcp configure news-api \ + --host claude-desktop \ + --command python \ + --args /path/to/news_server.py \ + --env API_KEY=your_api_key \ + --env DEBUG=true +``` + +**Expected Output**: +``` +Server 'weather-api' created for host 'claude-desktop': + name: UPDATED None --> 'weather-api' + command: UPDATED None --> 'python' + args: UPDATED None --> ['/path/to/weather_server.py'] + env: UPDATED None --> {'API_KEY': 'your_api_key', 'DEBUG': 'true'} + +Configuring MCP server 'weather-api' on host 'claude-desktop'? [y/N]: y +[SUCCESS] Successfully configured MCP server 'weather-api' on host 'claude-desktop' +``` + +### Verify Local Configuration + +```bash +# Check that the server is configured +hatch mcp list servers --host claude-desktop + +# Test the configuration +python /path/to/weather_server.py --help +``` + +### Manual Dependency Management + +**Important**: Unlike package deployment, you must ensure all dependencies are installed manually: + +```bash +# Install Python dependencies manually +pip install requests numpy pandas + +# Install system dependencies (Linux/macOS) +sudo apt-get install curl git # Ubuntu/Debian +brew install curl git # macOS + +# Verify dependencies +python -c "import requests, numpy, pandas; print('Dependencies available')" +``` + +## Step 2: Configure Remote MCP Server + +### Remote Server Configuration + +Configure an MCP server hosted on a remote URL: + +```bash +# Configure remote MCP server +hatch mcp configure remote-api \ + --host gemini \ + --url https://api.example.com/mcp \ + --header "Authorization=Bearer_your_token" \ + --header "Content-Type=application/json" +``` + +### Remote Server with Authentication + +```bash +# Configure with multiple headers for authentication +hatch mcp configure secure-api \ + --host gemini \ + --url https://secure-api.example.com/mcp \ + --header "Authorization=Bearer_token" \ + --header "X-API-Key=your_api_key" \ + --header "User-Agent=HatchMCP/1.0" +``` + +**Expected Output**: +``` +Server 'secure-api' created for host 'gemini': + name: UPDATED None --> 'secure-api' + url: UPDATED None --> 'https://secure-api.example.com/mcp' + headers: UPDATED None --> {'Authorization': 'Bearer_token', 'X-API-Key': 'your_api_key', 'User-Agent': 'HatchMCP/1.0'} + +Configuring MCP server 'secure-api' on host 'gemini'? [y/N]: y +[SUCCESS] Successfully configured MCP server 'secure-api' on host 'gemini' +``` + +### Verify Remote Configuration + +```bash +# Test remote server connectivity +curl -H "Authorization: Bearer_token" \ + -H "X-API-Key: your_api_key" \ + https://secure-api.example.com/mcp/health + +# Check configuration +hatch mcp list servers --host cursor +``` + +## Step 3: Multi-Host Configuration + +### Deploy to Multiple Hosts + +Configure the same server across multiple host platforms: + +```bash +# Configure on multiple hosts simultaneously +hatch mcp configure file-manager \ + --host claude-desktop,cursor,vscode \ + --command python \ + --args /path/to/file_manager.py \ + --env HOME_DIR=/home/user + +# Configure on all available hosts +hatch mcp configure system-tools \ + --host all \ + --command python \ + --args /path/to/system_tools.py +``` + +### Host-Specific Considerations + +**Claude Desktop Requirements**: +- Must use absolute paths for commands +- Environment variables fully supported +- JSON configuration format + +**VS Code Requirements**: +- Can use relative paths in workspace context +- Limited environment variable support +- JSONC configuration format + +**Cursor Requirements**: +- Similar to VS Code but with AI-specific features +- Custom configuration location + +## Step 4: Advanced Configuration Patterns + +### Complex Command Arguments + +```bash +# Server with multiple arguments +hatch mcp configure data-processor \ + --host claude-desktop \ + --command python \ + --args /path/to/processor.py \ + --args --config=/path/to/config.json \ + --args --verbose \ + --args --workers=4 +``` + +### Environment-Specific Configuration + +```bash +# Testing configuration +hatch env use package_testing +hatch mcp configure test-server \ + --host claude-desktop \ + --command python \ + --args /path/to/test_server.py \ + --env DEBUG=true \ + --env LOG_LEVEL=debug + +# Team standard configuration +hatch env use team_standard_2024q4 +hatch mcp configure team-server \ + --host claude-desktop \ + --command python \ + --args /path/to/team_server.py \ + --env DEBUG=false \ + --env LOG_LEVEL=info +``` + +## Step 5: Troubleshooting Direct Configuration + +### Common Configuration Issues + +**Path Resolution Problems**: +```bash +# Use absolute paths for Claude Desktop +hatch mcp configure my-server \ + --host claude-desktop \ + --command python \ + --args $(pwd)/server.py # Converts to absolute path + +# Check path accessibility +ls -la /path/to/server.py +python /path/to/server.py --help +``` + +**Dependency Issues**: +```bash +# Verify Python environment +which python +python --version + +# Check module availability +python -c "import required_module" + +# Install missing dependencies +pip install missing_package +``` + +**Permission Problems**: +```bash +# Check file permissions +ls -la /path/to/server.py +chmod +x /path/to/server.py + +# Check directory permissions +ls -la /path/to/ +``` + +### Configuration Validation + +```bash +# Preview configuration before applying +hatch mcp configure test-server \ + --host claude-desktop \ + --command python \ + --args /path/to/server.py \ + --dry-run + +# Validate existing configuration +hatch mcp list servers --host claude-desktop +``` + +### Recovery Procedures + +**Remove Problematic Configuration**: +```bash +# Remove specific server +hatch mcp remove server problematic-server --host claude-desktop + +# Remove all servers from host +hatch mcp remove host claude-desktop +``` + +**Restore from Backup**: +```bash +# Backups are created automatically +# Location: ~/.hatch/mcp_backups/ +# Format: _.json + +# Manual restoration (if needed) +cp ~/.hatch/mcp_backups/claude-desktop_20231201_143022.json \ + ~/.config/claude/claude_desktop_config.json +``` + +## Best Practices for Direct Configuration + +### Dependency Management + +1. **Document Dependencies**: Maintain clear documentation of required dependencies +2. **Version Pinning**: Use specific versions for critical dependencies +3. **Environment Testing**: Test in clean environments to verify dependencies +4. **Dependency Scripts**: Create installation scripts for complex setups + +### Configuration Management + +1. **Use Absolute Paths**: Especially for Claude Desktop configurations +2. **Environment Variables**: Use environment variables for sensitive data +3. **Configuration Validation**: Always test configurations before deployment +4. **Backup Strategy**: Rely on automatic backups, but verify they're created + +### Security Considerations + +1. **Sensitive Data**: Use environment variables, not command arguments +2. **File Permissions**: Ensure proper permissions on server files +3. **Network Security**: Use HTTPS for remote servers +4. **Authentication**: Implement proper authentication for remote servers + +## Comparison with Package Deployment + +### When Each Approach Excels + +**Direct Configuration Excels For**: +- Third-party servers you cannot modify +- Existing infrastructure integration +- Maximum configuration control +- Remote server integration + +**Package Deployment Excels For**: +- Servers you develop or control +- Complex dependency requirements +- Multi-environment deployments +- Automated deployment workflows + +## Next Steps + +You now understand how to configure arbitrary MCP servers using direct configuration. This advanced method provides maximum flexibility but requires careful dependency management. + +**Continue to**: [Tutorial 04-04: Environment Synchronization](04-environment-synchronization.md) to learn how to synchronize MCP configurations across environments and hosts. + +**Related Documentation**: +- [MCP Commands Reference](../../CLIReference.md) - Complete command syntax +- [Package Deployment Tutorial](02-configuring-hatch-packages.md) - Preferred deployment method diff --git a/docs/articles/users/tutorials/04-mcp-host-configuration/04-environment-synchronization.md b/docs/articles/users/tutorials/04-mcp-host-configuration/04-environment-synchronization.md new file mode 100644 index 0000000..c2a90c4 --- /dev/null +++ b/docs/articles/users/tutorials/04-mcp-host-configuration/04-environment-synchronization.md @@ -0,0 +1,322 @@ +# 04: Multi-Host Package Deployment + +--- +**Concepts covered:** + +- Using environments as project isolation containers +- Deploying MCP servers to multiple host platforms +- Project-specific configuration management +- Selective deployment patterns + +**Skills you will practice:** + +- Creating project-isolated environments +- Synchronizing project servers to multiple hosts +- Managing project-specific host configurations +- Using selective deployment for partial rollouts + +--- + +This tutorial teaches you how to deploy MCP servers to multiple host platforms using environments as project isolation containers. You'll learn to maintain clean separation between different projects while efficiently deploying their servers to host applications like Claude Desktop, Cursor, and VS Code. + +## Understanding Project Isolation with Environments + +### Environments as Project Containers + +Hatch environments serve as isolated containers for different projects, not development lifecycle stages. This approach provides: + +1. **Project Separation**: Keep project_alpha servers separate from project-beta servers +2. **Configuration Isolation**: Avoid naming conflicts between projects +3. **Selective Deployment**: Deploy only relevant servers to specific hosts +4. **Clean Management**: Maintain project-specific configurations independently + +### Project Isolation vs. Direct Configuration + +**Project-Isolated Environments**: +- โœ… Clean separation between projects +- โœ… Batch deployment of project servers +- โœ… Consistent project-specific configurations +- โœ… Reduced configuration conflicts + +**Direct Configuration** (from previous tutorials): +- โœ… Immediate deployment to hosts +- โœ… Maximum control over individual servers +- โŒ No project isolation benefits +- โŒ Manual configuration management + +## Step 1: Create Project Environments + +### Create Domain-Neutral Project Environments + +Create environments using project-focused naming (not lifecycle stages): + +```bash +# Create project environments +hatch env create project_alpha +hatch env create project_beta + +# Verify environments were created +hatch env list +``` + +### Configure Project_Alpha Servers + +Add MCP servers to your first project environment: + +```bash +# Activate project_alpha environment +hatch env use project_alpha + +# Add servers via packages (recommended approach) +hatch package add weather-toolkit +hatch package add team-utilities + +# Verify project_alpha configuration +hatch mcp list servers +``` + +### Configure Project-Beta Servers + +Set up a different project with its own server set: + +```bash +# Activate project-beta environment +hatch env use project_beta + +# Add different servers for this project +hatch package add analytics-suite + +# Verify project-beta configuration +hatch mcp list servers +``` + +### Verify Project Isolation + +Confirm that environments maintain separate configurations: + +```bash +# Check project_alpha servers +hatch env use project_alpha +hatch mcp list servers +# Should show: weather-toolkit, team-utilities + +# Check project-beta servers +hatch env use project_beta +hatch mcp list servers +# Should show: analytics-suite +``` + +## Step 2: Deploy Project Servers to Hosts + +### Deploy Project_Alpha to Multiple Hosts + +Deploy all servers from project_alpha to your target host platforms: + +```bash +# Deploy project_alpha servers to Claude Desktop and Cursor +hatch env use project_alpha +hatch mcp sync --from-env project_alpha --to-host claude-desktop,cursor +``` + +**Expected Output**: + +```text +Synchronize MCP configurations from host 'claude-desktop' to 1 host(s)? [y/N]: y +[SUCCESS] Synchronization completed + Servers synced: 4 + Hosts updated: 1 + โœ“ cursor (backup: path\to\.hatch\mcp_host_config_backups\cursor\mcp.json.cursor.20251124_225305_495653) +``` + +### Deploy Project-Beta to All Hosts + +Deploy project-beta servers to all detected host platforms: + +```bash +# Deploy project_beta servers to all detected hosts +hatch env use project_beta +hatch mcp sync --from-env project_beta --to-host all +``` + +**Real Behavior**: The `--to-host all` flag automatically detects and syncs to all available host platforms that Hatch can find (listed by `hatch mcp discover hosts`). This is a convenient way to ensure your project's servers are configured on every host applications are installed. + +### Verify Project Deployments + +Check what was deployed to each host for each project: + +```bash +# Check project_alpha deployments +hatch env use project_alpha +hatch mcp list servers + +# Check project_beta deployments +hatch env use project_beta +hatch mcp list servers +``` + +## Step 3: Selective Deployment Patterns + +### Deploy Specific Servers + +Deploy only a subset of servers from a project environment: + +```bash +# Deploy only weather-toolkit from project_alpha to Claude Desktop +hatch env use project_alpha +hatch mcp sync --from-env project_alpha \ + --to-host claude-desktop \ + --servers weather-toolkit +``` + +### Pattern-Based Deployment + +Use regular expressions for selective deployment: + +```bash +# Deploy servers matching a pattern from project_alpha +hatch mcp sync --from-env project_alpha \ + --to-host cursor \ + --pattern ".*util.*" + +# Deploy API-related servers from project_beta +hatch env use project_beta +hatch mcp sync --from-env project_beta \ + --to-host claude-desktop \ + --pattern ".*api.*" +``` + +## Step 4: Project Maintenance Workflows + +### Remove Server from Host + +Remove a specific server from a host for the current project: + +```bash +# Remove weather-toolkit from Cursor for project_alpha +hatch env use project_alpha +hatch mcp remove server weather-toolkit --host cursor +``` + +### Remove All Project Servers from Host + +Remove all servers for the current project from a host: + +```bash +# Remove all project_alpha configurations from Claude Desktop +hatch env use project_alpha +hatch mcp remove host claude-desktop +``` + +### Restore Host Configuration + +```bash +# Restore a previous host configuration (then continue with project workflow) +hatch mcp backup restore claude-desktop +``` + +Will restore the latest backup available. For a more granular restoration, you can specific a backup file with `--backup-file BACKUP_FILE` (or `-f BACKUP_FILE`). Backup files can be listed with `hatch mcp backup list claude-desktop`. + +## Step 5: Validation and Troubleshooting + +### Verify Project Deployments + +Use environment-scoped commands to verify your project configurations: + +```bash +# Check project_alpha server deployments +hatch env use project_alpha +hatch mcp list servers + +# Check which hosts have project_alpha servers configured +hatch mcp list hosts +``` + +### Common Project Isolation Issues + +**Server Name Conflicts**: + +```bash +# If projects have conflicting server names, rename them +hatch env use project_alpha +hatch mcp remove server conflicting-name --host claude-desktop +hatch package add unique-server-name +``` + +**Environment Confusion**: + +```bash +# Always verify current environment before operations +hatch env list +hatch env use project_alpha # Explicitly set environment +``` + +### Backup and Recovery for Projects + +**Verify Automatic Backups**: + +Hatch creates automatic backups before any configuration changes. You don't need to create them manually. + +```bash +# List available backups (always created automatically) +hatch mcp backup list --host claude-desktop + +# Clean old backups if needed +hatch mcp backup clean claude-desktop --keep-count 10 +``` + +**Restore Project Configuration**: + +```bash +# Restore from specific backup +hatch mcp backup restore claude-desktop project_alpha-stable + +# Then re-sync current project if needed +hatch env use project_alpha +hatch mcp sync --from-env project_alpha --to-host claude-desktop +``` + +## Step 6: Best Practices for Project Isolation + +### Project Environment Organization + +1. **Clear Naming**: Use project-focused names (`project_alpha`, `project_beta`) not lifecycle stages +2. **Purpose Separation**: Keep each project's servers in separate environments +3. **Documentation**: Document what each project environment contains and its purpose + +### Deployment Strategy + +1. **Test First**: Always use `--dry-run` before large deployments +2. **Selective Deployment**: Use `--servers` or `--pattern` for partial rollouts +3. **Backup Verification**: Verify automatic backups were created after changes +4. **Environment Validation**: Test project configurations before deployment + +### Project Workflow Integration + +1. **Environment Switching**: Always verify current environment before operations +2. **Host Specialization**: Deploy different projects to appropriate hosts +3. **Automation**: Use `--auto-approve` for scripted project deployments +4. **Recovery Planning**: Maintain clear rollback procedures for each project + +### Safe Automation Example + +```bash +#!/usr/bin/env bash +set -euo pipefail + +project_env="project_alpha" +target_hosts="claude-desktop,cursor" + +echo "Previewing deployment of $project_env to $target_hosts" +hatch mcp sync --from-env "$project_env" --to-host "$target_hosts" --dry-run + +echo "Applying changes" +hatch mcp sync --from-env "$project_env" --to-host "$target_hosts" --auto-approve +``` + +**Related Documentation**: + +- [MCP Sync Commands Reference](../../CLIReference.md#hatch-mcp-sync) - Complete command syntax +- [Environment Management Tutorial](../02-environments/01-manage-envs.md) - Advanced environment operations + +> Previous: [Edit Metadata](03-configuring-arbitrary-servers.md) +> Next: [Checkpoint](05-checkpoint.md) diff --git a/docs/articles/users/tutorials/04-mcp-host-configuration/05-checkpoint.md b/docs/articles/users/tutorials/04-mcp-host-configuration/05-checkpoint.md new file mode 100644 index 0000000..0799ac0 --- /dev/null +++ b/docs/articles/users/tutorials/04-mcp-host-configuration/05-checkpoint.md @@ -0,0 +1,164 @@ +# Checkpoint: MCP Host Configuration + +**What you've accomplished:** + +- Understood Hatch's role as an MCP package manager with host configuration features +- Mastered package-first deployment with automatic dependency resolution +- Learned direct configuration for arbitrary MCP servers +- Implemented environment & MCP hosts synchronization workflows + +You now have comprehensive skills for managing MCP server deployments across different host platforms using Hatch's configuration management capabilities. For more advanced topics, explore the [CLI Reference](../../CLIReference.md) and [MCP Host Configuration Guide](../../MCPHostConfiguration.md). + +## Skills Mastery Summary + +### Package-First Deployment +โœ… **Automatic Dependency Resolution**: Deploy Hatch packages with guaranteed dependency installation +โœ… **Multi-Host Deployment**: Deploy packages to multiple host platforms simultaneously +โœ… **Environment Integration**: Use Hatch environment isolation for organized deployments +โœ… **Rollback Capabilities**: Use automatic backups for safe deployments + +### Direct Server Configuration (Advanced Method) +โœ… **Third-Party Integration**: Configure arbitrary MCP servers not packaged with Hatch +โœ… **Cross-Environment Deployment**: Synchronize MCP configurations between Hatch environments and hosts +โœ… **Host-to-Host Copying**: Replicate configurations directly between host platforms +โœ… **Pattern-Based Filtering**: Use regular expressions for precise server selection + +## Deployment Strategy Decision Framework + +### Choose Package-First Deployment When: +- โœ… You have Hatch packages (from [Tutorial 03](../03-author-package/01-generate-template.md)) +- โœ… You want automatic dependency resolution +- โœ… You need environment isolation and rollback capabilities +- โœ… You want the most reliable and maintainable deployment workflow + +### Choose Direct Configuration When: +- โœ… You have third-party MCP servers not available as Hatch packages +- โœ… You need maximum control over server configuration +- โœ… You're integrating existing server infrastructure +- โœ… You're working with remote MCP servers +- โœ… You have specialized configuration requirements + +### Choose Environment Synchronization When: +- โœ… You want to leverage environment isolation +- โœ… You need to deploy environment-specific server sets to MCP hosts + +### Use Advanced Synchronization When: +- โœ… You need host-to-host configuration replication +- โœ… You want pattern-based server filtering and selection + +## Integration with Hatch Ecosystem + +### Complete Development-to-Deployment Pipeline + +``` +1. Package Development (Tutorial 03) + โ†“ +2. Package-First Deployment (Tutorial 04-02) โ† PREFERRED + โ†“ +3. Environment Synchronization (Tutorial 04-04) + โ†“ +4. Advanced Patterns & Production Deployment +``` + +### Hatch Feature Integration + +**Environment Management** ([Tutorial 02](../02-environments/01-manage-envs.md)): +- Create isolated environments for different projects +- Maintain separate package sets for development/production +- Use environment synchronization for deployment + +**Package Management** ([Tutorial 03](../03-author-package/01-generate-template.md)): +- Develop MCP servers as Hatch packages +- Include complete dependency specifications +- Deploy packages with automatic dependency resolution + +**Host Configuration** (This Tutorial Series): +- Configure MCP servers on host platforms +- Synchronize configurations across environments +- Manage deployment workflows effectively + +## Practical Usage Guide + +### Working with Multiple Hosts +- Use `hatch mcp discover hosts` to see available host platforms +- Hosts must be installed and accessible for configuration +- Different hosts have different configuration requirements (paths, formats) +- Use `--dry-run` to preview changes before applying to multiple hosts + +### Understanding Automatic Backups +- Backups are created automatically before any configuration change +- Located in `~/.hatch/mcp_host_config_backups/` with timestamp naming +- Use `hatch mcp backup list ` to see available backups +- Use `hatch mcp backup restore ` to restore from backups +- No manual backup creation needed - the system handles this for safety + +### Environment and Package Coordination +- `hatch package add --host` installs package AND configures on hosts +- `hatch package sync` only syncs packages already installed in environment +- Use separate environments for different projects (not lifecycle stages) +- Environment names must use underscores, not hyphens (alphanumeric + underscore only) + +## Troubleshooting Quick Reference + +### Common Issues and Solutions + +**Package Deployment Failures**: +- Verify package structure with `hatch validate .` +- Check dependency resolution with `--dry-run` +- Ensure all dependencies are properly specified + +**Host Configuration Errors**: +- Verify host platform installation and configuration +- Check file permissions and path accessibility +- Use absolute paths for Claude Desktop configurations + +**Synchronization Problems**: +- Verify source environment or host exists +- Check target host availability and permissions +- Use `--dry-run` to preview synchronization changes + +**Environment Issues**: +- List available environments with `hatch env list` +- Verify current environment with `hatch env current` +- Check package installation with `hatch package list` + +**Practical Diagnostics**: +- Check host platform detection: `hatch mcp discover hosts` +- List configured servers: `hatch mcp list servers --env ` +- Check server configuration details: `hatch mcp list servers --env --host ` +- Validate package structure: `hatch validate ` +- Test configuration preview: `--dry-run` flag on any command +- Check backup status: `hatch mcp backup list ` + +### Recovery Procedures + +**Configuration Rollback**: +```bash +# Remove problematic configuration +hatch mcp remove server --host + +# Restore from automatic backup +# (Backups created automatically in ~/.hatch/mcp_backups/) +``` + +**Environment Recovery**: +```bash +# Switch to known good environment +hatch env use + +# Re-sync to hosts +hatch mcp sync --from-env --to-host +``` + +## Conclusion + +You have successfully mastered MCP host configuration using Hatch's comprehensive deployment and synchronization capabilities. You can now: + +- Deploy MCP servers reliably using package-first deployment +- Handle complex scenarios with direct configuration +- Manage multi-environment workflows with synchronization +- Troubleshoot and recover from deployment issues + +These skills enable you to effectively manage MCP server deployments in any environment, from individual development setups to enterprise-scale production deployments. The combination of Hatch's package management capabilities with host configuration features provides a powerful foundation for MCP server lifecycle management. + +**Welcome to advanced MCP host configuration mastery!** Continue exploring Hatch's capabilities and contributing to the MCP ecosystem. diff --git a/docs/index.md b/docs/index.md index f12bca2..295e5d8 100644 --- a/docs/index.md +++ b/docs/index.md @@ -12,34 +12,37 @@ Hatch provides powerful tools for managing MCP server packages, environments, an - **[Getting Started](./articles/users/GettingStarted.md)** - Quick start guide for using Hatch - **[Command Reference](./articles/users/CLIReference.md)** - Complete CLI command documentation +- **[MCP Host Configuration](./articles/users/MCPHostConfiguration.md)** - Configure MCP servers across different host platforms - **[Tutorials Start](./articles/users/tutorials/01-getting-started/01-installation.md)** - Step-by-step guides for your journey from installation to authoring Hatch packages for MCP server easy sharing. ### For Developers Comprehensive documentation for developers and contributors working on the Hatch codebase. -#### [Architecture](./articles/devs/architecture/) +#### [Architecture](./articles/devs/architecture/index.md) High-level system understanding and design patterns for developers getting familiar with the Hatch codebase. - [System Overview](./articles/devs/architecture/system_overview.md) - Introduction to Hatch's architecture - [Component Architecture](./articles/devs/architecture/component_architecture.md) - Detailed component breakdown +- [MCP Host Configuration](./articles/devs/architecture/mcp_host_configuration.md) - Architecture for MCP host configuration management -#### [Implementation Guides](./articles/devs/implementation_guides/) +#### [Implementation Guides](./articles/devs/implementation_guides/index.md) Technical how-to guides for implementing specific features and extending the system. - [Adding New Installers](./articles/devs/implementation_guides/adding_installers.md) - Implementing new dependency installer types - [Registry Integration](./articles/devs/implementation_guides/registry_integration.md) - Working with package registries +- [MCP Host Configuration Extension](./articles/devs/implementation_guides/mcp_host_configuration_extension.md) - Adding support for new MCP host platforms -#### [Development Processes](./articles/devs/development_processes/) +#### [Development Processes](./articles/devs/development_processes/index.md) Workflow, standards, and processes for effective development on the Hatch project. - [Developer Onboarding](./articles/devs/development_processes/developer_onboarding.md) - Setting up your development environment - [Testing Standards](./articles/devs/development_processes/testing_standards.md) - Testing requirements and best practices -#### [Contribution Guidelines](./articles/devs/contribution_guides/) +#### [Contribution Guidelines](./articles/devs/contribution_guides/index.md) Process-focused guidance for contributing to the Hatch project. @@ -48,8 +51,6 @@ Process-focused guidance for contributing to the Hatch project. ## Quick Links -- **[Architecture Diagram](./resources/diagrams/architecture.puml)** - Visual overview of system components -- **[Source Code](../hatch/)** - Main Hatch package source code - **[GitHub Repository](https://github.com/CrackingShells/Hatch)** - Project repository - **[Hatchling Integration](https://github.com/CrackingShells/Hatchling)** - Primary consumer of Hatch diff --git a/hatch/__init__.py b/hatch/__init__.py index 60c0aaa..e7f401b 100644 --- a/hatch/__init__.py +++ b/hatch/__init__.py @@ -5,8 +5,6 @@ and interacting with the Hatch registry. """ -__version__ = "0.4.0" - from .cli_hatch import main from .environment_manager import HatchEnvironmentManager from .package_loader import HatchPackageLoader, PackageLoaderError diff --git a/hatch/cli_hatch.py b/hatch/cli_hatch.py index fd29a60..87a3318 100644 --- a/hatch/cli_hatch.py +++ b/hatch/cli_hatch.py @@ -8,157 +8,1846 @@ """ import argparse +import json import logging +import shlex import sys +from importlib.metadata import PackageNotFoundError, version from pathlib import Path +from typing import List, Optional -from hatch.environment_manager import HatchEnvironmentManager from hatch_validator import HatchPackageValidator +from hatch_validator.package.package_service import PackageService + +from hatch.environment_manager import HatchEnvironmentManager +from hatch.mcp_host_config import ( + MCPHostConfigurationManager, + MCPHostRegistry, + MCPHostType, + MCPServerConfig, +) +from hatch.mcp_host_config.models import HOST_MODEL_REGISTRY, MCPServerConfigOmni +from hatch.mcp_host_config.reporting import display_report, generate_conversion_report from hatch.template_generator import create_package_template + +def get_hatch_version() -> str: + """Get Hatch version from package metadata. + + Returns: + str: Version string from package metadata, or 'unknown (development mode)' + if package is not installed. + """ + try: + return version("hatch") + except PackageNotFoundError: + return "unknown (development mode)" + + +def parse_host_list(host_arg: str): + """Parse comma-separated host list or 'all'.""" + if not host_arg: + return [] + + if host_arg.lower() == "all": + return MCPHostRegistry.detect_available_hosts() + + hosts = [] + for host_str in host_arg.split(","): + host_str = host_str.strip() + try: + host_type = MCPHostType(host_str) + hosts.append(host_type) + except ValueError: + available = [h.value for h in MCPHostType] + raise ValueError(f"Unknown host '{host_str}'. Available: {available}") + + return hosts + + +def request_confirmation(message: str, auto_approve: bool = False) -> bool: + """Request user confirmation with non-TTY support following Hatch patterns.""" + import os + import sys + + # Check for auto-approve first + if auto_approve or os.getenv("HATCH_AUTO_APPROVE", "").lower() in ( + "1", + "true", + "yes", + ): + return True + + # Interactive mode - request user input (works in both TTY and test environments) + try: + while True: + response = input(f"{message} [y/N]: ").strip().lower() + if response in ["y", "yes"]: + return True + elif response in ["n", "no", ""]: + return False + else: + print("Please enter 'y' for yes or 'n' for no.") + except (EOFError, KeyboardInterrupt): + # Only auto-approve on EOF/interrupt if not in TTY (non-interactive environment) + if not sys.stdin.isatty(): + return True + return False + + +def get_package_mcp_server_config( + env_manager: HatchEnvironmentManager, env_name: str, package_name: str +) -> MCPServerConfig: + """Get MCP server configuration for a package using existing APIs.""" + try: + # Get package info from environment + packages = env_manager.list_packages(env_name) + package_info = next( + (pkg for pkg in packages if pkg["name"] == package_name), None + ) + + if not package_info: + raise ValueError( + f"Package '{package_name}' not found in environment '{env_name}'" + ) + + # Load package metadata using existing pattern from environment_manager.py:716-727 + package_path = Path(package_info["source"]["path"]) + metadata_path = package_path / "hatch_metadata.json" + + if not metadata_path.exists(): + raise ValueError( + f"Package '{package_name}' is not a Hatch package (no hatch_metadata.json)" + ) + + with open(metadata_path, "r") as f: + metadata = json.load(f) + + # Use PackageService for schema-aware access + from hatch_validator.package.package_service import PackageService + + package_service = PackageService(metadata) + + # Get the HatchMCP entry point (this handles both v1.2.0 and v1.2.1 schemas) + mcp_entry_point = package_service.get_mcp_entry_point() + if not mcp_entry_point: + raise ValueError( + f"Package '{package_name}' does not have a HatchMCP entry point" + ) + + # Get environment-specific Python executable + python_executable = env_manager.get_current_python_executable() + if not python_executable: + # Fallback to system Python if no environment-specific Python available + python_executable = "python" + + # Create server configuration + server_path = str(package_path / mcp_entry_point) + server_config = MCPServerConfig( + name=package_name, command=python_executable, args=[server_path], env={} + ) + + return server_config + + except Exception as e: + raise ValueError( + f"Failed to get MCP server config for package '{package_name}': {e}" + ) + + +def handle_mcp_discover_hosts(): + """Handle 'hatch mcp discover hosts' command.""" + try: + # Import strategies to trigger registration + import hatch.mcp_host_config.strategies + + available_hosts = MCPHostRegistry.detect_available_hosts() + print("Available MCP host platforms:") + + for host_type in MCPHostType: + try: + strategy = MCPHostRegistry.get_strategy(host_type) + config_path = strategy.get_config_path() + is_available = host_type in available_hosts + + status = "โœ“ Available" if is_available else "โœ— Not detected" + print(f" {host_type.value}: {status}") + if config_path: + print(f" Config path: {config_path}") + except Exception as e: + print(f" {host_type.value}: Error - {e}") + + return 0 + except Exception as e: + print(f"Error discovering hosts: {e}") + return 1 + + +def handle_mcp_discover_servers( + env_manager: HatchEnvironmentManager, env_name: Optional[str] = None +): + """Handle 'hatch mcp discover servers' command.""" + try: + env_name = env_name or env_manager.get_current_environment() + + if not env_manager.environment_exists(env_name): + print(f"Error: Environment '{env_name}' does not exist") + return 1 + + packages = env_manager.list_packages(env_name) + mcp_packages = [] + + for package in packages: + try: + # Check if package has MCP server entry point + server_config = get_package_mcp_server_config( + env_manager, env_name, package["name"] + ) + mcp_packages.append( + {"package": package, "server_config": server_config} + ) + except ValueError: + # Package doesn't have MCP server + continue + + if not mcp_packages: + print(f"No MCP servers found in environment '{env_name}'") + return 0 + + print(f"MCP servers in environment '{env_name}':") + for item in mcp_packages: + package = item["package"] + server_config = item["server_config"] + print(f" {server_config.name}:") + print( + f" Package: {package['name']} v{package.get('version', 'unknown')}" + ) + print(f" Command: {server_config.command}") + print(f" Args: {server_config.args}") + if server_config.env: + print(f" Environment: {server_config.env}") + + return 0 + except Exception as e: + print(f"Error discovering servers: {e}") + return 1 + + +def handle_mcp_list_hosts( + env_manager: HatchEnvironmentManager, + env_name: Optional[str] = None, + detailed: bool = False, +): + """Handle 'hatch mcp list hosts' command - shows configured hosts in environment.""" + try: + from collections import defaultdict + + # Resolve environment name + target_env = env_name or env_manager.get_current_environment() + + # Validate environment exists + if not env_manager.environment_exists(target_env): + available_envs = env_manager.list_environments() + print(f"Error: Environment '{target_env}' does not exist.") + if available_envs: + print(f"Available environments: {', '.join(available_envs)}") + return 1 + + # Collect hosts from configured_hosts across all packages in environment + hosts = defaultdict(int) + host_details = defaultdict(list) + + try: + env_data = env_manager.get_environment_data(target_env) + packages = env_data.get("packages", []) + + for package in packages: + package_name = package.get("name", "unknown") + configured_hosts = package.get("configured_hosts", {}) + + for host_name, host_config in configured_hosts.items(): + hosts[host_name] += 1 + if detailed: + config_path = host_config.get("config_path", "N/A") + configured_at = host_config.get("configured_at", "N/A") + host_details[host_name].append( + { + "package": package_name, + "config_path": config_path, + "configured_at": configured_at, + } + ) + + except Exception as e: + print(f"Error reading environment data: {e}") + return 1 + + # Display results + if not hosts: + print(f"No configured hosts for environment '{target_env}'") + return 0 + + print(f"Configured hosts for environment '{target_env}':") + + for host_name, package_count in sorted(hosts.items()): + if detailed: + print(f"\n{host_name} ({package_count} packages):") + for detail in host_details[host_name]: + print(f" - Package: {detail['package']}") + print(f" Config path: {detail['config_path']}") + print(f" Configured at: {detail['configured_at']}") + else: + print(f" - {host_name} ({package_count} packages)") + + return 0 + except Exception as e: + print(f"Error listing hosts: {e}") + return 1 + + +def handle_mcp_list_servers( + env_manager: HatchEnvironmentManager, env_name: Optional[str] = None +): + """Handle 'hatch mcp list servers' command.""" + try: + env_name = env_name or env_manager.get_current_environment() + + if not env_manager.environment_exists(env_name): + print(f"Error: Environment '{env_name}' does not exist") + return 1 + + packages = env_manager.list_packages(env_name) + mcp_packages = [] + + for package in packages: + # Check if package has host configuration tracking (indicating MCP server) + configured_hosts = package.get("configured_hosts", {}) + if configured_hosts: + # Use the tracked server configuration from any host + first_host = next(iter(configured_hosts.values())) + server_config_data = first_host.get("server_config", {}) + + # Create a simple server config object + class SimpleServerConfig: + def __init__(self, data): + self.name = data.get("name", package["name"]) + self.command = data.get("command", "unknown") + self.args = data.get("args", []) + + server_config = SimpleServerConfig(server_config_data) + mcp_packages.append( + {"package": package, "server_config": server_config} + ) + else: + # Try the original method as fallback + try: + server_config = get_package_mcp_server_config( + env_manager, env_name, package["name"] + ) + mcp_packages.append( + {"package": package, "server_config": server_config} + ) + except: + # Package doesn't have MCP server or method failed + continue + + if not mcp_packages: + print(f"No MCP servers configured in environment '{env_name}'") + return 0 + + print(f"MCP servers in environment '{env_name}':") + print(f"{'Server Name':<20} {'Package':<20} {'Version':<10} {'Command'}") + print("-" * 80) + + for item in mcp_packages: + package = item["package"] + server_config = item["server_config"] + + server_name = server_config.name + package_name = package["name"] + version = package.get("version", "unknown") + command = f"{server_config.command} {' '.join(server_config.args)}" + + print(f"{server_name:<20} {package_name:<20} {version:<10} {command}") + + # Display host configuration tracking information + configured_hosts = package.get("configured_hosts", {}) + if configured_hosts: + print(f"{'':>20} Configured on hosts:") + for hostname, host_config in configured_hosts.items(): + config_path = host_config.get("config_path", "unknown") + last_synced = host_config.get("last_synced", "unknown") + # Format the timestamp for better readability + if last_synced != "unknown": + try: + from datetime import datetime + + dt = datetime.fromisoformat( + last_synced.replace("Z", "+00:00") + ) + last_synced = dt.strftime("%Y-%m-%d %H:%M:%S") + except: + pass # Keep original format if parsing fails + print( + f"{'':>22} - {hostname}: {config_path} (synced: {last_synced})" + ) + else: + print(f"{'':>20} No host configurations tracked") + print() # Add blank line between servers + + return 0 + except Exception as e: + print(f"Error listing servers: {e}") + return 1 + + +def handle_mcp_backup_restore( + env_manager: HatchEnvironmentManager, + host: str, + backup_file: Optional[str] = None, + dry_run: bool = False, + auto_approve: bool = False, +): + """Handle 'hatch mcp backup restore' command.""" + try: + from hatch.mcp_host_config.backup import MCPHostConfigBackupManager + + # Validate host type + try: + host_type = MCPHostType(host) + except ValueError: + print( + f"Error: Invalid host '{host}'. Supported hosts: {[h.value for h in MCPHostType]}" + ) + return 1 + + backup_manager = MCPHostConfigBackupManager() + + # Get backup file path + if backup_file: + backup_path = backup_manager.backup_root / host / backup_file + if not backup_path.exists(): + print(f"Error: Backup file '{backup_file}' not found for host '{host}'") + return 1 + else: + backup_path = backup_manager._get_latest_backup(host) + if not backup_path: + print(f"Error: No backups found for host '{host}'") + return 1 + backup_file = backup_path.name + + if dry_run: + print(f"[DRY RUN] Would restore backup for host '{host}':") + print(f"[DRY RUN] Backup file: {backup_file}") + print(f"[DRY RUN] Backup path: {backup_path}") + return 0 + + # Confirm operation unless auto-approved + if not request_confirmation( + f"Restore backup '{backup_file}' for host '{host}'? This will overwrite current configuration.", + auto_approve, + ): + print("Operation cancelled.") + return 0 + + # Perform restoration + success = backup_manager.restore_backup(host, backup_file) + + if success: + print( + f"[SUCCESS] Successfully restored backup '{backup_file}' for host '{host}'" + ) + + # Read restored configuration to get actual server list + try: + # Import strategies to trigger registration + import hatch.mcp_host_config.strategies + + host_type = MCPHostType(host) + strategy = MCPHostRegistry.get_strategy(host_type) + restored_config = strategy.read_configuration() + + # Update environment tracking to match restored state + updates_count = ( + env_manager.apply_restored_host_configuration_to_environments( + host, restored_config.servers + ) + ) + if updates_count > 0: + print( + f"Synchronized {updates_count} package entries with restored configuration" + ) + + except Exception as e: + print(f"Warning: Could not synchronize environment tracking: {e}") + + return 0 + else: + print(f"[ERROR] Failed to restore backup '{backup_file}' for host '{host}'") + return 1 + + except Exception as e: + print(f"Error restoring backup: {e}") + return 1 + + +def handle_mcp_backup_list(host: str, detailed: bool = False): + """Handle 'hatch mcp backup list' command.""" + try: + from hatch.mcp_host_config.backup import MCPHostConfigBackupManager + + # Validate host type + try: + host_type = MCPHostType(host) + except ValueError: + print( + f"Error: Invalid host '{host}'. Supported hosts: {[h.value for h in MCPHostType]}" + ) + return 1 + + backup_manager = MCPHostConfigBackupManager() + backups = backup_manager.list_backups(host) + + if not backups: + print(f"No backups found for host '{host}'") + return 0 + + print(f"Backups for host '{host}' ({len(backups)} found):") + + if detailed: + print(f"{'Backup File':<40} {'Created':<20} {'Size':<10} {'Age (days)'}") + print("-" * 80) + + for backup in backups: + created = backup.timestamp.strftime("%Y-%m-%d %H:%M:%S") + size = f"{backup.file_size:,} B" + age = backup.age_days + + print(f"{backup.file_path.name:<40} {created:<20} {size:<10} {age}") + else: + for backup in backups: + created = backup.timestamp.strftime("%Y-%m-%d %H:%M:%S") + print( + f" {backup.file_path.name} (created: {created}, {backup.age_days} days ago)" + ) + + return 0 + except Exception as e: + print(f"Error listing backups: {e}") + return 1 + + +def handle_mcp_backup_clean( + host: str, + older_than_days: Optional[int] = None, + keep_count: Optional[int] = None, + dry_run: bool = False, + auto_approve: bool = False, +): + """Handle 'hatch mcp backup clean' command.""" + try: + from hatch.mcp_host_config.backup import MCPHostConfigBackupManager + + # Validate host type + try: + host_type = MCPHostType(host) + except ValueError: + print( + f"Error: Invalid host '{host}'. Supported hosts: {[h.value for h in MCPHostType]}" + ) + return 1 + + # Validate cleanup criteria + if not older_than_days and not keep_count: + print("Error: Must specify either --older-than-days or --keep-count") + return 1 + + backup_manager = MCPHostConfigBackupManager() + backups = backup_manager.list_backups(host) + + if not backups: + print(f"No backups found for host '{host}'") + return 0 + + # Determine which backups would be cleaned + to_clean = [] + + if older_than_days: + for backup in backups: + if backup.age_days > older_than_days: + to_clean.append(backup) + + if keep_count and len(backups) > keep_count: + # Keep newest backups, remove oldest + to_clean.extend(backups[keep_count:]) + + # Remove duplicates while preserving order + seen = set() + unique_to_clean = [] + for backup in to_clean: + if backup.file_path not in seen: + seen.add(backup.file_path) + unique_to_clean.append(backup) + + if not unique_to_clean: + print(f"No backups match cleanup criteria for host '{host}'") + return 0 + + if dry_run: + print( + f"[DRY RUN] Would clean {len(unique_to_clean)} backup(s) for host '{host}':" + ) + for backup in unique_to_clean: + print( + f"[DRY RUN] {backup.file_path.name} (age: {backup.age_days} days)" + ) + return 0 + + # Confirm operation unless auto-approved + if not request_confirmation( + f"Clean {len(unique_to_clean)} backup(s) for host '{host}'?", auto_approve + ): + print("Operation cancelled.") + return 0 + + # Perform cleanup + filters = {} + if older_than_days: + filters["older_than_days"] = older_than_days + if keep_count: + filters["keep_count"] = keep_count + + cleaned_count = backup_manager.clean_backups(host, **filters) + + if cleaned_count > 0: + print(f"โœ“ Successfully cleaned {cleaned_count} backup(s) for host '{host}'") + return 0 + else: + print(f"No backups were cleaned for host '{host}'") + return 0 + + except Exception as e: + print(f"Error cleaning backups: {e}") + return 1 + + +def parse_env_vars(env_list: Optional[list]) -> dict: + """Parse environment variables from command line format.""" + if not env_list: + return {} + + env_dict = {} + for env_var in env_list: + if "=" not in env_var: + print( + f"Warning: Invalid environment variable format '{env_var}'. Expected KEY=VALUE" + ) + continue + key, value = env_var.split("=", 1) + env_dict[key.strip()] = value.strip() + + return env_dict + + +def parse_header(header_list: Optional[list]) -> dict: + """Parse HTTP headers from command line format.""" + if not header_list: + return {} + + headers_dict = {} + for header in header_list: + if "=" not in header: + print(f"Warning: Invalid header format '{header}'. Expected KEY=VALUE") + continue + key, value = header.split("=", 1) + headers_dict[key.strip()] = value.strip() + + return headers_dict + + +def parse_input(input_list: Optional[list]) -> Optional[list]: + """Parse VS Code input variable definitions from command line format. + + Format: type,id,description[,password=true] + Example: promptString,api-key,GitHub Personal Access Token,password=true + + Returns: + List of input variable definition dictionaries, or None if no inputs provided. + """ + if not input_list: + return None + + parsed_inputs = [] + for input_str in input_list: + parts = [p.strip() for p in input_str.split(",")] + if len(parts) < 3: + print( + f"Warning: Invalid input format '{input_str}'. Expected: type,id,description[,password=true]" + ) + continue + + input_def = {"type": parts[0], "id": parts[1], "description": parts[2]} + + # Check for optional password flag + if len(parts) > 3 and parts[3].lower() == "password=true": + input_def["password"] = True + + parsed_inputs.append(input_def) + + return parsed_inputs if parsed_inputs else None + + +def handle_mcp_configure( + host: str, + server_name: str, + command: str, + args: list, + env: Optional[list] = None, + url: Optional[str] = None, + header: Optional[list] = None, + timeout: Optional[int] = None, + trust: bool = False, + cwd: Optional[str] = None, + env_file: Optional[str] = None, + http_url: Optional[str] = None, + include_tools: Optional[list] = None, + exclude_tools: Optional[list] = None, + input: Optional[list] = None, + no_backup: bool = False, + dry_run: bool = False, + auto_approve: bool = False, +): + """Handle 'hatch mcp configure' command with ALL host-specific arguments. + + Host-specific arguments are accepted for all hosts. The reporting system will + show unsupported fields as "UNSUPPORTED" in the conversion report rather than + rejecting them upfront. + """ + try: + # Validate host type + try: + host_type = MCPHostType(host) + except ValueError: + print( + f"Error: Invalid host '{host}'. Supported hosts: {[h.value for h in MCPHostType]}" + ) + return 1 + + # Validate Claude Desktop/Code transport restrictions (Issue 2) + if host_type in (MCPHostType.CLAUDE_DESKTOP, MCPHostType.CLAUDE_CODE): + if url is not None: + print( + f"Error: {host} does not support remote servers (--url). Only local servers with --command are supported." + ) + return 1 + + # Validate argument dependencies + if command and header: + print( + "Error: --header can only be used with --url or --http-url (remote servers), not with --command (local servers)" + ) + return 1 + + if (url or http_url) and args: + print( + "Error: --args can only be used with --command (local servers), not with --url or --http-url (remote servers)" + ) + return 1 + + # NOTE: We do NOT validate host-specific arguments here. + # The reporting system will show unsupported fields as "UNSUPPORTED" in the conversion report. + # This allows users to see which fields are not supported by their target host without blocking the operation. + + # Check if server exists (for partial update support) + manager = MCPHostConfigurationManager() + existing_config = manager.get_server_config(host, server_name) + is_update = existing_config is not None + + # Conditional validation: Create requires command OR url OR http_url, update does not + if not is_update: + # Create operation: require command, url, or http_url + if not command and not url and not http_url: + print( + f"Error: When creating a new server, you must provide either --command (for local servers), --url (for SSE remote servers), or --http-url (for HTTP remote servers, Gemini only)" + ) + return 1 + + # Parse environment variables, headers, and inputs + env_dict = parse_env_vars(env) + headers_dict = parse_header(header) + inputs_list = parse_input(input) + + # Create Omni configuration (universal model) + # Only include fields that have actual values to ensure model_dump(exclude_unset=True) works correctly + omni_config_data = {"name": server_name} + + if command is not None: + omni_config_data["command"] = command + if args is not None: + # Process args with shlex.split() to handle quoted strings (Issue 4) + processed_args = [] + for arg in args: + if arg: # Skip empty strings + try: + # Split quoted strings into individual arguments + split_args = shlex.split(arg) + processed_args.extend(split_args) + except ValueError as e: + # Handle invalid quotes gracefully + print(f"Warning: Invalid quote in argument '{arg}': {e}") + processed_args.append(arg) + omni_config_data["args"] = processed_args if processed_args else None + if env_dict: + omni_config_data["env"] = env_dict + if url is not None: + omni_config_data["url"] = url + if headers_dict: + omni_config_data["headers"] = headers_dict + + # Host-specific fields (Gemini) + if timeout is not None: + omni_config_data["timeout"] = timeout + if trust: + omni_config_data["trust"] = trust + if cwd is not None: + omni_config_data["cwd"] = cwd + if http_url is not None: + omni_config_data["httpUrl"] = http_url + if include_tools is not None: + omni_config_data["includeTools"] = include_tools + if exclude_tools is not None: + omni_config_data["excludeTools"] = exclude_tools + + # Host-specific fields (Cursor/VS Code/LM Studio) + if env_file is not None: + omni_config_data["envFile"] = env_file + + # Host-specific fields (VS Code) + if inputs_list is not None: + omni_config_data["inputs"] = inputs_list + + # Partial update merge logic + if is_update: + # Merge with existing configuration + existing_data = existing_config.model_dump( + exclude_unset=True, exclude={"name"} + ) + + # Handle command/URL/httpUrl switching behavior + # If switching from command to URL or httpUrl: clear command-based fields + if ( + url is not None or http_url is not None + ) and existing_config.command is not None: + existing_data.pop("command", None) + existing_data.pop("args", None) + existing_data.pop( + "type", None + ) # Clear type field when switching transports (Issue 1) + + # If switching from URL/httpUrl to command: clear URL-based fields + if command is not None and ( + existing_config.url is not None + or getattr(existing_config, "httpUrl", None) is not None + ): + existing_data.pop("url", None) + existing_data.pop("httpUrl", None) + existing_data.pop("headers", None) + existing_data.pop( + "type", None + ) # Clear type field when switching transports (Issue 1) + + # Merge: new values override existing values + merged_data = {**existing_data, **omni_config_data} + omni_config_data = merged_data + + # Create Omni model + omni_config = MCPServerConfigOmni(**omni_config_data) + + # Convert to host-specific model using HOST_MODEL_REGISTRY + host_model_class = HOST_MODEL_REGISTRY.get(host_type) + if not host_model_class: + print(f"Error: No model registered for host '{host}'") + return 1 + + # Convert Omni to host-specific model + server_config = host_model_class.from_omni(omni_config) + + # Generate conversion report + report = generate_conversion_report( + operation="update" if is_update else "create", + server_name=server_name, + target_host=host_type, + omni=omni_config, + old_config=existing_config if is_update else None, + dry_run=dry_run, + ) + + # Display conversion report + if dry_run: + print( + f"[DRY RUN] Would configure MCP server '{server_name}' on host '{host}':" + ) + print(f"[DRY RUN] Command: {command}") + if args: + print(f"[DRY RUN] Args: {args}") + if env_dict: + print(f"[DRY RUN] Environment: {env_dict}") + if url: + print(f"[DRY RUN] URL: {url}") + if headers_dict: + print(f"[DRY RUN] Headers: {headers_dict}") + print(f"[DRY RUN] Backup: {'Disabled' if no_backup else 'Enabled'}") + # Display report in dry-run mode + display_report(report) + return 0 + + # Display report before confirmation + display_report(report) + + # Confirm operation unless auto-approved + if not request_confirmation( + f"Configure MCP server '{server_name}' on host '{host}'?", auto_approve + ): + print("Operation cancelled.") + return 0 + + # Perform configuration + mcp_manager = MCPHostConfigurationManager() + result = mcp_manager.configure_server( + server_config=server_config, hostname=host, no_backup=no_backup + ) + + if result.success: + print( + f"[SUCCESS] Successfully configured MCP server '{server_name}' on host '{host}'" + ) + if result.backup_path: + print(f" Backup created: {result.backup_path}") + return 0 + else: + print( + f"[ERROR] Failed to configure MCP server '{server_name}' on host '{host}': {result.error_message}" + ) + return 1 + + except Exception as e: + print(f"Error configuring MCP server: {e}") + return 1 + + +def handle_mcp_remove( + host: str, + server_name: str, + no_backup: bool = False, + dry_run: bool = False, + auto_approve: bool = False, +): + """Handle 'hatch mcp remove' command.""" + try: + # Validate host type + try: + host_type = MCPHostType(host) + except ValueError: + print( + f"Error: Invalid host '{host}'. Supported hosts: {[h.value for h in MCPHostType]}" + ) + return 1 + + if dry_run: + print( + f"[DRY RUN] Would remove MCP server '{server_name}' from host '{host}'" + ) + print(f"[DRY RUN] Backup: {'Disabled' if no_backup else 'Enabled'}") + return 0 + + # Confirm operation unless auto-approved + if not request_confirmation( + f"Remove MCP server '{server_name}' from host '{host}'?", auto_approve + ): + print("Operation cancelled.") + return 0 + + # Perform removal + mcp_manager = MCPHostConfigurationManager() + result = mcp_manager.remove_server( + server_name=server_name, hostname=host, no_backup=no_backup + ) + + if result.success: + print( + f"[SUCCESS] Successfully removed MCP server '{server_name}' from host '{host}'" + ) + if result.backup_path: + print(f" Backup created: {result.backup_path}") + return 0 + else: + print( + f"[ERROR] Failed to remove MCP server '{server_name}' from host '{host}': {result.error_message}" + ) + return 1 + + except Exception as e: + print(f"Error removing MCP server: {e}") + return 1 + + +def parse_host_list(host_arg: str) -> List[str]: + """Parse comma-separated host list or 'all'.""" + if not host_arg: + return [] + + if host_arg.lower() == "all": + from hatch.mcp_host_config.host_management import MCPHostRegistry + + available_hosts = MCPHostRegistry.detect_available_hosts() + return [host.value for host in available_hosts] + + hosts = [] + for host_str in host_arg.split(","): + host_str = host_str.strip() + try: + host_type = MCPHostType(host_str) + hosts.append(host_type.value) + except ValueError: + available = [h.value for h in MCPHostType] + raise ValueError(f"Unknown host '{host_str}'. Available: {available}") + + return hosts + + +def handle_mcp_remove_server( + env_manager: HatchEnvironmentManager, + server_name: str, + hosts: Optional[str] = None, + env: Optional[str] = None, + no_backup: bool = False, + dry_run: bool = False, + auto_approve: bool = False, +): + """Handle 'hatch mcp remove server' command.""" + try: + # Determine target hosts + if hosts: + target_hosts = parse_host_list(hosts) + elif env: + # TODO: Implement environment-based server removal + print("Error: Environment-based removal not yet implemented") + return 1 + else: + print("Error: Must specify either --host or --env") + return 1 + + if not target_hosts: + print("Error: No valid hosts specified") + return 1 + + if dry_run: + print( + f"[DRY RUN] Would remove MCP server '{server_name}' from hosts: {', '.join(target_hosts)}" + ) + print(f"[DRY RUN] Backup: {'Disabled' if no_backup else 'Enabled'}") + return 0 + + # Confirm operation unless auto-approved + hosts_str = ", ".join(target_hosts) + if not request_confirmation( + f"Remove MCP server '{server_name}' from hosts: {hosts_str}?", auto_approve + ): + print("Operation cancelled.") + return 0 + + # Perform removal on each host + mcp_manager = MCPHostConfigurationManager() + success_count = 0 + total_count = len(target_hosts) + + for host in target_hosts: + result = mcp_manager.remove_server( + server_name=server_name, hostname=host, no_backup=no_backup + ) + + if result.success: + print(f"[SUCCESS] Successfully removed '{server_name}' from '{host}'") + if result.backup_path: + print(f" Backup created: {result.backup_path}") + success_count += 1 + + # Update environment tracking for current environment only + current_env = env_manager.get_current_environment() + if current_env: + env_manager.remove_package_host_configuration( + current_env, server_name, host + ) + else: + print( + f"[ERROR] Failed to remove '{server_name}' from '{host}': {result.error_message}" + ) + + # Summary + if success_count == total_count: + print(f"[SUCCESS] Removed '{server_name}' from all {total_count} hosts") + return 0 + elif success_count > 0: + print( + f"[PARTIAL SUCCESS] Removed '{server_name}' from {success_count}/{total_count} hosts" + ) + return 1 + else: + print(f"[ERROR] Failed to remove '{server_name}' from any hosts") + return 1 + + except Exception as e: + print(f"Error removing MCP server: {e}") + return 1 + + +def handle_mcp_remove_host( + env_manager: HatchEnvironmentManager, + host_name: str, + no_backup: bool = False, + dry_run: bool = False, + auto_approve: bool = False, +): + """Handle 'hatch mcp remove host' command.""" + try: + # Validate host type + try: + host_type = MCPHostType(host_name) + except ValueError: + print( + f"Error: Invalid host '{host_name}'. Supported hosts: {[h.value for h in MCPHostType]}" + ) + return 1 + + if dry_run: + print(f"[DRY RUN] Would remove entire host configuration for '{host_name}'") + print(f"[DRY RUN] Backup: {'Disabled' if no_backup else 'Enabled'}") + return 0 + + # Confirm operation unless auto-approved + if not request_confirmation( + f"Remove entire host configuration for '{host_name}'? This will remove ALL MCP servers from this host.", + auto_approve, + ): + print("Operation cancelled.") + return 0 + + # Perform host configuration removal + mcp_manager = MCPHostConfigurationManager() + result = mcp_manager.remove_host_configuration( + hostname=host_name, no_backup=no_backup + ) + + if result.success: + print( + f"[SUCCESS] Successfully removed host configuration for '{host_name}'" + ) + if result.backup_path: + print(f" Backup created: {result.backup_path}") + + # Update environment tracking across all environments + updates_count = env_manager.clear_host_from_all_packages_all_envs(host_name) + if updates_count > 0: + print(f"Updated {updates_count} package entries across environments") + + return 0 + else: + print( + f"[ERROR] Failed to remove host configuration for '{host_name}': {result.error_message}" + ) + return 1 + + except Exception as e: + print(f"Error removing host configuration: {e}") + return 1 + + +def handle_mcp_sync( + from_env: Optional[str] = None, + from_host: Optional[str] = None, + to_hosts: Optional[str] = None, + servers: Optional[str] = None, + pattern: Optional[str] = None, + dry_run: bool = False, + auto_approve: bool = False, + no_backup: bool = False, +) -> int: + """Handle 'hatch mcp sync' command.""" + try: + # Parse target hosts + if not to_hosts: + print("Error: Must specify --to-host") + return 1 + + target_hosts = parse_host_list(to_hosts) + + # Parse server filters + server_list = None + if servers: + server_list = [s.strip() for s in servers.split(",") if s.strip()] + + if dry_run: + source_desc = ( + f"environment '{from_env}'" if from_env else f"host '{from_host}'" + ) + target_desc = f"hosts: {', '.join(target_hosts)}" + print(f"[DRY RUN] Would synchronize from {source_desc} to {target_desc}") + + if server_list: + print(f"[DRY RUN] Server filter: {', '.join(server_list)}") + elif pattern: + print(f"[DRY RUN] Pattern filter: {pattern}") + + print(f"[DRY RUN] Backup: {'Disabled' if no_backup else 'Enabled'}") + return 0 + + # Confirm operation unless auto-approved + source_desc = f"environment '{from_env}'" if from_env else f"host '{from_host}'" + target_desc = f"{len(target_hosts)} host(s)" + if not request_confirmation( + f"Synchronize MCP configurations from {source_desc} to {target_desc}?", + auto_approve, + ): + print("Operation cancelled.") + return 0 + + # Perform synchronization + mcp_manager = MCPHostConfigurationManager() + result = mcp_manager.sync_configurations( + from_env=from_env, + from_host=from_host, + to_hosts=target_hosts, + servers=server_list, + pattern=pattern, + no_backup=no_backup, + ) + + if result.success: + print(f"[SUCCESS] Synchronization completed") + print(f" Servers synced: {result.servers_synced}") + print(f" Hosts updated: {result.hosts_updated}") + + # Show detailed results + for res in result.results: + if res.success: + backup_info = ( + f" (backup: {res.backup_path})" if res.backup_path else "" + ) + print(f" โœ“ {res.hostname}{backup_info}") + else: + print(f" โœ— {res.hostname}: {res.error_message}") + + return 0 + else: + print(f"[ERROR] Synchronization failed") + for res in result.results: + if not res.success: + print(f" โœ— {res.hostname}: {res.error_message}") + return 1 + + except ValueError as e: + print(f"Error: {e}") + return 1 + except Exception as e: + print(f"Error during synchronization: {e}") + return 1 + + def main(): """Main entry point for Hatch CLI. - + Parses command-line arguments and executes the requested commands for: - Package template creation - - Package validation + - Package validation - Environment management (create, remove, list, use, current) - Package management (add, remove, list) - + Returns: int: Exit code (0 for success, 1 for errors) """ # Configure logging logging.basicConfig( level=logging.INFO, - format="%(asctime)s - %(name)s - %(levelname)s - %(message)s" + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", ) - + # Create argument parser parser = argparse.ArgumentParser(description="Hatch package manager CLI") + + # Add version argument + parser.add_argument( + "--version", action="version", version=f"%(prog)s {get_hatch_version()}" + ) + subparsers = parser.add_subparsers(dest="command", help="Command to execute") - + # Create template command - create_parser = subparsers.add_parser("create", help="Create a new package template") + create_parser = subparsers.add_parser( + "create", help="Create a new package template" + ) create_parser.add_argument("name", help="Package name") - create_parser.add_argument("--dir", "-d", default=".", help="Target directory (default: current directory)") - create_parser.add_argument("--description", "-D", default="", help="Package description") - + create_parser.add_argument( + "--dir", "-d", default=".", help="Target directory (default: current directory)" + ) + create_parser.add_argument( + "--description", "-D", default="", help="Package description" + ) + # Validate package command validate_parser = subparsers.add_parser("validate", help="Validate a package") validate_parser.add_argument("package_dir", help="Path to package directory") - + # Environment management commands - env_subparsers = subparsers.add_parser("env", help="Environment management commands").add_subparsers( - dest="env_command", help="Environment command to execute" - ) - + env_subparsers = subparsers.add_parser( + "env", help="Environment management commands" + ).add_subparsers(dest="env_command", help="Environment command to execute") + # Create environment command - env_create_parser = env_subparsers.add_parser("create", help="Create a new environment") + env_create_parser = env_subparsers.add_parser( + "create", help="Create a new environment" + ) env_create_parser.add_argument("name", help="Environment name") - env_create_parser.add_argument("--description", "-D", default="", help="Environment description") - env_create_parser.add_argument("--python-version", help="Python version for the environment (e.g., 3.11, 3.12)") - env_create_parser.add_argument("--no-python", action="store_true", - help="Don't create a Python environment using conda/mamba") - env_create_parser.add_argument("--no-hatch-mcp-server", action="store_true", - help="Don't install hatch_mcp_server wrapper in the new environment") - env_create_parser.add_argument("--hatch_mcp_server_tag", - help="Git tag/branch reference for hatch_mcp_server wrapper installation (e.g., 'dev', 'v0.1.0')") - + env_create_parser.add_argument( + "--description", "-D", default="", help="Environment description" + ) + env_create_parser.add_argument( + "--python-version", help="Python version for the environment (e.g., 3.11, 3.12)" + ) + env_create_parser.add_argument( + "--no-python", + action="store_true", + help="Don't create a Python environment using conda/mamba", + ) + env_create_parser.add_argument( + "--no-hatch-mcp-server", + action="store_true", + help="Don't install hatch_mcp_server wrapper in the new environment", + ) + env_create_parser.add_argument( + "--hatch_mcp_server_tag", + help="Git tag/branch reference for hatch_mcp_server wrapper installation (e.g., 'dev', 'v0.1.0')", + ) + # Remove environment command - env_remove_parser = env_subparsers.add_parser("remove", help="Remove an environment") + env_remove_parser = env_subparsers.add_parser( + "remove", help="Remove an environment" + ) env_remove_parser.add_argument("name", help="Environment name") - + # List environments command env_subparsers.add_parser("list", help="List all available environments") - + # Set current environment command - env_use_parser = env_subparsers.add_parser("use", help="Set the current environment") + env_use_parser = env_subparsers.add_parser( + "use", help="Set the current environment" + ) env_use_parser.add_argument("name", help="Environment name") - + # Show current environment command env_subparsers.add_parser("current", help="Show the current environment") # Python environment management commands - advanced subcommands - env_python_subparsers = env_subparsers.add_parser("python", help="Manage Python environments").add_subparsers( + env_python_subparsers = env_subparsers.add_parser( + "python", help="Manage Python environments" + ).add_subparsers( dest="python_command", help="Python environment command to execute" ) - - # Initialize Python environment - python_init_parser = env_python_subparsers.add_parser("init", help="Initialize Python environment") - python_init_parser.add_argument("--hatch_env", default=None, help="Hatch environment name in which the Python environment is located (default: current environment)") - python_init_parser.add_argument("--python-version", help="Python version (e.g., 3.11, 3.12)") - python_init_parser.add_argument("--force", action="store_true", help="Force recreation if exists") - python_init_parser.add_argument("--no-hatch-mcp-server", action="store_true", - help="Don't install hatch_mcp_server wrapper in the Python environment") - python_init_parser.add_argument("--hatch_mcp_server_tag", - help="Git tag/branch reference for hatch_mcp_server wrapper installation (e.g., 'dev', 'v0.1.0')") - + + # Initialize Python environment + python_init_parser = env_python_subparsers.add_parser( + "init", help="Initialize Python environment" + ) + python_init_parser.add_argument( + "--hatch_env", + default=None, + help="Hatch environment name in which the Python environment is located (default: current environment)", + ) + python_init_parser.add_argument( + "--python-version", help="Python version (e.g., 3.11, 3.12)" + ) + python_init_parser.add_argument( + "--force", action="store_true", help="Force recreation if exists" + ) + python_init_parser.add_argument( + "--no-hatch-mcp-server", + action="store_true", + help="Don't install hatch_mcp_server wrapper in the Python environment", + ) + python_init_parser.add_argument( + "--hatch_mcp_server_tag", + help="Git tag/branch reference for hatch_mcp_server wrapper installation (e.g., 'dev', 'v0.1.0')", + ) + # Show Python environment info - python_info_parser = env_python_subparsers.add_parser("info", help="Show Python environment information") - python_info_parser.add_argument("--hatch_env", default=None, help="Hatch environment name in which the Python environment is located (default: current environment)") - python_info_parser.add_argument("--detailed", action="store_true", help="Show detailed diagnostics") - + python_info_parser = env_python_subparsers.add_parser( + "info", help="Show Python environment information" + ) + python_info_parser.add_argument( + "--hatch_env", + default=None, + help="Hatch environment name in which the Python environment is located (default: current environment)", + ) + python_info_parser.add_argument( + "--detailed", action="store_true", help="Show detailed diagnostics" + ) + # Hatch MCP server wrapper management commands - hatch_mcp_parser = env_python_subparsers.add_parser("add-hatch-mcp", help="Add hatch_mcp_server wrapper to the environment") + hatch_mcp_parser = env_python_subparsers.add_parser( + "add-hatch-mcp", help="Add hatch_mcp_server wrapper to the environment" + ) ## Install MCP server command - hatch_mcp_parser.add_argument("--hatch_env", default=None, help="Hatch environment name. It must possess a valid Python environment. (default: current environment)") - hatch_mcp_parser.add_argument("--tag", default=None, help="Git tag/branch reference for wrapper installation (e.g., 'dev', 'v0.1.0')") - + hatch_mcp_parser.add_argument( + "--hatch_env", + default=None, + help="Hatch environment name. It must possess a valid Python environment. (default: current environment)", + ) + hatch_mcp_parser.add_argument( + "--tag", + default=None, + help="Git tag/branch reference for wrapper installation (e.g., 'dev', 'v0.1.0')", + ) + # Remove Python environment - python_remove_parser = env_python_subparsers.add_parser("remove", help="Remove Python environment") - python_remove_parser.add_argument("--hatch_env", default=None, help="Hatch environment name in which the Python environment is located (default: current environment)") - python_remove_parser.add_argument("--force", action="store_true", help="Force removal without confirmation") - + python_remove_parser = env_python_subparsers.add_parser( + "remove", help="Remove Python environment" + ) + python_remove_parser.add_argument( + "--hatch_env", + default=None, + help="Hatch environment name in which the Python environment is located (default: current environment)", + ) + python_remove_parser.add_argument( + "--force", action="store_true", help="Force removal without confirmation" + ) + # Launch Python shell - python_shell_parser = env_python_subparsers.add_parser("shell", help="Launch Python shell in environment") - python_shell_parser.add_argument("--hatch_env", default=None, help="Hatch environment name in which the Python environment is located (default: current environment)") - python_shell_parser.add_argument("--cmd", help="Command to run in the shell (optional)") - - # Package management commands - pkg_subparsers = subparsers.add_parser("package", help="Package management commands").add_subparsers( - dest="pkg_command", help="Package command to execute" + python_shell_parser = env_python_subparsers.add_parser( + "shell", help="Launch Python shell in environment" + ) + python_shell_parser.add_argument( + "--hatch_env", + default=None, + help="Hatch environment name in which the Python environment is located (default: current environment)", + ) + python_shell_parser.add_argument( + "--cmd", help="Command to run in the shell (optional)" + ) + + # MCP host configuration commands + mcp_subparsers = subparsers.add_parser( + "mcp", help="MCP host configuration commands" + ).add_subparsers(dest="mcp_command", help="MCP command to execute") + + # MCP discovery commands + mcp_discover_subparsers = mcp_subparsers.add_parser( + "discover", help="Discover MCP hosts and servers" + ).add_subparsers(dest="discover_command", help="Discovery command to execute") + + # Discover hosts command + mcp_discover_hosts_parser = mcp_discover_subparsers.add_parser( + "hosts", help="Discover available MCP host platforms" + ) + + # Discover servers command + mcp_discover_servers_parser = mcp_discover_subparsers.add_parser( + "servers", help="Discover configured MCP servers" + ) + mcp_discover_servers_parser.add_argument( + "--env", + "-e", + default=None, + help="Environment name (default: current environment)", + ) + + # MCP list commands + mcp_list_subparsers = mcp_subparsers.add_parser( + "list", help="List MCP hosts and servers" + ).add_subparsers(dest="list_command", help="List command to execute") + + # List hosts command + mcp_list_hosts_parser = mcp_list_subparsers.add_parser( + "hosts", help="List configured MCP hosts from environment" + ) + mcp_list_hosts_parser.add_argument( + "--env", + "-e", + default=None, + help="Environment name (default: current environment)", + ) + mcp_list_hosts_parser.add_argument( + "--detailed", + action="store_true", + help="Show detailed host configuration information", + ) + + # List servers command + mcp_list_servers_parser = mcp_list_subparsers.add_parser( + "servers", help="List configured MCP servers from environment" + ) + mcp_list_servers_parser.add_argument( + "--env", + "-e", + default=None, + help="Environment name (default: current environment)", + ) + + # MCP backup commands + mcp_backup_subparsers = mcp_subparsers.add_parser( + "backup", help="Backup management commands" + ).add_subparsers(dest="backup_command", help="Backup command to execute") + + # Restore backup command + mcp_backup_restore_parser = mcp_backup_subparsers.add_parser( + "restore", help="Restore MCP host configuration from backup" + ) + mcp_backup_restore_parser.add_argument( + "host", help="Host platform to restore (e.g., claude-desktop, cursor)" + ) + mcp_backup_restore_parser.add_argument( + "--backup-file", + "-f", + default=None, + help="Specific backup file to restore (default: latest)", + ) + mcp_backup_restore_parser.add_argument( + "--dry-run", + action="store_true", + help="Preview restore operation without execution", + ) + mcp_backup_restore_parser.add_argument( + "--auto-approve", action="store_true", help="Skip confirmation prompts" + ) + + # List backups command + mcp_backup_list_parser = mcp_backup_subparsers.add_parser( + "list", help="List available backups for MCP host" + ) + mcp_backup_list_parser.add_argument( + "host", help="Host platform to list backups for (e.g., claude-desktop, cursor)" + ) + mcp_backup_list_parser.add_argument( + "--detailed", "-d", action="store_true", help="Show detailed backup information" + ) + + # Clean backups command + mcp_backup_clean_parser = mcp_backup_subparsers.add_parser( + "clean", help="Clean old backups based on criteria" + ) + mcp_backup_clean_parser.add_argument( + "host", help="Host platform to clean backups for (e.g., claude-desktop, cursor)" + ) + mcp_backup_clean_parser.add_argument( + "--older-than-days", type=int, help="Remove backups older than specified days" + ) + mcp_backup_clean_parser.add_argument( + "--keep-count", + type=int, + help="Keep only the specified number of newest backups", + ) + mcp_backup_clean_parser.add_argument( + "--dry-run", + action="store_true", + help="Preview cleanup operation without execution", + ) + mcp_backup_clean_parser.add_argument( + "--auto-approve", action="store_true", help="Skip confirmation prompts" + ) + + # MCP direct management commands + mcp_configure_parser = mcp_subparsers.add_parser( + "configure", help="Configure MCP server directly on host" + ) + mcp_configure_parser.add_argument("server_name", help="Name for the MCP server") + mcp_configure_parser.add_argument( + "--host", + required=True, + help="Host platform to configure (e.g., claude-desktop, cursor)", + ) + + # Create mutually exclusive group for server type + server_type_group = mcp_configure_parser.add_mutually_exclusive_group() + server_type_group.add_argument( + "--command", + dest="server_command", + help="Command to execute the MCP server (for local servers)", + ) + server_type_group.add_argument( + "--url", help="Server URL for remote MCP servers (SSE transport)" + ) + server_type_group.add_argument( + "--http-url", help="HTTP streaming endpoint URL (Gemini only)" + ) + + mcp_configure_parser.add_argument( + "--args", + nargs="*", + help="Arguments for the MCP server command (only with --command)", + ) + mcp_configure_parser.add_argument( + "--env-var", action="append", help="Environment variables (format: KEY=VALUE)" + ) + mcp_configure_parser.add_argument( + "--header", + action="append", + help="HTTP headers for remote servers (format: KEY=VALUE, only with --url)", ) - + + # Host-specific arguments (Gemini) + mcp_configure_parser.add_argument( + "--timeout", type=int, help="Request timeout in milliseconds (Gemini)" + ) + mcp_configure_parser.add_argument( + "--trust", action="store_true", help="Bypass tool call confirmations (Gemini)" + ) + mcp_configure_parser.add_argument( + "--cwd", help="Working directory for stdio transport (Gemini)" + ) + mcp_configure_parser.add_argument( + "--include-tools", + nargs="*", + help="Tool allowlist - only these tools will be available (Gemini)", + ) + mcp_configure_parser.add_argument( + "--exclude-tools", + nargs="*", + help="Tool blocklist - these tools will be excluded (Gemini)", + ) + + # Host-specific arguments (Cursor/VS Code/LM Studio) + mcp_configure_parser.add_argument( + "--env-file", help="Path to environment file (Cursor, VS Code, LM Studio)" + ) + + # Host-specific arguments (VS Code) + mcp_configure_parser.add_argument( + "--input", + action="append", + help="Input variable definitions in format: type,id,description[,password=true] (VS Code)", + ) + + mcp_configure_parser.add_argument( + "--no-backup", + action="store_true", + help="Skip backup creation before configuration", + ) + mcp_configure_parser.add_argument( + "--dry-run", action="store_true", help="Preview configuration without execution" + ) + mcp_configure_parser.add_argument( + "--auto-approve", action="store_true", help="Skip confirmation prompts" + ) + + # Remove MCP commands (object-action pattern) + mcp_remove_subparsers = mcp_subparsers.add_parser( + "remove", help="Remove MCP servers or host configurations" + ).add_subparsers(dest="remove_command", help="Remove command to execute") + + # Remove server command + mcp_remove_server_parser = mcp_remove_subparsers.add_parser( + "server", help="Remove MCP server from hosts" + ) + mcp_remove_server_parser.add_argument( + "server_name", help="Name of the MCP server to remove" + ) + mcp_remove_server_parser.add_argument( + "--host", help="Target hosts (comma-separated or 'all')" + ) + mcp_remove_server_parser.add_argument( + "--env", "-e", help="Environment name (for environment-based removal)" + ) + mcp_remove_server_parser.add_argument( + "--no-backup", action="store_true", help="Skip backup creation before removal" + ) + mcp_remove_server_parser.add_argument( + "--dry-run", action="store_true", help="Preview removal without execution" + ) + mcp_remove_server_parser.add_argument( + "--auto-approve", action="store_true", help="Skip confirmation prompts" + ) + + # Remove host command + mcp_remove_host_parser = mcp_remove_subparsers.add_parser( + "host", help="Remove entire host configuration" + ) + mcp_remove_host_parser.add_argument( + "host_name", help="Host platform to remove (e.g., claude-desktop, cursor)" + ) + mcp_remove_host_parser.add_argument( + "--no-backup", action="store_true", help="Skip backup creation before removal" + ) + mcp_remove_host_parser.add_argument( + "--dry-run", action="store_true", help="Preview removal without execution" + ) + mcp_remove_host_parser.add_argument( + "--auto-approve", action="store_true", help="Skip confirmation prompts" + ) + + # MCP synchronization command + mcp_sync_parser = mcp_subparsers.add_parser( + "sync", help="Synchronize MCP configurations between environments and hosts" + ) + + # Source options (mutually exclusive) + sync_source_group = mcp_sync_parser.add_mutually_exclusive_group(required=True) + sync_source_group.add_argument("--from-env", help="Source environment name") + sync_source_group.add_argument("--from-host", help="Source host platform") + + # Target options + mcp_sync_parser.add_argument( + "--to-host", required=True, help="Target hosts (comma-separated or 'all')" + ) + + # Filter options (mutually exclusive) + sync_filter_group = mcp_sync_parser.add_mutually_exclusive_group() + sync_filter_group.add_argument( + "--servers", help="Specific server names to sync (comma-separated)" + ) + sync_filter_group.add_argument( + "--pattern", help="Regex pattern for server selection" + ) + + # Standard options + mcp_sync_parser.add_argument( + "--dry-run", + action="store_true", + help="Preview synchronization without execution", + ) + mcp_sync_parser.add_argument( + "--auto-approve", action="store_true", help="Skip confirmation prompts" + ) + mcp_sync_parser.add_argument( + "--no-backup", + action="store_true", + help="Skip backup creation before synchronization", + ) + + # Package management commands + pkg_subparsers = subparsers.add_parser( + "package", help="Package management commands" + ).add_subparsers(dest="pkg_command", help="Package command to execute") + # Add package command - pkg_add_parser = pkg_subparsers.add_parser("add", help="Add a package to the current environment") - pkg_add_parser.add_argument("package_path_or_name", help="Path to package directory or name of the package") - pkg_add_parser.add_argument("--env", "-e", default=None, help="Environment name (default: current environment)") - pkg_add_parser.add_argument("--version", "-v", default=None, help="Version of the package (optional)") - pkg_add_parser.add_argument("--force-download", "-f", action="store_true", help="Force download even if package is in cache") - pkg_add_parser.add_argument("--refresh-registry", "-r", action="store_true", help="Force refresh of registry data") - pkg_add_parser.add_argument("--auto-approve", action="store_true", help="Automatically approve changes installation of deps for automation scenario") + pkg_add_parser = pkg_subparsers.add_parser( + "add", help="Add a package to the current environment" + ) + pkg_add_parser.add_argument( + "package_path_or_name", help="Path to package directory or name of the package" + ) + pkg_add_parser.add_argument( + "--env", + "-e", + default=None, + help="Environment name (default: current environment)", + ) + pkg_add_parser.add_argument( + "--version", "-v", default=None, help="Version of the package (optional)" + ) + pkg_add_parser.add_argument( + "--force-download", + "-f", + action="store_true", + help="Force download even if package is in cache", + ) + pkg_add_parser.add_argument( + "--refresh-registry", + "-r", + action="store_true", + help="Force refresh of registry data", + ) + pkg_add_parser.add_argument( + "--auto-approve", + action="store_true", + help="Automatically approve changes installation of deps for automation scenario", + ) + # MCP host configuration integration + pkg_add_parser.add_argument( + "--host", + help="Comma-separated list of MCP host platforms to configure (e.g., claude-desktop,cursor)", + ) # Remove package command - pkg_remove_parser = pkg_subparsers.add_parser("remove", help="Remove a package from the current environment") + pkg_remove_parser = pkg_subparsers.add_parser( + "remove", help="Remove a package from the current environment" + ) pkg_remove_parser.add_argument("package_name", help="Name of the package to remove") - pkg_remove_parser.add_argument("--env", "-e", default=None, help="Environment name (default: current environment)") - + pkg_remove_parser.add_argument( + "--env", + "-e", + default=None, + help="Environment name (default: current environment)", + ) + # List packages command - pkg_list_parser = pkg_subparsers.add_parser("list", help="List packages in an environment") - pkg_list_parser.add_argument("--env", "-e", help="Environment name (default: current environment)") + pkg_list_parser = pkg_subparsers.add_parser( + "list", help="List packages in an environment" + ) + pkg_list_parser.add_argument( + "--env", "-e", help="Environment name (default: current environment)" + ) + + # Sync package MCP servers command + pkg_sync_parser = pkg_subparsers.add_parser( + "sync", help="Synchronize package MCP servers to host platforms" + ) + pkg_sync_parser.add_argument( + "package_name", help="Name of the package whose MCP servers to sync" + ) + pkg_sync_parser.add_argument( + "--host", + required=True, + help="Comma-separated list of host platforms to sync to (or 'all')", + ) + pkg_sync_parser.add_argument( + "--env", + "-e", + default=None, + help="Environment name (default: current environment)", + ) + pkg_sync_parser.add_argument( + "--dry-run", action="store_true", help="Preview changes without execution" + ) + pkg_sync_parser.add_argument( + "--auto-approve", action="store_true", help="Skip confirmation prompts" + ) + pkg_sync_parser.add_argument( + "--no-backup", action="store_true", help="Disable default backup behavior" + ) # General arguments for the environment manager - parser.add_argument("--envs-dir", default=Path.home() / ".hatch" / "envs", help="Directory to store environments") - parser.add_argument("--cache-ttl", type=int, default=86400, help="Cache TTL in seconds (default: 86400 seconds --> 1 day)") - parser.add_argument("--cache-dir", default=Path.home() / ".hatch" / "cache", help="Directory to store cached packages") - + parser.add_argument( + "--envs-dir", + default=Path.home() / ".hatch" / "envs", + help="Directory to store environments", + ) + parser.add_argument( + "--cache-ttl", + type=int, + default=86400, + help="Cache TTL in seconds (default: 86400 seconds --> 1 day)", + ) + parser.add_argument( + "--cache-dir", + default=Path.home() / ".hatch" / "cache", + help="Directory to store cached packages", + ) + args = parser.parse_args() # Initialize environment manager env_manager = HatchEnvironmentManager( environments_dir=args.envs_dir, cache_ttl=args.cache_ttl, - cache_dir=args.cache_dir + cache_dir=args.cache_dir, ) + # Initialize MCP configuration manager + mcp_manager = MCPHostConfigurationManager() + # Execute commands if args.command == "create": target_dir = Path(args.dir).resolve() package_dir = create_package_template( - target_dir=target_dir, - package_name=args.name, - description=args.description + target_dir=target_dir, package_name=args.name, description=args.description ) print(f"Package template created at: {package_dir}") @@ -169,7 +1858,7 @@ def main(): validator = HatchPackageValidator( version="latest", allow_local_dependencies=True, - registry_data=env_manager.registry_data + registry_data=env_manager.registry_data, ) # Validate the package @@ -184,32 +1873,43 @@ def main(): # Print detailed validation results if available if validation_results and isinstance(validation_results, dict): for category, result in validation_results.items(): - if category != 'valid' and category != 'metadata' and isinstance(result, dict): - if not result.get('valid', True) and result.get('errors'): + if ( + category != "valid" + and category != "metadata" + and isinstance(result, dict) + ): + if not result.get("valid", True) and result.get("errors"): print(f"\n{category.replace('_', ' ').title()} errors:") - for error in result['errors']: + for error in result["errors"]: print(f" - {error}") return 1 - + elif args.command == "env": if args.env_command == "create": # Determine whether to create Python environment create_python_env = not args.no_python - python_version = getattr(args, 'python_version', None) - - if env_manager.create_environment(args.name, args.description, - python_version=python_version, - create_python_env=create_python_env, - no_hatch_mcp_server=args.no_hatch_mcp_server, - hatch_mcp_server_tag=args.hatch_mcp_server_tag): + python_version = getattr(args, "python_version", None) + + if env_manager.create_environment( + args.name, + args.description, + python_version=python_version, + create_python_env=create_python_env, + no_hatch_mcp_server=args.no_hatch_mcp_server, + hatch_mcp_server_tag=args.hatch_mcp_server_tag, + ): print(f"Environment created: {args.name}") - + # Show Python environment status if create_python_env and env_manager.is_python_environment_available(): - python_exec = env_manager.python_env_manager.get_python_executable(args.name) + python_exec = env_manager.python_env_manager.get_python_executable( + args.name + ) if python_exec: - python_version_info = env_manager.python_env_manager.get_python_version(args.name) + python_version_info = ( + env_manager.python_env_manager.get_python_version(args.name) + ) print(f"Python environment: {python_exec}") if python_version_info: print(f"Python version: {python_version_info}") @@ -217,12 +1917,12 @@ def main(): print("Python environment creation failed") elif create_python_env: print("Python environment requested but conda/mamba not available") - + return 0 else: print(f"Failed to create environment: {args.name}") return 1 - + elif args.env_command == "remove": if env_manager.remove_environment(args.name): print(f"Environment removed: {args.name}") @@ -230,28 +1930,32 @@ def main(): else: print(f"Failed to remove environment: {args.name}") return 1 - + elif args.env_command == "list": environments = env_manager.list_environments() print("Available environments:") - + # Check if conda/mamba is available for status info conda_available = env_manager.is_python_environment_available() - + for env in environments: current_marker = "* " if env.get("is_current") else " " - description = f" - {env.get('description')}" if env.get("description") else "" - + description = ( + f" - {env.get('description')}" if env.get("description") else "" + ) + # Show basic environment info print(f"{current_marker}{env.get('name')}{description}") - + # Show Python environment info if available python_env = env.get("python_environment", False) if python_env: - python_info = env_manager.get_python_environment_info(env.get('name')) + python_info = env_manager.get_python_environment_info( + env.get("name") + ) if python_info: - python_version = python_info.get('python_version', 'Unknown') - conda_env = python_info.get('conda_env_name', 'N/A') + python_version = python_info.get("python_version", "Unknown") + conda_env = python_info.get("conda_env_name", "N/A") print(f" Python: {python_version} (conda: {conda_env})") else: print(f" Python: Configured but unavailable") @@ -259,19 +1963,25 @@ def main(): print(f" Python: Not configured") else: print(f" Python: Conda/mamba not available") - + # Show conda/mamba status if conda_available: manager_info = env_manager.python_env_manager.get_manager_info() print(f"\nPython Environment Manager:") - print(f" Conda executable: {manager_info.get('conda_executable', 'Not found')}") - print(f" Mamba executable: {manager_info.get('mamba_executable', 'Not found')}") - print(f" Preferred manager: {manager_info.get('preferred_manager', 'N/A')}") + print( + f" Conda executable: {manager_info.get('conda_executable', 'Not found')}" + ) + print( + f" Mamba executable: {manager_info.get('mamba_executable', 'Not found')}" + ) + print( + f" Preferred manager: {manager_info.get('preferred_manager', 'N/A')}" + ) else: print(f"\nPython Environment Manager: Conda/mamba not available") - + return 0 - + elif args.env_command == "use": if env_manager.set_current_environment(args.name): print(f"Current environment set to: {args.name}") @@ -279,95 +1989,114 @@ def main(): else: print(f"Failed to set environment: {args.name}") return 1 - + elif args.env_command == "current": current_env = env_manager.get_current_environment() print(f"Current environment: {current_env}") return 0 - + elif args.env_command == "python": # Advanced Python environment management if args.python_command == "init": - python_version = getattr(args, 'python_version', None) - force = getattr(args, 'force', False) - no_hatch_mcp_server = getattr(args, 'no_hatch_mcp_server', False) - hatch_mcp_server_tag = getattr(args, 'hatch_mcp_server_tag', None) - + python_version = getattr(args, "python_version", None) + force = getattr(args, "force", False) + no_hatch_mcp_server = getattr(args, "no_hatch_mcp_server", False) + hatch_mcp_server_tag = getattr(args, "hatch_mcp_server_tag", None) + if env_manager.create_python_environment_only( - args.hatch_env, - python_version, + args.hatch_env, + python_version, force, no_hatch_mcp_server=no_hatch_mcp_server, - hatch_mcp_server_tag=hatch_mcp_server_tag + hatch_mcp_server_tag=hatch_mcp_server_tag, ): print(f"Python environment initialized for: {args.hatch_env}") - + # Show Python environment info - python_info = env_manager.get_python_environment_info(args.hatch_env) + python_info = env_manager.get_python_environment_info( + args.hatch_env + ) if python_info: - print(f" Python executable: {python_info['python_executable']}") - print(f" Python version: {python_info.get('python_version', 'Unknown')}") - print(f" Conda environment: {python_info.get('conda_env_name', 'N/A')}") - + print( + f" Python executable: {python_info['python_executable']}" + ) + print( + f" Python version: {python_info.get('python_version', 'Unknown')}" + ) + print( + f" Conda environment: {python_info.get('conda_env_name', 'N/A')}" + ) + return 0 else: env_name = args.hatch_env or env_manager.get_current_environment() print(f"Failed to initialize Python environment for: {env_name}") return 1 - + elif args.python_command == "info": - detailed = getattr(args, 'detailed', False) - + detailed = getattr(args, "detailed", False) + python_info = env_manager.get_python_environment_info(args.hatch_env) - + if python_info: env_name = args.hatch_env or env_manager.get_current_environment() print(f"Python environment info for '{env_name}':") - print(f" Status: {'Active' if python_info.get('enabled', False) else 'Inactive'}") + print( + f" Status: {'Active' if python_info.get('enabled', False) else 'Inactive'}" + ) print(f" Python executable: {python_info['python_executable']}") - print(f" Python version: {python_info.get('python_version', 'Unknown')}") - print(f" Conda environment: {python_info.get('conda_env_name', 'N/A')}") + print( + f" Python version: {python_info.get('python_version', 'Unknown')}" + ) + print( + f" Conda environment: {python_info.get('conda_env_name', 'N/A')}" + ) print(f" Environment path: {python_info['environment_path']}") print(f" Created: {python_info.get('created_at', 'Unknown')}") print(f" Package count: {python_info.get('package_count', 0)}") print(f" Packages:") - for pkg in python_info.get('packages', []): + for pkg in python_info.get("packages", []): print(f" - {pkg['name']} ({pkg['version']})") - + if detailed: print(f"\nDiagnostics:") - diagnostics = env_manager.get_python_environment_diagnostics(args.hatch_env) + diagnostics = env_manager.get_python_environment_diagnostics( + args.hatch_env + ) if diagnostics: for key, value in diagnostics.items(): print(f" {key}: {value}") else: print(" No diagnostics available") - + return 0 else: env_name = args.hatch_env or env_manager.get_current_environment() print(f"No Python environment found for: {env_name}") - + # Show diagnostics for missing environment if detailed: print("\nDiagnostics:") - general_diagnostics = env_manager.get_python_manager_diagnostics() + general_diagnostics = ( + env_manager.get_python_manager_diagnostics() + ) for key, value in general_diagnostics.items(): print(f" {key}: {value}") - + return 1 - + elif args.python_command == "remove": - force = getattr(args, 'force', False) - + force = getattr(args, "force", False) + if not force: - # Ask for confirmation + # Ask for confirmation using TTY-aware function env_name = args.hatch_env or env_manager.get_current_environment() - response = input(f"Remove Python environment for '{env_name}'? [y/N]: ") - if response.lower() not in ['y', 'yes']: + if not request_confirmation( + f"Remove Python environment for '{env_name}'?" + ): print("Operation cancelled") return 0 - + if env_manager.remove_python_environment_only(args.hatch_env): env_name = args.hatch_env or env_manager.get_current_environment() print(f"Python environment removed from: {env_name}") @@ -376,43 +2105,256 @@ def main(): env_name = args.hatch_env or env_manager.get_current_environment() print(f"Failed to remove Python environment from: {env_name}") return 1 - + elif args.python_command == "shell": - cmd = getattr(args, 'cmd', None) - + cmd = getattr(args, "cmd", None) + if env_manager.launch_python_shell(args.hatch_env, cmd): return 0 else: env_name = args.hatch_env or env_manager.get_current_environment() print(f"Failed to launch Python shell for: {env_name}") return 1 - + elif args.python_command == "add-hatch-mcp": env_name = args.hatch_env or env_manager.get_current_environment() tag = args.tag - + if env_manager.install_mcp_server(env_name, tag): - print(f"hatch_mcp_server wrapper installed successfully in environment: {env_name}") + print( + f"hatch_mcp_server wrapper installed successfully in environment: {env_name}" + ) return 0 else: - print(f"Failed to install hatch_mcp_server wrapper in environment: {env_name}") + print( + f"Failed to install hatch_mcp_server wrapper in environment: {env_name}" + ) return 1 - + else: print("Unknown Python environment command") return 1 - - + elif args.command == "package": if args.pkg_command == "add": - if env_manager.add_package_to_environment(args.package_path_or_name, args.env, args.version, - args.force_download, args.refresh_registry, args.auto_approve): + # Add package to environment + if env_manager.add_package_to_environment( + args.package_path_or_name, + args.env, + args.version, + args.force_download, + args.refresh_registry, + args.auto_approve, + ): print(f"Successfully added package: {args.package_path_or_name}") + + # Handle MCP host configuration if requested + if hasattr(args, "host") and args.host: + try: + hosts = parse_host_list(args.host) + env_name = args.env or env_manager.get_current_environment() + + package_name = args.package_path_or_name + package_service = None + + # Check if it's a local package path + pkg_path = Path(args.package_path_or_name) + if pkg_path.exists() and pkg_path.is_dir(): + # Local package - load metadata from directory + with open(pkg_path / "hatch_metadata.json", "r") as f: + metadata = json.load(f) + package_service = PackageService(metadata) + package_name = package_service.get_field("name") + else: + # Registry package - get metadata from environment manager + try: + env_data = env_manager.get_environment_data(env_name) + if env_data: + # Find the package in the environment + for pkg in env_data.packages: + if pkg.name == package_name: + # Create a minimal metadata structure for PackageService + metadata = { + "name": pkg.name, + "version": pkg.version, + "dependencies": {}, # Will be populated if needed + } + package_service = PackageService(metadata) + break + + if package_service is None: + print( + f"Warning: Could not find package '{package_name}' in environment '{env_name}'. Skipping dependency analysis." + ) + package_service = None + except Exception as e: + print( + f"Warning: Could not load package metadata for '{package_name}': {e}. Skipping dependency analysis." + ) + package_service = None + + # Get dependency names if we have package service + package_names = [] + if package_service: + # Get Hatch dependencies + dependencies = package_service.get_dependencies() + hatch_deps = dependencies.get("hatch", []) + package_names = [ + dep.get("name") for dep in hatch_deps if dep.get("name") + ] + + # Resolve local dependency paths to actual names + for i in range(len(package_names)): + dep_path = Path(package_names[i]) + if dep_path.exists() and dep_path.is_dir(): + try: + with open( + dep_path / "hatch_metadata.json", "r" + ) as f: + dep_metadata = json.load(f) + dep_service = PackageService(dep_metadata) + package_names[i] = dep_service.get_field("name") + except Exception as e: + print( + f"Warning: Could not resolve dependency path '{package_names[i]}': {e}" + ) + + # Add the main package to the list + package_names.append(package_name) + + # Get MCP server configuration for all packages + server_configs = [ + get_package_mcp_server_config( + env_manager, env_name, pkg_name + ) + for pkg_name in package_names + ] + + print( + f"Configuring MCP server for package '{package_name}' on {len(hosts)} host(s)..." + ) + + # Configure on each host + success_count = 0 + for host in hosts: # 'host', here, is a string + try: + # Convert string to MCPHostType enum + host_type = MCPHostType(host) + host_model_class = HOST_MODEL_REGISTRY.get(host_type) + if not host_model_class: + print( + f"โœ— Error: No model registered for host '{host}'" + ) + continue + + host_success_count = 0 + for i, server_config in enumerate(server_configs): + pkg_name = package_names[i] + try: + # Convert MCPServerConfig to Omni model + # Only include fields that have actual values + omni_config_data = {"name": server_config.name} + if server_config.command is not None: + omni_config_data["command"] = ( + server_config.command + ) + if server_config.args is not None: + omni_config_data["args"] = ( + server_config.args + ) + if server_config.env: + omni_config_data["env"] = server_config.env + if server_config.url is not None: + omni_config_data["url"] = server_config.url + headers = getattr( + server_config, "headers", None + ) + if headers is not None: + omni_config_data["headers"] = headers + + omni_config = MCPServerConfigOmni( + **omni_config_data + ) + + # Convert to host-specific model + host_config = host_model_class.from_omni( + omni_config + ) + + # Generate and display conversion report + report = generate_conversion_report( + operation="create", + server_name=server_config.name, + target_host=host_type, + omni=omni_config, + dry_run=False, + ) + display_report(report) + + result = mcp_manager.configure_server( + hostname=host, + server_config=host_config, + no_backup=False, # Always backup when adding packages + ) + + if result.success: + print( + f"โœ“ Configured {server_config.name} ({pkg_name}) on {host}" + ) + host_success_count += 1 + + # Update package metadata with host configuration tracking + try: + server_config_dict = { + "name": server_config.name, + "command": server_config.command, + "args": server_config.args, + } + + env_manager.update_package_host_configuration( + env_name=env_name, + package_name=pkg_name, + hostname=host, + server_config=server_config_dict, + ) + except Exception as e: + # Log but don't fail the configuration operation + print( + f"[WARNING] Failed to update package metadata for {pkg_name}: {e}" + ) + else: + print( + f"โœ— Failed to configure {server_config.name} ({pkg_name}) on {host}: {result.error_message}" + ) + + except Exception as e: + print( + f"โœ— Error configuring {server_config.name} ({pkg_name}) on {host}: {e}" + ) + + if host_success_count == len(server_configs): + success_count += 1 + + except ValueError as e: + print(f"โœ— Invalid host '{host}': {e}") + continue + + if success_count > 0: + print( + f"MCP configuration completed: {success_count}/{len(hosts)} hosts configured" + ) + else: + print("Warning: MCP configuration failed on all hosts") + + except ValueError as e: + print(f"Warning: MCP host configuration failed: {e}") + # Don't fail the entire operation for MCP configuration issues + return 0 else: print(f"Failed to add package: {args.package_path_or_name}") return 1 - + elif args.pkg_command == "remove": if env_manager.remove_package(args.package_name, args.env): print(f"Successfully removed package: {args.package_name}") @@ -420,7 +2362,7 @@ def main(): else: print(f"Failed to remove package: {args.package_name}") return 1 - + elif args.pkg_command == "list": packages = env_manager.list_packages(args.env) @@ -430,17 +2372,377 @@ def main(): print(f"Packages in environment '{args.env}':") for pkg in packages: - print(f"{pkg['name']} ({pkg['version']})\tHatch compliant: {pkg['hatch_compliant']}\tsource: {pkg['source']['uri']}\tlocation: {pkg['source']['path']}") + print( + f"{pkg['name']} ({pkg['version']})\tHatch compliant: {pkg['hatch_compliant']}\tsource: {pkg['source']['uri']}\tlocation: {pkg['source']['path']}" + ) return 0 - + + elif args.pkg_command == "sync": + try: + # Parse host list + hosts = parse_host_list(args.host) + env_name = args.env or env_manager.get_current_environment() + + # Get all packages to sync (main package + dependencies) + package_names = [args.package_name] + + # Try to get dependencies for the main package + try: + env_data = env_manager.get_environment_data(env_name) + if env_data: + # Find the main package in the environment + main_package = None + for pkg in env_data.packages: + if pkg.name == args.package_name: + main_package = pkg + break + + if main_package: + # Create a minimal metadata structure for PackageService + metadata = { + "name": main_package.name, + "version": main_package.version, + "dependencies": {}, # Will be populated if needed + } + package_service = PackageService(metadata) + + # Get Hatch dependencies + dependencies = package_service.get_dependencies() + hatch_deps = dependencies.get("hatch", []) + dep_names = [ + dep.get("name") for dep in hatch_deps if dep.get("name") + ] + + # Add dependencies to the sync list (before main package) + package_names = dep_names + [args.package_name] + else: + print( + f"Warning: Package '{args.package_name}' not found in environment '{env_name}'. Syncing only the specified package." + ) + else: + print( + f"Warning: Could not access environment '{env_name}'. Syncing only the specified package." + ) + except Exception as e: + print( + f"Warning: Could not analyze dependencies for '{args.package_name}': {e}. Syncing only the specified package." + ) + + # Get MCP server configurations for all packages + server_configs = [] + for pkg_name in package_names: + try: + config = get_package_mcp_server_config( + env_manager, env_name, pkg_name + ) + server_configs.append((pkg_name, config)) + except Exception as e: + print( + f"Warning: Could not get MCP configuration for package '{pkg_name}': {e}" + ) + + if not server_configs: + print( + f"Error: No MCP server configurations found for package '{args.package_name}' or its dependencies" + ) + return 1 + + if args.dry_run: + print( + f"[DRY RUN] Would synchronize MCP servers for {len(server_configs)} package(s) to hosts: {[h for h in hosts]}" + ) + for pkg_name, config in server_configs: + print( + f"[DRY RUN] - {pkg_name}: {config.name} -> {' '.join(config.args)}" + ) + + # Generate and display conversion reports for dry-run mode + for host in hosts: + try: + host_type = MCPHostType(host) + host_model_class = HOST_MODEL_REGISTRY.get(host_type) + if not host_model_class: + print( + f"[DRY RUN] โœ— Error: No model registered for host '{host}'" + ) + continue + + # Convert to Omni model + # Only include fields that have actual values + omni_config_data = {"name": config.name} + if config.command is not None: + omni_config_data["command"] = config.command + if config.args is not None: + omni_config_data["args"] = config.args + if config.env: + omni_config_data["env"] = config.env + if config.url is not None: + omni_config_data["url"] = config.url + headers = getattr(config, "headers", None) + if headers is not None: + omni_config_data["headers"] = headers + + omni_config = MCPServerConfigOmni(**omni_config_data) + + # Generate report + report = generate_conversion_report( + operation="create", + server_name=config.name, + target_host=host_type, + omni=omni_config, + dry_run=True, + ) + print(f"[DRY RUN] Preview for {pkg_name} on {host}:") + display_report(report) + except ValueError as e: + print(f"[DRY RUN] โœ— Invalid host '{host}': {e}") + return 0 + + # Confirm operation unless auto-approved + package_desc = ( + f"package '{args.package_name}'" + if len(server_configs) == 1 + else f"{len(server_configs)} packages ('{args.package_name}' + dependencies)" + ) + if not request_confirmation( + f"Synchronize MCP servers for {package_desc} to {len(hosts)} host(s)?", + args.auto_approve, + ): + print("Operation cancelled.") + return 0 + + # Perform synchronization to each host for all packages + total_operations = len(server_configs) * len(hosts) + success_count = 0 + + for host in hosts: + try: + # Convert string to MCPHostType enum + host_type = MCPHostType(host) + host_model_class = HOST_MODEL_REGISTRY.get(host_type) + if not host_model_class: + print(f"โœ— Error: No model registered for host '{host}'") + continue + + for pkg_name, server_config in server_configs: + try: + # Convert MCPServerConfig to Omni model + # Only include fields that have actual values + omni_config_data = {"name": server_config.name} + if server_config.command is not None: + omni_config_data["command"] = server_config.command + if server_config.args is not None: + omni_config_data["args"] = server_config.args + if server_config.env: + omni_config_data["env"] = server_config.env + if server_config.url is not None: + omni_config_data["url"] = server_config.url + headers = getattr(server_config, "headers", None) + if headers is not None: + omni_config_data["headers"] = headers + + omni_config = MCPServerConfigOmni(**omni_config_data) + + # Convert to host-specific model + host_config = host_model_class.from_omni(omni_config) + + # Generate and display conversion report + report = generate_conversion_report( + operation="create", + server_name=server_config.name, + target_host=host_type, + omni=omni_config, + dry_run=False, + ) + display_report(report) + + result = mcp_manager.configure_server( + hostname=host, + server_config=host_config, + no_backup=args.no_backup, + ) + + if result.success: + print( + f"[SUCCESS] Successfully configured {server_config.name} ({pkg_name}) on {host}" + ) + success_count += 1 + + # Update package metadata with host configuration tracking + try: + server_config_dict = { + "name": server_config.name, + "command": server_config.command, + "args": server_config.args, + } + + env_manager.update_package_host_configuration( + env_name=env_name, + package_name=pkg_name, + hostname=host, + server_config=server_config_dict, + ) + except Exception as e: + # Log but don't fail the sync operation + print( + f"[WARNING] Failed to update package metadata for {pkg_name}: {e}" + ) + else: + print( + f"[ERROR] Failed to configure {server_config.name} ({pkg_name}) on {host}: {result.error_message}" + ) + + except Exception as e: + print( + f"[ERROR] Error configuring {server_config.name} ({pkg_name}) on {host}: {e}" + ) + + except ValueError as e: + print(f"โœ— Invalid host '{host}': {e}") + continue + + # Report results + if success_count == total_operations: + package_desc = ( + f"package '{args.package_name}'" + if len(server_configs) == 1 + else f"{len(server_configs)} packages" + ) + print( + f"Successfully synchronized {package_desc} to all {len(hosts)} host(s)" + ) + return 0 + elif success_count > 0: + print( + f"Partially synchronized: {success_count}/{total_operations} operations succeeded" + ) + return 1 + else: + package_desc = ( + f"package '{args.package_name}'" + if len(server_configs) == 1 + else f"{len(server_configs)} packages" + ) + print(f"Failed to synchronize {package_desc} to any hosts") + return 1 + + except ValueError as e: + print(f"Error: {e}") + return 1 + else: parser.print_help() return 1 + + elif args.command == "mcp": + if args.mcp_command == "discover": + if args.discover_command == "hosts": + return handle_mcp_discover_hosts() + elif args.discover_command == "servers": + return handle_mcp_discover_servers(env_manager, args.env) + else: + print("Unknown discover command") + return 1 + + elif args.mcp_command == "list": + if args.list_command == "hosts": + return handle_mcp_list_hosts(env_manager, args.env, args.detailed) + elif args.list_command == "servers": + return handle_mcp_list_servers(env_manager, args.env) + else: + print("Unknown list command") + return 1 + + elif args.mcp_command == "backup": + if args.backup_command == "restore": + return handle_mcp_backup_restore( + env_manager, + args.host, + args.backup_file, + args.dry_run, + args.auto_approve, + ) + elif args.backup_command == "list": + return handle_mcp_backup_list(args.host, args.detailed) + elif args.backup_command == "clean": + return handle_mcp_backup_clean( + args.host, + args.older_than_days, + args.keep_count, + args.dry_run, + args.auto_approve, + ) + else: + print("Unknown backup command") + return 1 + + elif args.mcp_command == "configure": + return handle_mcp_configure( + args.host, + args.server_name, + args.server_command, + args.args, + getattr(args, "env_var", None), + args.url, + args.header, + getattr(args, "timeout", None), + getattr(args, "trust", False), + getattr(args, "cwd", None), + getattr(args, "env_file", None), + getattr(args, "http_url", None), + getattr(args, "include_tools", None), + getattr(args, "exclude_tools", None), + getattr(args, "input", None), + args.no_backup, + args.dry_run, + args.auto_approve, + ) + + elif args.mcp_command == "remove": + if args.remove_command == "server": + return handle_mcp_remove_server( + env_manager, + args.server_name, + args.host, + args.env, + args.no_backup, + args.dry_run, + args.auto_approve, + ) + elif args.remove_command == "host": + return handle_mcp_remove_host( + env_manager, + args.host_name, + args.no_backup, + args.dry_run, + args.auto_approve, + ) + else: + print("Unknown remove command") + return 1 + + elif args.mcp_command == "sync": + return handle_mcp_sync( + from_env=getattr(args, "from_env", None), + from_host=getattr(args, "from_host", None), + to_hosts=args.to_host, + servers=getattr(args, "servers", None), + pattern=getattr(args, "pattern", None), + dry_run=args.dry_run, + auto_approve=args.auto_approve, + no_backup=args.no_backup, + ) + + else: + print("Unknown MCP command") + return 1 + else: parser.print_help() return 1 - + return 0 + if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file + sys.exit(main()) diff --git a/hatch/environment_manager.py b/hatch/environment_manager.py index d90275b..585bdc7 100644 --- a/hatch/environment_manager.py +++ b/hatch/environment_manager.py @@ -17,6 +17,7 @@ from hatch.installers.dependency_installation_orchestrator import DependencyInstallerOrchestrator from hatch.installers.installation_context import InstallationContext from hatch.python_environment_manager import PythonEnvironmentManager, PythonEnvironmentError +from hatch.mcp_host_config.models import MCPServerConfig class HatchEnvironmentError(Exception): """Exception raised for environment-related errors.""" @@ -170,6 +171,20 @@ def get_current_environment(self) -> str: def get_current_environment_data(self) -> Dict: """Get the data for the current environment.""" return self._environments[self._current_env_name] + + def get_environment_data(self, env_name: str) -> Dict: + """Get the data for a specific environment. + + Args: + env_name: Name of the environment + + Returns: + Dict: Environment data + + Raises: + KeyError: If environment doesn't exist + """ + return self._environments[env_name] def set_current_environment(self, env_name: str) -> bool: """ @@ -444,10 +459,10 @@ def install_mcp_server(self, env_name: Optional[str] = None, tag: Optional[str] def remove_environment(self, name: str) -> bool: """ Remove an environment. - + Args: name: Name of the environment to remove - + Returns: bool: True if removed successfully, False otherwise """ @@ -455,28 +470,62 @@ def remove_environment(self, name: str) -> bool: if name == "default": self.logger.error("Cannot remove default environment") return False - + # Check if environment exists if name not in self._environments: self.logger.warning(f"Environment does not exist: {name}") return False - + # If removing current environment, switch to default if name == self._current_env_name: self.set_current_environment("default") - - # Remove Python environment if it exists + + # Clean up MCP server configurations for all packages in this environment env_data = self._environments[name] + packages = env_data.get("packages", []) + if packages: + self.logger.info(f"Cleaning up MCP server configurations for {len(packages)} packages in environment {name}") + try: + from .mcp_host_config.host_management import MCPHostConfigurationManager + mcp_manager = MCPHostConfigurationManager() + + for pkg in packages: + package_name = pkg.get("name") + configured_hosts = pkg.get("configured_hosts", {}) + + if configured_hosts and package_name: + for hostname in configured_hosts.keys(): + try: + # Remove server from host configuration file + result = mcp_manager.remove_server( + server_name=package_name, # In current 1:1 design, package name = server name + hostname=hostname, + no_backup=False # Create backup for safety + ) + + if result.success: + self.logger.info(f"Removed MCP server '{package_name}' from host '{hostname}' (env removal)") + else: + self.logger.warning(f"Failed to remove MCP server '{package_name}' from host '{hostname}': {result.error_message}") + except Exception as e: + self.logger.warning(f"Error removing MCP server '{package_name}' from host '{hostname}': {e}") + + except ImportError: + self.logger.warning("MCP host configuration manager not available for cleanup") + except Exception as e: + self.logger.warning(f"Error during MCP server cleanup for environment removal: {e}") + + # Remove Python environment if it exists if env_data.get("python_environment", False): try: self.python_env_manager.remove_python_environment(name) self.logger.info(f"Removed Python environment for {name}") except PythonEnvironmentError as e: self.logger.warning(f"Failed to remove Python environment: {e}") - + # Remove environment del self._environments[name] - + # Save environments and update cache self._save_environments() self.logger.info(f"Removed environment: {name}") @@ -598,7 +647,273 @@ def _add_package_to_env_data(self, env_name: str, package_name: str, }] self._save_environments() - + + def update_package_host_configuration(self, env_name: str, package_name: str, + hostname: str, server_config: dict) -> bool: + """Update package metadata with host configuration tracking. + + Enforces constraint: Only one environment can control a package-host combination. + Automatically cleans up conflicting configurations from other environments. + + Args: + env_name (str): Environment name + package_name (str): Package name + hostname (str): Host identifier (e.g., 'gemini', 'claude-desktop') + server_config (dict): Server configuration data + + Returns: + bool: True if update successful, False otherwise + """ + try: + if env_name not in self._environments: + self.logger.error(f"Environment {env_name} does not exist") + return False + + # Step 1: Clean up conflicting configurations from other environments + conflicts_removed = self._cleanup_package_host_conflicts( + target_env=env_name, + package_name=package_name, + hostname=hostname + ) + + # Step 2: Update target environment configuration + success = self._update_target_environment_configuration( + env_name, package_name, hostname, server_config + ) + + # Step 3: User notification for conflict resolution + if conflicts_removed > 0 and success: + self.logger.warning( + f"Package '{package_name}' host configuration for '{hostname}' " + f"transferred from {conflicts_removed} other environment(s) to '{env_name}'" + ) + + return success + + except Exception as e: + self.logger.error(f"Failed to update package host configuration: {e}") + return False + + def _cleanup_package_host_conflicts(self, target_env: str, package_name: str, hostname: str) -> int: + """Remove conflicting package-host configurations from other environments. + + This method enforces the constraint that only one environment can control + a package-host combination by removing conflicting configurations from + all environments except the target environment. + + Args: + target_env (str): Environment that should control the configuration + package_name (str): Package name + hostname (str): Host identifier + + Returns: + int: Number of conflicting configurations removed + """ + conflicts_removed = 0 + + for env_name, env_data in self._environments.items(): + if env_name == target_env: + continue # Skip target environment + + packages = env_data.get("packages", []) + for i, pkg in enumerate(packages): + if pkg.get("name") == package_name: + configured_hosts = pkg.get("configured_hosts", {}) + if hostname in configured_hosts: + # Remove the conflicting host configuration + del configured_hosts[hostname] + conflicts_removed += 1 + + # Update package metadata + pkg["configured_hosts"] = configured_hosts + self._environments[env_name]["packages"][i] = pkg + + self.logger.info( + f"Removed conflicting '{hostname}' configuration for package '{package_name}' " + f"from environment '{env_name}'" + ) + + if conflicts_removed > 0: + self._save_environments() + + return conflicts_removed + + def _update_target_environment_configuration(self, env_name: str, package_name: str, + hostname: str, server_config: dict) -> bool: + """Update the target environment's package host configuration. + + This method handles the actual configuration update for the target environment + after conflicts have been cleaned up. + + Args: + env_name (str): Environment name + package_name (str): Package name + hostname (str): Host identifier + server_config (dict): Server configuration data + + Returns: + bool: True if update successful, False otherwise + """ + # Find the package in the environment + packages = self._environments[env_name].get("packages", []) + for i, pkg in enumerate(packages): + if pkg.get("name") == package_name: + # Initialize configured_hosts if it doesn't exist + if "configured_hosts" not in pkg: + pkg["configured_hosts"] = {} + + # Add or update host configuration + from datetime import datetime + pkg["configured_hosts"][hostname] = { + "config_path": self._get_host_config_path(hostname), + "configured_at": datetime.now().isoformat(), + "last_synced": datetime.now().isoformat(), + "server_config": server_config + } + + # Update the package in the environment + self._environments[env_name]["packages"][i] = pkg + self._save_environments() + + self.logger.info(f"Updated host configuration for package {package_name} on {hostname}") + return True + + self.logger.error(f"Package {package_name} not found in environment {env_name}") + return False + + def remove_package_host_configuration(self, env_name: str, package_name: str, hostname: str) -> bool: + """Remove host configuration tracking for a specific package. + + Args: + env_name: Environment name + package_name: Package name (maps to server name in current 1:1 design) + hostname: Host identifier to remove + + Returns: + bool: True if removal occurred, False if package/host not found + """ + try: + if env_name not in self._environments: + self.logger.warning(f"Environment {env_name} does not exist") + return False + + packages = self._environments[env_name].get("packages", []) + for pkg in packages: + if pkg.get("name") == package_name: + configured_hosts = pkg.get("configured_hosts", {}) + if hostname in configured_hosts: + del configured_hosts[hostname] + self._save_environments() + self.logger.info(f"Removed host {hostname} from package {package_name} in env {env_name}") + return True + + return False + + except Exception as e: + self.logger.error(f"Failed to remove package host configuration: {e}") + return False + + def clear_host_from_all_packages_all_envs(self, hostname: str) -> int: + """Remove host from all packages across all environments. + + Args: + hostname: Host identifier to remove globally + + Returns: + int: Number of package entries updated + """ + updates_count = 0 + + try: + for env_name, env_data in self._environments.items(): + packages = env_data.get("packages", []) + for pkg in packages: + configured_hosts = pkg.get("configured_hosts", {}) + if hostname in configured_hosts: + del configured_hosts[hostname] + updates_count += 1 + self.logger.info(f"Removed host {hostname} from package {pkg.get('name')} in env {env_name}") + + if updates_count > 0: + self._save_environments() + + return updates_count + + except Exception as e: + self.logger.error(f"Failed to clear host from all packages: {e}") + return 0 + + def apply_restored_host_configuration_to_environments(self, hostname: str, restored_servers: Dict[str, MCPServerConfig]) -> int: + """Update environment tracking to match restored host configuration. + + Args: + hostname: Host that was restored + restored_servers: Dict mapping server_name -> server_config from restored host file + + Returns: + int: Number of package entries updated across all environments + """ + updates_count = 0 + + try: + from datetime import datetime + current_time = datetime.now().isoformat() + + for env_name, env_data in self._environments.items(): + packages = env_data.get("packages", []) + for pkg in packages: + package_name = pkg.get("name") + configured_hosts = pkg.get("configured_hosts", {}) + + # Check if this package corresponds to a restored server + if package_name in restored_servers: + # Server exists in restored config - ensure tracking exists and is current + server_config = restored_servers[package_name] + configured_hosts[hostname] = { + "config_path": self._get_host_config_path(hostname), + "configured_at": configured_hosts.get(hostname, {}).get("configured_at", current_time), + "last_synced": current_time, + "server_config": server_config.model_dump(exclude_none=True) + } + updates_count += 1 + self.logger.info(f"Updated host {hostname} tracking for package {package_name} in env {env_name}") + + elif hostname in configured_hosts: + # Server not in restored config but was previously tracked - remove stale tracking + del configured_hosts[hostname] + updates_count += 1 + self.logger.info(f"Removed stale host {hostname} tracking for package {package_name} in env {env_name}") + + if updates_count > 0: + self._save_environments() + + return updates_count + + except Exception as e: + self.logger.error(f"Failed to apply restored host configuration: {e}") + return 0 + + def _get_host_config_path(self, hostname: str) -> str: + """Get configuration file path for a host. + + Args: + hostname (str): Host identifier + + Returns: + str: Configuration file path + """ + # Map hostnames to their typical config paths + host_config_paths = { + 'gemini': '~/.gemini/settings.json', + 'claude-desktop': '~/.claude/claude_desktop_config.json', + 'claude-code': '.claude/mcp_config.json', + 'vscode': '.vscode/settings.json', + 'cursor': '~/.cursor/mcp.json', + 'lmstudio': '~/.lmstudio/mcp.json' + } + + return host_config_paths.get(hostname, f'~/.{hostname}/config.json') + def get_environment_path(self, env_name: str) -> Path: """ Get the path to the environment directory. @@ -658,11 +973,11 @@ def list_packages(self, env_name: Optional[str] = None) -> List[Dict]: def remove_package(self, package_name: str, env_name: Optional[str] = None) -> bool: """ Remove a package from an environment. - + Args: package_name: Name of the package to remove env_name: Environment to remove from (uses current if None) - + Returns: bool: True if successful, False otherwise """ @@ -670,19 +985,50 @@ def remove_package(self, package_name: str, env_name: Optional[str] = None) -> b if not self.environment_exists(env_name): self.logger.error(f"Environment {env_name} does not exist") return False - + # Check if package exists in environment env_packages = self._environments[env_name].get("packages", []) pkg_index = None + package_to_remove = None for i, pkg in enumerate(env_packages): if pkg.get("name") == package_name: pkg_index = i + package_to_remove = pkg break - + if pkg_index is None: self.logger.warning(f"Package {package_name} not found in environment {env_name}") return False - + + # Clean up MCP server configurations from all configured hosts + configured_hosts = package_to_remove.get("configured_hosts", {}) + if configured_hosts: + self.logger.info(f"Cleaning up MCP server configurations for package {package_name}") + try: + from .mcp_host_config.host_management import MCPHostConfigurationManager + mcp_manager = MCPHostConfigurationManager() + + for hostname in configured_hosts.keys(): + try: + # Remove server from host configuration file + result = mcp_manager.remove_server( + server_name=package_name, # In current 1:1 design, package name = server name + hostname=hostname, + no_backup=False # Create backup for safety + ) + + if result.success: + self.logger.info(f"Removed MCP server '{package_name}' from host '{hostname}'") + else: + self.logger.warning(f"Failed to remove MCP server '{package_name}' from host '{hostname}': {result.error_message}") + except Exception as e: + self.logger.warning(f"Error removing MCP server '{package_name}' from host '{hostname}': {e}") + + except ImportError: + self.logger.warning("MCP host configuration manager not available for cleanup") + except Exception as e: + self.logger.warning(f"Error during MCP server cleanup: {e}") + # Remove package from filesystem pkg_path = self.get_environment_path(env_name) / package_name try: @@ -692,11 +1038,11 @@ def remove_package(self, package_name: str, env_name: Optional[str] = None) -> b except Exception as e: self.logger.error(f"Failed to remove package files for {package_name}: {e}") return False - + # Remove package from environment data env_packages.pop(pkg_index) self._save_environments() - + self.logger.info(f"Removed package {package_name} from environment {env_name}") return True diff --git a/hatch/mcp_host_config/__init__.py b/hatch/mcp_host_config/__init__.py new file mode 100644 index 0000000..03c8178 --- /dev/null +++ b/hatch/mcp_host_config/__init__.py @@ -0,0 +1,38 @@ +"""MCP (Model Context Protocol) support for Hatch. + +This module provides MCP host configuration management functionality, +including backup and restore capabilities for MCP server configurations, +decorator-based strategy registration, and consolidated Pydantic models. +""" + +from .backup import MCPHostConfigBackupManager +from .models import ( + MCPHostType, MCPServerConfig, HostConfiguration, EnvironmentData, + PackageHostConfiguration, EnvironmentPackageEntry, ConfigurationResult, SyncResult, + # Host-specific configuration models + MCPServerConfigBase, MCPServerConfigGemini, MCPServerConfigVSCode, + MCPServerConfigCursor, MCPServerConfigClaude, MCPServerConfigOmni, + HOST_MODEL_REGISTRY +) +from .host_management import ( + MCPHostRegistry, MCPHostStrategy, MCPHostConfigurationManager, register_host_strategy +) +from .reporting import ( + FieldOperation, ConversionReport, generate_conversion_report, display_report +) + +# Import strategies to trigger decorator registration +from . import strategies + +__all__ = [ + 'MCPHostConfigBackupManager', + 'MCPHostType', 'MCPServerConfig', 'HostConfiguration', 'EnvironmentData', + 'PackageHostConfiguration', 'EnvironmentPackageEntry', 'ConfigurationResult', 'SyncResult', + # Host-specific configuration models + 'MCPServerConfigBase', 'MCPServerConfigGemini', 'MCPServerConfigVSCode', + 'MCPServerConfigCursor', 'MCPServerConfigClaude', 'MCPServerConfigOmni', + 'HOST_MODEL_REGISTRY', + # User feedback reporting + 'FieldOperation', 'ConversionReport', 'generate_conversion_report', 'display_report', + 'MCPHostRegistry', 'MCPHostStrategy', 'MCPHostConfigurationManager', 'register_host_strategy' +] diff --git a/hatch/mcp_host_config/backup.py b/hatch/mcp_host_config/backup.py new file mode 100644 index 0000000..bd4f0f8 --- /dev/null +++ b/hatch/mcp_host_config/backup.py @@ -0,0 +1,458 @@ +"""MCP host configuration backup system. + +This module provides comprehensive backup and restore functionality for MCP +host configuration files with atomic operations and Pydantic data validation. +""" + +import json +import shutil +import tempfile +from datetime import datetime +from pathlib import Path +from typing import Dict, List, Optional, Any + +from pydantic import BaseModel, Field, validator + + +class BackupError(Exception): + """Exception raised when backup operations fail.""" + pass + + +class RestoreError(Exception): + """Exception raised when restore operations fail.""" + pass + + +class BackupInfo(BaseModel): + """Information about a backup file with validation.""" + hostname: str = Field(..., description="Host identifier") + timestamp: datetime = Field(..., description="Backup creation timestamp") + file_path: Path = Field(..., description="Path to backup file") + file_size: int = Field(..., ge=0, description="Backup file size in bytes") + original_config_path: Path = Field(..., description="Original configuration file path") + + @validator('hostname') + def validate_hostname(cls, v): + """Validate hostname is supported.""" + supported_hosts = { + 'claude-desktop', 'claude-code', 'vscode', + 'cursor', 'lmstudio', 'gemini' + } + if v not in supported_hosts: + raise ValueError(f"Unsupported hostname: {v}. Supported: {supported_hosts}") + return v + + @validator('file_path') + def validate_file_exists(cls, v): + """Validate backup file exists.""" + if not v.exists(): + raise ValueError(f"Backup file does not exist: {v}") + return v + + @property + def backup_name(self) -> str: + """Get backup filename.""" + return f"mcp.json.{self.hostname}.{self.timestamp.strftime('%Y%m%d_%H%M%S_%f')}" + + @property + def age_days(self) -> int: + """Get backup age in days.""" + return (datetime.now() - self.timestamp).days + + class Config: + """Pydantic configuration.""" + arbitrary_types_allowed = True + json_encoders = { + Path: str, + datetime: lambda v: v.isoformat() + } + + +class BackupResult(BaseModel): + """Result of backup operation with validation.""" + success: bool = Field(..., description="Operation success status") + backup_path: Optional[Path] = Field(None, description="Path to created backup") + error_message: Optional[str] = Field(None, description="Error message if failed") + original_size: int = Field(0, ge=0, description="Original file size in bytes") + backup_size: int = Field(0, ge=0, description="Backup file size in bytes") + + @validator('backup_path') + def validate_backup_path_on_success(cls, v, values): + """Validate backup_path is provided when success is True.""" + if values.get('success') and v is None: + raise ValueError("backup_path must be provided when success is True") + return v + + @validator('error_message') + def validate_error_message_on_failure(cls, v, values): + """Validate error_message is provided when success is False.""" + if not values.get('success') and not v: + raise ValueError("error_message must be provided when success is False") + return v + + class Config: + """Pydantic configuration.""" + arbitrary_types_allowed = True + json_encoders = { + Path: str + } + + +class AtomicFileOperations: + """Atomic file operations for safe configuration updates.""" + + def atomic_write_with_backup(self, file_path: Path, data: Dict[str, Any], + backup_manager: "MCPHostConfigBackupManager", + hostname: str, skip_backup: bool = False) -> bool: + """Atomic write with automatic backup creation. + + Args: + file_path (Path): Target file path for writing + data (Dict[str, Any]): Data to write as JSON + backup_manager (MCPHostConfigBackupManager): Backup manager instance + hostname (str): Host identifier for backup + skip_backup (bool, optional): Skip backup creation. Defaults to False. + + Returns: + bool: True if operation successful, False otherwise + + Raises: + BackupError: If backup creation fails and skip_backup is False + """ + # Create backup if file exists and backup not skipped + backup_result = None + if file_path.exists() and not skip_backup: + backup_result = backup_manager.create_backup(file_path, hostname) + if not backup_result.success: + raise BackupError(f"Required backup failed: {backup_result.error_message}") + + # Create temporary file for atomic write + temp_file = None + try: + # Write to temporary file first + temp_file = file_path.with_suffix(f"{file_path.suffix}.tmp") + with open(temp_file, 'w', encoding='utf-8') as f: + json.dump(data, f, indent=2, ensure_ascii=False) + + # Atomic move to target location + temp_file.replace(file_path) + return True + + except Exception as e: + # Clean up temporary file on failure + if temp_file and temp_file.exists(): + temp_file.unlink() + + # Restore from backup if available + if backup_result and backup_result.backup_path: + try: + backup_manager.restore_backup(hostname, backup_result.backup_path.name) + except Exception: + pass # Log but don't raise - original error is more important + + raise BackupError(f"Atomic write failed: {str(e)}") + + def atomic_copy(self, source: Path, target: Path) -> bool: + """Atomic file copy operation. + + Args: + source (Path): Source file path + target (Path): Target file path + + Returns: + bool: True if copy successful, False otherwise + """ + try: + # Create temporary target file + temp_target = target.with_suffix(f"{target.suffix}.tmp") + + # Copy to temporary location + shutil.copy2(source, temp_target) + + # Atomic move to final location + temp_target.replace(target) + return True + + except Exception: + # Clean up temporary file on failure + temp_target = target.with_suffix(f"{target.suffix}.tmp") + if temp_target.exists(): + temp_target.unlink() + return False + + +class MCPHostConfigBackupManager: + """Manages MCP host configuration backups.""" + + def __init__(self, backup_root: Optional[Path] = None): + """Initialize backup manager. + + Args: + backup_root (Path, optional): Root directory for backups. + Defaults to ~/.hatch/mcp_host_config_backups/ + """ + self.backup_root = backup_root or Path.home() / ".hatch" / "mcp_host_config_backups" + self.backup_root.mkdir(parents=True, exist_ok=True) + self.atomic_ops = AtomicFileOperations() + + def create_backup(self, config_path: Path, hostname: str) -> BackupResult: + """Create timestamped backup of host configuration. + + Args: + config_path (Path): Path to original configuration file + hostname (str): Host identifier (claude-desktop, claude-code, vscode, cursor, lmstudio, gemini) + + Returns: + BackupResult: Operation result with backup path or error message + """ + try: + # Validate inputs + if not config_path.exists(): + return BackupResult( + success=False, + error_message=f"Configuration file not found: {config_path}" + ) + + # Validate hostname using Pydantic + try: + BackupInfo.validate_hostname(hostname) + except ValueError as e: + return BackupResult( + success=False, + error_message=str(e) + ) + + # Create host-specific backup directory + host_backup_dir = self.backup_root / hostname + host_backup_dir.mkdir(exist_ok=True) + + # Generate timestamped backup filename with microseconds for uniqueness + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S_%f") + backup_name = f"mcp.json.{hostname}.{timestamp}" + backup_path = host_backup_dir / backup_name + + # Get original file size + original_size = config_path.stat().st_size + + # Atomic copy operation + if not self.atomic_ops.atomic_copy(config_path, backup_path): + return BackupResult( + success=False, + error_message="Atomic copy operation failed" + ) + + # Verify backup integrity + backup_size = backup_path.stat().st_size + if backup_size != original_size: + backup_path.unlink() + return BackupResult( + success=False, + error_message="Backup size mismatch - backup deleted" + ) + + return BackupResult( + success=True, + backup_path=backup_path, + original_size=original_size, + backup_size=backup_size + ) + + except Exception as e: + return BackupResult( + success=False, + error_message=f"Backup creation failed: {str(e)}" + ) + + def restore_backup(self, hostname: str, backup_file: Optional[str] = None) -> bool: + """Restore configuration from backup. + + Args: + hostname (str): Host identifier + backup_file (str, optional): Specific backup file name. Defaults to latest. + + Returns: + bool: True if restoration successful, False otherwise + """ + try: + # Get backup file path + if backup_file: + backup_path = self.backup_root / hostname / backup_file + else: + backup_path = self._get_latest_backup(hostname) + + if not backup_path or not backup_path.exists(): + return False + + # Get target configuration path using host registry + from .host_management import MCPHostRegistry + from .models import MCPHostType + + try: + host_type = MCPHostType(hostname) + target_path = MCPHostRegistry.get_host_config_path(host_type) + + if not target_path: + return False + + # Ensure target directory exists + target_path.parent.mkdir(parents=True, exist_ok=True) + + # Perform atomic restore operation + return self.atomic_ops.atomic_copy(backup_path, target_path) + + except ValueError: + # Invalid hostname + return False + + except Exception: + return False + + def list_backups(self, hostname: str) -> List[BackupInfo]: + """List available backups for hostname. + + Args: + hostname (str): Host identifier + + Returns: + List[BackupInfo]: List of backup information objects + """ + host_backup_dir = self.backup_root / hostname + + if not host_backup_dir.exists(): + return [] + + backups = [] + + # Search for both correct format and legacy incorrect format for backward compatibility + patterns = [ + f"mcp.json.{hostname}.*", # Correct format: mcp.json.gemini.* + f"mcp.json.MCPHostType.{hostname.upper()}.*" # Legacy incorrect format: mcp.json.MCPHostType.GEMINI.* + ] + + for pattern in patterns: + for backup_file in host_backup_dir.glob(pattern): + try: + # Parse timestamp from filename + timestamp_str = backup_file.name.split('.')[-1] + timestamp = datetime.strptime(timestamp_str, "%Y%m%d_%H%M%S_%f") + + backup_info = BackupInfo( + hostname=hostname, + timestamp=timestamp, + file_path=backup_file, + file_size=backup_file.stat().st_size, + original_config_path=Path("placeholder") # Will be implemented in host config phase + ) + backups.append(backup_info) + + except (ValueError, OSError): + # Skip invalid backup files + continue + + # Sort by timestamp (newest first) + return sorted(backups, key=lambda b: b.timestamp, reverse=True) + + def clean_backups(self, hostname: str, **filters) -> int: + """Clean old backups based on filters. + + Args: + hostname (str): Host identifier + **filters: Filter criteria (e.g., older_than_days, keep_count) + + Returns: + int: Number of backups cleaned + """ + backups = self.list_backups(hostname) + cleaned_count = 0 + + # Apply filters + older_than_days = filters.get('older_than_days') + keep_count = filters.get('keep_count') + + if older_than_days: + for backup in backups: + if backup.age_days > older_than_days: + try: + backup.file_path.unlink() + cleaned_count += 1 + except OSError: + continue + + if keep_count and len(backups) > keep_count: + # Keep newest backups, remove oldest + to_remove = backups[keep_count:] + for backup in to_remove: + try: + backup.file_path.unlink() + cleaned_count += 1 + except OSError: + continue + + return cleaned_count + + def _get_latest_backup(self, hostname: str) -> Optional[Path]: + """Get path to latest backup for hostname. + + Args: + hostname (str): Host identifier + + Returns: + Optional[Path]: Path to latest backup or None if no backups exist + """ + backups = self.list_backups(hostname) + return backups[0].file_path if backups else None + + +class BackupAwareOperation: + """Base class for operations that require backup awareness.""" + + def __init__(self, backup_manager: MCPHostConfigBackupManager): + """Initialize backup-aware operation. + + Args: + backup_manager (MCPHostConfigBackupManager): Backup manager instance + """ + self.backup_manager = backup_manager + + def prepare_backup(self, config_path: Path, hostname: str, + no_backup: bool = False) -> Optional[BackupResult]: + """Prepare backup before operation if required. + + Args: + config_path (Path): Path to configuration file + hostname (str): Host identifier + no_backup (bool, optional): Skip backup creation. Defaults to False. + + Returns: + Optional[BackupResult]: BackupResult if backup created, None if skipped + + Raises: + BackupError: If backup required but fails + """ + if no_backup: + return None + + backup_result = self.backup_manager.create_backup(config_path, hostname) + if not backup_result.success: + raise BackupError(f"Required backup failed: {backup_result.error_message}") + + return backup_result + + def rollback_on_failure(self, backup_result: Optional[BackupResult], + config_path: Path, hostname: str) -> bool: + """Rollback configuration on operation failure. + + Args: + backup_result (Optional[BackupResult]): Result from prepare_backup + config_path (Path): Path to configuration file + hostname (str): Host identifier + + Returns: + bool: True if rollback successful, False otherwise + """ + if backup_result and backup_result.backup_path: + return self.backup_manager.restore_backup( + hostname, backup_result.backup_path.name + ) + return False diff --git a/hatch/mcp_host_config/host_management.py b/hatch/mcp_host_config/host_management.py new file mode 100644 index 0000000..56c8b5b --- /dev/null +++ b/hatch/mcp_host_config/host_management.py @@ -0,0 +1,572 @@ +""" +MCP host configuration management with decorator-based strategy registration. + +This module provides the core host management infrastructure including +decorator-based strategy registration following Hatchling patterns, +host registry, and configuration manager with consolidated model support. +""" + +from typing import Dict, List, Type, Optional, Callable, Any +from pathlib import Path +import json +import logging + +from .models import ( + MCPHostType, MCPServerConfig, HostConfiguration, EnvironmentData, + ConfigurationResult, SyncResult +) + +logger = logging.getLogger(__name__) + + +class MCPHostRegistry: + """Registry for MCP host strategies with decorator-based registration.""" + + _strategies: Dict[MCPHostType, Type["MCPHostStrategy"]] = {} + _instances: Dict[MCPHostType, "MCPHostStrategy"] = {} + _family_mappings: Dict[str, List[MCPHostType]] = { + "claude": [MCPHostType.CLAUDE_DESKTOP, MCPHostType.CLAUDE_CODE], + "cursor": [MCPHostType.CURSOR, MCPHostType.LMSTUDIO] + } + + @classmethod + def register(cls, host_type: MCPHostType): + """Decorator to register a host strategy class.""" + def decorator(strategy_class: Type["MCPHostStrategy"]): + if not issubclass(strategy_class, MCPHostStrategy): + raise ValueError(f"Strategy class {strategy_class.__name__} must inherit from MCPHostStrategy") + + if host_type in cls._strategies: + logger.warning(f"Overriding existing strategy for {host_type}: {cls._strategies[host_type].__name__} -> {strategy_class.__name__}") + + cls._strategies[host_type] = strategy_class + logger.debug(f"Registered MCP host strategy '{host_type}' -> {strategy_class.__name__}") + return strategy_class + return decorator + + @classmethod + def get_strategy(cls, host_type: MCPHostType) -> "MCPHostStrategy": + """Get strategy instance for host type.""" + if host_type not in cls._strategies: + available = list(cls._strategies.keys()) + raise ValueError(f"Unknown host type: '{host_type}'. Available: {available}") + + if host_type not in cls._instances: + cls._instances[host_type] = cls._strategies[host_type]() + + return cls._instances[host_type] + + @classmethod + def detect_available_hosts(cls) -> List[MCPHostType]: + """Detect available hosts on the system.""" + available_hosts = [] + for host_type, strategy_class in cls._strategies.items(): + try: + strategy = cls.get_strategy(host_type) + if strategy.is_host_available(): + available_hosts.append(host_type) + except Exception: + # Host detection failed, skip + continue + return available_hosts + + @classmethod + def get_family_hosts(cls, family: str) -> List[MCPHostType]: + """Get all hosts in a strategy family.""" + return cls._family_mappings.get(family, []) + + @classmethod + def get_host_config_path(cls, host_type: MCPHostType) -> Optional[Path]: + """Get configuration path for host type.""" + strategy = cls.get_strategy(host_type) + return strategy.get_config_path() + + +def register_host_strategy(host_type: MCPHostType) -> Callable: + """Convenience decorator for registering host strategies.""" + return MCPHostRegistry.register(host_type) + + +class MCPHostStrategy: + """Abstract base class for host configuration strategies.""" + + def get_config_path(self) -> Optional[Path]: + """Get configuration file path for this host.""" + raise NotImplementedError("Subclasses must implement get_config_path") + + def is_host_available(self) -> bool: + """Check if host is available on system.""" + raise NotImplementedError("Subclasses must implement is_host_available") + + def read_configuration(self) -> HostConfiguration: + """Read and parse host configuration.""" + raise NotImplementedError("Subclasses must implement read_configuration") + + def write_configuration(self, config: HostConfiguration, + no_backup: bool = False) -> bool: + """Write configuration to host file.""" + raise NotImplementedError("Subclasses must implement write_configuration") + + def validate_server_config(self, server_config: MCPServerConfig) -> bool: + """Validate server configuration for this host.""" + raise NotImplementedError("Subclasses must implement validate_server_config") + + def get_config_key(self) -> str: + """Get the root configuration key for MCP servers.""" + return "mcpServers" # Default for most platforms + + +class MCPHostConfigurationManager: + """Central manager for MCP host configuration operations.""" + + def __init__(self, backup_manager: Optional[Any] = None): + self.host_registry = MCPHostRegistry + self.backup_manager = backup_manager or self._create_default_backup_manager() + + def _create_default_backup_manager(self): + """Create default backup manager.""" + try: + from .backup import MCPHostConfigBackupManager + return MCPHostConfigBackupManager() + except ImportError: + logger.warning("Backup manager not available") + return None + + def configure_server(self, server_config: MCPServerConfig, + hostname: str, no_backup: bool = False) -> ConfigurationResult: + """Configure MCP server on specified host.""" + try: + host_type = MCPHostType(hostname) + strategy = self.host_registry.get_strategy(host_type) + + # Validate server configuration for this host + if not strategy.validate_server_config(server_config): + return ConfigurationResult( + success=False, + hostname=hostname, + error_message=f"Server configuration invalid for {hostname}" + ) + + # Read current configuration + current_config = strategy.read_configuration() + + # Create backup if requested + backup_path = None + if not no_backup and self.backup_manager: + config_path = strategy.get_config_path() + if config_path and config_path.exists(): + backup_result = self.backup_manager.create_backup(config_path, hostname) + if backup_result.success: + backup_path = backup_result.backup_path + + # Add server to configuration + server_name = getattr(server_config, 'name', 'default_server') + current_config.add_server(server_name, server_config) + + # Write updated configuration + success = strategy.write_configuration(current_config, no_backup=no_backup) + + return ConfigurationResult( + success=success, + hostname=hostname, + server_name=server_name, + backup_created=backup_path is not None, + backup_path=backup_path + ) + + except Exception as e: + return ConfigurationResult( + success=False, + hostname=hostname, + error_message=str(e) + ) + + def get_server_config(self, hostname: str, server_name: str) -> Optional[MCPServerConfig]: + """ + Get existing server configuration from host. + + Args: + hostname: The MCP host to query (e.g., 'claude-desktop', 'cursor') + server_name: Name of the server to retrieve + + Returns: + MCPServerConfig if server exists, None otherwise + """ + try: + host_type = MCPHostType(hostname) + strategy = self.host_registry.get_strategy(host_type) + current_config = strategy.read_configuration() + + if server_name in current_config.servers: + return current_config.servers[server_name] + return None + + except Exception as e: + logger.debug(f"Failed to retrieve server config for {server_name} on {hostname}: {e}") + return None + + def remove_server(self, server_name: str, hostname: str, + no_backup: bool = False) -> ConfigurationResult: + """Remove MCP server from specified host.""" + try: + host_type = MCPHostType(hostname) + strategy = self.host_registry.get_strategy(host_type) + + # Read current configuration + current_config = strategy.read_configuration() + + # Check if server exists + if server_name not in current_config.servers: + return ConfigurationResult( + success=False, + hostname=hostname, + server_name=server_name, + error_message=f"Server '{server_name}' not found in {hostname} configuration" + ) + + # Create backup if requested + backup_path = None + if not no_backup and self.backup_manager: + config_path = strategy.get_config_path() + if config_path and config_path.exists(): + backup_result = self.backup_manager.create_backup(config_path, hostname) + if backup_result.success: + backup_path = backup_result.backup_path + + # Remove server from configuration + current_config.remove_server(server_name) + + # Write updated configuration + success = strategy.write_configuration(current_config, no_backup=no_backup) + + return ConfigurationResult( + success=success, + hostname=hostname, + server_name=server_name, + backup_created=backup_path is not None, + backup_path=backup_path + ) + + except Exception as e: + return ConfigurationResult( + success=False, + hostname=hostname, + server_name=server_name, + error_message=str(e) + ) + + def sync_environment_to_hosts(self, env_data: EnvironmentData, + target_hosts: Optional[List[str]] = None, + no_backup: bool = False) -> SyncResult: + """Synchronize environment MCP data to host configurations.""" + if target_hosts is None: + target_hosts = [host.value for host in self.host_registry.detect_available_hosts()] + + results = [] + servers_synced = 0 + + for hostname in target_hosts: + try: + host_type = MCPHostType(hostname) + strategy = self.host_registry.get_strategy(host_type) + + # Collect all MCP servers for this host from environment + host_servers = {} + for package in env_data.get_mcp_packages(): + if hostname in package.configured_hosts: + host_config = package.configured_hosts[hostname] + # Use package name as server name (single server per package) + host_servers[package.name] = host_config.server_config + + if not host_servers: + # No servers to sync for this host + results.append(ConfigurationResult( + success=True, + hostname=hostname, + error_message="No servers to sync" + )) + continue + + # Read current host configuration + current_config = strategy.read_configuration() + + # Create backup if requested + backup_path = None + if not no_backup and self.backup_manager: + config_path = strategy.get_config_path() + if config_path and config_path.exists(): + backup_result = self.backup_manager.create_backup(config_path, hostname) + if backup_result.success: + backup_path = backup_result.backup_path + + # Update configuration with environment servers + for server_name, server_config in host_servers.items(): + current_config.add_server(server_name, server_config) + servers_synced += 1 + + # Write updated configuration + success = strategy.write_configuration(current_config, no_backup=no_backup) + + results.append(ConfigurationResult( + success=success, + hostname=hostname, + backup_created=backup_path is not None, + backup_path=backup_path + )) + + except Exception as e: + results.append(ConfigurationResult( + success=False, + hostname=hostname, + error_message=str(e) + )) + + # Calculate summary statistics + successful_results = [r for r in results if r.success] + hosts_updated = len(successful_results) + + return SyncResult( + success=hosts_updated > 0, + results=results, + servers_synced=servers_synced, + hosts_updated=hosts_updated + ) + + def remove_host_configuration(self, hostname: str, no_backup: bool = False) -> ConfigurationResult: + """Remove entire host configuration (all MCP servers). + + Args: + hostname (str): Host identifier + no_backup (bool, optional): Skip backup creation. Defaults to False. + + Returns: + ConfigurationResult: Result of the removal operation + """ + try: + host_type = MCPHostType(hostname) + strategy = self.host_registry.get_strategy(host_type) + config_path = strategy.get_config_path() + + if not config_path or not config_path.exists(): + return ConfigurationResult( + success=True, + hostname=hostname, + error_message="No configuration file to remove" + ) + + # Create backup if requested + backup_path = None + if not no_backup and self.backup_manager: + backup_result = self.backup_manager.create_backup(config_path, hostname) + if backup_result.success: + backup_path = backup_result.backup_path + + # Remove configuration + # Create Empty HostConfiguration + empty_config = HostConfiguration() + strategy.write_configuration(empty_config, no_backup=no_backup) + + return ConfigurationResult( + success=True, + hostname=hostname, + backup_created=backup_path is not None, + backup_path=backup_path + ) + + except Exception as e: + return ConfigurationResult( + success=False, + hostname=hostname, + error_message=str(e) + ) + + def sync_configurations(self, + from_env: Optional[str] = None, + from_host: Optional[str] = None, + to_hosts: Optional[List[str]] = None, + servers: Optional[List[str]] = None, + pattern: Optional[str] = None, + no_backup: bool = False) -> SyncResult: + """Advanced synchronization with multiple source/target options. + + Args: + from_env (str, optional): Source environment name + from_host (str, optional): Source host name + to_hosts (List[str], optional): Target host names + servers (List[str], optional): Specific server names to sync + pattern (str, optional): Regex pattern for server selection + no_backup (bool, optional): Skip backup creation. Defaults to False. + + Returns: + SyncResult: Result of the synchronization operation + + Raises: + ValueError: If source specification is invalid + """ + import re + from hatch.environment_manager import HatchEnvironmentManager + + # Validate source specification + if not from_env and not from_host: + raise ValueError("Must specify either from_env or from_host as source") + if from_env and from_host: + raise ValueError("Cannot specify both from_env and from_host as source") + + # Default to all available hosts if no targets specified + if not to_hosts: + to_hosts = [host.value for host in self.host_registry.detect_available_hosts()] + + try: + # Resolve source data + if from_env: + # Get environment data + env_manager = HatchEnvironmentManager() + env_data = env_manager.get_environment_data(from_env) + if not env_data: + return SyncResult( + success=False, + results=[ConfigurationResult( + success=False, + hostname="", + error_message=f"Environment '{from_env}' not found" + )], + servers_synced=0, + hosts_updated=0 + ) + + # Extract servers from environment + source_servers = {} + for package in env_data.get_mcp_packages(): + # Use package name as server name (single server per package) + source_servers[package.name] = package.configured_hosts + + else: # from_host + # Read host configuration + try: + host_type = MCPHostType(from_host) + strategy = self.host_registry.get_strategy(host_type) + host_config = strategy.read_configuration() + + # Extract servers from host configuration + source_servers = {} + for server_name, server_config in host_config.servers.items(): + source_servers[server_name] = { + from_host: {"server_config": server_config} + } + + except ValueError: + return SyncResult( + success=False, + results=[ConfigurationResult( + success=False, + hostname="", + error_message=f"Invalid source host '{from_host}'" + )], + servers_synced=0, + hosts_updated=0 + ) + + # Apply server filtering + if servers: + # Filter by specific server names + filtered_servers = {name: config for name, config in source_servers.items() + if name in servers} + source_servers = filtered_servers + elif pattern: + # Filter by regex pattern + regex = re.compile(pattern) + filtered_servers = {name: config for name, config in source_servers.items() + if regex.match(name)} + source_servers = filtered_servers + + # Apply synchronization to target hosts + results = [] + servers_synced = 0 + + for target_host in to_hosts: + try: + host_type = MCPHostType(target_host) + strategy = self.host_registry.get_strategy(host_type) + + # Read current target configuration + current_config = strategy.read_configuration() + + # Create backup if requested + backup_path = None + if not no_backup and self.backup_manager: + config_path = strategy.get_config_path() + if config_path and config_path.exists(): + backup_result = self.backup_manager.create_backup(config_path, target_host) + if backup_result.success: + backup_path = backup_result.backup_path + + # Add servers to target configuration + host_servers_added = 0 + for server_name, server_hosts in source_servers.items(): + # Find appropriate server config for this target host + server_config = None + + if from_env: + # For environment source, look for host-specific config + if target_host in server_hosts: + server_config = server_hosts[target_host]["server_config"] + elif "claude-desktop" in server_hosts: + # Fallback to claude-desktop config for compatibility + server_config = server_hosts["claude-desktop"]["server_config"] + else: + # For host source, use the server config directly + if from_host in server_hosts: + server_config = server_hosts[from_host]["server_config"] + + if server_config: + current_config.add_server(server_name, server_config) + host_servers_added += 1 + + # Write updated configuration + success = strategy.write_configuration(current_config, no_backup=no_backup) + + results.append(ConfigurationResult( + success=success, + hostname=target_host, + backup_created=backup_path is not None, + backup_path=backup_path + )) + + if success: + servers_synced += host_servers_added + + except ValueError: + results.append(ConfigurationResult( + success=False, + hostname=target_host, + error_message=f"Invalid target host '{target_host}'" + )) + except Exception as e: + results.append(ConfigurationResult( + success=False, + hostname=target_host, + error_message=str(e) + )) + + # Calculate summary statistics + successful_results = [r for r in results if r.success] + hosts_updated = len(successful_results) + + return SyncResult( + success=hosts_updated > 0, + results=results, + servers_synced=servers_synced, + hosts_updated=hosts_updated + ) + + except Exception as e: + return SyncResult( + success=False, + results=[ConfigurationResult( + success=False, + hostname="", + error_message=f"Synchronization failed: {str(e)}" + )], + servers_synced=0, + hosts_updated=0 + ) diff --git a/hatch/mcp_host_config/models.py b/hatch/mcp_host_config/models.py new file mode 100644 index 0000000..b265370 --- /dev/null +++ b/hatch/mcp_host_config/models.py @@ -0,0 +1,602 @@ +""" +Consolidated Pydantic models for MCP host configuration management. + +This module provides the core data models for MCP server configuration, +environment data structures, and host configuration management following +the v2 design specification with consolidated MCPServerConfig model. +""" + +from pydantic import BaseModel, Field, field_validator, model_validator, ConfigDict +from typing import Dict, List, Optional, Union, Literal +from datetime import datetime +from pathlib import Path +from enum import Enum +import logging + +logger = logging.getLogger(__name__) + + +class MCPHostType(str, Enum): + """Enumeration of supported MCP host types.""" + CLAUDE_DESKTOP = "claude-desktop" + CLAUDE_CODE = "claude-code" + VSCODE = "vscode" + CURSOR = "cursor" + LMSTUDIO = "lmstudio" + GEMINI = "gemini" + + +class MCPServerConfig(BaseModel): + """Consolidated MCP server configuration supporting local and remote servers.""" + + model_config = ConfigDict(extra="allow") + + # Server identification + name: Optional[str] = Field(None, description="Server name for identification") + + # Transport type (PRIMARY DISCRIMINATOR) + type: Optional[Literal["stdio", "sse", "http"]] = Field( + None, + description="Transport type (stdio for local, sse/http for remote)" + ) + + # Local server configuration (Pattern A: Command-Based / stdio transport) + command: Optional[str] = Field(None, description="Executable path/name for local servers") + args: Optional[List[str]] = Field(None, description="Command arguments for local servers") + env: Optional[Dict[str, str]] = Field(None, description="Environment variables for all transports") + + # Remote server configuration (Pattern B: URL-Based / sse/http transports) + url: Optional[str] = Field(None, description="Server endpoint URL for remote servers") + headers: Optional[Dict[str, str]] = Field(None, description="HTTP headers for remote servers") + + @model_validator(mode='after') + def validate_server_type(self): + """Validate that either local or remote configuration is provided, not both.""" + command = self.command + url = self.url + + if not command and not url: + raise ValueError("Either 'command' (local server) or 'url' (remote server) must be provided") + + if command and url: + raise ValueError("Cannot specify both 'command' and 'url' - choose local or remote server") + + return self + + @field_validator('command') + @classmethod + def validate_command_not_empty(cls, v): + """Validate command is not empty when provided.""" + if v is not None and not v.strip(): + raise ValueError("Command cannot be empty") + return v.strip() if v else v + + @field_validator('url') + @classmethod + def validate_url_format(cls, v): + """Validate URL format when provided.""" + if v is not None: + if not v.startswith(('http://', 'https://')): + raise ValueError("URL must start with http:// or https://") + return v + + @model_validator(mode='after') + def validate_field_combinations(self): + """Validate field combinations for local vs remote servers.""" + # Validate args are only provided with command + if self.args is not None and self.command is None: + raise ValueError("'args' can only be specified with 'command' for local servers") + + # Validate headers are only provided with URL + if self.headers is not None and self.url is None: + raise ValueError("'headers' can only be specified with 'url' for remote servers") + + return self + + @model_validator(mode='after') + def validate_type_field(self): + """Validate type field consistency with command/url fields.""" + # Only validate if type field is explicitly set + if self.type is not None: + if self.type == "stdio": + if not self.command: + raise ValueError("'type=stdio' requires 'command' field") + if self.url: + raise ValueError("'type=stdio' cannot be used with 'url' field") + elif self.type in ("sse", "http"): + if not self.url: + raise ValueError(f"'type={self.type}' requires 'url' field") + if self.command: + raise ValueError(f"'type={self.type}' cannot be used with 'command' field") + + return self + + @property + def is_local_server(self) -> bool: + """Check if this is a local server configuration.""" + # Prioritize type field if present + if self.type is not None: + return self.type == "stdio" + # Fall back to command detection for backward compatibility + return self.command is not None + + @property + def is_remote_server(self) -> bool: + """Check if this is a remote server configuration.""" + # Prioritize type field if present + if self.type is not None: + return self.type in ("sse", "http") + # Fall back to url detection for backward compatibility + return self.url is not None + + + + +class HostConfigurationMetadata(BaseModel): + """Metadata for host configuration tracking.""" + config_path: str = Field(..., description="Path to host configuration file") + configured_at: datetime = Field(..., description="Initial configuration timestamp") + last_synced: datetime = Field(..., description="Last synchronization timestamp") + + @field_validator('config_path') + @classmethod + def validate_config_path_not_empty(cls, v): + """Validate config path is not empty.""" + if not v.strip(): + raise ValueError("Config path cannot be empty") + return v.strip() + + +class PackageHostConfiguration(BaseModel): + """Host configuration for a single package (corrected structure).""" + config_path: str = Field(..., description="Path to host configuration file") + configured_at: datetime = Field(..., description="Initial configuration timestamp") + last_synced: datetime = Field(..., description="Last synchronization timestamp") + server_config: MCPServerConfig = Field(..., description="Server configuration for this host") + + @field_validator('config_path') + @classmethod + def validate_config_path_format(cls, v): + """Validate config path format.""" + if not v.strip(): + raise ValueError("Config path cannot be empty") + return v.strip() + + +class EnvironmentPackageEntry(BaseModel): + """Package entry within environment with corrected MCP structure.""" + name: str = Field(..., description="Package name") + version: str = Field(..., description="Package version") + type: str = Field(..., description="Package type (hatch, mcp_standalone, etc.)") + source: str = Field(..., description="Package source") + installed_at: datetime = Field(..., description="Installation timestamp") + configured_hosts: Dict[str, PackageHostConfiguration] = Field( + default_factory=dict, + description="Host configurations for this package's MCP server" + ) + + @field_validator('name') + @classmethod + def validate_package_name(cls, v): + """Validate package name format.""" + if not v.strip(): + raise ValueError("Package name cannot be empty") + # Allow standard package naming patterns + if not v.replace('-', '').replace('_', '').replace('.', '').isalnum(): + raise ValueError(f"Invalid package name format: {v}") + return v.strip() + + @field_validator('configured_hosts') + @classmethod + def validate_host_names(cls, v): + """Validate host names are supported.""" + supported_hosts = { + 'claude-desktop', 'claude-code', 'vscode', + 'cursor', 'lmstudio', 'gemini' + } + for host_name in v.keys(): + if host_name not in supported_hosts: + raise ValueError(f"Unsupported host: {host_name}. Supported: {supported_hosts}") + return v + + +class EnvironmentData(BaseModel): + """Complete environment data structure with corrected MCP integration.""" + name: str = Field(..., description="Environment name") + description: str = Field(..., description="Environment description") + created_at: datetime = Field(..., description="Environment creation timestamp") + packages: List[EnvironmentPackageEntry] = Field( + default_factory=list, + description="Packages installed in this environment" + ) + python_environment: bool = Field(True, description="Whether this is a Python environment") + python_env: Dict = Field(default_factory=dict, description="Python environment data") + + @field_validator('name') + @classmethod + def validate_environment_name(cls, v): + """Validate environment name format.""" + if not v.strip(): + raise ValueError("Environment name cannot be empty") + return v.strip() + + def get_mcp_packages(self) -> List[EnvironmentPackageEntry]: + """Get packages that have MCP server configurations.""" + return [pkg for pkg in self.packages if pkg.configured_hosts] + + def get_standalone_mcp_package(self) -> Optional[EnvironmentPackageEntry]: + """Get the standalone MCP servers package if it exists.""" + for pkg in self.packages: + if pkg.name == "__standalone_mcp_servers__": + return pkg + return None + + def add_standalone_mcp_server(self, server_name: str, host_config: PackageHostConfiguration): + """Add a standalone MCP server configuration.""" + standalone_pkg = self.get_standalone_mcp_package() + + if standalone_pkg is None: + # Create standalone package entry + standalone_pkg = EnvironmentPackageEntry( + name="__standalone_mcp_servers__", + version="1.0.0", + type="mcp_standalone", + source="user_configured", + installed_at=datetime.now(), + configured_hosts={} + ) + self.packages.append(standalone_pkg) + + # Add host configuration (single server per package constraint) + for host_name, config in host_config.items(): + standalone_pkg.configured_hosts[host_name] = config + + +class HostConfiguration(BaseModel): + """Host configuration file structure using consolidated MCPServerConfig.""" + servers: Dict[str, MCPServerConfig] = Field( + default_factory=dict, + description="Configured MCP servers" + ) + + @field_validator('servers') + @classmethod + def validate_servers_not_empty_when_present(cls, v): + """Validate servers dict structure.""" + for server_name, config in v.items(): + if not isinstance(config, (dict, MCPServerConfig)): + raise ValueError(f"Invalid server config for {server_name}") + return v + + def add_server(self, name: str, config: MCPServerConfig): + """Add server configuration.""" + self.servers[name] = config + + def remove_server(self, name: str) -> bool: + """Remove server configuration.""" + if name in self.servers: + del self.servers[name] + return True + return False + + class Config: + """Pydantic configuration.""" + arbitrary_types_allowed = True + extra = "allow" # Allow additional host-specific fields + + +class ConfigurationResult(BaseModel): + """Result of a configuration operation.""" + success: bool = Field(..., description="Whether operation succeeded") + hostname: str = Field(..., description="Target hostname") + server_name: Optional[str] = Field(None, description="Server name if applicable") + backup_created: bool = Field(False, description="Whether backup was created") + backup_path: Optional[Path] = Field(None, description="Path to backup file") + error_message: Optional[str] = Field(None, description="Error message if failed") + + @model_validator(mode='after') + def validate_result_consistency(self): + """Validate result consistency.""" + if not self.success and not self.error_message: + raise ValueError("Error message required when success=False") + + return self + + +class SyncResult(BaseModel): + """Result of environment synchronization operation.""" + success: bool = Field(..., description="Whether overall sync succeeded") + results: List[ConfigurationResult] = Field(..., description="Individual host results") + servers_synced: int = Field(..., description="Total servers synchronized") + hosts_updated: int = Field(..., description="Number of hosts updated") + + @property + def failed_hosts(self) -> List[str]: + """Get list of hosts that failed synchronization.""" + return [r.hostname for r in self.results if not r.success] + + @property + def success_rate(self) -> float: + """Calculate success rate percentage.""" + if not self.results: + return 0.0 + successful = len([r for r in self.results if r.success]) + return (successful / len(self.results)) * 100.0 + + +# ============================================================================ +# MCP Host-Specific Configuration Models +# ============================================================================ + + +class MCPServerConfigBase(BaseModel): + """Base class for MCP server configurations with universal fields. + + This model contains fields supported by ALL MCP hosts and provides + transport validation logic. Host-specific models inherit from this base. + """ + + model_config = ConfigDict(extra="forbid") + + # Hatch-specific field + name: Optional[str] = Field(None, description="Server name for identification") + + # Transport type (PRIMARY DISCRIMINATOR) + type: Optional[Literal["stdio", "sse", "http"]] = Field( + None, + description="Transport type (stdio for local, sse/http for remote)" + ) + + # stdio transport fields + command: Optional[str] = Field(None, description="Server executable command") + args: Optional[List[str]] = Field(None, description="Command arguments") + + # All transports + env: Optional[Dict[str, str]] = Field(None, description="Environment variables") + + # Remote transport fields (sse/http) + url: Optional[str] = Field(None, description="Remote server endpoint") + headers: Optional[Dict[str, str]] = Field(None, description="HTTP headers") + + @model_validator(mode='after') + def validate_transport(self) -> 'MCPServerConfigBase': + """Validate transport configuration using type field. + + Note: Gemini subclass overrides this with dual-transport support. + """ + # Skip validation for Gemini which has its own dual-transport validator + if self.__class__.__name__ == 'MCPServerConfigGemini': + return self + + # Check mutual exclusion - command and url cannot both be set + if self.command is not None and self.url is not None: + raise ValueError( + "Cannot specify both 'command' and 'url' - use 'type' field to specify transport" + ) + + # Validate based on type + if self.type == "stdio": + if not self.command: + raise ValueError("'command' is required for stdio transport") + elif self.type in ("sse", "http"): + if not self.url: + raise ValueError("'url' is required for sse/http transports") + elif self.type is None: + # Infer type from fields if not specified + if self.command: + self.type = "stdio" + elif self.url: + self.type = "sse" # default to sse for remote + else: + raise ValueError("Either 'command' or 'url' must be provided") + + return self + + +class MCPServerConfigGemini(MCPServerConfigBase): + """Gemini CLI-specific MCP server configuration. + + Extends base model with Gemini-specific fields including working directory, + timeout, trust mode, tool filtering, and OAuth configuration. + """ + + # Gemini-specific fields + cwd: Optional[str] = Field(None, description="Working directory for stdio transport") + timeout: Optional[int] = Field(None, description="Request timeout in milliseconds") + trust: Optional[bool] = Field(None, description="Bypass tool call confirmations") + httpUrl: Optional[str] = Field(None, description="HTTP streaming endpoint URL") + includeTools: Optional[List[str]] = Field(None, description="Tools to include (allowlist)") + excludeTools: Optional[List[str]] = Field(None, description="Tools to exclude (blocklist)") + + # OAuth configuration (simplified - nested object would be better but keeping flat for now) + oauth_enabled: Optional[bool] = Field(None, description="Enable OAuth for this server") + oauth_clientId: Optional[str] = Field(None, description="OAuth client identifier") + oauth_clientSecret: Optional[str] = Field(None, description="OAuth client secret") + oauth_authorizationUrl: Optional[str] = Field(None, description="OAuth authorization endpoint") + oauth_tokenUrl: Optional[str] = Field(None, description="OAuth token endpoint") + oauth_scopes: Optional[List[str]] = Field(None, description="Required OAuth scopes") + oauth_redirectUri: Optional[str] = Field(None, description="Custom redirect URI") + oauth_tokenParamName: Optional[str] = Field(None, description="Query parameter name for tokens") + oauth_audiences: Optional[List[str]] = Field(None, description="OAuth audiences") + authProviderType: Optional[str] = Field(None, description="Authentication provider type") + + @model_validator(mode='after') + def validate_gemini_dual_transport(self): + """Override transport validation to support Gemini's dual-transport capability. + + Gemini supports both: + - SSE transport with 'url' field + - HTTP transport with 'httpUrl' field + + Validates that: + 1. Either url or httpUrl is provided (not both) + 2. Type field matches the transport being used + """ + # Check if both url and httpUrl are provided + if self.url is not None and self.httpUrl is not None: + raise ValueError("Cannot specify both 'url' and 'httpUrl' - choose one transport") + + # Validate based on type + if self.type == "stdio": + if not self.command: + raise ValueError("'command' is required for stdio transport") + elif self.type == "sse": + if not self.url: + raise ValueError("'url' is required for sse transport") + elif self.type == "http": + if not self.httpUrl: + raise ValueError("'httpUrl' is required for http transport") + elif self.type is None: + # Infer type from fields if not specified + if self.command: + self.type = "stdio" + elif self.url: + self.type = "sse" # default to sse for url + elif self.httpUrl: + self.type = "http" # http for httpUrl + else: + raise ValueError("Either 'command', 'url', or 'httpUrl' must be provided") + + return self + + @classmethod + def from_omni(cls, omni: 'MCPServerConfigOmni') -> 'MCPServerConfigGemini': + """Convert Omni model to Gemini-specific model using Pydantic APIs.""" + # Get supported fields dynamically from model definition + supported_fields = set(cls.model_fields.keys()) + + # Use Pydantic's model_dump with include and exclude_unset + gemini_data = omni.model_dump(include=supported_fields, exclude_unset=True) + + # Use Pydantic's model_validate for type-safe creation + return cls.model_validate(gemini_data) + + +class MCPServerConfigVSCode(MCPServerConfigBase): + """VS Code-specific MCP server configuration. + + Extends base model with VS Code-specific fields including environment file + path and input variable definitions. + """ + + # VS Code-specific fields + envFile: Optional[str] = Field(None, description="Path to environment file") + inputs: Optional[List[Dict]] = Field(None, description="Input variable definitions") + + @classmethod + def from_omni(cls, omni: 'MCPServerConfigOmni') -> 'MCPServerConfigVSCode': + """Convert Omni model to VS Code-specific model.""" + # Get supported fields dynamically + supported_fields = set(cls.model_fields.keys()) + + # Single-call field filtering + vscode_data = omni.model_dump(include=supported_fields, exclude_unset=True) + + return cls.model_validate(vscode_data) + + +class MCPServerConfigCursor(MCPServerConfigBase): + """Cursor/LM Studio-specific MCP server configuration. + + Extends base model with Cursor-specific fields including environment file path. + Cursor handles config interpolation (${env:NAME}, ${userHome}, etc.) at runtime. + """ + + # Cursor-specific fields + envFile: Optional[str] = Field(None, description="Path to environment file") + + @classmethod + def from_omni(cls, omni: 'MCPServerConfigOmni') -> 'MCPServerConfigCursor': + """Convert Omni model to Cursor-specific model.""" + # Get supported fields dynamically + supported_fields = set(cls.model_fields.keys()) + + # Single-call field filtering + cursor_data = omni.model_dump(include=supported_fields, exclude_unset=True) + + return cls.model_validate(cursor_data) + + +class MCPServerConfigClaude(MCPServerConfigBase): + """Claude Desktop/Code-specific MCP server configuration. + + Uses only universal fields from base model. Supports all transport types + (stdio, sse, http). Claude handles environment variable expansion at runtime. + """ + + # No host-specific fields - uses universal fields only + + @classmethod + def from_omni(cls, omni: 'MCPServerConfigOmni') -> 'MCPServerConfigClaude': + """Convert Omni model to Claude-specific model.""" + # Get supported fields dynamically + supported_fields = set(cls.model_fields.keys()) + + # Single-call field filtering + claude_data = omni.model_dump(include=supported_fields, exclude_unset=True) + + return cls.model_validate(claude_data) + + +class MCPServerConfigOmni(BaseModel): + """Omni configuration supporting all host-specific fields. + + This is the primary API interface for MCP server configuration. It contains + all possible fields from all hosts. Use host-specific models' from_omni() + methods to convert to host-specific configurations. + """ + + model_config = ConfigDict(extra="forbid") + + # Hatch-specific + name: Optional[str] = None + + # Universal fields (all hosts) + type: Optional[Literal["stdio", "sse", "http"]] = None + command: Optional[str] = None + args: Optional[List[str]] = None + env: Optional[Dict[str, str]] = None + url: Optional[str] = None + headers: Optional[Dict[str, str]] = None + + # Gemini CLI specific + cwd: Optional[str] = None + timeout: Optional[int] = None + trust: Optional[bool] = None + httpUrl: Optional[str] = None + includeTools: Optional[List[str]] = None + excludeTools: Optional[List[str]] = None + oauth_enabled: Optional[bool] = None + oauth_clientId: Optional[str] = None + oauth_clientSecret: Optional[str] = None + oauth_authorizationUrl: Optional[str] = None + oauth_tokenUrl: Optional[str] = None + oauth_scopes: Optional[List[str]] = None + oauth_redirectUri: Optional[str] = None + oauth_tokenParamName: Optional[str] = None + oauth_audiences: Optional[List[str]] = None + authProviderType: Optional[str] = None + + # VS Code specific + envFile: Optional[str] = None + inputs: Optional[List[Dict]] = None + + @field_validator('url') + @classmethod + def validate_url_format(cls, v): + """Validate URL format when provided.""" + if v is not None: + if not v.startswith(('http://', 'https://')): + raise ValueError("URL must start with http:// or https://") + return v + + +# HOST_MODEL_REGISTRY: Dictionary dispatch for host-specific models +HOST_MODEL_REGISTRY: Dict[MCPHostType, type[MCPServerConfigBase]] = { + MCPHostType.GEMINI: MCPServerConfigGemini, + MCPHostType.CLAUDE_DESKTOP: MCPServerConfigClaude, + MCPHostType.CLAUDE_CODE: MCPServerConfigClaude, # Same as CLAUDE_DESKTOP + MCPHostType.VSCODE: MCPServerConfigVSCode, + MCPHostType.CURSOR: MCPServerConfigCursor, + MCPHostType.LMSTUDIO: MCPServerConfigCursor, # Same as CURSOR +} diff --git a/hatch/mcp_host_config/reporting.py b/hatch/mcp_host_config/reporting.py new file mode 100644 index 0000000..2710a05 --- /dev/null +++ b/hatch/mcp_host_config/reporting.py @@ -0,0 +1,181 @@ +""" +User feedback reporting system for MCP configuration operations. + +This module provides models and functions for generating and displaying +user-friendly reports about MCP configuration changes, including field-level +operations and conversion summaries. +""" + +from typing import Literal, Optional, Any, List +from pydantic import BaseModel, ConfigDict + +from .models import MCPServerConfigOmni, MCPHostType, HOST_MODEL_REGISTRY + + +class FieldOperation(BaseModel): + """Single field operation in a conversion. + + Represents a single field-level change during MCP configuration conversion, + including the operation type (UPDATED, UNSUPPORTED, UNCHANGED) and values. + """ + + field_name: str + operation: Literal["UPDATED", "UNSUPPORTED", "UNCHANGED"] + old_value: Optional[Any] = None + new_value: Optional[Any] = None + + def __str__(self) -> str: + """Return formatted string representation for console output. + + Uses ASCII arrow (-->) for terminal compatibility instead of Unicode. + """ + if self.operation == "UPDATED": + return f"{self.field_name}: UPDATED {repr(self.old_value)} --> {repr(self.new_value)}" + elif self.operation == "UNSUPPORTED": + return f"{self.field_name}: UNSUPPORTED" + elif self.operation == "UNCHANGED": + return f"{self.field_name}: UNCHANGED {repr(self.new_value)}" + return f"{self.field_name}: {self.operation}" + + +class ConversionReport(BaseModel): + """Complete conversion report for a configuration operation. + + Contains metadata about the operation (create, update, delete, migrate) + and a list of field-level operations that occurred during conversion. + """ + + model_config = ConfigDict(validate_assignment=False) + + operation: Literal["create", "update", "delete", "migrate"] + server_name: str + source_host: Optional[MCPHostType] = None + target_host: MCPHostType + success: bool = True + error_message: Optional[str] = None + field_operations: List[FieldOperation] = [] + dry_run: bool = False + + +def generate_conversion_report( + operation: Literal["create", "update", "delete", "migrate"], + server_name: str, + target_host: MCPHostType, + omni: MCPServerConfigOmni, + source_host: Optional[MCPHostType] = None, + old_config: Optional[MCPServerConfigOmni] = None, + dry_run: bool = False +) -> ConversionReport: + """Generate conversion report for a configuration operation. + + Analyzes the conversion from Omni model to host-specific configuration, + identifying which fields were updated, which are unsupported, and which + remained unchanged. + + Args: + operation: Type of operation being performed + server_name: Name of the server being configured + target_host: Target host for the configuration (MCPHostType enum) + omni: New/updated configuration (Omni model) + source_host: Source host (for migrate operation, MCPHostType enum) + old_config: Existing configuration (for update operation) + dry_run: Whether this is a dry-run preview + + Returns: + ConversionReport with field-level operations + """ + # Derive supported fields dynamically from model class + model_class = HOST_MODEL_REGISTRY[target_host] + supported_fields = set(model_class.model_fields.keys()) + + field_operations = [] + set_fields = omni.model_dump(exclude_unset=True) + + for field_name, new_value in set_fields.items(): + if field_name in supported_fields: + # Field is supported by target host + if old_config: + # Update operation - check if field changed + old_fields = old_config.model_dump(exclude_unset=True) + if field_name in old_fields: + old_value = old_fields[field_name] + if old_value != new_value: + # Field was modified + field_operations.append(FieldOperation( + field_name=field_name, + operation="UPDATED", + old_value=old_value, + new_value=new_value + )) + else: + # Field unchanged + field_operations.append(FieldOperation( + field_name=field_name, + operation="UNCHANGED", + new_value=new_value + )) + else: + # Field was added + field_operations.append(FieldOperation( + field_name=field_name, + operation="UPDATED", + old_value=None, + new_value=new_value + )) + else: + # Create operation - all fields are new + field_operations.append(FieldOperation( + field_name=field_name, + operation="UPDATED", + old_value=None, + new_value=new_value + )) + else: + # Field is not supported by target host + field_operations.append(FieldOperation( + field_name=field_name, + operation="UNSUPPORTED", + new_value=new_value + )) + + return ConversionReport( + operation=operation, + server_name=server_name, + source_host=source_host, + target_host=target_host, + field_operations=field_operations, + dry_run=dry_run + ) + + +def display_report(report: ConversionReport) -> None: + """Display conversion report to console. + + Prints a formatted report showing the operation performed and all + field-level changes. Uses FieldOperation.__str__() for consistent + formatting. + + Args: + report: ConversionReport to display + """ + # Header + if report.dry_run: + print(f"[DRY RUN] Preview of changes for server '{report.server_name}':") + else: + if report.operation == "create": + print(f"Server '{report.server_name}' created for host '{report.target_host.value}':") + elif report.operation == "update": + print(f"Server '{report.server_name}' updated for host '{report.target_host.value}':") + elif report.operation == "migrate": + print(f"Server '{report.server_name}' migrated from '{report.source_host.value}' to '{report.target_host.value}':") + elif report.operation == "delete": + print(f"Server '{report.server_name}' deleted from host '{report.target_host.value}':") + + # Field operations + for field_op in report.field_operations: + print(f" {field_op}") + + # Footer + if report.dry_run: + print("\nNo changes were made.") + diff --git a/hatch/mcp_host_config/strategies.py b/hatch/mcp_host_config/strategies.py new file mode 100644 index 0000000..bb63035 --- /dev/null +++ b/hatch/mcp_host_config/strategies.py @@ -0,0 +1,513 @@ +""" +MCP host strategy implementations with decorator-based registration. + +This module provides concrete implementations of host strategies for all +supported MCP hosts including Claude family, Cursor family, and independent +strategies with decorator registration following Hatchling patterns. +""" + +import platform +import json +from pathlib import Path +from typing import Optional, Dict, Any +import logging + +from .host_management import MCPHostStrategy, register_host_strategy +from .models import MCPHostType, MCPServerConfig, HostConfiguration + +logger = logging.getLogger(__name__) + + +class ClaudeHostStrategy(MCPHostStrategy): + """Base strategy for Claude family hosts with shared patterns.""" + + def __init__(self): + self.company_origin = "Anthropic" + self.config_format = "claude_format" + + def get_config_key(self) -> str: + """Claude family uses 'mcpServers' key.""" + return "mcpServers" + + def validate_server_config(self, server_config: MCPServerConfig) -> bool: + """Claude family validation - accepts any valid command or URL. + + Claude Desktop accepts both absolute and relative paths for commands. + Commands are resolved at runtime using the system PATH, similar to + how shell commands work. This validation only checks that either a + command or URL is provided, not the path format. + """ + # Accept local servers (command-based) + if server_config.command: + return True + # Accept remote servers (URL-based) + if server_config.url: + return True + # Reject if neither command nor URL is provided + return False + + def _preserve_claude_settings(self, existing_config: Dict, new_servers: Dict) -> Dict: + """Preserve Claude-specific settings when updating configuration.""" + # Preserve non-MCP settings like theme, auto_update, etc. + preserved_config = existing_config.copy() + preserved_config[self.get_config_key()] = new_servers + return preserved_config + + def read_configuration(self) -> HostConfiguration: + """Read Claude configuration file.""" + config_path = self.get_config_path() + if not config_path or not config_path.exists(): + return HostConfiguration() + + try: + with open(config_path, 'r') as f: + config_data = json.load(f) + + # Extract MCP servers from Claude configuration + mcp_servers = config_data.get(self.get_config_key(), {}) + + # Convert to MCPServerConfig objects + servers = {} + for name, server_data in mcp_servers.items(): + try: + servers[name] = MCPServerConfig(**server_data) + except Exception as e: + logger.warning(f"Invalid server config for {name}: {e}") + continue + + return HostConfiguration(servers=servers) + + except Exception as e: + logger.error(f"Failed to read Claude configuration: {e}") + return HostConfiguration() + + def write_configuration(self, config: HostConfiguration, no_backup: bool = False) -> bool: + """Write Claude configuration file.""" + config_path = self.get_config_path() + if not config_path: + return False + + try: + # Ensure parent directory exists + config_path.parent.mkdir(parents=True, exist_ok=True) + + # Read existing configuration to preserve non-MCP settings + existing_config = {} + if config_path.exists(): + try: + with open(config_path, 'r') as f: + existing_config = json.load(f) + except Exception: + pass # Start with empty config if read fails + + # Convert MCPServerConfig objects to dict + servers_dict = {} + for name, server_config in config.servers.items(): + servers_dict[name] = server_config.model_dump(exclude_none=True) + + # Preserve Claude-specific settings + updated_config = self._preserve_claude_settings(existing_config, servers_dict) + + # Write atomically + temp_path = config_path.with_suffix('.tmp') + with open(temp_path, 'w') as f: + json.dump(updated_config, f, indent=2) + + temp_path.replace(config_path) + return True + + except Exception as e: + logger.error(f"Failed to write Claude configuration: {e}") + return False + + +@register_host_strategy(MCPHostType.CLAUDE_DESKTOP) +class ClaudeDesktopStrategy(ClaudeHostStrategy): + """Configuration strategy for Claude Desktop.""" + + def get_config_path(self) -> Optional[Path]: + """Get Claude Desktop configuration path.""" + system = platform.system() + + if system == "Darwin": # macOS + return Path.home() / "Library" / "Application Support" / "Claude" / "claude_desktop_config.json" + elif system == "Windows": + return Path.home() / "AppData" / "Roaming" / "Claude" / "claude_desktop_config.json" + elif system == "Linux": + return Path.home() / ".config" / "Claude" / "claude_desktop_config.json" + return None + + def is_host_available(self) -> bool: + """Check if Claude Desktop is installed.""" + config_path = self.get_config_path() + return config_path is not None and config_path.parent.exists() + + +@register_host_strategy(MCPHostType.CLAUDE_CODE) +class ClaudeCodeStrategy(ClaudeHostStrategy): + """Configuration strategy for Claude for VS Code.""" + + def get_config_path(self) -> Optional[Path]: + """Get Claude Code configuration path (workspace-specific).""" + # Claude Code uses workspace-specific configuration + # This would be determined at runtime based on current workspace + return Path.home() / ".claude.json" + + def is_host_available(self) -> bool: + """Check if Claude Code is available.""" + # Check for Claude Code user configuration file + vscode_dir = Path.home() / ".claude.json" + return vscode_dir.exists() + + +class CursorBasedHostStrategy(MCPHostStrategy): + """Base strategy for Cursor-based hosts (Cursor and LM Studio).""" + + def __init__(self): + self.config_format = "cursor_format" + self.supports_remote_servers = True + + def get_config_key(self) -> str: + """Cursor family uses 'mcpServers' key.""" + return "mcpServers" + + def validate_server_config(self, server_config: MCPServerConfig) -> bool: + """Cursor family validation - supports both local and remote servers.""" + # Cursor family is more flexible with paths and supports remote servers + if server_config.command: + return True # Local server + elif server_config.url: + return True # Remote server + return False + + def _format_cursor_server_config(self, server_config: MCPServerConfig) -> Dict: + """Format server configuration for Cursor family.""" + config = {} + + if server_config.command: + # Local server configuration + config["command"] = server_config.command + if server_config.args: + config["args"] = server_config.args + if server_config.env: + config["env"] = server_config.env + elif server_config.url: + # Remote server configuration + config["url"] = server_config.url + if server_config.headers: + config["headers"] = server_config.headers + + return config + + def read_configuration(self) -> HostConfiguration: + """Read Cursor-based configuration file.""" + config_path = self.get_config_path() + if not config_path or not config_path.exists(): + return HostConfiguration() + + try: + with open(config_path, 'r') as f: + config_data = json.load(f) + + # Extract MCP servers + mcp_servers = config_data.get(self.get_config_key(), {}) + + # Convert to MCPServerConfig objects + servers = {} + for name, server_data in mcp_servers.items(): + try: + servers[name] = MCPServerConfig(**server_data) + except Exception as e: + logger.warning(f"Invalid server config for {name}: {e}") + continue + + return HostConfiguration(servers=servers) + + except Exception as e: + logger.error(f"Failed to read Cursor configuration: {e}") + return HostConfiguration() + + def write_configuration(self, config: HostConfiguration, no_backup: bool = False) -> bool: + """Write Cursor-based configuration file.""" + config_path = self.get_config_path() + if not config_path: + return False + + try: + # Ensure parent directory exists + config_path.parent.mkdir(parents=True, exist_ok=True) + + # Read existing configuration + existing_config = {} + if config_path.exists(): + try: + with open(config_path, 'r') as f: + existing_config = json.load(f) + except Exception: + pass + + # Convert MCPServerConfig objects to dict + servers_dict = {} + for name, server_config in config.servers.items(): + servers_dict[name] = server_config.model_dump(exclude_none=True) + + # Update configuration + existing_config[self.get_config_key()] = servers_dict + + # Write atomically + temp_path = config_path.with_suffix('.tmp') + with open(temp_path, 'w') as f: + json.dump(existing_config, f, indent=2) + + temp_path.replace(config_path) + return True + + except Exception as e: + logger.error(f"Failed to write Cursor configuration: {e}") + return False + + +@register_host_strategy(MCPHostType.CURSOR) +class CursorHostStrategy(CursorBasedHostStrategy): + """Configuration strategy for Cursor IDE.""" + + def get_config_path(self) -> Optional[Path]: + """Get Cursor configuration path.""" + return Path.home() / ".cursor" / "mcp.json" + + def is_host_available(self) -> bool: + """Check if Cursor IDE is installed.""" + cursor_dir = Path.home() / ".cursor" + return cursor_dir.exists() + + +@register_host_strategy(MCPHostType.LMSTUDIO) +class LMStudioHostStrategy(CursorBasedHostStrategy): + """Configuration strategy for LM Studio (follows Cursor format).""" + + def get_config_path(self) -> Optional[Path]: + """Get LM Studio configuration path.""" + return Path.home() / ".lmstudio" / "mcp.json" + + def is_host_available(self) -> bool: + """Check if LM Studio is installed.""" + config_path = self.get_config_path() + return self.get_config_path().parent.exists() + + +@register_host_strategy(MCPHostType.VSCODE) +class VSCodeHostStrategy(MCPHostStrategy): + """Configuration strategy for VS Code MCP extension with user-wide mcp support.""" + + def get_config_path(self) -> Optional[Path]: + """Get VS Code user mcp configuration path (cross-platform).""" + try: + system = platform.system() + if system == "Windows": + # Windows: %APPDATA%\Code\User\mcp.json + appdata = Path.home() / "AppData" / "Roaming" + return appdata / "Code" / "User" / "mcp.json" + elif system == "Darwin": # macOS + # macOS: $HOME/Library/Application Support/Code/User/mcp.json + return Path.home() / "Library" / "Application Support" / "Code" / "User" / "mcp.json" + elif system == "Linux": + # Linux: $HOME/.config/Code/User/mcp.json + return Path.home() / ".config" / "Code" / "User" / "mcp.json" + else: + logger.warning(f"Unsupported platform for VS Code: {system}") + return None + except Exception as e: + logger.error(f"Failed to determine VS Code user mcp path: {e}") + return None + + def get_config_key(self) -> str: + """VS Code uses direct servers configuration structure.""" + return "servers" # VS Code specific direct key + + def is_host_available(self) -> bool: + """Check if VS Code is installed by checking for user directory.""" + try: + config_path = self.get_config_path() + if not config_path: + return False + + # Check if VS Code user directory exists (indicates VS Code installation) + user_dir = config_path.parent + return user_dir.exists() + except Exception: + return False + + def validate_server_config(self, server_config: MCPServerConfig) -> bool: + """VS Code validation - flexible path handling.""" + return server_config.command is not None or server_config.url is not None + + def read_configuration(self) -> HostConfiguration: + """Read VS Code mcp.json configuration.""" + config_path = self.get_config_path() + if not config_path or not config_path.exists(): + return HostConfiguration() + + try: + with open(config_path, 'r') as f: + config_data = json.load(f) + + # Extract MCP servers from direct structure + mcp_servers = config_data.get(self.get_config_key(), {}) + + # Convert to MCPServerConfig objects + servers = {} + for name, server_data in mcp_servers.items(): + try: + servers[name] = MCPServerConfig(**server_data) + except Exception as e: + logger.warning(f"Invalid server config for {name}: {e}") + continue + + return HostConfiguration(servers=servers) + + except Exception as e: + logger.error(f"Failed to read VS Code configuration: {e}") + return HostConfiguration() + + def write_configuration(self, config: HostConfiguration, no_backup: bool = False) -> bool: + """Write VS Code mcp.json configuration.""" + config_path = self.get_config_path() + if not config_path: + return False + + try: + # Ensure parent directory exists + config_path.parent.mkdir(parents=True, exist_ok=True) + + # Read existing configuration to preserve non-MCP settings + existing_config = {} + if config_path.exists(): + try: + with open(config_path, 'r') as f: + existing_config = json.load(f) + except Exception: + pass + + # Convert MCPServerConfig objects to dict + servers_dict = {} + for name, server_config in config.servers.items(): + servers_dict[name] = server_config.model_dump(exclude_none=True) + + # Update configuration with new servers (preserves non-MCP settings) + existing_config[self.get_config_key()] = servers_dict + + # Write atomically + temp_path = config_path.with_suffix('.tmp') + with open(temp_path, 'w') as f: + json.dump(existing_config, f, indent=2) + + temp_path.replace(config_path) + return True + + except Exception as e: + logger.error(f"Failed to write VS Code configuration: {e}") + return False + + +@register_host_strategy(MCPHostType.GEMINI) +class GeminiHostStrategy(MCPHostStrategy): + """Configuration strategy for Google Gemini CLI MCP integration.""" + + def get_config_path(self) -> Optional[Path]: + """Get Gemini configuration path based on official documentation.""" + # Based on official Gemini CLI documentation: ~/.gemini/settings.json + return Path.home() / ".gemini" / "settings.json" + + def get_config_key(self) -> str: + """Gemini uses 'mcpServers' key in settings.json.""" + return "mcpServers" + + def is_host_available(self) -> bool: + """Check if Gemini CLI is available.""" + # Check if Gemini CLI directory exists + gemini_dir = Path.home() / ".gemini" + return gemini_dir.exists() + + def validate_server_config(self, server_config: MCPServerConfig) -> bool: + """Gemini validation - supports both local and remote servers.""" + # Gemini CLI supports both command-based and URL-based servers + return server_config.command is not None or server_config.url is not None + + def read_configuration(self) -> HostConfiguration: + """Read Gemini settings.json configuration.""" + config_path = self.get_config_path() + if not config_path or not config_path.exists(): + return HostConfiguration() + + try: + with open(config_path, 'r') as f: + config_data = json.load(f) + + # Extract MCP servers from Gemini configuration + mcp_servers = config_data.get(self.get_config_key(), {}) + + # Convert to MCPServerConfig objects + servers = {} + for name, server_data in mcp_servers.items(): + try: + servers[name] = MCPServerConfig(**server_data) + except Exception as e: + logger.warning(f"Invalid server config for {name}: {e}") + continue + + return HostConfiguration(servers=servers) + + except Exception as e: + logger.error(f"Failed to read Gemini configuration: {e}") + return HostConfiguration() + + def write_configuration(self, config: HostConfiguration, no_backup: bool = False) -> bool: + """Write Gemini settings.json configuration.""" + config_path = self.get_config_path() + if not config_path: + return False + + try: + # Ensure parent directory exists + config_path.parent.mkdir(parents=True, exist_ok=True) + + # Read existing configuration to preserve non-MCP settings + existing_config = {} + if config_path.exists(): + try: + with open(config_path, 'r') as f: + existing_config = json.load(f) + except Exception: + pass + + # Convert MCPServerConfig objects to dict (REPLACE, don't merge) + servers_dict = {} + for name, server_config in config.servers.items(): + servers_dict[name] = server_config.model_dump(exclude_none=True) + + # Update configuration with new servers (preserves non-MCP settings) + existing_config[self.get_config_key()] = servers_dict + + # Write atomically with enhanced error handling + temp_path = config_path.with_suffix('.tmp') + try: + with open(temp_path, 'w') as f: + json.dump(existing_config, f, indent=2, ensure_ascii=False) + + # Verify the JSON is valid by reading it back + with open(temp_path, 'r') as f: + json.load(f) # This will raise an exception if JSON is invalid + + # Only replace if verification succeeds + temp_path.replace(config_path) + return True + except Exception as json_error: + # Clean up temp file on JSON error + if temp_path.exists(): + temp_path.unlink() + logger.error(f"JSON serialization/verification failed: {json_error}") + raise + + except Exception as e: + logger.error(f"Failed to write Gemini configuration: {e}") + return False diff --git a/hatch/template_generator.py b/hatch/template_generator.py index 5977bde..3557f31 100644 --- a/hatch/template_generator.py +++ b/hatch/template_generator.py @@ -4,26 +4,29 @@ This module contains functions to generate template files for Hatch MCP server packages. Each function generates a specific file for the package template. """ + import json import logging from pathlib import Path logger = logging.getLogger("hatch.template_generator") -def generate_init_py(): + +def generate_init_py() -> str: """Generate the __init__.py file content for a template package. - + Returns: str: Content for __init__.py file. """ return "# Hatch package initialization\n" -def generate_mcp_server_py(package_name: str): + +def generate_mcp_server_py(package_name: str) -> str: """Generate the mcp_server.py file content for a template package. - + Args: package_name (str): Name of the package. - + Returns: str: Content for mcp_server.py file. """ @@ -34,10 +37,10 @@ def generate_mcp_server_py(package_name: str): @mcp.tool() def example_tool(param: str) -> str: \"\"\"Example tool function. - + Args: param (str): Example parameter. - + Returns: str: Example result. \"\"\" @@ -47,12 +50,13 @@ def example_tool(param: str) -> str: mcp.run() """ -def generate_hatch_mcp_server_entry_py(package_name: str): + +def generate_hatch_mcp_server_entry_py(package_name: str) -> str: """Generate the hatch_mcp_server_entry.py file content for a template package. - + Args: package_name (str): Name of the package. - + Returns: str: Content for hatch_mcp_server_entry.py file. """ @@ -68,13 +72,14 @@ def generate_hatch_mcp_server_entry_py(package_name: str): hatch_mcp.server.run() """ + def generate_metadata_json(package_name: str, description: str = ""): """Generate the metadata JSON content for a template package. - + Args: package_name (str): Name of the package. description (str, optional): Package description. Defaults to empty string. - + Returns: dict: Metadata dictionary. """ @@ -84,37 +89,27 @@ def generate_metadata_json(package_name: str, description: str = ""): "version": "0.1.0", "description": description or f"A Hatch package for {package_name}", "tags": [], - "author": { - "name": "Hatch User", - "email": "" - }, - "license": { - "name": "MIT" - }, - "entry_point": - { + "author": {"name": "Hatch User", "email": ""}, + "license": {"name": "MIT"}, + "entry_point": { "mcp_server": "mcp_server.py", - "hatch_mcp_server": "hatch_mcp_server_entry.py" + "hatch_mcp_server": "hatch_mcp_server_entry.py", }, - "tools": [ - { - "name": "example_tool", - "description": "Example tool function" - } - ], + "tools": [{"name": "example_tool", "description": "Example tool function"}], "citations": { "origin": f"Origin citation for {package_name}", - "mcp": f"MCP citation for {package_name}" - } + "mcp": f"MCP citation for {package_name}", + }, } -def generate_readme_md(package_name: str, description: str = ""): + +def generate_readme_md(package_name: str, description: str = "") -> str: """Generate the README.md file content for a template package. - + Args: package_name (str): Name of the package. description (str, optional): Package description. Defaults to empty string. - + Returns: str: Content for README.md file. """ @@ -127,9 +122,12 @@ def generate_readme_md(package_name: str, description: str = ""): - **example_tool**: Example tool function """ -def create_package_template(target_dir: Path, package_name: str, description: str = "") -> Path: + +def create_package_template( + target_dir: Path, package_name: str, description: str = "" +) -> Path: """Create a package template directory with all necessary files. - + This function orchestrates the generation of a complete package structure by: 1. Creating the package directory 2. Generating and writing the __init__.py file @@ -137,45 +135,45 @@ def create_package_template(target_dir: Path, package_name: str, description: st 4. Generating and writing the hatch_mcp_server_entry.py file that wraps the MCP server 5. Creating the hatch_metadata.json with package information 6. Generating a README.md with basic documentation - + Args: target_dir (Path): Directory where the package should be created. package_name (str): Name of the package. description (str, optional): Package description. Defaults to empty string. - + Returns: Path: Path to the created package directory. """ logger.info(f"Creating package template for {package_name} in {target_dir}") - + # Create package directory package_dir = target_dir / package_name package_dir.mkdir(parents=True, exist_ok=True) - + # Create __init__.py init_content = generate_init_py() - with open(package_dir / "__init__.py", 'w') as f: + with open(package_dir / "__init__.py", "w") as f: f.write(init_content) - + # Create mcp_server.py mcp_server_content = generate_mcp_server_py(package_name) - with open(package_dir / "mcp_server.py", 'w') as f: + with open(package_dir / "mcp_server.py", "w") as f: f.write(mcp_server_content) - + # Create hatch_mcp_server_entry.py hatch_mcp_server_entry_content = generate_hatch_mcp_server_entry_py(package_name) - with open(package_dir / "hatch_mcp_server_entry.py", 'w') as f: + with open(package_dir / "hatch_mcp_server_entry.py", "w") as f: f.write(hatch_mcp_server_entry_content) - + # Create metadata.json metadata = generate_metadata_json(package_name, description) - with open(package_dir / "hatch_metadata.json", 'w') as f: + with open(package_dir / "hatch_metadata.json", "w") as f: json.dump(metadata, f, indent=2) - + # Create README.md readme_content = generate_readme_md(package_name, description) - with open(package_dir / "README.md", 'w') as f: + with open(package_dir / "README.md", "w") as f: f.write(readme_content) - + logger.info(f"Package template created successfully at {package_dir}") return package_dir diff --git a/mkdocs.yml b/mkdocs.yml index 63075bf..4893986 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -39,6 +39,7 @@ nav: - Users: - Getting Started: articles/users/GettingStarted.md - CLI Reference: articles/users/CLIReference.md + - MCP Host Configuration: articles/users/MCPHostConfiguration.md - Security & Trust: articles/users/SecurityAndTrust.md - Troubleshooting: - Report Issues: articles/users/Troubleshooting/ReportIssues.md @@ -58,15 +59,26 @@ nav: - 03 Edit Metadata: articles/users/tutorials/03-author-package/03-edit-metadata.md - 04 Validate and Install: articles/users/tutorials/03-author-package/04-validate-and-install.md - 05 Checkpoint: articles/users/tutorials/03-author-package/05-checkpoint.md + - MCP Host Configuration: + - 01 Host Platform overview: articles/users/tutorials/04-mcp-host-configuration/01-host-platform-overview.md + - 02 Configuring Hatch! Packages: articles/users/tutorials/04-mcp-host-configuration/02-configuring-hatch-packages.md + - 03 Configuring Arbitrary MCP Servers: articles/users/tutorials/04-mcp-host-configuration/03-configuring-arbitrary-servers.md + - 04 Environments & Hosts Synchronization: articles/users/tutorials/04-mcp-host-configuration/04-environment-synchronization.md + - 05 Checkpoint: articles/users/tutorials/04-mcp-host-configuration/05-checkpoint.md - Developers: - Overview: articles/devs/index.md - Architecture: + - Overview: articles/devs/architecture/index.md - System Overview: articles/devs/architecture/system_overview.md - Component Architecture: articles/devs/architecture/component_architecture.md + - MCP Host Configuration: articles/devs/architecture/mcp_host_configuration.md + - MCP Host Backup System: articles/devs/architecture/mcp_backup_system.md - Contribution Guides: + - Overview: articles/devs/contribution_guides/index.md - How to Contribute: articles/devs/contribution_guides/how_to_contribute.md - Release Policy: articles/devs/contribution_guides/release_policy.md - Development Processes: + - Overview: articles/devs/development_processes/index.md - Developer Onboarding: articles/devs/development_processes/developer_onboarding.md - Testing Standards: articles/devs/development_processes/testing_standards.md - Implementation Guides: @@ -75,6 +87,7 @@ nav: - Installation Orchestration: articles/devs/implementation_guides/installation_orchestration.md - Package Loader Extensions: articles/devs/implementation_guides/package_loader_extensions.md - Registry Integration: articles/devs/implementation_guides/registry_integration.md + - MCP Host Configuration Extension: articles/devs/implementation_guides/mcp_host_configuration_extension.md - API Reference: - Overview: articles/api/index.md - Core Modules: diff --git a/package-lock.json b/package-lock.json index d86b2ff..0b6587e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -6,14 +6,63 @@ "": { "name": "hatch", "devDependencies": { + "@artessan-devs/sr-uv-plugin": "github:LittleCoinCoin/sr-uv-plugin#fix/semantic-release-plugin-loading", "@commitlint/cli": "^18.6.1", "@commitlint/config-conventional": "^18.6.2", "@semantic-release/changelog": "^6.0.3", "@semantic-release/git": "^10.0.1", "@semantic-release/github": "^9.2.6", - "commitizen": "^4.3.0", - "cz-conventional-changelog": "^3.3.0", - "semantic-release": "^22.0.12" + "commitizen": "^4.3.1", + "cz-conventional-changelog": "^3.0.1", + "semantic-release": "^25.0.2" + } + }, + "node_modules/@actions/core": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", + "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@actions/exec": "^1.1.1", + "@actions/http-client": "^2.0.1" + } + }, + "node_modules/@actions/exec": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", + "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@actions/io": "^1.0.1" + } + }, + "node_modules/@actions/http-client": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz", + "integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==", + "dev": true, + "license": "MIT", + "dependencies": { + "tunnel": "^0.0.6", + "undici": "^5.25.4" + } + }, + "node_modules/@actions/io": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", + "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==", + "dev": true, + "license": "MIT" + }, + "node_modules/@artessan-devs/sr-uv-plugin": { + "version": "1.0.0", + "resolved": "git+ssh://git@github.com/LittleCoinCoin/sr-uv-plugin.git#2cac1907f1ac67b0d91e10f67756e16692942b9e", + "dev": true, + "license": "MIT", + "dependencies": { + "@iarna/toml": "^2.2.5" } }, "node_modules/@babel/code-frame": { @@ -312,6 +361,23 @@ "node": ">=v18" } }, + "node_modules/@fastify/busboy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + } + }, + "node_modules/@iarna/toml": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/@iarna/toml/-/toml-2.2.5.tgz", + "integrity": "sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg==", + "dev": true, + "license": "ISC" + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -586,6 +652,13 @@ "node": ">=12" } }, + "node_modules/@sec-ant/readable-stream": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@sec-ant/readable-stream/-/readable-stream-0.4.1.tgz", + "integrity": "sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg==", + "dev": true, + "license": "MIT" + }, "node_modules/@semantic-release/changelog": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/@semantic-release/changelog/-/changelog-6.0.3.tgz", @@ -606,27 +679,70 @@ } }, "node_modules/@semantic-release/commit-analyzer": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/@semantic-release/commit-analyzer/-/commit-analyzer-11.1.0.tgz", - "integrity": "sha512-cXNTbv3nXR2hlzHjAMgbuiQVtvWHTlwwISt60B+4NZv01y/QRY7p2HcJm8Eh2StzcTJoNnflvKjHH/cjFS7d5g==", + "version": "13.0.1", + "resolved": "https://registry.npmjs.org/@semantic-release/commit-analyzer/-/commit-analyzer-13.0.1.tgz", + "integrity": "sha512-wdnBPHKkr9HhNhXOhZD5a2LNl91+hs8CC2vsAVYxtZH3y0dV3wKn+uZSN61rdJQZ8EGxzWB3inWocBHV9+u/CQ==", "dev": true, "license": "MIT", "dependencies": { - "conventional-changelog-angular": "^7.0.0", - "conventional-commits-filter": "^4.0.0", - "conventional-commits-parser": "^5.0.0", + "conventional-changelog-angular": "^8.0.0", + "conventional-changelog-writer": "^8.0.0", + "conventional-commits-filter": "^5.0.0", + "conventional-commits-parser": "^6.0.0", "debug": "^4.0.0", - "import-from-esm": "^1.0.3", + "import-from-esm": "^2.0.0", "lodash-es": "^4.17.21", "micromatch": "^4.0.2" }, "engines": { - "node": "^18.17 || >=20.6.1" + "node": ">=20.8.1" }, "peerDependencies": { "semantic-release": ">=20.1.0" } }, + "node_modules/@semantic-release/commit-analyzer/node_modules/conventional-changelog-angular": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-8.1.0.tgz", + "integrity": "sha512-GGf2Nipn1RUCAktxuVauVr1e3r8QrLP/B0lEUsFktmGqc3ddbQkhoJZHJctVU829U1c6mTSWftrVOCHaL85Q3w==", + "dev": true, + "license": "ISC", + "dependencies": { + "compare-func": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@semantic-release/commit-analyzer/node_modules/conventional-commits-parser": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/conventional-commits-parser/-/conventional-commits-parser-6.2.1.tgz", + "integrity": "sha512-20pyHgnO40rvfI0NGF/xiEoFMkXDtkF8FwHvk5BokoFoCuTQRI8vrNCNFWUOfuolKJMm1tPCHc8GgYEtr1XRNA==", + "dev": true, + "license": "MIT", + "dependencies": { + "meow": "^13.0.0" + }, + "bin": { + "conventional-commits-parser": "dist/cli/index.js" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@semantic-release/commit-analyzer/node_modules/meow": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-13.2.0.tgz", + "integrity": "sha512-pxQJQzB6djGPXh08dacEloMFopsOqGVRKFPYvPOt9XDZ1HasbgDZA74CJGreSU4G3Ak7EFJGoiH2auq+yXISgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/@semantic-release/error": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/@semantic-release/error/-/error-3.0.0.tgz", @@ -719,9 +835,9 @@ } }, "node_modules/@semantic-release/github/node_modules/clean-stack": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-5.2.0.tgz", - "integrity": "sha512-TyUIUJgdFnCISzG5zu3291TAsE77ddchd0bepon1VVQrKLGKFED4iXFEDQ24mIPdPBbyE16PK3F8MYE1CmcBEQ==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-5.3.0.tgz", + "integrity": "sha512-9ngPTOhYGQqNVSfeJkYXHmF7AGWp4/nN5D/QqNQs3Dvxd1Kk/WpjHfNujKHYUQ/5CoGyOyFNoWSPk5afzP0QVg==", "dev": true, "license": "MIT", "dependencies": { @@ -761,28 +877,30 @@ } }, "node_modules/@semantic-release/npm": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@semantic-release/npm/-/npm-11.0.3.tgz", - "integrity": "sha512-KUsozQGhRBAnoVg4UMZj9ep436VEGwT536/jwSqB7vcEfA6oncCUU7UIYTRdLx7GvTtqn0kBjnkfLVkcnBa2YQ==", + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/@semantic-release/npm/-/npm-13.1.2.tgz", + "integrity": "sha512-9rtshDTNlzYrC7uSBtB1vHqFzFZaNHigqkkCH5Ls4N/BSlVOenN5vtwHYxjAR4jf1hNvWSVwL4eIFTHONYckkw==", "dev": true, "license": "MIT", "dependencies": { + "@actions/core": "^1.11.1", "@semantic-release/error": "^4.0.0", "aggregate-error": "^5.0.0", - "execa": "^8.0.0", + "env-ci": "^11.2.0", + "execa": "^9.0.0", "fs-extra": "^11.0.0", "lodash-es": "^4.17.21", "nerf-dart": "^1.0.0", "normalize-url": "^8.0.0", - "npm": "^10.5.0", + "npm": "^11.6.2", "rc": "^1.2.8", - "read-pkg": "^9.0.0", + "read-pkg": "^10.0.0", "registry-auth-token": "^5.0.0", "semver": "^7.1.2", "tempy": "^3.0.0" }, "engines": { - "node": "^18.17 || >=20" + "node": "^22.14.0 || >= 24.10.0" }, "peerDependencies": { "semantic-release": ">=20.1.0" @@ -798,6 +916,19 @@ "node": ">=18" } }, + "node_modules/@semantic-release/npm/node_modules/@sindresorhus/merge-streams": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-4.0.0.tgz", + "integrity": "sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/@semantic-release/npm/node_modules/aggregate-error": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-5.0.0.tgz", @@ -816,9 +947,9 @@ } }, "node_modules/@semantic-release/npm/node_modules/clean-stack": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-5.2.0.tgz", - "integrity": "sha512-TyUIUJgdFnCISzG5zu3291TAsE77ddchd0bepon1VVQrKLGKFED4iXFEDQ24mIPdPBbyE16PK3F8MYE1CmcBEQ==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-5.3.0.tgz", + "integrity": "sha512-9ngPTOhYGQqNVSfeJkYXHmF7AGWp4/nN5D/QqNQs3Dvxd1Kk/WpjHfNujKHYUQ/5CoGyOyFNoWSPk5afzP0QVg==", "dev": true, "license": "MIT", "dependencies": { @@ -845,50 +976,73 @@ } }, "node_modules/@semantic-release/npm/node_modules/execa": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", - "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-9.6.1.tgz", + "integrity": "sha512-9Be3ZoN4LmYR90tUoVu2te2BsbzHfhJyfEiAVfz7N5/zv+jduIfLrV2xdQXOHbaD6KgpGdO9PRPM1Y4Q9QkPkA==", "dev": true, "license": "MIT", "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^8.0.1", - "human-signals": "^5.0.0", - "is-stream": "^3.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^5.1.0", - "onetime": "^6.0.0", + "@sindresorhus/merge-streams": "^4.0.0", + "cross-spawn": "^7.0.6", + "figures": "^6.1.0", + "get-stream": "^9.0.0", + "human-signals": "^8.0.1", + "is-plain-obj": "^4.1.0", + "is-stream": "^4.0.1", + "npm-run-path": "^6.0.0", + "pretty-ms": "^9.2.0", "signal-exit": "^4.1.0", - "strip-final-newline": "^3.0.0" + "strip-final-newline": "^4.0.0", + "yoctocolors": "^2.1.1" }, "engines": { - "node": ">=16.17" + "node": "^18.19.0 || >=20.5.0" }, "funding": { "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, + "node_modules/@semantic-release/npm/node_modules/figures": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-6.1.0.tgz", + "integrity": "sha512-d+l3qxjSesT4V7v2fh+QnmFnUWv9lSpjarhShNTgBOfA0ttejbQUAlHLitbjkoRiDulW0OPoQPYIGhIC8ohejg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-unicode-supported": "^2.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/@semantic-release/npm/node_modules/get-stream": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", - "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-9.0.1.tgz", + "integrity": "sha512-kVCxPF3vQM/N0B1PmoqVUqgHP+EeVjmZSQn+1oCRPxd2P21P2F19lIgbR3HBosbB1PUhOAoctJnfEn2GbN2eZA==", "dev": true, "license": "MIT", + "dependencies": { + "@sec-ant/readable-stream": "^0.4.1", + "is-stream": "^4.0.1" + }, "engines": { - "node": ">=16" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/@semantic-release/npm/node_modules/human-signals": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", - "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-8.0.1.tgz", + "integrity": "sha512-eKCa6bwnJhvxj14kZk5NCPc6Hb6BdsU9DZcOnmQKSnO1VKrfV0zCvtttPZUsBvjmNDn8rpcJfpwSYnHBjc95MQ==", "dev": true, "license": "Apache-2.0", "engines": { - "node": ">=16.17.0" + "node": ">=18.18.0" } }, "node_modules/@semantic-release/npm/node_modules/indent-string": { @@ -904,23 +1058,10 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@semantic-release/npm/node_modules/is-stream": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", - "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@semantic-release/npm/node_modules/mimic-fn": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", - "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", + "node_modules/@semantic-release/npm/node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", "dev": true, "license": "MIT", "engines": { @@ -930,33 +1071,31 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@semantic-release/npm/node_modules/npm-run-path": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", - "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "node_modules/@semantic-release/npm/node_modules/is-stream": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-4.0.1.tgz", + "integrity": "sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==", "dev": true, "license": "MIT", - "dependencies": { - "path-key": "^4.0.0" - }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@semantic-release/npm/node_modules/onetime": { + "node_modules/@semantic-release/npm/node_modules/npm-run-path": { "version": "6.0.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", - "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-6.0.0.tgz", + "integrity": "sha512-9qny7Z9DsQU8Ou39ERsPU4OZQlSTP47ShQzuKZ6PRXpYLtIFgl/DEBYEXKlvcEa+9tHVcK8CF81Y2V72qaZhWA==", "dev": true, "license": "MIT", "dependencies": { - "mimic-fn": "^4.0.0" + "path-key": "^4.0.0", + "unicorn-magic": "^0.3.0" }, "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -989,43 +1128,72 @@ } }, "node_modules/@semantic-release/npm/node_modules/strip-final-newline": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", - "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-4.0.0.tgz", + "integrity": "sha512-aulFJcD6YK8V1G7iRB5tigAP4TsHBZZrOV8pjV++zdUwmeV8uzbY7yn6h9MswN62adStNZFuCIx4haBnRuMDaw==", "dev": true, "license": "MIT", "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/@semantic-release/release-notes-generator": { - "version": "12.1.0", - "resolved": "https://registry.npmjs.org/@semantic-release/release-notes-generator/-/release-notes-generator-12.1.0.tgz", - "integrity": "sha512-g6M9AjUKAZUZnxaJZnouNBeDNTCUrJ5Ltj+VJ60gJeDaRRahcHsry9HW8yKrnKkKNkx5lbWiEP1FPMqVNQz8Kg==", + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/@semantic-release/release-notes-generator/-/release-notes-generator-14.1.0.tgz", + "integrity": "sha512-CcyDRk7xq+ON/20YNR+1I/jP7BYKICr1uKd1HHpROSnnTdGqOTburi4jcRiTYz0cpfhxSloQO3cGhnoot7IEkA==", "dev": true, "license": "MIT", "dependencies": { - "conventional-changelog-angular": "^7.0.0", - "conventional-changelog-writer": "^7.0.0", - "conventional-commits-filter": "^4.0.0", - "conventional-commits-parser": "^5.0.0", + "conventional-changelog-angular": "^8.0.0", + "conventional-changelog-writer": "^8.0.0", + "conventional-commits-filter": "^5.0.0", + "conventional-commits-parser": "^6.0.0", "debug": "^4.0.0", "get-stream": "^7.0.0", - "import-from-esm": "^1.0.3", + "import-from-esm": "^2.0.0", "into-stream": "^7.0.0", "lodash-es": "^4.17.21", - "read-pkg-up": "^11.0.0" + "read-package-up": "^11.0.0" }, "engines": { - "node": "^18.17 || >=20.6.1" + "node": ">=20.8.1" }, "peerDependencies": { "semantic-release": ">=20.1.0" } }, + "node_modules/@semantic-release/release-notes-generator/node_modules/conventional-changelog-angular": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-8.1.0.tgz", + "integrity": "sha512-GGf2Nipn1RUCAktxuVauVr1e3r8QrLP/B0lEUsFktmGqc3ddbQkhoJZHJctVU829U1c6mTSWftrVOCHaL85Q3w==", + "dev": true, + "license": "ISC", + "dependencies": { + "compare-func": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@semantic-release/release-notes-generator/node_modules/conventional-commits-parser": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/conventional-commits-parser/-/conventional-commits-parser-6.2.1.tgz", + "integrity": "sha512-20pyHgnO40rvfI0NGF/xiEoFMkXDtkF8FwHvk5BokoFoCuTQRI8vrNCNFWUOfuolKJMm1tPCHc8GgYEtr1XRNA==", + "dev": true, + "license": "MIT", + "dependencies": { + "meow": "^13.0.0" + }, + "bin": { + "conventional-commits-parser": "dist/cli/index.js" + }, + "engines": { + "node": ">=18" + } + }, "node_modules/@semantic-release/release-notes-generator/node_modules/get-stream": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-7.0.1.tgz", @@ -1039,23 +1207,30 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@sindresorhus/is": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", - "integrity": "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==", + "node_modules/@semantic-release/release-notes-generator/node_modules/hosted-git-info": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz", + "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==", "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" + "license": "ISC", + "dependencies": { + "lru-cache": "^10.0.1" }, - "funding": { - "url": "https://github.com/sindresorhus/is?sponsor=1" + "engines": { + "node": "^16.14.0 || >=18.0.0" } }, - "node_modules/@sindresorhus/merge-streams": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", - "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", + "node_modules/@semantic-release/release-notes-generator/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/@semantic-release/release-notes-generator/node_modules/meow": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-13.2.0.tgz", + "integrity": "sha512-pxQJQzB6djGPXh08dacEloMFopsOqGVRKFPYvPOt9XDZ1HasbgDZA74CJGreSU4G3Ak7EFJGoiH2auq+yXISgA==", "dev": true, "license": "MIT", "engines": { @@ -1065,25 +1240,148 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@types/minimist": { - "version": "1.2.5", - "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.5.tgz", - "integrity": "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==", + "node_modules/@semantic-release/release-notes-generator/node_modules/normalize-package-data": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.2.tgz", + "integrity": "sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==", "dev": true, - "license": "MIT" + "license": "BSD-2-Clause", + "dependencies": { + "hosted-git-info": "^7.0.0", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } }, - "node_modules/@types/node": { - "version": "24.3.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.3.1.tgz", - "integrity": "sha512-3vXmQDXy+woz+gnrTvuvNrPzekOi+Ds0ReMxw0LzBiK3a+1k0kQn9f2NWk+lgD4rJehFUmYy2gMhJ2ZI+7YP9g==", + "node_modules/@semantic-release/release-notes-generator/node_modules/parse-json": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-8.3.0.tgz", + "integrity": "sha512-ybiGyvspI+fAoRQbIPRddCcSTV9/LsJbf0e/S85VLowVGzRmokfneg2kwVW/KU5rOXrPSbF1qAKPMgNTqqROQQ==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { - "undici-types": "~7.10.0" + "@babel/code-frame": "^7.26.2", + "index-to-position": "^1.1.0", + "type-fest": "^4.39.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@types/normalize-package-data": { + "node_modules/@semantic-release/release-notes-generator/node_modules/read-package-up": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/read-package-up/-/read-package-up-11.0.0.tgz", + "integrity": "sha512-MbgfoNPANMdb4oRBNg5eqLbB2t2r+o5Ua1pNt8BqGp4I0FJZhuVSOj3PaBPni4azWuSzEdNn2evevzVmEk1ohQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "find-up-simple": "^1.0.0", + "read-pkg": "^9.0.0", + "type-fest": "^4.6.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/release-notes-generator/node_modules/read-pkg": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-9.0.1.tgz", + "integrity": "sha512-9viLL4/n1BJUCT1NXVTdS1jtm80yDEgR5T4yCelII49Mbj0v1rZdKqj7zCiYdbB0CuCgdrvHcNogAKTFPBocFA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/normalize-package-data": "^2.4.3", + "normalize-package-data": "^6.0.0", + "parse-json": "^8.0.0", + "type-fest": "^4.6.0", + "unicorn-magic": "^0.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/release-notes-generator/node_modules/type-fest": { + "version": "4.41.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz", + "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@semantic-release/release-notes-generator/node_modules/unicorn-magic": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz", + "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@sindresorhus/is": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", + "integrity": "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/is?sponsor=1" + } + }, + "node_modules/@sindresorhus/merge-streams": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-2.3.0.tgz", + "integrity": "sha512-LtoMMhxAlorcGhmFYI+LhPgbPZCkgP6ra1YL604EeF6U98pLlQ3iWIGMdWSC+vWmPBWBNgmDBAhnAobLROJmwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@types/minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "24.5.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.5.2.tgz", + "integrity": "sha512-FYxk1I7wPv3K2XBaoyH2cTnocQEu8AOZ60hPbsyukMPLv5/5qr7V1i8PLHdl6Zf87I+xZXFvPCXYjiTFq+YSDQ==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "undici-types": "~7.12.0" + } + }, + "node_modules/@types/normalize-package-data": { "version": "2.4.4", "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz", "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==", @@ -1147,6 +1445,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/ansi-escapes/node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", @@ -1173,10 +1484,10 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/ansicolors": { - "version": "0.3.2", - "resolved": "https://registry.npmjs.org/ansicolors/-/ansicolors-0.3.2.tgz", - "integrity": "sha512-QXu7BPrP29VllRxH8GwB7x5iX5qWKAAMLqKQGWTeLWVlNHNOpVMJ91dsxQAIWXpjuW5wqvxu3Jd/nRjrJ+0pqg==", + "node_modules/any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", "dev": true, "license": "MIT" }, @@ -1372,20 +1683,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/cardinal": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/cardinal/-/cardinal-2.1.1.tgz", - "integrity": "sha512-JSr5eOgoEymtYHBjNWyjrMqet9Am2miJhlfKNdqLp6zoeAh0KN5dRAcxlecj5mAJrmQomgiOBj35xHLrFjqBpw==", - "dev": true, - "license": "MIT", - "dependencies": { - "ansicolors": "~0.3.2", - "redeyed": "~2.1.0" - }, - "bin": { - "cdl": "bin/cdl.js" - } - }, "node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -1443,6 +1740,69 @@ "node": ">=8" } }, + "node_modules/cli-highlight": { + "version": "2.1.11", + "resolved": "https://registry.npmjs.org/cli-highlight/-/cli-highlight-2.1.11.tgz", + "integrity": "sha512-9KDcoEVwyUXrjcJNvHD0NFc/hiwe/WPVYIleQh2O1N2Zro5gWJZ/K+3DGn8w8P/F6FxOgzyC5bxDyHIgCSPhGg==", + "dev": true, + "license": "ISC", + "dependencies": { + "chalk": "^4.0.0", + "highlight.js": "^10.7.1", + "mz": "^2.4.0", + "parse5": "^5.1.1", + "parse5-htmlparser2-tree-adapter": "^6.0.0", + "yargs": "^16.0.0" + }, + "bin": { + "highlight": "bin/highlight" + }, + "engines": { + "node": ">=8.0.0", + "npm": ">=5.0.0" + } + }, + "node_modules/cli-highlight/node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/cli-highlight/node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/cli-highlight/node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, "node_modules/cli-spinners": { "version": "2.9.2", "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", @@ -1584,6 +1944,19 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/commitizen/node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/compare-func": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/compare-func/-/compare-func-2.0.0.tgz", @@ -1640,24 +2013,35 @@ } }, "node_modules/conventional-changelog-writer": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/conventional-changelog-writer/-/conventional-changelog-writer-7.0.1.tgz", - "integrity": "sha512-Uo+R9neH3r/foIvQ0MKcsXkX642hdm9odUp7TqgFS7BsalTcjzRlIfWZrZR1gbxOozKucaKt5KAbjW8J8xRSmA==", + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/conventional-changelog-writer/-/conventional-changelog-writer-8.2.0.tgz", + "integrity": "sha512-Y2aW4596l9AEvFJRwFGJGiQjt2sBYTjPD18DdvxX9Vpz0Z7HQ+g1Z+6iYDAm1vR3QOJrDBkRHixHK/+FhkR6Pw==", "dev": true, "license": "MIT", "dependencies": { - "conventional-commits-filter": "^4.0.0", + "conventional-commits-filter": "^5.0.0", "handlebars": "^4.7.7", - "json-stringify-safe": "^5.0.1", - "meow": "^12.0.1", - "semver": "^7.5.2", - "split2": "^4.0.0" + "meow": "^13.0.0", + "semver": "^7.5.2" }, "bin": { - "conventional-changelog-writer": "cli.mjs" + "conventional-changelog-writer": "dist/cli/index.js" }, "engines": { - "node": ">=16" + "node": ">=18" + } + }, + "node_modules/conventional-changelog-writer/node_modules/meow": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/meow/-/meow-13.2.0.tgz", + "integrity": "sha512-pxQJQzB6djGPXh08dacEloMFopsOqGVRKFPYvPOt9XDZ1HasbgDZA74CJGreSU4G3Ak7EFJGoiH2auq+yXISgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/conventional-commit-types": { @@ -1668,13 +2052,13 @@ "license": "ISC" }, "node_modules/conventional-commits-filter": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/conventional-commits-filter/-/conventional-commits-filter-4.0.0.tgz", - "integrity": "sha512-rnpnibcSOdFcdclpFwWa+pPlZJhXE7l+XK04zxhbWrhgpR96h33QLz8hITTXbcYICxVr3HZFtbtUAQ+4LdBo9A==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/conventional-commits-filter/-/conventional-commits-filter-5.0.0.tgz", + "integrity": "sha512-tQMagCOC59EVgNZcC5zl7XqO30Wki9i9J3acbUvkaosCT6JX3EeFwJD7Qqp4MCikRnzS18WXV3BLIQ66ytu6+Q==", "dev": true, "license": "MIT", "engines": { - "node": ">=16" + "node": ">=18" } }, "node_modules/conventional-commits-parser": { @@ -1696,6 +2080,19 @@ "node": ">=16" } }, + "node_modules/convert-hrtime": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/convert-hrtime/-/convert-hrtime-5.0.0.tgz", + "integrity": "sha512-lOETlkIeYSJWcbbcvjRKGxVMXJR+8+OQb/mTPbA4ObPMytYIsUbuOE0Jzy60hjARYszq1id0j8KgVhC+WGZVTg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/core-util-is": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", @@ -1892,9 +2289,9 @@ } }, "node_modules/debug": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", - "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", "dev": true, "license": "MIT", "dependencies": { @@ -2087,9 +2484,9 @@ "license": "MIT" }, "node_modules/env-ci": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/env-ci/-/env-ci-10.0.0.tgz", - "integrity": "sha512-U4xcd/utDYFgMh0yWj07R1H6L5fwhVbmxBCpnL0DbVSDZVnsC82HONw0wxtxNkIAcua3KtbomQvIk5xFZGAQJw==", + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/env-ci/-/env-ci-11.2.0.tgz", + "integrity": "sha512-D5kWfzkmaOQDioPmiviWAVtKmpPT4/iJmMVQxWxMPJTFyTkdc5JQUfc5iXEeWxcOdsYTKSAiA/Age4NUOqKsRA==", "dev": true, "license": "MIT", "dependencies": { @@ -2244,10 +2641,33 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/env-paths": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", + "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/environment": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz", + "integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz", + "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==", "dev": true, "license": "MIT", "dependencies": { @@ -2274,20 +2694,6 @@ "node": ">=0.8.0" } }, - "node_modules/esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true, - "license": "BSD-2-Clause", - "bin": { - "esparse": "bin/esparse.js", - "esvalidate": "bin/esvalidate.js" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -2340,6 +2746,23 @@ "node": ">=4" } }, + "node_modules/fast-content-type-parse": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-3.0.0.tgz", + "integrity": "sha512-ZvLdcY8P+N8mGQJahJV5G4U88CSvT1rP8ApL6uETe88MBXrBHAkZlSEySdUlyztF7ccb+Znos3TFqaepHxdhBg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -2469,16 +2892,17 @@ } }, "node_modules/find-versions": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/find-versions/-/find-versions-5.1.0.tgz", - "integrity": "sha512-+iwzCJ7C5v5KgcBuueqVoNiHVoQpwiUK5XFLjf0affFTep+Wcw93tPvmb8tqujDNmzhBDPddnWV/qgWSXgq+Hg==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/find-versions/-/find-versions-6.0.0.tgz", + "integrity": "sha512-2kCCtc+JvcZ86IGAz3Z2Y0A1baIz9fL31pH/0S1IqZr9Iwnjq8izfPtrCyQKO6TLMPELLsQMre7VDqeIKCsHkA==", "dev": true, "license": "MIT", "dependencies": { - "semver-regex": "^4.0.5" + "semver-regex": "^4.0.5", + "super-regex": "^1.0.0" }, "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -2545,9 +2969,9 @@ } }, "node_modules/fs-extra": { - "version": "11.3.1", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.1.tgz", - "integrity": "sha512-eXvGGwZ5CL17ZSwHWd3bbgk7UUpF6IFHtP57NYYakPvHOs8GDgDe5KJI36jIJzDkJ6eJjuzRA8eBQb6SkKue0g==", + "version": "11.3.2", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.2.tgz", + "integrity": "sha512-Xr9F6z6up6Ws+NjzMCZc6WXg2YFRlrLP9NQDO3VQrWrfiojdhS56TzueT88ze0uBdCTwEIhQ3ptnmKeWGFAe0A==", "dev": true, "license": "MIT", "dependencies": { @@ -2576,15 +3000,41 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "node_modules/function-timeout": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/function-timeout/-/function-timeout-1.0.2.tgz", + "integrity": "sha512-939eZS4gJ3htTHAldmyyuzlrD58P03fHG49v2JfFXbV6OhvZKRC9j2yAtdHw/zrp2zXHuv05zMIy40F0ge7spA==", "dev": true, - "license": "ISC", + "license": "MIT", "engines": { - "node": "6.* || 8.* || >= 10.*" - } + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-east-asian-width": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz", + "integrity": "sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, "node_modules/get-stream": { "version": "6.0.1", @@ -3118,6 +3568,16 @@ "node": ">= 0.4" } }, + "node_modules/highlight.js": { + "version": "10.7.3", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-10.7.3.tgz", + "integrity": "sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": "*" + } + }, "node_modules/homedir-polyfill": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz", @@ -3132,29 +3592,29 @@ } }, "node_modules/hook-std": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/hook-std/-/hook-std-3.0.0.tgz", - "integrity": "sha512-jHRQzjSDzMtFy34AGj1DN+vq54WVuhSvKgrHf0OMiFQTwDD4L/qqofVEWjLOBMTn5+lCD3fPg32W9yOfnEJTTw==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/hook-std/-/hook-std-4.0.0.tgz", + "integrity": "sha512-IHI4bEVOt3vRUDJ+bFA9VUJlo7SzvFARPNLw75pqSmAOP2HmTWfFJtPvLBrDrlgjEYXY9zs7SFdHPQaJShkSCQ==", "dev": true, "license": "MIT", "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/hosted-git-info": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz", - "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==", + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.2.tgz", + "integrity": "sha512-M422h7o/BR3rmCQ8UHi7cyyMqKltdP9Uo+J2fXK+RSAY+wTcKOIRyhTuKv4qn+DJf3g+PL890AzId5KZpX+CBg==", "dev": true, "license": "ISC", "dependencies": { - "lru-cache": "^10.0.1" + "lru-cache": "^11.1.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/http-proxy-agent": { @@ -3267,9 +3727,9 @@ } }, "node_modules/import-from-esm": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/import-from-esm/-/import-from-esm-1.3.4.tgz", - "integrity": "sha512-7EyUlPFC0HOlBDpUFGfYstsU7XHxZJKAAMzCT8wZ0hMW7b+hG51LIKTDcsgtz8Pu6YC0HqRVbX+rVUtsGMUKvg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/import-from-esm/-/import-from-esm-2.0.0.tgz", + "integrity": "sha512-YVt14UZCgsX1vZQ3gKjkWVdBdHQ6eu3MPU1TBgL1H5orXe2+jWD006WCPPtOuwlQm10NuzOW5WawiF1Q9veW8g==", "dev": true, "license": "MIT", "dependencies": { @@ -3277,7 +3737,7 @@ "import-meta-resolve": "^4.0.0" }, "engines": { - "node": ">=16.20" + "node": ">=18.20" } }, "node_modules/import-meta-resolve": { @@ -3302,9 +3762,9 @@ } }, "node_modules/index-to-position": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/index-to-position/-/index-to-position-1.1.0.tgz", - "integrity": "sha512-XPdx9Dq4t9Qk1mTMbWONJqU7boCoumEH7fRET37HX5+khDUl3J2W6PdALxhILYlIYx2amlwYcRPp28p0tSiojg==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/index-to-position/-/index-to-position-1.2.0.tgz", + "integrity": "sha512-Yg7+ztRkqslMAS2iFaU+Oa4KTSidr63OsFGlOrJoW981kIYO3CGCS3wA95P1mUi/IVSJkn0D479KTJpVpvFNuw==", "dev": true, "license": "MIT", "engines": { @@ -3507,13 +3967,13 @@ } }, "node_modules/is-unicode-supported": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", - "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz", + "integrity": "sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==", "dev": true, "license": "MIT", "engines": { - "node": ">=10" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -3595,9 +4055,9 @@ "license": "MIT" }, "node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", + "integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", "dev": true, "license": "MIT", "dependencies": { @@ -3628,13 +4088,6 @@ "dev": true, "license": "MIT" }, - "node_modules/json-stringify-safe": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", - "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==", - "dev": true, - "license": "ISC" - }, "node_modules/jsonfile": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", @@ -3884,6 +4337,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/log-symbols/node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/longest": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/longest/-/longest-2.0.1.tgz", @@ -3895,11 +4361,45 @@ } }, "node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "version": "11.2.4", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz", + "integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==", "dev": true, - "license": "ISC" + "license": "BlueOak-1.0.0", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/make-asynchronous": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/make-asynchronous/-/make-asynchronous-1.0.1.tgz", + "integrity": "sha512-T9BPOmEOhp6SmV25SwLVcHK4E6JyG/coH3C6F1NjNXSziv/fd4GmsqMk8YR6qpPOswfaOCApSNkZv6fxoaYFcQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-event": "^6.0.0", + "type-fest": "^4.6.0", + "web-worker": "1.2.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-asynchronous/node_modules/type-fest": { + "version": "4.41.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz", + "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, "node_modules/map-obj": { "version": "4.3.0", @@ -3915,56 +4415,73 @@ } }, "node_modules/marked": { - "version": "9.1.6", - "resolved": "https://registry.npmjs.org/marked/-/marked-9.1.6.tgz", - "integrity": "sha512-jcByLnIFkd5gSXZmjNvS1TlmRhCXZjIzHYlaGkPlLIekG55JDR2Z4va9tZwCiP+/RDERiNhMOFu01xd6O5ct1Q==", + "version": "15.0.12", + "resolved": "https://registry.npmjs.org/marked/-/marked-15.0.12.tgz", + "integrity": "sha512-8dD6FusOQSrpv9Z1rdNMdlSgQOIP880DHqnohobOmYLElGEqAL/JvxvuxZO16r4HtjTlfPRDC1hbvxC9dPN2nA==", "dev": true, "license": "MIT", "bin": { "marked": "bin/marked.js" }, "engines": { - "node": ">= 16" + "node": ">= 18" } }, "node_modules/marked-terminal": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/marked-terminal/-/marked-terminal-6.2.0.tgz", - "integrity": "sha512-ubWhwcBFHnXsjYNsu+Wndpg0zhY4CahSpPlA70PlO0rR9r2sZpkyU+rkCsOWH+KMEkx847UpALON+HWgxowFtw==", + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/marked-terminal/-/marked-terminal-7.3.0.tgz", + "integrity": "sha512-t4rBvPsHc57uE/2nJOLmMbZCQ4tgAccAED3ngXQqW6g+TxA488JzJ+FK3lQkzBQOI1mRV/r/Kq+1ZlJ4D0owQw==", "dev": true, "license": "MIT", "dependencies": { - "ansi-escapes": "^6.2.0", - "cardinal": "^2.1.1", - "chalk": "^5.3.0", - "cli-table3": "^0.6.3", - "node-emoji": "^2.1.3", - "supports-hyperlinks": "^3.0.0" + "ansi-escapes": "^7.0.0", + "ansi-regex": "^6.1.0", + "chalk": "^5.4.1", + "cli-highlight": "^2.1.11", + "cli-table3": "^0.6.5", + "node-emoji": "^2.2.0", + "supports-hyperlinks": "^3.1.0" }, "engines": { "node": ">=16.0.0" }, "peerDependencies": { - "marked": ">=1 <12" + "marked": ">=1 <16" } }, "node_modules/marked-terminal/node_modules/ansi-escapes": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-6.2.1.tgz", - "integrity": "sha512-4nJ3yixlEthEJ9Rk4vPcdBRkZvQZlYyu8j4/Mqz5sgIkddmEnH2Yj2ZrnP9S3tQOvSNRUIgVNF/1yPpRAGNRig==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.2.0.tgz", + "integrity": "sha512-g6LhBsl+GBPRWGWsBtutpzBYuIIdBkLEvad5C/va/74Db018+5TZiyA26cZJAr3Rft5lprVqOIPxf5Vid6tqAw==", "dev": true, "license": "MIT", + "dependencies": { + "environment": "^1.0.0" + }, "engines": { - "node": ">=14.16" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/marked-terminal/node_modules/ansi-regex": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, "node_modules/marked-terminal/node_modules/chalk": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.6.0.tgz", - "integrity": "sha512-46QrSQFyVSEyYAgQ22hQ+zDa60YHA4fBstHmtSApj1Y5vKtG27fWowW03jCk5KcbXEWPZUIR894aARCA/G1kfQ==", + "version": "5.6.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.6.2.tgz", + "integrity": "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==", "dev": true, "license": "MIT", "engines": { @@ -4026,9 +4543,9 @@ } }, "node_modules/mime": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/mime/-/mime-4.0.7.tgz", - "integrity": "sha512-2OfDPL+e03E0LrXaGYOtTFIYhiuzep94NSsuhrNULq+stylcJedcHdzHtz0atMUuGwJfFYs0YL5xeC/Ca2x0eQ==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-4.1.0.tgz", + "integrity": "sha512-X5ju04+cAzsojXKes0B/S4tcYtFAJ6tTMuSPBEn9CPGlrWr8Fiw7qYeLT0XyH80HSoAoqWCaz+MWKh22P7G1cw==", "dev": true, "funding": [ "https://github.com/sponsors/broofa" @@ -4113,6 +4630,18 @@ "dev": true, "license": "ISC" }, + "node_modules/mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, "node_modules/neo-async": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", @@ -4144,24 +4673,24 @@ } }, "node_modules/normalize-package-data": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.2.tgz", - "integrity": "sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-8.0.0.tgz", + "integrity": "sha512-RWk+PI433eESQ7ounYxIp67CYuVsS1uYSonX3kA6ps/3LWfjVQa/ptEg6Y3T6uAMq1mWpX9PQ+qx+QaHpsc7gQ==", "dev": true, "license": "BSD-2-Clause", "dependencies": { - "hosted-git-info": "^7.0.0", + "hosted-git-info": "^9.0.0", "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/normalize-url": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-8.0.2.tgz", - "integrity": "sha512-Ee/R3SyN4BuynXcnTaekmaVdbDAEiNrHqjQIA37mHU8G9pf7aaAD4ZX3XjBLo6rsdcxA/gtkcNYZLt30ACgynw==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-8.1.0.tgz", + "integrity": "sha512-X06Mfd/5aKsRHc0O0J5CUedwnPmnDtLF2+nq+KN9KSDlJHkPuh0JUviWjEWMe0SW/9TDdSLVPuk7L5gGTIA1/w==", "dev": true, "license": "MIT", "engines": { @@ -4172,15 +4701,16 @@ } }, "node_modules/npm": { - "version": "10.9.3", - "resolved": "https://registry.npmjs.org/npm/-/npm-10.9.3.tgz", - "integrity": "sha512-6Eh1u5Q+kIVXeA8e7l2c/HpnFFcwrkt37xDMujD5be1gloWa9p6j3Fsv3mByXXmqJHy+2cElRMML8opNT7xIJQ==", + "version": "11.7.0", + "resolved": "https://registry.npmjs.org/npm/-/npm-11.7.0.tgz", + "integrity": "sha512-wiCZpv/41bIobCoJ31NStIWKfAxxYyD1iYnWCtiyns8s5v3+l8y0HCP/sScuH6B5+GhIfda4HQKiqeGZwJWhFw==", "bundleDependencies": [ "@isaacs/string-locale-compare", "@npmcli/arborist", "@npmcli/config", "@npmcli/fs", "@npmcli/map-workspaces", + "@npmcli/metavuln-calculator", "@npmcli/package-json", "@npmcli/promise-spawn", "@npmcli/redact", @@ -4205,7 +4735,6 @@ "libnpmdiff", "libnpmexec", "libnpmfund", - "libnpmhook", "libnpmorg", "libnpmpack", "libnpmpublish", @@ -4219,7 +4748,6 @@ "ms", "node-gyp", "nopt", - "normalize-package-data", "npm-audit-report", "npm-install-checks", "npm-package-arg", @@ -4242,8 +4770,7 @@ "tiny-relative-date", "treeverse", "validate-npm-package-name", - "which", - "write-file-atomic" + "which" ], "dev": true, "license": "Artistic-2.0", @@ -4256,80 +4783,78 @@ ], "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", - "@npmcli/arborist": "^8.0.1", - "@npmcli/config": "^9.0.0", - "@npmcli/fs": "^4.0.0", - "@npmcli/map-workspaces": "^4.0.2", - "@npmcli/package-json": "^6.2.0", - "@npmcli/promise-spawn": "^8.0.2", - "@npmcli/redact": "^3.2.2", - "@npmcli/run-script": "^9.1.0", - "@sigstore/tuf": "^3.1.1", - "abbrev": "^3.0.1", + "@npmcli/arborist": "^9.1.9", + "@npmcli/config": "^10.4.5", + "@npmcli/fs": "^5.0.0", + "@npmcli/map-workspaces": "^5.0.3", + "@npmcli/metavuln-calculator": "^9.0.3", + "@npmcli/package-json": "^7.0.4", + "@npmcli/promise-spawn": "^9.0.1", + "@npmcli/redact": "^4.0.0", + "@npmcli/run-script": "^10.0.3", + "@sigstore/tuf": "^4.0.0", + "abbrev": "^4.0.0", "archy": "~1.0.0", - "cacache": "^19.0.1", - "chalk": "^5.4.1", - "ci-info": "^4.2.0", + "cacache": "^20.0.3", + "chalk": "^5.6.2", + "ci-info": "^4.3.1", "cli-columns": "^4.0.0", "fastest-levenshtein": "^1.0.16", "fs-minipass": "^3.0.3", - "glob": "^10.4.5", + "glob": "^13.0.0", "graceful-fs": "^4.2.11", - "hosted-git-info": "^8.1.0", - "ini": "^5.0.0", - "init-package-json": "^7.0.2", - "is-cidr": "^5.1.1", - "json-parse-even-better-errors": "^4.0.0", - "libnpmaccess": "^9.0.0", - "libnpmdiff": "^7.0.1", - "libnpmexec": "^9.0.1", - "libnpmfund": "^6.0.1", - "libnpmhook": "^11.0.0", - "libnpmorg": "^7.0.0", - "libnpmpack": "^8.0.1", - "libnpmpublish": "^10.0.1", - "libnpmsearch": "^8.0.0", - "libnpmteam": "^7.0.0", - "libnpmversion": "^7.0.0", - "make-fetch-happen": "^14.0.3", - "minimatch": "^9.0.5", + "hosted-git-info": "^9.0.2", + "ini": "^6.0.0", + "init-package-json": "^8.2.4", + "is-cidr": "^6.0.1", + "json-parse-even-better-errors": "^5.0.0", + "libnpmaccess": "^10.0.3", + "libnpmdiff": "^8.0.12", + "libnpmexec": "^10.1.11", + "libnpmfund": "^7.0.12", + "libnpmorg": "^8.0.1", + "libnpmpack": "^9.0.12", + "libnpmpublish": "^11.1.3", + "libnpmsearch": "^9.0.1", + "libnpmteam": "^8.0.2", + "libnpmversion": "^8.0.3", + "make-fetch-happen": "^15.0.3", + "minimatch": "^10.1.1", "minipass": "^7.1.1", "minipass-pipeline": "^1.2.4", "ms": "^2.1.2", - "node-gyp": "^11.2.0", - "nopt": "^8.1.0", - "normalize-package-data": "^7.0.0", - "npm-audit-report": "^6.0.0", - "npm-install-checks": "^7.1.1", - "npm-package-arg": "^12.0.2", - "npm-pick-manifest": "^10.0.0", - "npm-profile": "^11.0.1", - "npm-registry-fetch": "^18.0.2", - "npm-user-validate": "^3.0.0", - "p-map": "^7.0.3", - "pacote": "^19.0.1", - "parse-conflict-json": "^4.0.0", - "proc-log": "^5.0.0", + "node-gyp": "^12.1.0", + "nopt": "^9.0.0", + "npm-audit-report": "^7.0.0", + "npm-install-checks": "^8.0.0", + "npm-package-arg": "^13.0.2", + "npm-pick-manifest": "^11.0.3", + "npm-profile": "^12.0.1", + "npm-registry-fetch": "^19.1.1", + "npm-user-validate": "^4.0.0", + "p-map": "^7.0.4", + "pacote": "^21.0.4", + "parse-conflict-json": "^5.0.1", + "proc-log": "^6.1.0", "qrcode-terminal": "^0.12.0", - "read": "^4.1.0", - "semver": "^7.7.2", + "read": "^5.0.1", + "semver": "^7.7.3", "spdx-expression-parse": "^4.0.0", - "ssri": "^12.0.0", - "supports-color": "^9.4.0", - "tar": "^6.2.1", + "ssri": "^13.0.0", + "supports-color": "^10.2.2", + "tar": "^7.5.2", "text-table": "~0.2.0", - "tiny-relative-date": "^1.3.0", + "tiny-relative-date": "^2.0.2", "treeverse": "^3.0.0", - "validate-npm-package-name": "^6.0.1", - "which": "^5.0.0", - "write-file-atomic": "^6.0.0" + "validate-npm-package-name": "^7.0.0", + "which": "^6.0.0" }, "bin": { "npm": "bin/npm-cli.js", "npx": "bin/npx-cli.js" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm-run-path": { @@ -4345,71 +4870,25 @@ "node": ">=8" } }, - "node_modules/npm/node_modules/@isaacs/cliui": { - "version": "8.0.2", - "dev": true, - "inBundle": true, - "license": "ISC", - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/npm/node_modules/@isaacs/cliui/node_modules/ansi-regex": { - "version": "6.1.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/npm/node_modules/@isaacs/cliui/node_modules/emoji-regex": { - "version": "9.2.2", - "dev": true, - "inBundle": true, - "license": "MIT" - }, - "node_modules/npm/node_modules/@isaacs/cliui/node_modules/string-width": { - "version": "5.1.2", + "node_modules/npm/node_modules/@isaacs/balanced-match": { + "version": "4.0.1", "dev": true, "inBundle": true, "license": "MIT", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": "20 || >=22" } }, - "node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi": { - "version": "7.1.0", + "node_modules/npm/node_modules/@isaacs/brace-expansion": { + "version": "5.0.0", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "ansi-regex": "^6.0.1" + "@isaacs/balanced-match": "^4.0.1" }, "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" + "node": "20 || >=22" } }, "node_modules/npm/node_modules/@isaacs/fs-minipass": { @@ -4431,7 +4910,7 @@ "license": "ISC" }, "node_modules/npm/node_modules/@npmcli/agent": { - "version": "3.0.0", + "version": "4.0.0", "dev": true, "inBundle": true, "license": "ISC", @@ -4439,83 +4918,81 @@ "agent-base": "^7.1.0", "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.1", - "lru-cache": "^10.0.1", + "lru-cache": "^11.2.1", "socks-proxy-agent": "^8.0.3" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/@npmcli/arborist": { - "version": "8.0.1", + "version": "9.1.9", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", - "@npmcli/fs": "^4.0.0", - "@npmcli/installed-package-contents": "^3.0.0", - "@npmcli/map-workspaces": "^4.0.1", - "@npmcli/metavuln-calculator": "^8.0.0", - "@npmcli/name-from-folder": "^3.0.0", - "@npmcli/node-gyp": "^4.0.0", - "@npmcli/package-json": "^6.0.1", - "@npmcli/query": "^4.0.0", - "@npmcli/redact": "^3.0.0", - "@npmcli/run-script": "^9.0.1", - "bin-links": "^5.0.0", - "cacache": "^19.0.1", + "@npmcli/fs": "^5.0.0", + "@npmcli/installed-package-contents": "^4.0.0", + "@npmcli/map-workspaces": "^5.0.0", + "@npmcli/metavuln-calculator": "^9.0.2", + "@npmcli/name-from-folder": "^4.0.0", + "@npmcli/node-gyp": "^5.0.0", + "@npmcli/package-json": "^7.0.0", + "@npmcli/query": "^5.0.0", + "@npmcli/redact": "^4.0.0", + "@npmcli/run-script": "^10.0.0", + "bin-links": "^6.0.0", + "cacache": "^20.0.1", "common-ancestor-path": "^1.0.1", - "hosted-git-info": "^8.0.0", - "json-parse-even-better-errors": "^4.0.0", + "hosted-git-info": "^9.0.0", "json-stringify-nice": "^1.1.4", - "lru-cache": "^10.2.2", - "minimatch": "^9.0.4", - "nopt": "^8.0.0", - "npm-install-checks": "^7.1.0", - "npm-package-arg": "^12.0.0", - "npm-pick-manifest": "^10.0.0", - "npm-registry-fetch": "^18.0.1", - "pacote": "^19.0.0", - "parse-conflict-json": "^4.0.0", - "proc-log": "^5.0.0", - "proggy": "^3.0.0", + "lru-cache": "^11.2.1", + "minimatch": "^10.0.3", + "nopt": "^9.0.0", + "npm-install-checks": "^8.0.0", + "npm-package-arg": "^13.0.0", + "npm-pick-manifest": "^11.0.1", + "npm-registry-fetch": "^19.0.0", + "pacote": "^21.0.2", + "parse-conflict-json": "^5.0.1", + "proc-log": "^6.0.0", + "proggy": "^4.0.0", "promise-all-reject-late": "^1.0.0", "promise-call-limit": "^3.0.1", - "read-package-json-fast": "^4.0.0", "semver": "^7.3.7", - "ssri": "^12.0.0", + "ssri": "^13.0.0", "treeverse": "^3.0.0", - "walk-up-path": "^3.0.1" + "walk-up-path": "^4.0.0" }, "bin": { "arborist": "bin/index.js" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/@npmcli/config": { - "version": "9.0.0", + "version": "10.4.5", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/map-workspaces": "^4.0.1", - "@npmcli/package-json": "^6.0.1", + "@npmcli/map-workspaces": "^5.0.0", + "@npmcli/package-json": "^7.0.0", "ci-info": "^4.0.0", - "ini": "^5.0.0", - "nopt": "^8.0.0", - "proc-log": "^5.0.0", + "ini": "^6.0.0", + "nopt": "^9.0.0", + "proc-log": "^6.0.0", "semver": "^7.3.5", - "walk-up-path": "^3.0.1" + "walk-up-path": "^4.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/@npmcli/fs": { - "version": "4.0.0", + "version": "5.0.0", "dev": true, "inBundle": true, "license": "ISC", @@ -4523,156 +5000,125 @@ "semver": "^7.3.5" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/@npmcli/git": { - "version": "6.0.3", + "version": "7.0.1", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/promise-spawn": "^8.0.0", - "ini": "^5.0.0", - "lru-cache": "^10.0.1", - "npm-pick-manifest": "^10.0.0", - "proc-log": "^5.0.0", + "@npmcli/promise-spawn": "^9.0.0", + "ini": "^6.0.0", + "lru-cache": "^11.2.1", + "npm-pick-manifest": "^11.0.1", + "proc-log": "^6.0.0", "promise-retry": "^2.0.1", "semver": "^7.3.5", - "which": "^5.0.0" + "which": "^6.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/@npmcli/installed-package-contents": { - "version": "3.0.0", + "version": "4.0.0", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "npm-bundled": "^4.0.0", - "npm-normalize-package-bin": "^4.0.0" + "npm-bundled": "^5.0.0", + "npm-normalize-package-bin": "^5.0.0" }, "bin": { "installed-package-contents": "bin/index.js" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/@npmcli/map-workspaces": { - "version": "4.0.2", + "version": "5.0.3", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/name-from-folder": "^3.0.0", - "@npmcli/package-json": "^6.0.0", - "glob": "^10.2.2", - "minimatch": "^9.0.0" + "@npmcli/name-from-folder": "^4.0.0", + "@npmcli/package-json": "^7.0.0", + "glob": "^13.0.0", + "minimatch": "^10.0.3" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/@npmcli/metavuln-calculator": { - "version": "8.0.1", + "version": "9.0.3", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "cacache": "^19.0.0", - "json-parse-even-better-errors": "^4.0.0", - "pacote": "^20.0.0", - "proc-log": "^5.0.0", + "cacache": "^20.0.0", + "json-parse-even-better-errors": "^5.0.0", + "pacote": "^21.0.0", + "proc-log": "^6.0.0", "semver": "^7.3.5" }, "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm/node_modules/@npmcli/metavuln-calculator/node_modules/pacote": { - "version": "20.0.0", - "dev": true, - "inBundle": true, - "license": "ISC", - "dependencies": { - "@npmcli/git": "^6.0.0", - "@npmcli/installed-package-contents": "^3.0.0", - "@npmcli/package-json": "^6.0.0", - "@npmcli/promise-spawn": "^8.0.0", - "@npmcli/run-script": "^9.0.0", - "cacache": "^19.0.0", - "fs-minipass": "^3.0.0", - "minipass": "^7.0.2", - "npm-package-arg": "^12.0.0", - "npm-packlist": "^9.0.0", - "npm-pick-manifest": "^10.0.0", - "npm-registry-fetch": "^18.0.0", - "proc-log": "^5.0.0", - "promise-retry": "^2.0.1", - "sigstore": "^3.0.0", - "ssri": "^12.0.0", - "tar": "^6.1.11" - }, - "bin": { - "pacote": "bin/index.js" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/@npmcli/name-from-folder": { - "version": "3.0.0", + "version": "4.0.0", "dev": true, "inBundle": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/@npmcli/node-gyp": { - "version": "4.0.0", + "version": "5.0.0", "dev": true, "inBundle": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/@npmcli/package-json": { - "version": "6.2.0", + "version": "7.0.4", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/git": "^6.0.0", - "glob": "^10.2.2", - "hosted-git-info": "^8.0.0", - "json-parse-even-better-errors": "^4.0.0", - "proc-log": "^5.0.0", + "@npmcli/git": "^7.0.0", + "glob": "^13.0.0", + "hosted-git-info": "^9.0.0", + "json-parse-even-better-errors": "^5.0.0", + "proc-log": "^6.0.0", "semver": "^7.5.3", "validate-npm-package-license": "^3.0.4" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/@npmcli/promise-spawn": { - "version": "8.0.2", + "version": "9.0.1", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "which": "^5.0.0" + "which": "^6.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/@npmcli/query": { - "version": "4.0.1", + "version": "5.0.0", "dev": true, "inBundle": true, "license": "ISC", @@ -4680,67 +5126,118 @@ "postcss-selector-parser": "^7.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/@npmcli/redact": { - "version": "3.2.2", + "version": "4.0.0", "dev": true, "inBundle": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/@npmcli/run-script": { - "version": "9.1.0", + "version": "10.0.3", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/node-gyp": "^4.0.0", - "@npmcli/package-json": "^6.0.0", - "@npmcli/promise-spawn": "^8.0.0", - "node-gyp": "^11.0.0", - "proc-log": "^5.0.0", - "which": "^5.0.0" + "@npmcli/node-gyp": "^5.0.0", + "@npmcli/package-json": "^7.0.0", + "@npmcli/promise-spawn": "^9.0.0", + "node-gyp": "^12.1.0", + "proc-log": "^6.0.0", + "which": "^6.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, - "node_modules/npm/node_modules/@pkgjs/parseargs": { - "version": "0.11.0", + "node_modules/npm/node_modules/@sigstore/bundle": { + "version": "4.0.0", "dev": true, "inBundle": true, - "license": "MIT", - "optional": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.5.0" + }, "engines": { - "node": ">=14" + "node": "^20.17.0 || >=22.9.0" } }, - "node_modules/npm/node_modules/@sigstore/protobuf-specs": { - "version": "0.4.3", + "node_modules/npm/node_modules/@sigstore/core": { + "version": "3.0.0", "dev": true, "inBundle": true, "license": "Apache-2.0", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, - "node_modules/npm/node_modules/@sigstore/tuf": { - "version": "3.1.1", + "node_modules/npm/node_modules/@sigstore/protobuf-specs": { + "version": "0.5.0", "dev": true, "inBundle": true, "license": "Apache-2.0", - "dependencies": { - "@sigstore/protobuf-specs": "^0.4.1", - "tuf-js": "^3.0.1" + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/npm/node_modules/@sigstore/sign": { + "version": "4.0.1", + "dev": true, + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^4.0.0", + "@sigstore/core": "^3.0.0", + "@sigstore/protobuf-specs": "^0.5.0", + "make-fetch-happen": "^15.0.2", + "proc-log": "^5.0.0", + "promise-retry": "^2.0.1" }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + } + }, + "node_modules/npm/node_modules/@sigstore/sign/node_modules/proc-log": { + "version": "5.0.0", + "dev": true, + "inBundle": true, + "license": "ISC", "engines": { "node": "^18.17.0 || >=20.5.0" } }, + "node_modules/npm/node_modules/@sigstore/tuf": { + "version": "4.0.0", + "dev": true, + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.5.0", + "tuf-js": "^4.0.0" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + } + }, + "node_modules/npm/node_modules/@sigstore/verify": { + "version": "3.0.0", + "dev": true, + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^4.0.0", + "@sigstore/core": "^3.0.0", + "@sigstore/protobuf-specs": "^0.5.0" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + } + }, "node_modules/npm/node_modules/@tufjs/canonical-json": { "version": "2.0.0", "dev": true, @@ -4750,17 +5247,45 @@ "node": "^16.14.0 || >=18.0.0" } }, + "node_modules/npm/node_modules/@tufjs/models": { + "version": "4.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "dependencies": { + "@tufjs/canonical-json": "2.0.0", + "minimatch": "^9.0.5" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + } + }, + "node_modules/npm/node_modules/@tufjs/models/node_modules/minimatch": { + "version": "9.0.5", + "dev": true, + "inBundle": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/npm/node_modules/abbrev": { - "version": "3.0.1", + "version": "4.0.0", "dev": true, "inBundle": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/agent-base": { - "version": "7.1.3", + "version": "7.1.4", "dev": true, "inBundle": true, "license": "MIT", @@ -4777,20 +5302,8 @@ "node": ">=8" } }, - "node_modules/npm/node_modules/ansi-styles": { - "version": "6.2.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, "node_modules/npm/node_modules/aproba": { - "version": "2.0.0", + "version": "2.1.0", "dev": true, "inBundle": true, "license": "ISC" @@ -4808,28 +5321,28 @@ "license": "MIT" }, "node_modules/npm/node_modules/bin-links": { - "version": "5.0.0", + "version": "6.0.0", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "cmd-shim": "^7.0.0", - "npm-normalize-package-bin": "^4.0.0", - "proc-log": "^5.0.0", - "read-cmd-shim": "^5.0.0", - "write-file-atomic": "^6.0.0" + "cmd-shim": "^8.0.0", + "npm-normalize-package-bin": "^5.0.0", + "proc-log": "^6.0.0", + "read-cmd-shim": "^6.0.0", + "write-file-atomic": "^7.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/binary-extensions": { - "version": "2.3.0", + "version": "3.1.0", "dev": true, "inBundle": true, "license": "MIT", "engines": { - "node": ">=8" + "node": ">=18.20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -4845,80 +5358,29 @@ } }, "node_modules/npm/node_modules/cacache": { - "version": "19.0.1", + "version": "20.0.3", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/fs": "^4.0.0", + "@npmcli/fs": "^5.0.0", "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^10.0.1", + "glob": "^13.0.0", + "lru-cache": "^11.1.0", "minipass": "^7.0.3", "minipass-collect": "^2.0.1", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "p-map": "^7.0.2", - "ssri": "^12.0.0", - "tar": "^7.4.3", - "unique-filename": "^4.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm/node_modules/cacache/node_modules/chownr": { - "version": "3.0.0", - "dev": true, - "inBundle": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" - } - }, - "node_modules/npm/node_modules/cacache/node_modules/mkdirp": { - "version": "3.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "bin": { - "mkdirp": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/npm/node_modules/cacache/node_modules/tar": { - "version": "7.4.3", - "dev": true, - "inBundle": true, - "license": "ISC", - "dependencies": { - "@isaacs/fs-minipass": "^4.0.0", - "chownr": "^3.0.0", - "minipass": "^7.1.2", - "minizlib": "^3.0.1", - "mkdirp": "^3.0.1", - "yallist": "^5.0.0" + "ssri": "^13.0.0", + "unique-filename": "^5.0.0" }, "engines": { - "node": ">=18" - } - }, - "node_modules/npm/node_modules/cacache/node_modules/yallist": { - "version": "5.0.0", - "dev": true, - "inBundle": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/chalk": { - "version": "5.4.1", + "version": "5.6.2", "dev": true, "inBundle": true, "license": "MIT", @@ -4930,16 +5392,16 @@ } }, "node_modules/npm/node_modules/chownr": { - "version": "2.0.0", + "version": "3.0.0", "dev": true, "inBundle": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "engines": { - "node": ">=10" + "node": ">=18" } }, "node_modules/npm/node_modules/ci-info": { - "version": "4.2.0", + "version": "4.3.1", "dev": true, "funding": [ { @@ -4954,15 +5416,15 @@ } }, "node_modules/npm/node_modules/cidr-regex": { - "version": "4.1.3", + "version": "5.0.1", "dev": true, "inBundle": true, "license": "BSD-2-Clause", "dependencies": { - "ip-regex": "^5.0.0" + "ip-regex": "5.0.0" }, "engines": { - "node": ">=14" + "node": ">=20" } }, "node_modules/npm/node_modules/cli-columns": { @@ -4979,67 +5441,20 @@ } }, "node_modules/npm/node_modules/cmd-shim": { - "version": "7.0.0", + "version": "8.0.0", "dev": true, "inBundle": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm/node_modules/color-convert": { - "version": "2.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" + "node": "^20.17.0 || >=22.9.0" } }, - "node_modules/npm/node_modules/color-name": { - "version": "1.1.4", - "dev": true, - "inBundle": true, - "license": "MIT" - }, "node_modules/npm/node_modules/common-ancestor-path": { "version": "1.0.1", "dev": true, "inBundle": true, "license": "ISC" }, - "node_modules/npm/node_modules/cross-spawn": { - "version": "7.0.6", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/npm/node_modules/cross-spawn/node_modules/which": { - "version": "2.0.2", - "dev": true, - "inBundle": true, - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/npm/node_modules/cssesc": { "version": "3.0.0", "dev": true, @@ -5053,7 +5468,7 @@ } }, "node_modules/npm/node_modules/debug": { - "version": "4.4.1", + "version": "4.4.3", "dev": true, "inBundle": true, "license": "MIT", @@ -5070,7 +5485,7 @@ } }, "node_modules/npm/node_modules/diff": { - "version": "5.2.0", + "version": "8.0.2", "dev": true, "inBundle": true, "license": "BSD-3-Clause", @@ -5078,12 +5493,6 @@ "node": ">=0.3.1" } }, - "node_modules/npm/node_modules/eastasianwidth": { - "version": "0.2.0", - "dev": true, - "inBundle": true, - "license": "MIT" - }, "node_modules/npm/node_modules/emoji-regex": { "version": "8.0.0", "dev": true, @@ -5116,7 +5525,7 @@ "license": "MIT" }, "node_modules/npm/node_modules/exponential-backoff": { - "version": "3.1.2", + "version": "3.1.3", "dev": true, "inBundle": true, "license": "Apache-2.0" @@ -5130,22 +5539,6 @@ "node": ">= 4.9.1" } }, - "node_modules/npm/node_modules/foreground-child": { - "version": "3.3.1", - "dev": true, - "inBundle": true, - "license": "ISC", - "dependencies": { - "cross-spawn": "^7.0.6", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/npm/node_modules/fs-minipass": { "version": "3.0.3", "dev": true, @@ -5159,20 +5552,17 @@ } }, "node_modules/npm/node_modules/glob": { - "version": "10.4.5", + "version": "13.0.0", "dev": true, "inBundle": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", + "minimatch": "^10.1.1", "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" + "path-scurry": "^2.0.0" }, - "bin": { - "glob": "dist/esm/bin.mjs" + "engines": { + "node": "20 || >=22" }, "funding": { "url": "https://github.com/sponsors/isaacs" @@ -5185,15 +5575,15 @@ "license": "ISC" }, "node_modules/npm/node_modules/hosted-git-info": { - "version": "8.1.0", + "version": "9.0.2", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "lru-cache": "^10.0.1" + "lru-cache": "^11.1.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/http-cache-semantics": { @@ -5242,15 +5632,15 @@ } }, "node_modules/npm/node_modules/ignore-walk": { - "version": "7.0.0", + "version": "8.0.0", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "minimatch": "^9.0.0" + "minimatch": "^10.0.3" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/imurmurhash": { @@ -5263,41 +5653,37 @@ } }, "node_modules/npm/node_modules/ini": { - "version": "5.0.0", + "version": "6.0.0", "dev": true, "inBundle": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/init-package-json": { - "version": "7.0.2", + "version": "8.2.4", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/package-json": "^6.0.0", - "npm-package-arg": "^12.0.0", - "promzard": "^2.0.0", - "read": "^4.0.0", - "semver": "^7.3.5", + "@npmcli/package-json": "^7.0.0", + "npm-package-arg": "^13.0.0", + "promzard": "^3.0.1", + "read": "^5.0.1", + "semver": "^7.7.2", "validate-npm-package-license": "^3.0.4", - "validate-npm-package-name": "^6.0.0" + "validate-npm-package-name": "^7.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/ip-address": { - "version": "9.0.5", + "version": "10.0.1", "dev": true, "inBundle": true, "license": "MIT", - "dependencies": { - "jsbn": "1.1.0", - "sprintf-js": "^1.1.3" - }, "engines": { "node": ">= 12" } @@ -5315,15 +5701,15 @@ } }, "node_modules/npm/node_modules/is-cidr": { - "version": "5.1.1", + "version": "6.0.1", "dev": true, "inBundle": true, "license": "BSD-2-Clause", "dependencies": { - "cidr-regex": "^4.1.1" + "cidr-regex": "5.0.1" }, "engines": { - "node": ">=14" + "node": ">=20" } }, "node_modules/npm/node_modules/is-fullwidth-code-point": { @@ -5336,39 +5722,21 @@ } }, "node_modules/npm/node_modules/isexe": { - "version": "2.0.0", + "version": "3.1.1", "dev": true, "inBundle": true, - "license": "ISC" + "license": "ISC", + "engines": { + "node": ">=16" + } }, - "node_modules/npm/node_modules/jackspeak": { - "version": "3.4.3", + "node_modules/npm/node_modules/json-parse-even-better-errors": { + "version": "5.0.0", "dev": true, "inBundle": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, - "node_modules/npm/node_modules/jsbn": { - "version": "1.1.0", - "dev": true, - "inBundle": true, - "license": "MIT" - }, - "node_modules/npm/node_modules/json-parse-even-better-errors": { - "version": "4.0.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "engines": { - "node": "^18.17.0 || >=20.5.0" + "license": "MIT", + "engines": { + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/json-stringify-nice": { @@ -5402,218 +5770,201 @@ "license": "MIT" }, "node_modules/npm/node_modules/libnpmaccess": { - "version": "9.0.0", + "version": "10.0.3", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "npm-package-arg": "^12.0.0", - "npm-registry-fetch": "^18.0.1" + "npm-package-arg": "^13.0.0", + "npm-registry-fetch": "^19.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/libnpmdiff": { - "version": "7.0.1", + "version": "8.0.12", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/arborist": "^8.0.1", - "@npmcli/installed-package-contents": "^3.0.0", - "binary-extensions": "^2.3.0", - "diff": "^5.1.0", - "minimatch": "^9.0.4", - "npm-package-arg": "^12.0.0", - "pacote": "^19.0.0", - "tar": "^6.2.1" + "@npmcli/arborist": "^9.1.9", + "@npmcli/installed-package-contents": "^4.0.0", + "binary-extensions": "^3.0.0", + "diff": "^8.0.2", + "minimatch": "^10.0.3", + "npm-package-arg": "^13.0.0", + "pacote": "^21.0.2", + "tar": "^7.5.1" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/libnpmexec": { - "version": "9.0.1", + "version": "10.1.11", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/arborist": "^8.0.1", - "@npmcli/run-script": "^9.0.1", + "@npmcli/arborist": "^9.1.9", + "@npmcli/package-json": "^7.0.0", + "@npmcli/run-script": "^10.0.0", "ci-info": "^4.0.0", - "npm-package-arg": "^12.0.0", - "pacote": "^19.0.0", - "proc-log": "^5.0.0", - "read": "^4.0.0", - "read-package-json-fast": "^4.0.0", + "npm-package-arg": "^13.0.0", + "pacote": "^21.0.2", + "proc-log": "^6.0.0", + "promise-retry": "^2.0.1", + "read": "^5.0.1", "semver": "^7.3.7", - "walk-up-path": "^3.0.1" + "signal-exit": "^4.1.0", + "walk-up-path": "^4.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/libnpmfund": { - "version": "6.0.1", - "dev": true, - "inBundle": true, - "license": "ISC", - "dependencies": { - "@npmcli/arborist": "^8.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm/node_modules/libnpmhook": { - "version": "11.0.0", + "version": "7.0.12", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "aproba": "^2.0.0", - "npm-registry-fetch": "^18.0.1" + "@npmcli/arborist": "^9.1.9" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/libnpmorg": { - "version": "7.0.0", + "version": "8.0.1", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^18.0.1" + "npm-registry-fetch": "^19.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/libnpmpack": { - "version": "8.0.1", + "version": "9.0.12", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/arborist": "^8.0.1", - "@npmcli/run-script": "^9.0.1", - "npm-package-arg": "^12.0.0", - "pacote": "^19.0.0" + "@npmcli/arborist": "^9.1.9", + "@npmcli/run-script": "^10.0.0", + "npm-package-arg": "^13.0.0", + "pacote": "^21.0.2" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/libnpmpublish": { - "version": "10.0.1", + "version": "11.1.3", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { + "@npmcli/package-json": "^7.0.0", "ci-info": "^4.0.0", - "normalize-package-data": "^7.0.0", - "npm-package-arg": "^12.0.0", - "npm-registry-fetch": "^18.0.1", - "proc-log": "^5.0.0", + "npm-package-arg": "^13.0.0", + "npm-registry-fetch": "^19.0.0", + "proc-log": "^6.0.0", "semver": "^7.3.7", - "sigstore": "^3.0.0", - "ssri": "^12.0.0" + "sigstore": "^4.0.0", + "ssri": "^13.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/libnpmsearch": { - "version": "8.0.0", + "version": "9.0.1", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "npm-registry-fetch": "^18.0.1" + "npm-registry-fetch": "^19.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/libnpmteam": { - "version": "7.0.0", + "version": "8.0.2", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^18.0.1" + "npm-registry-fetch": "^19.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/libnpmversion": { - "version": "7.0.0", + "version": "8.0.3", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/git": "^6.0.1", - "@npmcli/run-script": "^9.0.1", - "json-parse-even-better-errors": "^4.0.0", - "proc-log": "^5.0.0", + "@npmcli/git": "^7.0.0", + "@npmcli/run-script": "^10.0.0", + "json-parse-even-better-errors": "^5.0.0", + "proc-log": "^6.0.0", "semver": "^7.3.7" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/lru-cache": { - "version": "10.4.3", + "version": "11.2.2", "dev": true, "inBundle": true, - "license": "ISC" + "license": "ISC", + "engines": { + "node": "20 || >=22" + } }, "node_modules/npm/node_modules/make-fetch-happen": { - "version": "14.0.3", + "version": "15.0.3", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/agent": "^3.0.0", - "cacache": "^19.0.1", + "@npmcli/agent": "^4.0.0", + "cacache": "^20.0.1", "http-cache-semantics": "^4.1.1", "minipass": "^7.0.2", - "minipass-fetch": "^4.0.0", + "minipass-fetch": "^5.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^1.0.0", - "proc-log": "^5.0.0", + "proc-log": "^6.0.0", "promise-retry": "^2.0.1", - "ssri": "^12.0.0" + "ssri": "^13.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm/node_modules/make-fetch-happen/node_modules/negotiator": { - "version": "1.0.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/minimatch": { - "version": "9.0.5", + "version": "10.1.1", "dev": true, "inBundle": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "dependencies": { - "brace-expansion": "^2.0.1" + "@isaacs/brace-expansion": "^5.0.0" }, "engines": { - "node": ">=16 || 14 >=14.17" + "node": "20 || >=22" }, "funding": { "url": "https://github.com/sponsors/isaacs" @@ -5641,7 +5992,7 @@ } }, "node_modules/npm/node_modules/minipass-fetch": { - "version": "4.0.1", + "version": "5.0.0", "dev": true, "inBundle": true, "license": "MIT", @@ -5651,7 +6002,7 @@ "minizlib": "^3.0.1" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" }, "optionalDependencies": { "encoding": "^0.1.13" @@ -5730,7 +6081,7 @@ } }, "node_modules/npm/node_modules/minizlib": { - "version": "3.0.2", + "version": "3.1.0", "dev": true, "inBundle": true, "license": "MIT", @@ -5741,18 +6092,6 @@ "node": ">= 18" } }, - "node_modules/npm/node_modules/mkdirp": { - "version": "1.0.4", - "dev": true, - "inBundle": true, - "license": "MIT", - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/npm/node_modules/ms": { "version": "2.1.3", "dev": true, @@ -5760,16 +6099,25 @@ "license": "MIT" }, "node_modules/npm/node_modules/mute-stream": { - "version": "2.0.0", + "version": "3.0.0", "dev": true, "inBundle": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" + } + }, + "node_modules/npm/node_modules/negotiator": { + "version": "1.0.0", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" } }, "node_modules/npm/node_modules/node-gyp": { - "version": "11.2.0", + "version": "12.1.0", "dev": true, "inBundle": true, "license": "MIT", @@ -5777,123 +6125,59 @@ "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", "graceful-fs": "^4.2.6", - "make-fetch-happen": "^14.0.3", - "nopt": "^8.0.0", - "proc-log": "^5.0.0", + "make-fetch-happen": "^15.0.0", + "nopt": "^9.0.0", + "proc-log": "^6.0.0", "semver": "^7.3.5", - "tar": "^7.4.3", + "tar": "^7.5.2", "tinyglobby": "^0.2.12", - "which": "^5.0.0" + "which": "^6.0.0" }, "bin": { "node-gyp": "bin/node-gyp.js" }, "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm/node_modules/node-gyp/node_modules/chownr": { - "version": "3.0.0", - "dev": true, - "inBundle": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" - } - }, - "node_modules/npm/node_modules/node-gyp/node_modules/mkdirp": { - "version": "3.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "bin": { - "mkdirp": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/npm/node_modules/node-gyp/node_modules/tar": { - "version": "7.4.3", - "dev": true, - "inBundle": true, - "license": "ISC", - "dependencies": { - "@isaacs/fs-minipass": "^4.0.0", - "chownr": "^3.0.0", - "minipass": "^7.1.2", - "minizlib": "^3.0.1", - "mkdirp": "^3.0.1", - "yallist": "^5.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/npm/node_modules/node-gyp/node_modules/yallist": { - "version": "5.0.0", - "dev": true, - "inBundle": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/nopt": { - "version": "8.1.0", + "version": "9.0.0", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "abbrev": "^3.0.0" + "abbrev": "^4.0.0" }, "bin": { "nopt": "bin/nopt.js" }, "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm/node_modules/normalize-package-data": { - "version": "7.0.0", - "dev": true, - "inBundle": true, - "license": "BSD-2-Clause", - "dependencies": { - "hosted-git-info": "^8.0.0", - "semver": "^7.3.5", - "validate-npm-package-license": "^3.0.4" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/npm-audit-report": { - "version": "6.0.0", + "version": "7.0.0", "dev": true, "inBundle": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/npm-bundled": { - "version": "4.0.0", + "version": "5.0.0", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "npm-normalize-package-bin": "^4.0.0" + "npm-normalize-package-bin": "^5.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/npm-install-checks": { - "version": "7.1.1", + "version": "8.0.0", "dev": true, "inBundle": true, "license": "BSD-2-Clause", @@ -5901,103 +6185,104 @@ "semver": "^7.1.1" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/npm-normalize-package-bin": { - "version": "4.0.0", + "version": "5.0.0", "dev": true, "inBundle": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/npm-package-arg": { - "version": "12.0.2", + "version": "13.0.2", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "hosted-git-info": "^8.0.0", - "proc-log": "^5.0.0", + "hosted-git-info": "^9.0.0", + "proc-log": "^6.0.0", "semver": "^7.3.5", - "validate-npm-package-name": "^6.0.0" + "validate-npm-package-name": "^7.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/npm-packlist": { - "version": "9.0.0", + "version": "10.0.3", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "ignore-walk": "^7.0.0" + "ignore-walk": "^8.0.0", + "proc-log": "^6.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/npm-pick-manifest": { - "version": "10.0.0", + "version": "11.0.3", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "npm-install-checks": "^7.1.0", - "npm-normalize-package-bin": "^4.0.0", - "npm-package-arg": "^12.0.0", + "npm-install-checks": "^8.0.0", + "npm-normalize-package-bin": "^5.0.0", + "npm-package-arg": "^13.0.0", "semver": "^7.3.5" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/npm-profile": { - "version": "11.0.1", + "version": "12.0.1", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "npm-registry-fetch": "^18.0.0", - "proc-log": "^5.0.0" + "npm-registry-fetch": "^19.0.0", + "proc-log": "^6.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/npm-registry-fetch": { - "version": "18.0.2", + "version": "19.1.1", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/redact": "^3.0.0", + "@npmcli/redact": "^4.0.0", "jsonparse": "^1.3.1", - "make-fetch-happen": "^14.0.0", + "make-fetch-happen": "^15.0.0", "minipass": "^7.0.2", - "minipass-fetch": "^4.0.0", + "minipass-fetch": "^5.0.0", "minizlib": "^3.0.1", - "npm-package-arg": "^12.0.0", - "proc-log": "^5.0.0" + "npm-package-arg": "^13.0.0", + "proc-log": "^6.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/npm-user-validate": { - "version": "3.0.0", + "version": "4.0.0", "dev": true, "inBundle": true, "license": "BSD-2-Clause", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/p-map": { - "version": "7.0.3", + "version": "7.0.4", "dev": true, "inBundle": true, "license": "MIT", @@ -6008,77 +6293,62 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/npm/node_modules/package-json-from-dist": { - "version": "1.0.1", - "dev": true, - "inBundle": true, - "license": "BlueOak-1.0.0" - }, "node_modules/npm/node_modules/pacote": { - "version": "19.0.1", + "version": "21.0.4", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/git": "^6.0.0", - "@npmcli/installed-package-contents": "^3.0.0", - "@npmcli/package-json": "^6.0.0", - "@npmcli/promise-spawn": "^8.0.0", - "@npmcli/run-script": "^9.0.0", - "cacache": "^19.0.0", + "@npmcli/git": "^7.0.0", + "@npmcli/installed-package-contents": "^4.0.0", + "@npmcli/package-json": "^7.0.0", + "@npmcli/promise-spawn": "^9.0.0", + "@npmcli/run-script": "^10.0.0", + "cacache": "^20.0.0", "fs-minipass": "^3.0.0", "minipass": "^7.0.2", - "npm-package-arg": "^12.0.0", - "npm-packlist": "^9.0.0", - "npm-pick-manifest": "^10.0.0", - "npm-registry-fetch": "^18.0.0", - "proc-log": "^5.0.0", + "npm-package-arg": "^13.0.0", + "npm-packlist": "^10.0.1", + "npm-pick-manifest": "^11.0.1", + "npm-registry-fetch": "^19.0.0", + "proc-log": "^6.0.0", "promise-retry": "^2.0.1", - "sigstore": "^3.0.0", - "ssri": "^12.0.0", - "tar": "^6.1.11" + "sigstore": "^4.0.0", + "ssri": "^13.0.0", + "tar": "^7.4.3" }, "bin": { "pacote": "bin/index.js" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/parse-conflict-json": { - "version": "4.0.0", + "version": "5.0.1", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "json-parse-even-better-errors": "^4.0.0", + "json-parse-even-better-errors": "^5.0.0", "just-diff": "^6.0.0", "just-diff-apply": "^5.2.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm/node_modules/path-key": { - "version": "3.1.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "engines": { - "node": ">=8" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/path-scurry": { - "version": "1.11.1", + "version": "2.0.0", "dev": true, "inBundle": true, "license": "BlueOak-1.0.0", "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + "lru-cache": "^11.0.0", + "minipass": "^7.1.2" }, "engines": { - "node": ">=16 || 14 >=14.18" + "node": "20 || >=22" }, "funding": { "url": "https://github.com/sponsors/isaacs" @@ -6098,21 +6368,21 @@ } }, "node_modules/npm/node_modules/proc-log": { - "version": "5.0.0", + "version": "6.1.0", "dev": true, "inBundle": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/proggy": { - "version": "3.0.0", + "version": "4.0.0", "dev": true, "inBundle": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/promise-all-reject-late": { @@ -6147,15 +6417,15 @@ } }, "node_modules/npm/node_modules/promzard": { - "version": "2.0.0", + "version": "3.0.1", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "read": "^4.0.0" + "read": "^5.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/qrcode-terminal": { @@ -6167,37 +6437,24 @@ } }, "node_modules/npm/node_modules/read": { - "version": "4.1.0", + "version": "5.0.1", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "mute-stream": "^2.0.0" + "mute-stream": "^3.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/read-cmd-shim": { - "version": "5.0.0", - "dev": true, - "inBundle": true, - "license": "ISC", - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm/node_modules/read-package-json-fast": { - "version": "4.0.0", + "version": "6.0.0", "dev": true, "inBundle": true, "license": "ISC", - "dependencies": { - "json-parse-even-better-errors": "^4.0.0", - "npm-normalize-package-bin": "^4.0.0" - }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/retry": { @@ -6217,7 +6474,7 @@ "optional": true }, "node_modules/npm/node_modules/semver": { - "version": "7.7.2", + "version": "7.7.3", "dev": true, "inBundle": true, "license": "ISC", @@ -6228,27 +6485,6 @@ "node": ">=10" } }, - "node_modules/npm/node_modules/shebang-command": { - "version": "2.0.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/npm/node_modules/shebang-regex": { - "version": "3.0.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/npm/node_modules/signal-exit": { "version": "4.1.0", "dev": true, @@ -6262,76 +6498,24 @@ } }, "node_modules/npm/node_modules/sigstore": { - "version": "3.1.0", + "version": "4.0.0", "dev": true, "inBundle": true, "license": "Apache-2.0", "dependencies": { - "@sigstore/bundle": "^3.1.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.4.0", - "@sigstore/sign": "^3.1.0", - "@sigstore/tuf": "^3.1.0", - "@sigstore/verify": "^2.1.0" + "@sigstore/bundle": "^4.0.0", + "@sigstore/core": "^3.0.0", + "@sigstore/protobuf-specs": "^0.5.0", + "@sigstore/sign": "^4.0.0", + "@sigstore/tuf": "^4.0.0", + "@sigstore/verify": "^3.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, - "node_modules/npm/node_modules/sigstore/node_modules/@sigstore/bundle": { - "version": "3.1.0", - "dev": true, - "inBundle": true, - "license": "Apache-2.0", - "dependencies": { - "@sigstore/protobuf-specs": "^0.4.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm/node_modules/sigstore/node_modules/@sigstore/core": { - "version": "2.0.0", - "dev": true, - "inBundle": true, - "license": "Apache-2.0", - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm/node_modules/sigstore/node_modules/@sigstore/sign": { - "version": "3.1.0", - "dev": true, - "inBundle": true, - "license": "Apache-2.0", - "dependencies": { - "@sigstore/bundle": "^3.1.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.4.0", - "make-fetch-happen": "^14.0.2", - "proc-log": "^5.0.0", - "promise-retry": "^2.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm/node_modules/sigstore/node_modules/@sigstore/verify": { - "version": "2.1.1", - "dev": true, - "inBundle": true, - "license": "Apache-2.0", - "dependencies": { - "@sigstore/bundle": "^3.1.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.4.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm/node_modules/smart-buffer": { - "version": "4.2.0", + "node_modules/npm/node_modules/smart-buffer": { + "version": "4.2.0", "dev": true, "inBundle": true, "license": "MIT", @@ -6341,12 +6525,12 @@ } }, "node_modules/npm/node_modules/socks": { - "version": "2.8.5", + "version": "2.8.7", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "ip-address": "^9.0.5", + "ip-address": "^10.0.1", "smart-buffer": "^4.2.0" }, "engines": { @@ -6405,19 +6589,13 @@ } }, "node_modules/npm/node_modules/spdx-license-ids": { - "version": "3.0.21", + "version": "3.0.22", "dev": true, "inBundle": true, "license": "CC0-1.0" }, - "node_modules/npm/node_modules/sprintf-js": { - "version": "1.1.3", - "dev": true, - "inBundle": true, - "license": "BSD-3-Clause" - }, "node_modules/npm/node_modules/ssri": { - "version": "12.0.0", + "version": "13.0.0", "dev": true, "inBundle": true, "license": "ISC", @@ -6425,7 +6603,7 @@ "minipass": "^7.0.3" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/string-width": { @@ -6442,21 +6620,6 @@ "node": ">=8" } }, - "node_modules/npm/node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/npm/node_modules/strip-ansi": { "version": "6.0.1", "dev": true, @@ -6469,104 +6632,41 @@ "node": ">=8" } }, - "node_modules/npm/node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/npm/node_modules/supports-color": { - "version": "9.4.0", + "version": "10.2.2", "dev": true, "inBundle": true, "license": "MIT", "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "url": "https://github.com/chalk/supports-color?sponsor=1" } }, "node_modules/npm/node_modules/tar": { - "version": "6.2.1", + "version": "7.5.2", "dev": true, "inBundle": true, - "license": "ISC", - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/npm/node_modules/tar/node_modules/fs-minipass": { - "version": "2.1.0", - "dev": true, - "inBundle": true, - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/npm/node_modules/tar/node_modules/fs-minipass/node_modules/minipass": { - "version": "3.3.6", - "dev": true, - "inBundle": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "dependencies": { - "yallist": "^4.0.0" + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", + "minipass": "^7.1.2", + "minizlib": "^3.1.0", + "yallist": "^5.0.0" }, "engines": { - "node": ">=8" + "node": ">=18" } }, - "node_modules/npm/node_modules/tar/node_modules/minipass": { + "node_modules/npm/node_modules/tar/node_modules/yallist": { "version": "5.0.0", "dev": true, "inBundle": true, - "license": "ISC", - "engines": { - "node": ">=8" - } - }, - "node_modules/npm/node_modules/tar/node_modules/minizlib": { - "version": "2.1.2", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/npm/node_modules/tar/node_modules/minizlib/node_modules/minipass": { - "version": "3.3.6", - "dev": true, - "inBundle": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, + "license": "BlueOak-1.0.0", "engines": { - "node": ">=8" + "node": ">=18" } }, "node_modules/npm/node_modules/text-table": { @@ -6576,19 +6676,19 @@ "license": "MIT" }, "node_modules/npm/node_modules/tiny-relative-date": { - "version": "1.3.0", + "version": "2.0.2", "dev": true, "inBundle": true, "license": "MIT" }, "node_modules/npm/node_modules/tinyglobby": { - "version": "0.2.14", + "version": "0.2.15", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "fdir": "^6.4.4", - "picomatch": "^4.0.2" + "fdir": "^6.5.0", + "picomatch": "^4.0.3" }, "engines": { "node": ">=12.0.0" @@ -6598,10 +6698,13 @@ } }, "node_modules/npm/node_modules/tinyglobby/node_modules/fdir": { - "version": "6.4.6", + "version": "6.5.0", "dev": true, "inBundle": true, "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, "peerDependencies": { "picomatch": "^3 || ^4" }, @@ -6612,7 +6715,7 @@ } }, "node_modules/npm/node_modules/tinyglobby/node_modules/picomatch": { - "version": "4.0.2", + "version": "4.0.3", "dev": true, "inBundle": true, "license": "MIT", @@ -6633,46 +6736,33 @@ } }, "node_modules/npm/node_modules/tuf-js": { - "version": "3.0.1", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "@tufjs/models": "3.0.1", - "debug": "^4.3.6", - "make-fetch-happen": "^14.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm/node_modules/tuf-js/node_modules/@tufjs/models": { - "version": "3.0.1", + "version": "4.0.0", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "@tufjs/canonical-json": "2.0.0", - "minimatch": "^9.0.5" + "@tufjs/models": "4.0.0", + "debug": "^4.4.1", + "make-fetch-happen": "^15.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/unique-filename": { - "version": "4.0.0", + "version": "5.0.0", "dev": true, "inBundle": true, "license": "ISC", "dependencies": { - "unique-slug": "^5.0.0" + "unique-slug": "^6.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/unique-slug": { - "version": "5.0.0", + "version": "6.0.0", "dev": true, "inBundle": true, "license": "ISC", @@ -6680,7 +6770,7 @@ "imurmurhash": "^0.1.4" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/util-deprecate": { @@ -6710,22 +6800,25 @@ } }, "node_modules/npm/node_modules/validate-npm-package-name": { - "version": "6.0.1", + "version": "7.0.0", "dev": true, "inBundle": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/walk-up-path": { - "version": "3.0.1", + "version": "4.0.0", "dev": true, "inBundle": true, - "license": "ISC" + "license": "ISC", + "engines": { + "node": "20 || >=22" + } }, "node_modules/npm/node_modules/which": { - "version": "5.0.0", + "version": "6.0.0", "dev": true, "inBundle": true, "license": "ISC", @@ -6736,120 +6829,11 @@ "node-which": "bin/which.js" }, "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm/node_modules/which/node_modules/isexe": { - "version": "3.1.1", - "dev": true, - "inBundle": true, - "license": "ISC", - "engines": { - "node": ">=16" - } - }, - "node_modules/npm/node_modules/wrap-ansi": { - "version": "8.1.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/npm/node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/npm/node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { - "version": "4.3.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/npm/node_modules/wrap-ansi/node_modules/ansi-regex": { - "version": "6.1.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/npm/node_modules/wrap-ansi/node_modules/emoji-regex": { - "version": "9.2.2", - "dev": true, - "inBundle": true, - "license": "MIT" - }, - "node_modules/npm/node_modules/wrap-ansi/node_modules/string-width": { - "version": "5.1.2", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/npm/node_modules/wrap-ansi/node_modules/strip-ansi": { - "version": "7.1.0", - "dev": true, - "inBundle": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/write-file-atomic": { - "version": "6.0.0", + "version": "7.0.0", "dev": true, "inBundle": true, "license": "ISC", @@ -6858,7 +6842,7 @@ "signal-exit": "^4.0.1" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm/node_modules/yallist": { @@ -6867,6 +6851,16 @@ "inBundle": true, "license": "ISC" }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -6917,6 +6911,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/ora/node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/os-tmpdir": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", @@ -6940,6 +6947,22 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/p-event": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/p-event/-/p-event-6.0.1.tgz", + "integrity": "sha512-Q6Bekk5wpzW5qIyUP4gdMEujObYstZl6DMMOSenwBvV0BlE5LkDwkjs5yHbZmdCEq2o4RJx4tE1vwxFVf2FG1w==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-timeout": "^6.1.2" + }, + "engines": { + "node": ">=16.17" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/p-filter": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/p-filter/-/p-filter-4.1.0.tgz", @@ -7021,6 +7044,19 @@ "node": ">=8" } }, + "node_modules/p-timeout": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-6.1.4.tgz", + "integrity": "sha512-MyIV3ZA/PmyBN/ud8vV9XzwTrNtR4jFrObymZYnZqMmW0zA8Z17vnT0rBgFE/TlohB+YCHqXMgZzb3Csp49vqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", @@ -7063,6 +7099,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/parse-ms": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-ms/-/parse-ms-4.0.0.tgz", + "integrity": "sha512-TXfryirbmq34y8QBwgqCVLi+8oA3oWx2eAnSn62ITyEhEYaWRlVZ2DvMM9eZbMs/RfxPu/PK/aBLyGj4IrqMHw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/parse-passwd": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/parse-passwd/-/parse-passwd-1.0.0.tgz", @@ -7073,6 +7122,30 @@ "node": ">=0.10.0" } }, + "node_modules/parse5": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.1.tgz", + "integrity": "sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==", + "dev": true, + "license": "MIT" + }, + "node_modules/parse5-htmlparser2-tree-adapter": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-6.0.1.tgz", + "integrity": "sha512-qPuWvbLgvDGilKc5BoicRovlT4MtYT6JfJyBOMDsKoiT+GiuP5qyrPCnR9HcPECIJJmZh5jRndyNThnhhb/vlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "parse5": "^6.0.1" + } + }, + "node_modules/parse5-htmlparser2-tree-adapter/node_modules/parse5": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", + "dev": true, + "license": "MIT" + }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -7237,6 +7310,22 @@ "node": ">=4" } }, + "node_modules/pretty-ms": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-9.3.0.tgz", + "integrity": "sha512-gjVS5hOP+M3wMm5nmNOucbIrqudzs9v/57bWRHQWLYklXqoXKrVfYW2W9+glfGsqtPgpiz5WwyEEB+ksXIx3gQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "parse-ms": "^4.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", @@ -7298,63 +7387,39 @@ "rc": "cli.js" } }, - "node_modules/rc/node_modules/strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/read-pkg": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-9.0.1.tgz", - "integrity": "sha512-9viLL4/n1BJUCT1NXVTdS1jtm80yDEgR5T4yCelII49Mbj0v1rZdKqj7zCiYdbB0CuCgdrvHcNogAKTFPBocFA==", + "node_modules/read-package-up": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/read-package-up/-/read-package-up-12.0.0.tgz", + "integrity": "sha512-Q5hMVBYur/eQNWDdbF4/Wqqr9Bjvtrw2kjGxxBbKLbx8bVCL8gcArjTy8zDUuLGQicftpMuU0riQNcAsbtOVsw==", "dev": true, "license": "MIT", "dependencies": { - "@types/normalize-package-data": "^2.4.3", - "normalize-package-data": "^6.0.0", - "parse-json": "^8.0.0", - "type-fest": "^4.6.0", - "unicorn-magic": "^0.1.0" + "find-up-simple": "^1.0.1", + "read-pkg": "^10.0.0", + "type-fest": "^5.2.0" }, "engines": { - "node": ">=18" + "node": ">=20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/read-pkg-up": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-11.0.0.tgz", - "integrity": "sha512-LOVbvF1Q0SZdjClSefZ0Nz5z8u+tIE7mV5NibzmE9VYmDe9CaBbAVtz1veOSZbofrdsilxuDAYnFenukZVp8/Q==", - "deprecated": "Renamed to read-package-up", + "node_modules/read-pkg": { + "version": "10.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-10.0.0.tgz", + "integrity": "sha512-A70UlgfNdKI5NSvTTfHzLQj7NJRpJ4mT5tGafkllJ4wh71oYuGm/pzphHcmW4s35iox56KSK721AihodoXSc/A==", "dev": true, "license": "MIT", "dependencies": { - "find-up-simple": "^1.0.0", - "read-pkg": "^9.0.0", - "type-fest": "^4.6.0" - }, - "engines": { - "node": ">=18" + "@types/normalize-package-data": "^2.4.4", + "normalize-package-data": "^8.0.0", + "parse-json": "^8.3.0", + "type-fest": "^5.2.0", + "unicorn-magic": "^0.3.0" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/read-pkg-up/node_modules/type-fest": { - "version": "4.41.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz", - "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", - "dev": true, - "license": "(MIT OR CC0-1.0)", "engines": { - "node": ">=16" + "node": ">=20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -7378,7 +7443,7 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/read-pkg/node_modules/type-fest": { + "node_modules/read-pkg/node_modules/parse-json/node_modules/type-fest": { "version": "4.41.0", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz", "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", @@ -7391,19 +7456,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/read-pkg/node_modules/unicorn-magic": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz", - "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/readable-stream": { "version": "3.6.2", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", @@ -7433,16 +7485,6 @@ "node": ">=8" } }, - "node_modules/redeyed": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/redeyed/-/redeyed-2.1.1.tgz", - "integrity": "sha512-FNpGGo1DycYAdnrKFxCMmKYgo/mILAqtRYbkdQD8Ep/Hk2PQ5+aEAEx+IU713RTDmuBaH0c8P5ZozurNu5ObRQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "esprima": "~4.0.0" - } - }, "node_modules/registry-auth-token": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-5.1.0.tgz", @@ -7632,47 +7674,203 @@ "license": "MIT" }, "node_modules/semantic-release": { - "version": "22.0.12", - "resolved": "https://registry.npmjs.org/semantic-release/-/semantic-release-22.0.12.tgz", - "integrity": "sha512-0mhiCR/4sZb00RVFJIUlMuiBkW3NMpVIW2Gse7noqEMoFGkvfPPAImEQbkBV8xga4KOPP4FdTRYuLLy32R1fPw==", + "version": "25.0.2", + "resolved": "https://registry.npmjs.org/semantic-release/-/semantic-release-25.0.2.tgz", + "integrity": "sha512-6qGjWccl5yoyugHt3jTgztJ9Y0JVzyH8/Voc/D8PlLat9pwxQYXz7W1Dpnq5h0/G5GCYGUaDSlYcyk3AMh5A6g==", "dev": true, "license": "MIT", "dependencies": { - "@semantic-release/commit-analyzer": "^11.0.0", + "@semantic-release/commit-analyzer": "^13.0.1", "@semantic-release/error": "^4.0.0", - "@semantic-release/github": "^9.0.0", - "@semantic-release/npm": "^11.0.0", - "@semantic-release/release-notes-generator": "^12.0.0", + "@semantic-release/github": "^12.0.0", + "@semantic-release/npm": "^13.1.1", + "@semantic-release/release-notes-generator": "^14.1.0", "aggregate-error": "^5.0.0", - "cosmiconfig": "^8.0.0", + "cosmiconfig": "^9.0.0", "debug": "^4.0.0", - "env-ci": "^10.0.0", - "execa": "^8.0.0", + "env-ci": "^11.0.0", + "execa": "^9.0.0", "figures": "^6.0.0", - "find-versions": "^5.1.0", + "find-versions": "^6.0.0", "get-stream": "^6.0.0", "git-log-parser": "^1.2.0", - "hook-std": "^3.0.0", - "hosted-git-info": "^7.0.0", - "import-from-esm": "^1.3.1", + "hook-std": "^4.0.0", + "hosted-git-info": "^9.0.0", + "import-from-esm": "^2.0.0", "lodash-es": "^4.17.21", - "marked": "^9.0.0", - "marked-terminal": "^6.0.0", + "marked": "^15.0.0", + "marked-terminal": "^7.3.0", "micromatch": "^4.0.2", "p-each-series": "^3.0.0", "p-reduce": "^3.0.0", - "read-pkg-up": "^11.0.0", + "read-package-up": "^12.0.0", "resolve-from": "^5.0.0", "semver": "^7.3.2", - "semver-diff": "^4.0.0", + "semver-diff": "^5.0.0", "signale": "^1.2.1", - "yargs": "^17.5.1" + "yargs": "^18.0.0" }, "bin": { "semantic-release": "bin/semantic-release.js" }, "engines": { - "node": "^18.17 || >=20.6.1" + "node": "^22.14.0 || >= 24.10.0" + } + }, + "node_modules/semantic-release/node_modules/@octokit/auth-token": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-6.0.0.tgz", + "integrity": "sha512-P4YJBPdPSpWTQ1NU4XYdvHvXJJDxM6YwpS0FZHRgP7YFkdVxsWcpWGy/NVqlAA7PcPCnMacXlRm1y2PFZRWL/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 20" + } + }, + "node_modules/semantic-release/node_modules/@octokit/core": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-7.0.6.tgz", + "integrity": "sha512-DhGl4xMVFGVIyMwswXeyzdL4uXD5OGILGX5N8Y+f6W7LhC1Ze2poSNrkF/fedpVDHEEZ+PHFW0vL14I+mm8K3Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/auth-token": "^6.0.0", + "@octokit/graphql": "^9.0.3", + "@octokit/request": "^10.0.6", + "@octokit/request-error": "^7.0.2", + "@octokit/types": "^16.0.0", + "before-after-hook": "^4.0.0", + "universal-user-agent": "^7.0.0" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/semantic-release/node_modules/@octokit/endpoint": { + "version": "11.0.2", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-11.0.2.tgz", + "integrity": "sha512-4zCpzP1fWc7QlqunZ5bSEjxc6yLAlRTnDwKtgXfcI/FxxGoqedDG8V2+xJ60bV2kODqcGB+nATdtap/XYq2NZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/types": "^16.0.0", + "universal-user-agent": "^7.0.2" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/semantic-release/node_modules/@octokit/graphql": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-9.0.3.tgz", + "integrity": "sha512-grAEuupr/C1rALFnXTv6ZQhFuL1D8G5y8CN04RgrO4FIPMrtm+mcZzFG7dcBm+nq+1ppNixu+Jd78aeJOYxlGA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/request": "^10.0.6", + "@octokit/types": "^16.0.0", + "universal-user-agent": "^7.0.0" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/semantic-release/node_modules/@octokit/openapi-types": { + "version": "27.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-27.0.0.tgz", + "integrity": "sha512-whrdktVs1h6gtR+09+QsNk2+FO+49j6ga1c55YZudfEG+oKJVvJLQi3zkOm5JjiUXAagWK2tI2kTGKJ2Ys7MGA==", + "dev": true, + "license": "MIT" + }, + "node_modules/semantic-release/node_modules/@octokit/plugin-paginate-rest": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-14.0.0.tgz", + "integrity": "sha512-fNVRE7ufJiAA3XUrha2omTA39M6IXIc6GIZLvlbsm8QOQCYvpq/LkMNGyFlB1d8hTDzsAXa3OKtybdMAYsV/fw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/types": "^16.0.0" + }, + "engines": { + "node": ">= 20" + }, + "peerDependencies": { + "@octokit/core": ">=6" + } + }, + "node_modules/semantic-release/node_modules/@octokit/plugin-retry": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-8.0.3.tgz", + "integrity": "sha512-vKGx1i3MC0za53IzYBSBXcrhmd+daQDzuZfYDd52X5S0M2otf3kVZTVP8bLA3EkU0lTvd1WEC2OlNNa4G+dohA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/request-error": "^7.0.2", + "@octokit/types": "^16.0.0", + "bottleneck": "^2.15.3" + }, + "engines": { + "node": ">= 20" + }, + "peerDependencies": { + "@octokit/core": ">=7" + } + }, + "node_modules/semantic-release/node_modules/@octokit/plugin-throttling": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/@octokit/plugin-throttling/-/plugin-throttling-11.0.3.tgz", + "integrity": "sha512-34eE0RkFCKycLl2D2kq7W+LovheM/ex3AwZCYN8udpi6bxsyjZidb2McXs69hZhLmJlDqTSP8cH+jSRpiaijBg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/types": "^16.0.0", + "bottleneck": "^2.15.3" + }, + "engines": { + "node": ">= 20" + }, + "peerDependencies": { + "@octokit/core": "^7.0.0" + } + }, + "node_modules/semantic-release/node_modules/@octokit/request": { + "version": "10.0.7", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-10.0.7.tgz", + "integrity": "sha512-v93h0i1yu4idj8qFPZwjehoJx4j3Ntn+JhXsdJrG9pYaX6j/XRz2RmasMUHtNgQD39nrv/VwTWSqK0RNXR8upA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/endpoint": "^11.0.2", + "@octokit/request-error": "^7.0.2", + "@octokit/types": "^16.0.0", + "fast-content-type-parse": "^3.0.0", + "universal-user-agent": "^7.0.2" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/semantic-release/node_modules/@octokit/request-error": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-7.1.0.tgz", + "integrity": "sha512-KMQIfq5sOPpkQYajXHwnhjCC0slzCNScLHs9JafXc4RAJI+9f+jNDlBNaIMTvazOPLgb4BnlhGJOTbnN0wIjPw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/types": "^16.0.0" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/semantic-release/node_modules/@octokit/types": { + "version": "16.0.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-16.0.0.tgz", + "integrity": "sha512-sKq+9r1Mm4efXW1FCk7hFSeJo4QKreL/tTbR0rz/qx/r1Oa2VV83LTA/H/MuCOX7uCIJmQVRKBcbmWoySjAnSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^27.0.0" } }, "node_modules/semantic-release/node_modules/@semantic-release/error": { @@ -7685,6 +7883,51 @@ "node": ">=18" } }, + "node_modules/semantic-release/node_modules/@semantic-release/github": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/@semantic-release/github/-/github-12.0.2.tgz", + "integrity": "sha512-qyqLS+aSGH1SfXIooBKjs7mvrv0deg8v+jemegfJg1kq6ji+GJV8CO08VJDEsvjp3O8XJmTTIAjjZbMzagzsdw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@octokit/core": "^7.0.0", + "@octokit/plugin-paginate-rest": "^14.0.0", + "@octokit/plugin-retry": "^8.0.0", + "@octokit/plugin-throttling": "^11.0.0", + "@semantic-release/error": "^4.0.0", + "aggregate-error": "^5.0.0", + "debug": "^4.3.4", + "dir-glob": "^3.0.1", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.0", + "issue-parser": "^7.0.0", + "lodash-es": "^4.17.21", + "mime": "^4.0.0", + "p-filter": "^4.0.0", + "tinyglobby": "^0.2.14", + "undici": "^7.0.0", + "url-join": "^5.0.0" + }, + "engines": { + "node": "^22.14.0 || >= 24.10.0" + }, + "peerDependencies": { + "semantic-release": ">=24.1.0" + } + }, + "node_modules/semantic-release/node_modules/@sindresorhus/merge-streams": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/merge-streams/-/merge-streams-4.0.0.tgz", + "integrity": "sha512-tlqY9xq5ukxTUZBmoOp+m61cqwQD5pHJtFY3Mn8CA8ps6yghLH/Hw8UPdqg4OLmFW3IFlcXnQNmo/dh8HzXYIQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/semantic-release/node_modules/aggregate-error": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-5.0.0.tgz", @@ -7702,10 +7945,43 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/semantic-release/node_modules/ansi-regex": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/semantic-release/node_modules/ansi-styles": { + "version": "6.2.3", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz", + "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/semantic-release/node_modules/before-after-hook": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-4.0.0.tgz", + "integrity": "sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ==", + "dev": true, + "license": "Apache-2.0" + }, "node_modules/semantic-release/node_modules/clean-stack": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-5.2.0.tgz", - "integrity": "sha512-TyUIUJgdFnCISzG5zu3291TAsE77ddchd0bepon1VVQrKLGKFED4iXFEDQ24mIPdPBbyE16PK3F8MYE1CmcBEQ==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-5.3.0.tgz", + "integrity": "sha512-9ngPTOhYGQqNVSfeJkYXHmF7AGWp4/nN5D/QqNQs3Dvxd1Kk/WpjHfNujKHYUQ/5CoGyOyFNoWSPk5afzP0QVg==", "dev": true, "license": "MIT", "dependencies": { @@ -7718,6 +7994,55 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/semantic-release/node_modules/cliui": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-9.0.1.tgz", + "integrity": "sha512-k7ndgKhwoQveBL+/1tqGJYNz097I7WOvwbmmU2AR5+magtbjPWQTS1C5vzGkBC8Ym8UWRzfKUzUUqFLypY4Q+w==", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^7.2.0", + "strip-ansi": "^7.1.0", + "wrap-ansi": "^9.0.0" + }, + "engines": { + "node": ">=20" + } + }, + "node_modules/semantic-release/node_modules/cosmiconfig": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-9.0.0.tgz", + "integrity": "sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==", + "dev": true, + "license": "MIT", + "dependencies": { + "env-paths": "^2.2.1", + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/semantic-release/node_modules/emoji-regex": { + "version": "10.6.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz", + "integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==", + "dev": true, + "license": "MIT" + }, "node_modules/semantic-release/node_modules/escape-string-regexp": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", @@ -7730,39 +8055,46 @@ "funding": { "url": "https://github.com/sponsors/sindresorhus" } - }, - "node_modules/semantic-release/node_modules/execa": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", - "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", - "dev": true, - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^8.0.1", - "human-signals": "^5.0.0", - "is-stream": "^3.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^5.1.0", - "onetime": "^6.0.0", + }, + "node_modules/semantic-release/node_modules/execa": { + "version": "9.6.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-9.6.1.tgz", + "integrity": "sha512-9Be3ZoN4LmYR90tUoVu2te2BsbzHfhJyfEiAVfz7N5/zv+jduIfLrV2xdQXOHbaD6KgpGdO9PRPM1Y4Q9QkPkA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@sindresorhus/merge-streams": "^4.0.0", + "cross-spawn": "^7.0.6", + "figures": "^6.1.0", + "get-stream": "^9.0.0", + "human-signals": "^8.0.1", + "is-plain-obj": "^4.1.0", + "is-stream": "^4.0.1", + "npm-run-path": "^6.0.0", + "pretty-ms": "^9.2.0", "signal-exit": "^4.1.0", - "strip-final-newline": "^3.0.0" + "strip-final-newline": "^4.0.0", + "yoctocolors": "^2.1.1" }, "engines": { - "node": ">=16.17" + "node": "^18.19.0 || >=20.5.0" }, "funding": { "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, "node_modules/semantic-release/node_modules/execa/node_modules/get-stream": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", - "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-9.0.1.tgz", + "integrity": "sha512-kVCxPF3vQM/N0B1PmoqVUqgHP+EeVjmZSQn+1oCRPxd2P21P2F19lIgbR3HBosbB1PUhOAoctJnfEn2GbN2eZA==", "dev": true, "license": "MIT", + "dependencies": { + "@sec-ant/readable-stream": "^0.4.1", + "is-stream": "^4.0.1" + }, "engines": { - "node": ">=16" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -7785,13 +8117,13 @@ } }, "node_modules/semantic-release/node_modules/human-signals": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", - "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-8.0.1.tgz", + "integrity": "sha512-eKCa6bwnJhvxj14kZk5NCPc6Hb6BdsU9DZcOnmQKSnO1VKrfV0zCvtttPZUsBvjmNDn8rpcJfpwSYnHBjc95MQ==", "dev": true, "license": "Apache-2.0", "engines": { - "node": ">=16.17.0" + "node": ">=18.18.0" } }, "node_modules/semantic-release/node_modules/indent-string": { @@ -7807,23 +8139,23 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/semantic-release/node_modules/is-stream": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", - "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", + "node_modules/semantic-release/node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", "dev": true, "license": "MIT", "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/semantic-release/node_modules/is-unicode-supported": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz", - "integrity": "sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ==", + "node_modules/semantic-release/node_modules/is-stream": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-4.0.1.tgz", + "integrity": "sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==", "dev": true, "license": "MIT", "engines": { @@ -7833,46 +8165,35 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/semantic-release/node_modules/mimic-fn": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", - "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/semantic-release/node_modules/npm-run-path": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", - "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", + "node_modules/semantic-release/node_modules/issue-parser": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/issue-parser/-/issue-parser-7.0.1.tgz", + "integrity": "sha512-3YZcUUR2Wt1WsapF+S/WiA2WmlW0cWAoPccMqne7AxEBhCdFeTPjfv/Axb8V2gyCgY3nRw+ksZ3xSUX+R47iAg==", "dev": true, "license": "MIT", "dependencies": { - "path-key": "^4.0.0" + "lodash.capitalize": "^4.2.1", + "lodash.escaperegexp": "^4.1.2", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.uniqby": "^4.7.0" }, "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": "^18.17 || >=20.6.1" } }, - "node_modules/semantic-release/node_modules/onetime": { + "node_modules/semantic-release/node_modules/npm-run-path": { "version": "6.0.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", - "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-6.0.0.tgz", + "integrity": "sha512-9qny7Z9DsQU8Ou39ERsPU4OZQlSTP47ShQzuKZ6PRXpYLtIFgl/DEBYEXKlvcEa+9tHVcK8CF81Y2V72qaZhWA==", "dev": true, "license": "MIT", "dependencies": { - "mimic-fn": "^4.0.0" + "path-key": "^4.0.0", + "unicorn-magic": "^0.3.0" }, "engines": { - "node": ">=12" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -7917,19 +8238,116 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/semantic-release/node_modules/strip-final-newline": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", - "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", + "node_modules/semantic-release/node_modules/string-width": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^10.3.0", + "get-east-asian-width": "^1.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semantic-release/node_modules/strip-ansi": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", "dev": true, "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, "engines": { "node": ">=12" }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/semantic-release/node_modules/strip-final-newline": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-4.0.0.tgz", + "integrity": "sha512-aulFJcD6YK8V1G7iRB5tigAP4TsHBZZrOV8pjV++zdUwmeV8uzbY7yn6h9MswN62adStNZFuCIx4haBnRuMDaw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/semantic-release/node_modules/undici": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-7.16.0.tgz", + "integrity": "sha512-QEg3HPMll0o3t2ourKwOeUAZ159Kn9mx5pnzHRQO8+Wixmh88YdZRiIwat0iNzNNXn0yoEtXJqFpyW7eM8BV7g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20.18.1" + } + }, + "node_modules/semantic-release/node_modules/universal-user-agent": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.3.tgz", + "integrity": "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A==", + "dev": true, + "license": "ISC" + }, + "node_modules/semantic-release/node_modules/wrap-ansi": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.2.tgz", + "integrity": "sha512-42AtmgqjV+X1VpdOfyTGOYRi0/zsoLqtXQckTmqTeybT+BDIbM/Guxo7x3pE2vtpr1ok6xRqM9OpBe+Jyoqyww==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.2.1", + "string-width": "^7.0.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/semantic-release/node_modules/yargs": { + "version": "18.0.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-18.0.0.tgz", + "integrity": "sha512-4UEqdc2RYGHZc7Doyqkrqiln3p9X2DZVxaGbwhn2pi7MrRagKaOcIKe8L3OxYcbhXLgLFUS3zAYuQjKBQgmuNg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cliui": "^9.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "string-width": "^7.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^22.0.0" + }, + "engines": { + "node": "^20.19.0 || ^22.12.0 || >=23" + } + }, + "node_modules/semantic-release/node_modules/yargs-parser": { + "version": "22.0.0", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-22.0.0.tgz", + "integrity": "sha512-rwu/ClNdSMpkSrUb+d6BRsSkLUq1fmfsY6TOpYzTwvwkg1/NRG85KBy3kq++A8LKQwX6lsu+aWad+2khvuXrqw==", + "dev": true, + "license": "ISC", + "engines": { + "node": "^20.19.0 || ^22.12.0 || >=23" + } + }, "node_modules/semver": { "version": "7.6.0", "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", @@ -7947,9 +8365,10 @@ } }, "node_modules/semver-diff": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-4.0.0.tgz", - "integrity": "sha512-0Ju4+6A8iOnpL/Thra7dZsSlOHYAHIeMxfhWQRI1/VLcT3WDBZKKtQt/QkBOsiIN9ZpuvHE6cGZ0x4glCMmfiA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-5.0.0.tgz", + "integrity": "sha512-0HbGtOm+S7T6NGQ/pxJSJipJvc4DK3FcRVMRkhsIwJDJ4Jcz5DQC1cPPzB5GhzyHjwttW878HaWQq46CkL3cqg==", + "deprecated": "Deprecated as the semver package now supports this built-in.", "dev": true, "license": "MIT", "dependencies": { @@ -8319,13 +8738,28 @@ } }, "node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==", "dev": true, "license": "MIT", "engines": { - "node": ">=8" + "node": ">=0.10.0" + } + }, + "node_modules/super-regex": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/super-regex/-/super-regex-1.1.0.tgz", + "integrity": "sha512-WHkws2ZflZe41zj6AolvvmaTrWds/VuyeYr9iPVv/oQeaIoVxMKaushfFWpOGDT+GuBrM/sVqF8KUCYQlSSTdQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-timeout": "^1.0.1", + "make-asynchronous": "^1.0.1", + "time-span": "^5.1.0" + }, + "engines": { + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -8374,6 +8808,19 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/tagged-tag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/tagged-tag/-/tagged-tag-1.0.0.tgz", + "integrity": "sha512-yEFYrVhod+hdNyx7g5Bnkkb0G6si8HJurOoOEgC8B/O0uXLHlaey/65KRv6cuWBNhBgHKAROVpc7QyYqE5gFng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/temp-dir": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/temp-dir/-/temp-dir-3.0.0.tgz", @@ -8442,6 +8889,29 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/thenify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0" + } + }, + "node_modules/thenify-all": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "engines": { + "node": ">=0.8" + } + }, "node_modules/through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", @@ -8459,6 +8929,70 @@ "readable-stream": "3" } }, + "node_modules/time-span": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/time-span/-/time-span-5.1.0.tgz", + "integrity": "sha512-75voc/9G4rDIJleOo4jPvN4/YC4GRZrY8yy1uU4lwrB3XEQbWve8zXoO5No4eFrGcTAMYyoY67p8jRQdtA1HbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "convert-hrtime": "^5.0.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/tmp": { "version": "0.0.33", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", @@ -8515,14 +9049,27 @@ "dev": true, "license": "0BSD" }, + "node_modules/tunnel": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.6.11 <=0.7.0 || >=0.7.3" + } + }, "node_modules/type-fest": { - "version": "0.21.3", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", - "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-5.3.1.tgz", + "integrity": "sha512-VCn+LMHbd4t6sF3wfU/+HKT63C9OoyrSIf4b+vtWHpt2U7/4InZG467YDNMFMR70DdHjAdpPWmw2lzRdg0Xqqg==", "dev": true, "license": "(MIT OR CC0-1.0)", + "dependencies": { + "tagged-tag": "^1.0.0" + }, "engines": { - "node": ">=10" + "node": ">=20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -8557,10 +9104,23 @@ "node": ">=0.8.0" } }, + "node_modules/undici": { + "version": "5.29.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz", + "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@fastify/busboy": "^2.0.0" + }, + "engines": { + "node": ">=14.0" + } + }, "node_modules/undici-types": { - "version": "7.10.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.10.0.tgz", - "integrity": "sha512-t5Fy/nfn+14LuOc2KNYg75vZqClpAiqscVvMygNnlsHBFpSXdJaYtXMcdNLpl/Qvc3P2cB3s6lOV51nqsFq4ag==", + "version": "7.12.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.12.0.tgz", + "integrity": "sha512-goOacqME2GYyOZZfb5Lgtu+1IDmAlAEu5xnD3+xTzS10hT0vzpf0SPjkXwAw9Jm+4n/mQGDP3LO8CPbYROeBfQ==", "dev": true, "license": "MIT", "peer": true @@ -8659,6 +9219,13 @@ "defaults": "^1.0.3" } }, + "node_modules/web-worker": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/web-worker/-/web-worker-1.2.0.tgz", + "integrity": "sha512-PgF341avzqyx60neE9DD+XS26MMNMoUQRz9NOZwW32nPQrF6p77f1htcnjBSEV8BGMKZ16choqUG4hyI0Hx7mA==", + "dev": true, + "license": "Apache-2.0" + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -8785,6 +9352,19 @@ "funding": { "url": "https://github.com/sponsors/sindresorhus" } + }, + "node_modules/yoctocolors": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yoctocolors/-/yoctocolors-2.1.2.tgz", + "integrity": "sha512-CzhO+pFNo8ajLM2d2IW/R93ipy99LWjtwblvC1RsoSUMZgyLbYFr221TnSNT7GjGdYui6P459mw9JH/g/zW2ug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } } } } diff --git a/package.json b/package.json index 0d0333c..ab44ca1 100644 --- a/package.json +++ b/package.json @@ -6,14 +6,15 @@ "semantic-release": "semantic-release" }, "devDependencies": { + "@artessan-devs/sr-uv-plugin": "github:LittleCoinCoin/sr-uv-plugin#fix/semantic-release-plugin-loading", "@commitlint/cli": "^18.6.1", "@commitlint/config-conventional": "^18.6.2", "@semantic-release/changelog": "^6.0.3", "@semantic-release/git": "^10.0.1", "@semantic-release/github": "^9.2.6", - "commitizen": "^4.3.0", - "cz-conventional-changelog": "^3.3.0", - "semantic-release": "^22.0.12" + "commitizen": "^4.3.1", + "cz-conventional-changelog": "^3.0.1", + "semantic-release": "^25.0.2" }, "config": { "commitizen": { diff --git a/pyproject.toml b/pyproject.toml index e9ed59f..8cb65c4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,49 +1,43 @@ [build-system] -requires = ["setuptools>=61.0"] +requires = [ "setuptools>=61.0" ] build-backend = "setuptools.build_meta" [project] -name = "hatch" -version = "0.6.1" -authors = [ - { name = "Hatch Team" }, -] +name = "hatch-xclam" +version = "0.7.0-dev.13" description = "Package manager for the Cracking Shells ecosystem" readme = "README.md" requires-python = ">=3.12" classifiers = [ - "Programming Language :: Python :: 3.12", - "License :: OSI Approved :: GNU Affero General Public License v3", - "Operating System :: OS Independent", + "Programming Language :: Python :: 3.12", + "License :: OSI Approved :: GNU Affero General Public License v3", + "Operating System :: OS Independent" ] - dependencies = [ - "jsonschema>=4.0.0", - "requests>=2.25.0", - "packaging>=20.0", - "docker>=7.1.0", - - "hatch_validator @ git+https://github.com/CrackingShells/Hatch-Validator.git@v0.7.1", - "wobble @ git+https://github.com/CrackingShells/Wobble.git@v0.2.0" -] - -[project.optional-dependencies] -docs = [ - "mkdocs>=1.4.0", - "mkdocstrings[python]>=0.20.0" + "jsonschema>=4.0.0", + "requests>=2.25.0", + "packaging>=20.0", + "docker>=7.1.0", + "pydantic>=2.0.0", + "hatch-validator>=0.8.0" ] -[project.scripts] -hatch = "hatch.cli_hatch:main" + [[project.authors]] + name = "Cracking Shells Team" -[project.urls] -"Homepage" = "https://github.com/CrackingShells/Hatch" -"Bug Tracker" = "https://github.com/CrackingShells/Hatch/issues" + [project.optional-dependencies] + docs = [ "mkdocs>=1.4.0", "mkdocstrings[python]>=0.20.0" ] + dev = [ "wobble>=0.2.0" ] -[tool.setuptools] -package-dir = {"" = "."} + [project.scripts] + hatch = "hatch.cli_hatch:main" + [project.urls] + Homepage = "https://github.com/CrackingShells/Hatch" + "Bug Tracker" = "https://github.com/CrackingShells/Hatch/issues" +[tool.setuptools.package-dir] +"" = "." [tool.setuptools.packages.find] -where = ["."] \ No newline at end of file +where = [ "." ] diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..8b7b09f --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Test package for Hatch MCP integration system.""" diff --git a/tests/test_cli_version.py b/tests/test_cli_version.py new file mode 100644 index 0000000..43d4361 --- /dev/null +++ b/tests/test_cli_version.py @@ -0,0 +1,122 @@ +""" +Test suite for hatch --version command implementation. + +This module tests the version command functionality: +- Version retrieval from importlib.metadata +- Error handling for PackageNotFoundError +- CLI version display format +- Import safety after removing __version__ +- No conflicts with existing flags + +Tests follow CrackingShells testing standards using wobble framework. +""" + +import unittest +import sys +from pathlib import Path +from unittest.mock import patch, MagicMock +from io import StringIO + +# Add parent directory to path +sys.path.insert(0, str(Path(__file__).parent.parent)) + +from hatch.cli_hatch import main, get_hatch_version + +try: + from wobble.decorators import regression_test, integration_test +except ImportError: + # Fallback decorators if wobble not available + def regression_test(func): + return func + + def integration_test(scope="component"): + def decorator(func): + return func + return decorator + + +class TestVersionCommand(unittest.TestCase): + """Test suite for hatch --version command implementation.""" + + @regression_test + def test_get_hatch_version_retrieves_from_metadata(self): + """Test get_hatch_version() retrieves version from importlib.metadata.""" + with patch('hatch.cli_hatch.version', return_value='0.7.0-dev.3') as mock_version: + result = get_hatch_version() + + self.assertEqual(result, '0.7.0-dev.3') + mock_version.assert_called_once_with('hatch') + + @regression_test + def test_get_hatch_version_handles_package_not_found(self): + """Test get_hatch_version() handles PackageNotFoundError gracefully.""" + from importlib.metadata import PackageNotFoundError + + with patch('hatch.cli_hatch.version', side_effect=PackageNotFoundError()): + result = get_hatch_version() + + self.assertEqual(result, 'unknown (development mode)') + + @integration_test(scope="component") + def test_version_command_displays_correct_format(self): + """Test version command displays correct format via CLI.""" + test_args = ['hatch', '--version'] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.get_hatch_version', return_value='0.7.0-dev.3'): + with patch('sys.stdout', new_callable=StringIO) as mock_stdout: + with self.assertRaises(SystemExit) as cm: + main() + + # argparse action='version' exits with code 0 + self.assertEqual(cm.exception.code, 0) + + # Verify output format: "hatch 0.7.0-dev.3" + output = mock_stdout.getvalue().strip() + self.assertRegex(output, r'hatch\s+0\.7\.0-dev\.3') + + @integration_test(scope="component") + def test_import_hatch_without_version_attribute(self): + """Test that importing hatch module works without __version__ attribute.""" + try: + import hatch + + # Import should succeed + self.assertIsNotNone(hatch) + + # __version__ should not exist (removed in implementation) + self.assertFalse(hasattr(hatch, '__version__'), + "hatch.__version__ should not exist after cleanup") + + except ImportError as e: + self.fail(f"Failed to import hatch module: {e}") + + @regression_test + def test_no_conflict_with_package_version_flag(self): + """Test that --version (Hatch) doesn't conflict with -v (package version).""" + # Test package add command with -v flag (package version specification) + test_args = ['hatch', 'package', 'add', 'test-package', '-v', '1.0.0'] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager') as mock_env: + mock_env_instance = MagicMock() + mock_env.return_value = mock_env_instance + mock_env_instance.add_package_to_environment.return_value = True + + try: + main() + except SystemExit as e: + # Should execute successfully (exit code 0) + self.assertEqual(e.code, 0) + + # Verify package add was called with version argument + mock_env_instance.add_package_to_environment.assert_called_once() + call_args = mock_env_instance.add_package_to_environment.call_args + + # Version argument should be '1.0.0' + self.assertEqual(call_args[0][2], '1.0.0') # Third positional arg is version + + +if __name__ == '__main__': + unittest.main() + diff --git a/tests/test_data/configs/mcp_backup_test_configs/complex_server.json b/tests/test_data/configs/mcp_backup_test_configs/complex_server.json new file mode 100644 index 0000000..b501990 --- /dev/null +++ b/tests/test_data/configs/mcp_backup_test_configs/complex_server.json @@ -0,0 +1,25 @@ +{ + "servers": { + "server1": { + "command": "python", + "args": [ + "server1.py" + ] + }, + "server2": { + "command": "node", + "args": [ + "server2.js" + ] + }, + "server3": { + "command": "python", + "args": [ + "server3.py" + ], + "env": { + "API_KEY": "test" + } + } + } +} \ No newline at end of file diff --git a/tests/test_data/configs/mcp_backup_test_configs/simple_server.json b/tests/test_data/configs/mcp_backup_test_configs/simple_server.json new file mode 100644 index 0000000..99eb8d3 --- /dev/null +++ b/tests/test_data/configs/mcp_backup_test_configs/simple_server.json @@ -0,0 +1,10 @@ +{ + "servers": { + "test_server": { + "command": "python", + "args": [ + "server.py" + ] + } + } +} \ No newline at end of file diff --git a/tests/test_data/configs/mcp_host_test_configs/claude_desktop_config.json b/tests/test_data/configs/mcp_host_test_configs/claude_desktop_config.json new file mode 100644 index 0000000..6106744 --- /dev/null +++ b/tests/test_data/configs/mcp_host_test_configs/claude_desktop_config.json @@ -0,0 +1,4 @@ +{ + "mcpServers": {} +} + diff --git a/tests/test_data/configs/mcp_host_test_configs/claude_desktop_config_with_server.json b/tests/test_data/configs/mcp_host_test_configs/claude_desktop_config_with_server.json new file mode 100644 index 0000000..39f52d2 --- /dev/null +++ b/tests/test_data/configs/mcp_host_test_configs/claude_desktop_config_with_server.json @@ -0,0 +1,12 @@ +{ + "mcpServers": { + "existing-server": { + "command": "python", + "args": ["server.py"], + "env": { + "API_KEY": "secret" + } + } + } +} + diff --git a/tests/test_data/configs/mcp_host_test_configs/cursor_mcp.json b/tests/test_data/configs/mcp_host_test_configs/cursor_mcp.json new file mode 100644 index 0000000..6106744 --- /dev/null +++ b/tests/test_data/configs/mcp_host_test_configs/cursor_mcp.json @@ -0,0 +1,4 @@ +{ + "mcpServers": {} +} + diff --git a/tests/test_data/configs/mcp_host_test_configs/cursor_mcp_with_server.json b/tests/test_data/configs/mcp_host_test_configs/cursor_mcp_with_server.json new file mode 100644 index 0000000..4eac728 --- /dev/null +++ b/tests/test_data/configs/mcp_host_test_configs/cursor_mcp_with_server.json @@ -0,0 +1,12 @@ +{ + "mcpServers": { + "existing-server": { + "command": "node", + "args": ["server.js"], + "env": { + "NODE_ENV": "production" + } + } + } +} + diff --git a/tests/test_data/configs/mcp_host_test_configs/environment_v2_multi_host.json b/tests/test_data/configs/mcp_host_test_configs/environment_v2_multi_host.json new file mode 100644 index 0000000..f5170a5 --- /dev/null +++ b/tests/test_data/configs/mcp_host_test_configs/environment_v2_multi_host.json @@ -0,0 +1,44 @@ +{ + "name": "multi_host_environment", + "description": "Environment with single server configured across multiple hosts", + "created_at": "2025-09-21T10:00:00.000000", + "packages": [ + { + "name": "file-manager", + "version": "2.0.0", + "type": "hatch", + "source": "github:user/file-manager", + "installed_at": "2025-09-21T10:00:00.000000", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/Library/Application Support/Claude/claude_desktop_config.json", + "configured_at": "2025-09-21T10:00:00.000000", + "last_synced": "2025-09-21T10:00:00.000000", + "server_config": { + "command": "/usr/local/bin/python", + "args": [ + "file_manager.py" + ], + "env": { + "DEBUG": "true" + } + } + }, + "cursor": { + "config_path": "~/.cursor/mcp.json", + "configured_at": "2025-09-21T10:00:00.000000", + "last_synced": "2025-09-21T10:00:00.000000", + "server_config": { + "command": "python", + "args": [ + "file_manager.py" + ], + "env": { + "DEBUG": "true" + } + } + } + } + } + ] +} \ No newline at end of file diff --git a/tests/test_data/configs/mcp_host_test_configs/environment_v2_simple.json b/tests/test_data/configs/mcp_host_test_configs/environment_v2_simple.json new file mode 100644 index 0000000..cdda403 --- /dev/null +++ b/tests/test_data/configs/mcp_host_test_configs/environment_v2_simple.json @@ -0,0 +1,30 @@ +{ + "name": "test_environment", + "description": "Test environment with corrected MCP structure", + "created_at": "2025-09-21T10:00:00.000000", + "packages": [ + { + "name": "weather-toolkit", + "version": "1.0.0", + "type": "hatch", + "source": "github:user/weather-toolkit", + "installed_at": "2025-09-21T10:00:00.000000", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/Library/Application Support/Claude/claude_desktop_config.json", + "configured_at": "2025-09-21T10:00:00.000000", + "last_synced": "2025-09-21T10:00:00.000000", + "server_config": { + "command": "/usr/local/bin/python", + "args": [ + "weather.py" + ], + "env": { + "API_KEY": "weather_key" + } + } + } + } + } + ] +} \ No newline at end of file diff --git a/tests/test_data/configs/mcp_host_test_configs/gemini_cli_config.json b/tests/test_data/configs/mcp_host_test_configs/gemini_cli_config.json new file mode 100644 index 0000000..6106744 --- /dev/null +++ b/tests/test_data/configs/mcp_host_test_configs/gemini_cli_config.json @@ -0,0 +1,4 @@ +{ + "mcpServers": {} +} + diff --git a/tests/test_data/configs/mcp_host_test_configs/gemini_cli_config_with_server.json b/tests/test_data/configs/mcp_host_test_configs/gemini_cli_config_with_server.json new file mode 100644 index 0000000..c553c14 --- /dev/null +++ b/tests/test_data/configs/mcp_host_test_configs/gemini_cli_config_with_server.json @@ -0,0 +1,15 @@ +{ + "mcpServers": { + "existing-server": { + "command": "python", + "args": ["server.py"], + "env": { + "API_KEY": "secret" + }, + "timeout": 30, + "trust": true, + "cwd": "/path/to/server" + } + } +} + diff --git a/tests/test_data/configs/mcp_host_test_configs/mcp_server_local.json b/tests/test_data/configs/mcp_host_test_configs/mcp_server_local.json new file mode 100644 index 0000000..c78efce --- /dev/null +++ b/tests/test_data/configs/mcp_host_test_configs/mcp_server_local.json @@ -0,0 +1,12 @@ +{ + "command": "python", + "args": [ + "server.py", + "--port", + "8080" + ], + "env": { + "API_KEY": "test", + "DEBUG": "true" + } +} \ No newline at end of file diff --git a/tests/test_data/configs/mcp_host_test_configs/mcp_server_local_minimal.json b/tests/test_data/configs/mcp_host_test_configs/mcp_server_local_minimal.json new file mode 100644 index 0000000..0ac4fa0 --- /dev/null +++ b/tests/test_data/configs/mcp_host_test_configs/mcp_server_local_minimal.json @@ -0,0 +1,6 @@ +{ + "command": "python", + "args": [ + "minimal_server.py" + ] +} \ No newline at end of file diff --git a/tests/test_data/configs/mcp_host_test_configs/mcp_server_remote.json b/tests/test_data/configs/mcp_host_test_configs/mcp_server_remote.json new file mode 100644 index 0000000..637b58f --- /dev/null +++ b/tests/test_data/configs/mcp_host_test_configs/mcp_server_remote.json @@ -0,0 +1,7 @@ +{ + "url": "https://api.example.com/mcp", + "headers": { + "Authorization": "Bearer token", + "Content-Type": "application/json" + } +} \ No newline at end of file diff --git a/tests/test_data/configs/mcp_host_test_configs/mcp_server_remote_minimal.json b/tests/test_data/configs/mcp_host_test_configs/mcp_server_remote_minimal.json new file mode 100644 index 0000000..cd3569c --- /dev/null +++ b/tests/test_data/configs/mcp_host_test_configs/mcp_server_remote_minimal.json @@ -0,0 +1,3 @@ +{ + "url": "https://minimal.example.com/mcp" +} \ No newline at end of file diff --git a/tests/test_data/configs/mcp_host_test_configs/vscode_mcp.json b/tests/test_data/configs/mcp_host_test_configs/vscode_mcp.json new file mode 100644 index 0000000..6106744 --- /dev/null +++ b/tests/test_data/configs/mcp_host_test_configs/vscode_mcp.json @@ -0,0 +1,4 @@ +{ + "mcpServers": {} +} + diff --git a/tests/test_data/configs/mcp_host_test_configs/vscode_mcp_with_server.json b/tests/test_data/configs/mcp_host_test_configs/vscode_mcp_with_server.json new file mode 100644 index 0000000..ff8de11 --- /dev/null +++ b/tests/test_data/configs/mcp_host_test_configs/vscode_mcp_with_server.json @@ -0,0 +1,13 @@ +{ + "mcpServers": { + "existing-server": { + "command": "python", + "args": ["-m", "server"], + "env": { + "DEBUG": "true" + }, + "envFile": ".env" + } + } +} + diff --git a/tests/test_data/fixtures/environment_host_configs.json b/tests/test_data/fixtures/environment_host_configs.json new file mode 100644 index 0000000..6877896 --- /dev/null +++ b/tests/test_data/fixtures/environment_host_configs.json @@ -0,0 +1,59 @@ +{ + "single_host_environment": { + "packages": [ + { + "name": "weather-toolkit", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/.claude/config.json", + "configured_at": "2025-09-25T10:00:00", + "last_synced": "2025-09-25T10:00:00" + } + } + } + ] + }, + "multi_host_environment": { + "packages": [ + { + "name": "weather-toolkit", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/.claude/config.json", + "configured_at": "2025-09-25T10:00:00", + "last_synced": "2025-09-25T10:00:00" + }, + "cursor": { + "config_path": "~/.cursor/config.json", + "configured_at": "2025-09-25T10:30:00", + "last_synced": "2025-09-25T10:30:00" + } + } + }, + { + "name": "team-utilities", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/.claude/config.json", + "configured_at": "2025-09-25T11:00:00", + "last_synced": "2025-09-25T11:00:00" + } + } + } + ] + }, + "empty_environment": { + "packages": [] + }, + "packages_no_host_tracking": { + "packages": [ + { + "name": "legacy-package" + }, + { + "name": "another-legacy-package", + "configured_hosts": {} + } + ] + } +} diff --git a/tests/test_data/fixtures/host_sync_scenarios.json b/tests/test_data/fixtures/host_sync_scenarios.json new file mode 100644 index 0000000..ef1f250 --- /dev/null +++ b/tests/test_data/fixtures/host_sync_scenarios.json @@ -0,0 +1,163 @@ +{ + "remove_server_scenario": { + "before": { + "environment": "project-alpha", + "packages": [ + { + "name": "weather-toolkit", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/.claude/config.json", + "configured_at": "2025-09-25T10:00:00", + "last_synced": "2025-09-25T10:00:00" + }, + "cursor": { + "config_path": "~/.cursor/config.json", + "configured_at": "2025-09-25T10:30:00", + "last_synced": "2025-09-25T10:30:00" + } + } + } + ] + }, + "after": { + "packages": [ + { + "name": "weather-toolkit", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/.claude/config.json", + "configured_at": "2025-09-25T10:00:00", + "last_synced": "2025-09-25T10:00:00" + } + } + } + ] + } + }, + "remove_host_scenario": { + "multi_environment_before": { + "project-alpha": { + "packages": [ + { + "name": "weather-toolkit", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/.claude/config.json", + "configured_at": "2025-09-25T10:00:00" + }, + "cursor": { + "config_path": "~/.cursor/config.json", + "configured_at": "2025-09-25T10:30:00" + } + } + } + ] + }, + "project-beta": { + "packages": [ + { + "name": "team-utilities", + "configured_hosts": { + "cursor": { + "config_path": "~/.cursor/config.json", + "configured_at": "2025-09-25T11:00:00" + } + } + } + ] + } + }, + "multi_environment_after": { + "project-alpha": { + "packages": [ + { + "name": "weather-toolkit", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/.claude/config.json", + "configured_at": "2025-09-25T10:00:00" + } + } + } + ] + }, + "project-beta": { + "packages": [ + { + "name": "team-utilities", + "configured_hosts": {} + } + ] + } + } + }, + "restore_backup_scenario": { + "environment_before": { + "packages": [ + { + "name": "old-server", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/.claude/config.json", + "configured_at": "2025-09-25T09:00:00", + "last_synced": "2025-09-25T09:00:00" + } + } + }, + { + "name": "weather-toolkit", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/.claude/config.json", + "configured_at": "2025-09-25T09:30:00", + "last_synced": "2025-09-25T09:30:00" + } + } + } + ] + }, + "restored_servers": { + "new-server": { + "command": "python -m new_server", + "args": ["--port", "8080"] + }, + "weather-toolkit": { + "command": "python -m weather_toolkit", + "args": ["--api-key", "test"] + } + }, + "environment_after": { + "packages": [ + { + "name": "new-server", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/.claude/config.json", + "configured_at": "2025-09-25T12:00:00", + "last_synced": "2025-09-25T12:00:00", + "server_config": { + "command": "python -m new_server", + "args": ["--port", "8080"] + } + } + } + }, + { + "name": "weather-toolkit", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/.claude/config.json", + "configured_at": "2025-09-25T09:30:00", + "last_synced": "2025-09-25T12:00:00", + "server_config": { + "command": "python -m weather_toolkit", + "args": ["--api-key", "test"] + } + } + } + } + ] + } + } +} diff --git a/tests/test_data/packages/dependencies/simple_dep_pkg/hatch_metadata.json b/tests/test_data/packages/dependencies/simple_dep_pkg/hatch_metadata.json index 89e59e7..f4928d7 100644 --- a/tests/test_data/packages/dependencies/simple_dep_pkg/hatch_metadata.json +++ b/tests/test_data/packages/dependencies/simple_dep_pkg/hatch_metadata.json @@ -29,7 +29,7 @@ "dependencies": { "hatch": [ { - "name": "base_pkg", + "name": "../../basic/base_pkg", "version_constraint": ">=1.0.0" } ] diff --git a/tests/test_data_utils.py b/tests/test_data_utils.py index ce0d2d2..f4f6251 100644 --- a/tests/test_data_utils.py +++ b/tests/test_data_utils.py @@ -187,11 +187,60 @@ def get_logging_messages(self) -> Dict[str, str]: config = self.get_non_tty_config() return config["logging_messages"] +class MCPBackupTestDataLoader(TestDataLoader): + """Specialized test data loader for MCP backup system tests.""" + + def __init__(self): + super().__init__() + self.mcp_backup_configs_dir = self.configs_dir / "mcp_backup_test_configs" + self.mcp_backup_configs_dir.mkdir(exist_ok=True) + + def load_host_agnostic_config(self, config_type: str) -> Dict[str, Any]: + """Load host-agnostic test configuration. + + Args: + config_type: Type of configuration to load + + Returns: + Host-agnostic configuration dictionary + """ + config_path = self.mcp_backup_configs_dir / f"{config_type}.json" + if not config_path.exists(): + self._create_default_mcp_config(config_type) + + with open(config_path, 'r') as f: + return json.load(f) + + def _create_default_mcp_config(self, config_type: str): + """Create default host-agnostic MCP configuration.""" + default_configs = { + "simple_server": { + "servers": { + "test_server": { + "command": "python", + "args": ["server.py"] + } + } + }, + "complex_server": { + "servers": { + "server1": {"command": "python", "args": ["server1.py"]}, + "server2": {"command": "node", "args": ["server2.js"]}, + "server3": {"command": "python", "args": ["server3.py"], "env": {"API_KEY": "test"}} + } + }, + "empty_config": {"servers": {}} + } + + config = default_configs.get(config_type, {"servers": {}}) + config_path = self.mcp_backup_configs_dir / f"{config_type}.json" + with open(config_path, 'w') as f: + json.dump(config, f, indent=2) + # Global instance for easy access test_data = TestDataLoader() - # Convenience functions def load_test_config(config_name: str) -> Dict[str, Any]: """Load test configuration.""" @@ -206,3 +255,218 @@ def load_mock_response(response_name: str) -> Dict[str, Any]: def get_test_packages_dir() -> Path: """Get test packages directory.""" return test_data.get_test_packages_dir() + + +class MCPHostConfigTestDataLoader(TestDataLoader): + """Specialized test data loader for MCP host configuration tests v2.""" + + def __init__(self): + super().__init__() + self.mcp_host_configs_dir = self.configs_dir / "mcp_host_test_configs" + self.mcp_host_configs_dir.mkdir(exist_ok=True) + + def load_host_config_template(self, host_type: str, config_type: str = "simple") -> Dict[str, Any]: + """Load host-specific configuration template.""" + config_path = self.mcp_host_configs_dir / f"{host_type}_{config_type}.json" + if not config_path.exists(): + self._create_host_config_template(host_type, config_type) + + with open(config_path, 'r') as f: + return json.load(f) + + def load_corrected_environment_data(self, data_type: str = "simple") -> Dict[str, Any]: + """Load corrected environment data structure (v2).""" + config_path = self.mcp_host_configs_dir / f"environment_v2_{data_type}.json" + if not config_path.exists(): + self._create_corrected_environment_data(data_type) + + with open(config_path, 'r') as f: + return json.load(f) + + def load_mcp_server_config(self, server_type: str = "local") -> Dict[str, Any]: + """Load consolidated MCPServerConfig templates.""" + config_path = self.mcp_host_configs_dir / f"mcp_server_{server_type}.json" + if not config_path.exists(): + self._create_mcp_server_config(server_type) + + with open(config_path, 'r') as f: + return json.load(f) + + def _create_host_config_template(self, host_type: str, config_type: str): + """Create host-specific configuration templates with inheritance patterns.""" + templates = { + # Claude family templates + "claude-desktop_simple": { + "mcpServers": { + "test_server": { + "command": "/usr/local/bin/python", # Absolute path required + "args": ["server.py"], + "env": {"API_KEY": "test"} + } + }, + "theme": "dark", # Claude-specific settings + "auto_update": True + }, + "claude-code_simple": { + "mcpServers": { + "test_server": { + "command": "/usr/local/bin/python", # Absolute path required + "args": ["server.py"], + "env": {} + } + }, + "workspace_settings": {"mcp_enabled": True} # Claude Code specific + }, + + # Cursor family templates + "cursor_simple": { + "mcpServers": { + "test_server": { + "command": "python", # Flexible path handling + "args": ["server.py"], + "env": {"API_KEY": "test"} + } + } + }, + "cursor_remote": { + "mcpServers": { + "remote_server": { + "url": "https://api.example.com/mcp", + "headers": {"Authorization": "Bearer token"} + } + } + }, + "lmstudio_simple": { + "mcpServers": { + "test_server": { + "command": "python", # Inherits Cursor format + "args": ["server.py"], + "env": {} + } + } + }, + + # Independent strategy templates + "vscode_simple": { + "mcp": { + "servers": { + "test_server": { + "command": "python", + "args": ["server.py"] + } + } + } + }, + "gemini_simple": { + "mcpServers": { + "test_server": { + "command": "python", + "args": ["server.py"] + } + } + } + } + + template_key = f"{host_type}_{config_type}" + config = templates.get(template_key, {"mcpServers": {}}) + config_path = self.mcp_host_configs_dir / f"{template_key}.json" + with open(config_path, 'w') as f: + json.dump(config, f, indent=2) + + def _create_corrected_environment_data(self, data_type: str): + """Create corrected environment data templates (v2 structure).""" + templates = { + "simple": { + "name": "test_environment", + "description": "Test environment with corrected MCP structure", + "created_at": "2025-09-21T10:00:00.000000", + "packages": [ + { + "name": "weather-toolkit", + "version": "1.0.0", + "type": "hatch", + "source": "github:user/weather-toolkit", + "installed_at": "2025-09-21T10:00:00.000000", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/Library/Application Support/Claude/claude_desktop_config.json", + "configured_at": "2025-09-21T10:00:00.000000", + "last_synced": "2025-09-21T10:00:00.000000", + "server_config": { + "command": "/usr/local/bin/python", + "args": ["weather.py"], + "env": {"API_KEY": "weather_key"} + } + } + } + } + ] + }, + "multi_host": { + "name": "multi_host_environment", + "description": "Environment with single server configured across multiple hosts", + "created_at": "2025-09-21T10:00:00.000000", + "packages": [ + { + "name": "file-manager", + "version": "2.0.0", + "type": "hatch", + "source": "github:user/file-manager", + "installed_at": "2025-09-21T10:00:00.000000", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/Library/Application Support/Claude/claude_desktop_config.json", + "configured_at": "2025-09-21T10:00:00.000000", + "last_synced": "2025-09-21T10:00:00.000000", + "server_config": { + "command": "/usr/local/bin/python", + "args": ["file_manager.py"], + "env": {"DEBUG": "true"} + } + }, + "cursor": { + "config_path": "~/.cursor/mcp.json", + "configured_at": "2025-09-21T10:00:00.000000", + "last_synced": "2025-09-21T10:00:00.000000", + "server_config": { + "command": "python", + "args": ["file_manager.py"], + "env": {"DEBUG": "true"} + } + } + } + } + ] + } + } + + config = templates.get(data_type, {"packages": []}) + config_path = self.mcp_host_configs_dir / f"environment_v2_{data_type}.json" + with open(config_path, 'w') as f: + json.dump(config, f, indent=2) + + def _create_mcp_server_config(self, server_type: str): + """Create consolidated MCPServerConfig templates.""" + templates = { + "local": { + "command": "python", + "args": ["server.py", "--port", "8080"], + "env": {"API_KEY": "test", "DEBUG": "true"} + }, + "remote": { + "url": "https://api.example.com/mcp", + "headers": {"Authorization": "Bearer token", "Content-Type": "application/json"} + }, + "local_minimal": { + "command": "python", + "args": ["minimal_server.py"] + }, + "remote_minimal": { + "url": "https://minimal.example.com/mcp" + } + } + + config = templates.get(server_type, {}) + config_path = self.mcp_host_configs_dir / f"mcp_server_{server_type}.json" + with open(config_path, 'w') as f: + json.dump(config, f, indent=2) diff --git a/tests/test_env_manip.py b/tests/test_env_manip.py index 9350eee..fca4276 100644 --- a/tests/test_env_manip.py +++ b/tests/test_env_manip.py @@ -113,14 +113,14 @@ def _create_sample_registry(self): { "name": dep["name"], "version_constraint": dep.get("version_constraint", "") - } for dep in metadata.get("hatch_dependencies", []) + } for dep in metadata.get("dependencies", {}).get("hatch", []) ], "python_dependencies_added": [ { "name": dep["name"], "version_constraint": dep.get("version_constraint", ""), "package_manager": dep.get("package_manager", "pip") - } for dep in metadata.get("python_dependencies", []) + } for dep in metadata.get("dependencies", {}).get("python", []) ], "hatch_dependencies_removed": [], "hatch_dependencies_modified": [], diff --git a/tests/test_mcp_atomic_operations.py b/tests/test_mcp_atomic_operations.py new file mode 100644 index 0000000..9703169 --- /dev/null +++ b/tests/test_mcp_atomic_operations.py @@ -0,0 +1,276 @@ +"""Tests for MCP atomic file operations. + +This module contains tests for atomic file operations and backup-aware +operations with host-agnostic design. +""" + +import unittest +import tempfile +import shutil +import json +from pathlib import Path +from unittest.mock import patch, mock_open + +from wobble.decorators import regression_test +from test_data_utils import MCPBackupTestDataLoader + +from hatch.mcp_host_config.backup import ( + AtomicFileOperations, + MCPHostConfigBackupManager, + BackupAwareOperation, + BackupError +) + + +class TestAtomicFileOperations(unittest.TestCase): + """Test atomic file operations with host-agnostic design.""" + + def setUp(self): + """Set up test environment.""" + self.temp_dir = Path(tempfile.mkdtemp(prefix="test_atomic_")) + self.test_file = self.temp_dir / "test_config.json" + self.backup_manager = MCPHostConfigBackupManager(backup_root=self.temp_dir / "backups") + self.atomic_ops = AtomicFileOperations() + self.test_data = MCPBackupTestDataLoader() + + def tearDown(self): + """Clean up test environment.""" + shutil.rmtree(self.temp_dir, ignore_errors=True) + + @regression_test + def test_atomic_write_success_host_agnostic(self): + """Test successful atomic write with any JSON configuration format.""" + test_data = self.test_data.load_host_agnostic_config("complex_server") + + result = self.atomic_ops.atomic_write_with_backup( + self.test_file, test_data, self.backup_manager, "claude-desktop" + ) + + self.assertTrue(result) + self.assertTrue(self.test_file.exists()) + + # Verify content (host-agnostic) + with open(self.test_file) as f: + written_data = json.load(f) + self.assertEqual(written_data, test_data) + + @regression_test + def test_atomic_write_with_existing_file(self): + """Test atomic write with existing file creates backup.""" + # Create initial file + initial_data = self.test_data.load_host_agnostic_config("simple_server") + with open(self.test_file, 'w') as f: + json.dump(initial_data, f) + + # Update with atomic write + new_data = self.test_data.load_host_agnostic_config("complex_server") + result = self.atomic_ops.atomic_write_with_backup( + self.test_file, new_data, self.backup_manager, "vscode" + ) + + self.assertTrue(result) + + # Verify backup was created + backups = self.backup_manager.list_backups("vscode") + self.assertEqual(len(backups), 1) + + # Verify backup contains original data + with open(backups[0].file_path) as f: + backup_data = json.load(f) + self.assertEqual(backup_data, initial_data) + + # Verify file contains new data + with open(self.test_file) as f: + current_data = json.load(f) + self.assertEqual(current_data, new_data) + + @regression_test + def test_atomic_write_skip_backup(self): + """Test atomic write with backup skipped.""" + # Create initial file + initial_data = self.test_data.load_host_agnostic_config("simple_server") + with open(self.test_file, 'w') as f: + json.dump(initial_data, f) + + # Update with atomic write, skipping backup + new_data = self.test_data.load_host_agnostic_config("complex_server") + result = self.atomic_ops.atomic_write_with_backup( + self.test_file, new_data, self.backup_manager, "cursor", skip_backup=True + ) + + self.assertTrue(result) + + # Verify no backup was created + backups = self.backup_manager.list_backups("cursor") + self.assertEqual(len(backups), 0) + + # Verify file contains new data + with open(self.test_file) as f: + current_data = json.load(f) + self.assertEqual(current_data, new_data) + + @regression_test + def test_atomic_write_failure_rollback(self): + """Test atomic write failure triggers rollback.""" + # Create initial file + initial_data = self.test_data.load_host_agnostic_config("simple_server") + with open(self.test_file, 'w') as f: + json.dump(initial_data, f) + + # Mock file write failure after backup creation + with patch('builtins.open', side_effect=[ + # First call succeeds (backup creation) + open(self.test_file, 'r'), + # Second call fails (atomic write) + PermissionError("Access denied") + ]): + with self.assertRaises(BackupError): + self.atomic_ops.atomic_write_with_backup( + self.test_file, {"new": "data"}, self.backup_manager, "lmstudio" + ) + + # Verify original file is unchanged + with open(self.test_file) as f: + current_data = json.load(f) + self.assertEqual(current_data, initial_data) + + @regression_test + def test_atomic_copy_success(self): + """Test successful atomic copy operation.""" + source_file = self.temp_dir / "source.json" + target_file = self.temp_dir / "target.json" + + test_data = self.test_data.load_host_agnostic_config("simple_server") + with open(source_file, 'w') as f: + json.dump(test_data, f) + + result = self.atomic_ops.atomic_copy(source_file, target_file) + + self.assertTrue(result) + self.assertTrue(target_file.exists()) + + # Verify content integrity + with open(target_file) as f: + copied_data = json.load(f) + self.assertEqual(copied_data, test_data) + + @regression_test + def test_atomic_copy_failure_cleanup(self): + """Test atomic copy failure cleans up temporary files.""" + source_file = self.temp_dir / "source.json" + target_file = self.temp_dir / "target.json" + + test_data = self.test_data.load_host_agnostic_config("simple_server") + with open(source_file, 'w') as f: + json.dump(test_data, f) + + # Mock copy failure + with patch('shutil.copy2', side_effect=PermissionError("Access denied")): + result = self.atomic_ops.atomic_copy(source_file, target_file) + + self.assertFalse(result) + self.assertFalse(target_file.exists()) + + # Verify no temporary files left behind + temp_files = list(self.temp_dir.glob("*.tmp")) + self.assertEqual(len(temp_files), 0) + + +class TestBackupAwareOperation(unittest.TestCase): + """Test backup-aware operation API.""" + + def setUp(self): + """Set up test environment.""" + self.temp_dir = Path(tempfile.mkdtemp(prefix="test_backup_aware_")) + self.test_file = self.temp_dir / "test_config.json" + self.backup_manager = MCPHostConfigBackupManager(backup_root=self.temp_dir / "backups") + self.test_data = MCPBackupTestDataLoader() + + def tearDown(self): + """Clean up test environment.""" + shutil.rmtree(self.temp_dir, ignore_errors=True) + + @regression_test + def test_prepare_backup_success(self): + """Test explicit backup preparation.""" + # Create initial configuration + initial_data = self.test_data.load_host_agnostic_config("simple_server") + with open(self.test_file, 'w') as f: + json.dump(initial_data, f) + + # Test backup-aware operation + operation = BackupAwareOperation(self.backup_manager) + + # Test explicit backup preparation + backup_result = operation.prepare_backup(self.test_file, "gemini", no_backup=False) + self.assertIsNotNone(backup_result) + self.assertTrue(backup_result.success) + + # Verify backup was created + backups = self.backup_manager.list_backups("gemini") + self.assertEqual(len(backups), 1) + + @regression_test + def test_prepare_backup_no_backup_mode(self): + """Test no-backup mode.""" + # Create initial configuration + initial_data = self.test_data.load_host_agnostic_config("simple_server") + with open(self.test_file, 'w') as f: + json.dump(initial_data, f) + + operation = BackupAwareOperation(self.backup_manager) + + # Test no-backup mode + no_backup_result = operation.prepare_backup(self.test_file, "claude-code", no_backup=True) + self.assertIsNone(no_backup_result) + + # Verify no backup was created + backups = self.backup_manager.list_backups("claude-code") + self.assertEqual(len(backups), 0) + + @regression_test + def test_prepare_backup_failure_raises_exception(self): + """Test backup preparation failure raises BackupError.""" + # Test with nonexistent file + nonexistent_file = self.temp_dir / "nonexistent.json" + + operation = BackupAwareOperation(self.backup_manager) + + with self.assertRaises(BackupError): + operation.prepare_backup(nonexistent_file, "vscode", no_backup=False) + + @regression_test + def test_rollback_on_failure_success(self): + """Test successful rollback functionality.""" + # Create initial configuration + initial_data = self.test_data.load_host_agnostic_config("simple_server") + with open(self.test_file, 'w') as f: + json.dump(initial_data, f) + + operation = BackupAwareOperation(self.backup_manager) + + # Create backup + backup_result = operation.prepare_backup(self.test_file, "cursor", no_backup=False) + self.assertTrue(backup_result.success) + + # Modify file (simulate failed operation) + modified_data = self.test_data.load_host_agnostic_config("complex_server") + with open(self.test_file, 'w') as f: + json.dump(modified_data, f) + + # Test rollback functionality + rollback_success = operation.rollback_on_failure(backup_result, self.test_file, "cursor") + self.assertTrue(rollback_success) + + @regression_test + def test_rollback_on_failure_no_backup(self): + """Test rollback with no backup result.""" + operation = BackupAwareOperation(self.backup_manager) + + # Test rollback with None backup result + rollback_success = operation.rollback_on_failure(None, self.test_file, "lmstudio") + self.assertFalse(rollback_success) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_mcp_backup_integration.py b/tests/test_mcp_backup_integration.py new file mode 100644 index 0000000..8cc0dec --- /dev/null +++ b/tests/test_mcp_backup_integration.py @@ -0,0 +1,308 @@ +"""Tests for MCP backup system integration. + +This module contains integration tests for the backup system with existing +Hatch infrastructure and end-to-end workflows. +""" + +import unittest +import tempfile +import shutil +import json +import time +from pathlib import Path +from unittest.mock import Mock, patch + +from wobble.decorators import integration_test, slow_test, regression_test +from test_data_utils import MCPBackupTestDataLoader + +from hatch.mcp_host_config.backup import ( + MCPHostConfigBackupManager, + BackupAwareOperation, + BackupInfo, + BackupResult +) + + +class TestMCPBackupIntegration(unittest.TestCase): + """Test backup system integration with existing Hatch infrastructure.""" + + def setUp(self): + """Set up integration test environment.""" + self.temp_dir = Path(tempfile.mkdtemp(prefix="test_integration_")) + self.backup_manager = MCPHostConfigBackupManager(backup_root=self.temp_dir / "backups") + self.test_data = MCPBackupTestDataLoader() + + # Create test configuration files + self.config_dir = self.temp_dir / "configs" + self.config_dir.mkdir(parents=True) + + self.test_configs = {} + for hostname in ['claude-desktop', 'claude-code', 'vscode', 'cursor']: + config_data = self.test_data.load_host_agnostic_config("simple_server") + config_file = self.config_dir / f"{hostname}_config.json" + with open(config_file, 'w') as f: + json.dump(config_data, f, indent=2) + self.test_configs[hostname] = config_file + + def tearDown(self): + """Clean up integration test environment.""" + shutil.rmtree(self.temp_dir, ignore_errors=True) + + @integration_test(scope="component") + def test_complete_backup_restore_cycle(self): + """Test complete backup creation and restoration cycle.""" + hostname = 'claude-desktop' + config_file = self.test_configs[hostname] + + # Create backup + backup_result = self.backup_manager.create_backup(config_file, hostname) + self.assertTrue(backup_result.success) + + # Modify original file + modified_data = self.test_data.load_host_agnostic_config("complex_server") + with open(config_file, 'w') as f: + json.dump(modified_data, f) + + # Verify file was modified + with open(config_file) as f: + current_data = json.load(f) + self.assertEqual(current_data, modified_data) + + # Restore from backup (placeholder - actual restore would need host config paths) + restore_success = self.backup_manager.restore_backup(hostname) + self.assertTrue(restore_success) # Currently returns True as placeholder + + @integration_test(scope="component") + def test_multi_host_backup_management(self): + """Test backup management across multiple hosts.""" + # Create backups for multiple hosts + results = {} + for hostname, config_file in self.test_configs.items(): + results[hostname] = self.backup_manager.create_backup(config_file, hostname) + self.assertTrue(results[hostname].success) + + # Verify separate backup directories + for hostname in self.test_configs.keys(): + backups = self.backup_manager.list_backups(hostname) + self.assertEqual(len(backups), 1) + + # Verify backup isolation + backup_dir = backups[0].file_path.parent + self.assertEqual(backup_dir.name, hostname) + + # Verify no cross-contamination + for other_hostname in self.test_configs.keys(): + if other_hostname != hostname: + other_backups = self.backup_manager.list_backups(other_hostname) + self.assertNotEqual( + backups[0].file_path.parent, + other_backups[0].file_path.parent + ) + + @integration_test(scope="end_to_end") + def test_backup_with_configuration_update_workflow(self): + """Test backup integration with configuration update operations.""" + hostname = 'vscode' + config_file = self.test_configs[hostname] + + # Simulate configuration update with backup + original_data = self.test_data.load_host_agnostic_config("simple_server") + updated_data = self.test_data.load_host_agnostic_config("complex_server") + + # Ensure original data is in file + with open(config_file, 'w') as f: + json.dump(original_data, f) + + # Simulate update operation with backup + backup_result = self.backup_manager.create_backup(config_file, hostname) + self.assertTrue(backup_result.success) + + # Update configuration + with open(config_file, 'w') as f: + json.dump(updated_data, f) + + # Verify backup contains original data + backups = self.backup_manager.list_backups(hostname) + self.assertEqual(len(backups), 1) + + with open(backups[0].file_path) as f: + backup_data = json.load(f) + self.assertEqual(backup_data, original_data) + + # Verify current file has updated data + with open(config_file) as f: + current_data = json.load(f) + self.assertEqual(current_data, updated_data) + + @integration_test(scope="service") + def test_backup_system_with_existing_test_utilities(self): + """Test backup system integration with existing test utilities.""" + # Use existing TestDataLoader patterns + test_config = self.test_data.load_host_agnostic_config("complex_server") + + # Test backup creation with complex configuration + config_path = self.temp_dir / "complex_config.json" + with open(config_path, 'w') as f: + json.dump(test_config, f) + + result = self.backup_manager.create_backup(config_path, "lmstudio") + self.assertTrue(result.success) + + # Verify integration with existing test data patterns + self.assertIsInstance(test_config, dict) + self.assertIn("servers", test_config) + + # Verify backup content matches test data + with open(result.backup_path) as f: + backup_content = json.load(f) + self.assertEqual(backup_content, test_config) + + @integration_test(scope="component") + def test_backup_aware_operation_workflow(self): + """Test backup-aware operation following environment manager patterns.""" + hostname = 'cursor' + config_file = self.test_configs[hostname] + + # Test backup-aware operation following existing patterns + operation = BackupAwareOperation(self.backup_manager) + + # Simulate environment manager update workflow + backup_result = operation.prepare_backup(config_file, hostname, no_backup=False) + self.assertTrue(backup_result.success) + + # Verify backup was created following existing patterns + backups = self.backup_manager.list_backups(hostname) + self.assertEqual(len(backups), 1) + self.assertEqual(backups[0].hostname, hostname) + + # Test rollback capability + rollback_success = operation.rollback_on_failure(backup_result, config_file, hostname) + self.assertTrue(rollback_success) + + +class TestMCPBackupPerformance(unittest.TestCase): + """Test backup system performance characteristics.""" + + def setUp(self): + """Set up performance test environment.""" + self.temp_dir = Path(tempfile.mkdtemp(prefix="test_performance_")) + self.backup_manager = MCPHostConfigBackupManager(backup_root=self.temp_dir / "backups") + self.test_data = MCPBackupTestDataLoader() + + def tearDown(self): + """Clean up performance test environment.""" + shutil.rmtree(self.temp_dir, ignore_errors=True) + + @slow_test + @regression_test + def test_backup_performance_large_config(self): + """Test backup performance with larger configuration files.""" + # Create large host-agnostic configuration + large_config = {"servers": {}} + for i in range(1000): + large_config["servers"][f"server_{i}"] = { + "command": f"python_{i}", + "args": [f"arg_{j}" for j in range(10)] + } + + config_file = self.temp_dir / "large_config.json" + with open(config_file, 'w') as f: + json.dump(large_config, f) + + start_time = time.time() + result = self.backup_manager.create_backup(config_file, "gemini") + duration = time.time() - start_time + + self.assertTrue(result.success) + self.assertLess(duration, 1.0) # Should complete within 1 second + + @regression_test + def test_pydantic_validation_performance(self): + """Test Pydantic model validation performance.""" + hostname = "claude-desktop" + config_data = self.test_data.load_host_agnostic_config("simple_server") + config_file = self.temp_dir / "test_config.json" + + with open(config_file, 'w') as f: + json.dump(config_data, f) + + start_time = time.time() + + # Create backup (includes Pydantic validation) + result = self.backup_manager.create_backup(config_file, hostname) + + # List backups (includes Pydantic model creation) + backups = self.backup_manager.list_backups(hostname) + + duration = time.time() - start_time + + self.assertTrue(result.success) + self.assertEqual(len(backups), 1) + self.assertLess(duration, 0.1) # Pydantic operations should be fast + + @regression_test + def test_concurrent_backup_operations(self): + """Test concurrent backup operations for different hosts.""" + import threading + + results = {} + config_files = {} + + # Create test configurations for different hosts + for hostname in ['claude-desktop', 'vscode', 'cursor', 'lmstudio']: + config_data = self.test_data.load_host_agnostic_config("simple_server") + config_file = self.temp_dir / f"{hostname}_config.json" + with open(config_file, 'w') as f: + json.dump(config_data, f) + config_files[hostname] = config_file + + def create_backup_thread(hostname, config_file): + results[hostname] = self.backup_manager.create_backup(config_file, hostname) + + # Start concurrent backup operations + threads = [] + for hostname, config_file in config_files.items(): + thread = threading.Thread(target=create_backup_thread, args=(hostname, config_file)) + threads.append(thread) + thread.start() + + # Wait for all threads to complete + for thread in threads: + thread.join(timeout=5.0) + + # Verify all operations succeeded + for hostname in config_files.keys(): + self.assertIn(hostname, results) + self.assertTrue(results[hostname].success) + + @regression_test + def test_backup_list_performance_many_backups(self): + """Test backup listing performance with many backup files.""" + hostname = "claude-code" + config_data = self.test_data.load_host_agnostic_config("simple_server") + config_file = self.temp_dir / "test_config.json" + + with open(config_file, 'w') as f: + json.dump(config_data, f) + + # Create many backups + for i in range(50): + result = self.backup_manager.create_backup(config_file, hostname) + self.assertTrue(result.success) + + # Test listing performance + start_time = time.time() + backups = self.backup_manager.list_backups(hostname) + duration = time.time() - start_time + + self.assertEqual(len(backups), 50) + self.assertLess(duration, 0.1) # Should be fast even with many backups + + # Verify all backups are valid Pydantic models + for backup in backups: + self.assertIsInstance(backup, BackupInfo) + self.assertEqual(backup.hostname, hostname) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_mcp_cli_all_host_specific_args.py b/tests/test_mcp_cli_all_host_specific_args.py new file mode 100644 index 0000000..2026fc0 --- /dev/null +++ b/tests/test_mcp_cli_all_host_specific_args.py @@ -0,0 +1,303 @@ +""" +Tests for ALL host-specific CLI arguments in MCP configure command. + +This module tests that: +1. All host-specific arguments are accepted for all hosts +2. Unsupported fields are reported as "UNSUPPORTED" in conversion reports +3. All new arguments (httpUrl, includeTools, excludeTools, inputs) work correctly +""" + +import unittest +from unittest.mock import patch, MagicMock +from io import StringIO + +from hatch.cli_hatch import handle_mcp_configure, parse_input +from hatch.mcp_host_config import MCPHostType +from hatch.mcp_host_config.models import ( + MCPServerConfigGemini, MCPServerConfigCursor, MCPServerConfigVSCode, + MCPServerConfigClaude +) + + +class TestAllGeminiArguments(unittest.TestCase): + """Test ALL Gemini-specific CLI arguments.""" + + @patch('hatch.cli_hatch.MCPHostConfigurationManager') + @patch('sys.stdout', new_callable=StringIO) + def test_all_gemini_arguments_accepted(self, mock_stdout, mock_manager_class): + """Test that all Gemini arguments are accepted and passed to model.""" + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + + mock_result = MagicMock() + mock_result.success = True + mock_result.backup_path = None + mock_manager.configure_server.return_value = mock_result + + result = handle_mcp_configure( + host='gemini', + server_name='test-server', + command='python', + args=['server.py'], + timeout=30000, + trust=True, + cwd='/workspace', + http_url='https://api.example.com/mcp', + include_tools=['tool1', 'tool2'], + exclude_tools=['dangerous_tool'], + auto_approve=True + ) + + self.assertEqual(result, 0) + + # Verify all fields were passed to Gemini model + call_args = mock_manager.configure_server.call_args + server_config = call_args.kwargs['server_config'] + self.assertIsInstance(server_config, MCPServerConfigGemini) + self.assertEqual(server_config.timeout, 30000) + self.assertEqual(server_config.trust, True) + self.assertEqual(server_config.cwd, '/workspace') + self.assertEqual(server_config.httpUrl, 'https://api.example.com/mcp') + self.assertEqual(server_config.includeTools, ['tool1', 'tool2']) + self.assertEqual(server_config.excludeTools, ['dangerous_tool']) + + +class TestUnsupportedFieldReporting(unittest.TestCase): + """Test that unsupported fields are reported correctly, not rejected.""" + + @patch('hatch.cli_hatch.MCPHostConfigurationManager') + @patch('sys.stdout', new_callable=StringIO) + def test_gemini_args_on_vscode_show_unsupported(self, mock_stdout, mock_manager_class): + """Test that Gemini-specific args on VS Code show as UNSUPPORTED.""" + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + + mock_result = MagicMock() + mock_result.success = True + mock_result.backup_path = None + mock_manager.configure_server.return_value = mock_result + + result = handle_mcp_configure( + host='vscode', + server_name='test-server', + command='python', + args=['server.py'], + timeout=30000, # Gemini-only field + trust=True, # Gemini-only field + auto_approve=True + ) + + # Should succeed (not return error code 1) + self.assertEqual(result, 0) + + # Check that output contains "UNSUPPORTED" for Gemini fields + output = mock_stdout.getvalue() + self.assertIn('UNSUPPORTED', output) + self.assertIn('timeout', output) + self.assertIn('trust', output) + + @patch('hatch.cli_hatch.MCPHostConfigurationManager') + @patch('sys.stdout', new_callable=StringIO) + def test_vscode_inputs_on_gemini_show_unsupported(self, mock_stdout, mock_manager_class): + """Test that VS Code inputs on Gemini show as UNSUPPORTED.""" + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + + mock_result = MagicMock() + mock_result.success = True + mock_result.backup_path = None + mock_manager.configure_server.return_value = mock_result + + result = handle_mcp_configure( + host='gemini', + server_name='test-server', + command='python', + args=['server.py'], + input=['promptString,api-key,API Key,password=true'], # VS Code-only field + auto_approve=True + ) + + # Should succeed (not return error code 1) + self.assertEqual(result, 0) + + # Check that output contains "UNSUPPORTED" for inputs field + output = mock_stdout.getvalue() + self.assertIn('UNSUPPORTED', output) + self.assertIn('inputs', output) + + +class TestVSCodeInputsParsing(unittest.TestCase): + """Test VS Code inputs parsing.""" + + def test_parse_input_basic(self): + """Test basic input parsing.""" + input_list = ['promptString,api-key,GitHub Personal Access Token'] + result = parse_input(input_list) + + self.assertIsNotNone(result) + self.assertEqual(len(result), 1) + self.assertEqual(result[0]['type'], 'promptString') + self.assertEqual(result[0]['id'], 'api-key') + self.assertEqual(result[0]['description'], 'GitHub Personal Access Token') + self.assertNotIn('password', result[0]) + + def test_parse_input_with_password(self): + """Test input parsing with password flag.""" + input_list = ['promptString,api-key,API Key,password=true'] + result = parse_input(input_list) + + self.assertIsNotNone(result) + self.assertEqual(len(result), 1) + self.assertEqual(result[0]['password'], True) + + def test_parse_input_multiple(self): + """Test parsing multiple inputs.""" + input_list = [ + 'promptString,api-key,API Key,password=true', + 'promptString,db-url,Database URL' + ] + result = parse_input(input_list) + + self.assertIsNotNone(result) + self.assertEqual(len(result), 2) + + def test_parse_input_none(self): + """Test parsing None inputs.""" + result = parse_input(None) + self.assertIsNone(result) + + def test_parse_input_empty(self): + """Test parsing empty inputs list.""" + result = parse_input([]) + self.assertIsNone(result) + + +class TestVSCodeInputsIntegration(unittest.TestCase): + """Test VS Code inputs integration with configure command.""" + + @patch('hatch.cli_hatch.MCPHostConfigurationManager') + def test_vscode_inputs_passed_to_model(self, mock_manager_class): + """Test that parsed inputs are passed to VS Code model.""" + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + + mock_result = MagicMock() + mock_result.success = True + mock_result.backup_path = None + mock_manager.configure_server.return_value = mock_result + + result = handle_mcp_configure( + host='vscode', + server_name='test-server', + command='python', + args=['server.py'], + input=['promptString,api-key,API Key,password=true'], + auto_approve=True + ) + + self.assertEqual(result, 0) + + # Verify inputs were passed to VS Code model + call_args = mock_manager.configure_server.call_args + server_config = call_args.kwargs['server_config'] + self.assertIsInstance(server_config, MCPServerConfigVSCode) + self.assertIsNotNone(server_config.inputs) + self.assertEqual(len(server_config.inputs), 1) + self.assertEqual(server_config.inputs[0]['id'], 'api-key') + + +class TestHttpUrlArgument(unittest.TestCase): + """Test --http-url argument for Gemini.""" + + @patch('hatch.cli_hatch.MCPHostConfigurationManager') + def test_http_url_passed_to_gemini(self, mock_manager_class): + """Test that httpUrl is passed to Gemini model.""" + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + + mock_result = MagicMock() + mock_result.success = True + mock_result.backup_path = None + mock_manager.configure_server.return_value = mock_result + + result = handle_mcp_configure( + host='gemini', + server_name='test-server', + command='python', + args=['server.py'], + http_url='https://api.example.com/mcp', + auto_approve=True + ) + + self.assertEqual(result, 0) + + # Verify httpUrl was passed to Gemini model + call_args = mock_manager.configure_server.call_args + server_config = call_args.kwargs['server_config'] + self.assertIsInstance(server_config, MCPServerConfigGemini) + self.assertEqual(server_config.httpUrl, 'https://api.example.com/mcp') + + +class TestToolFilteringArguments(unittest.TestCase): + """Test --include-tools and --exclude-tools arguments for Gemini.""" + + @patch('hatch.cli_hatch.MCPHostConfigurationManager') + def test_include_tools_passed_to_gemini(self, mock_manager_class): + """Test that includeTools is passed to Gemini model.""" + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + + mock_result = MagicMock() + mock_result.success = True + mock_result.backup_path = None + mock_manager.configure_server.return_value = mock_result + + result = handle_mcp_configure( + host='gemini', + server_name='test-server', + command='python', + args=['server.py'], + include_tools=['tool1', 'tool2', 'tool3'], + auto_approve=True + ) + + self.assertEqual(result, 0) + + # Verify includeTools was passed to Gemini model + call_args = mock_manager.configure_server.call_args + server_config = call_args.kwargs['server_config'] + self.assertIsInstance(server_config, MCPServerConfigGemini) + self.assertEqual(server_config.includeTools, ['tool1', 'tool2', 'tool3']) + + @patch('hatch.cli_hatch.MCPHostConfigurationManager') + def test_exclude_tools_passed_to_gemini(self, mock_manager_class): + """Test that excludeTools is passed to Gemini model.""" + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + + mock_result = MagicMock() + mock_result.success = True + mock_result.backup_path = None + mock_manager.configure_server.return_value = mock_result + + result = handle_mcp_configure( + host='gemini', + server_name='test-server', + command='python', + args=['server.py'], + exclude_tools=['dangerous_tool'], + auto_approve=True + ) + + self.assertEqual(result, 0) + + # Verify excludeTools was passed to Gemini model + call_args = mock_manager.configure_server.call_args + server_config = call_args.kwargs['server_config'] + self.assertIsInstance(server_config, MCPServerConfigGemini) + self.assertEqual(server_config.excludeTools, ['dangerous_tool']) + + +if __name__ == '__main__': + unittest.main() + diff --git a/tests/test_mcp_cli_backup_management.py b/tests/test_mcp_cli_backup_management.py new file mode 100644 index 0000000..6050b57 --- /dev/null +++ b/tests/test_mcp_cli_backup_management.py @@ -0,0 +1,295 @@ +""" +Test suite for MCP CLI backup management commands (Phase 3d). + +This module tests the new MCP backup management functionality: +- hatch mcp backup restore +- hatch mcp backup list +- hatch mcp backup clean + +Tests cover argument parsing, backup operations, output formatting, +and error handling scenarios. +""" + +import unittest +from unittest.mock import patch, MagicMock, ANY +import sys +from pathlib import Path +from datetime import datetime + +# Add the parent directory to the path to import hatch modules +sys.path.insert(0, str(Path(__file__).parent.parent)) + +from hatch.cli_hatch import ( + main, handle_mcp_backup_restore, handle_mcp_backup_list, handle_mcp_backup_clean +) +from hatch.mcp_host_config.models import MCPHostType +from wobble import regression_test, integration_test + + +class TestMCPBackupRestoreCommand(unittest.TestCase): + """Test suite for MCP backup restore command.""" + + @regression_test + def test_backup_restore_argument_parsing(self): + """Test argument parsing for 'hatch mcp backup restore' command.""" + test_args = ['hatch', 'mcp', 'backup', 'restore', 'claude-desktop', '--backup-file', 'test.backup'] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager'): + with patch('hatch.cli_hatch.handle_mcp_backup_restore', return_value=0) as mock_handler: + try: + main() + mock_handler.assert_called_once_with( + ANY, 'claude-desktop', 'test.backup', False, False + ) + except SystemExit as e: + self.assertEqual(e.code, 0) + + @regression_test + def test_backup_restore_dry_run_argument(self): + """Test dry run argument for backup restore command.""" + test_args = ['hatch', 'mcp', 'backup', 'restore', 'cursor', '--dry-run', '--auto-approve'] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager'): + with patch('hatch.cli_hatch.handle_mcp_backup_restore', return_value=0) as mock_handler: + try: + main() + mock_handler.assert_called_once_with( + ANY, 'cursor', None, True, True + ) + except SystemExit as e: + self.assertEqual(e.code, 0) + + @integration_test(scope="component") + def test_backup_restore_invalid_host(self): + """Test backup restore with invalid host type.""" + with patch('hatch.cli_hatch.HatchEnvironmentManager') as mock_env_manager: + with patch('builtins.print') as mock_print: + result = handle_mcp_backup_restore(mock_env_manager.return_value, 'invalid-host') + + self.assertEqual(result, 1) + + # Verify error message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("Error: Invalid host 'invalid-host'" in call for call in print_calls)) + + @integration_test(scope="component") + def test_backup_restore_no_backups(self): + """Test backup restore when no backups exist.""" + with patch('hatch.mcp_host_config.backup.MCPHostConfigBackupManager') as mock_backup_class: + mock_backup_manager = MagicMock() + mock_backup_manager._get_latest_backup.return_value = None + mock_backup_class.return_value = mock_backup_manager + + with patch('hatch.cli_hatch.HatchEnvironmentManager') as mock_env_manager: + with patch('builtins.print') as mock_print: + result = handle_mcp_backup_restore(mock_env_manager.return_value, 'claude-desktop') + + self.assertEqual(result, 1) + + # Verify error message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("Error: No backups found for host 'claude-desktop'" in call for call in print_calls)) + + @integration_test(scope="component") + def test_backup_restore_dry_run(self): + """Test backup restore dry run functionality.""" + with patch('hatch.mcp_host_config.backup.MCPHostConfigBackupManager') as mock_backup_class: + mock_backup_manager = MagicMock() + mock_backup_path = Path("/test/backup.json") + mock_backup_manager._get_latest_backup.return_value = mock_backup_path + mock_backup_class.return_value = mock_backup_manager + + with patch('hatch.cli_hatch.HatchEnvironmentManager') as mock_env_manager: + with patch('builtins.print') as mock_print: + result = handle_mcp_backup_restore(mock_env_manager.return_value, 'claude-desktop', dry_run=True) + + self.assertEqual(result, 0) + + # Verify dry run output + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("[DRY RUN] Would restore backup for host 'claude-desktop'" in call for call in print_calls)) + + @integration_test(scope="component") + def test_backup_restore_successful(self): + """Test successful backup restore operation.""" + with patch('hatch.mcp_host_config.backup.MCPHostConfigBackupManager') as mock_backup_class: + mock_backup_manager = MagicMock() + mock_backup_path = Path("/test/backup.json") + mock_backup_manager._get_latest_backup.return_value = mock_backup_path + mock_backup_manager.restore_backup.return_value = True + mock_backup_class.return_value = mock_backup_manager + + with patch('hatch.cli_hatch.request_confirmation', return_value=True): + with patch('hatch.cli_hatch.HatchEnvironmentManager') as mock_env_manager: + with patch('builtins.print') as mock_print: + result = handle_mcp_backup_restore(mock_env_manager.return_value, 'claude-desktop', auto_approve=True) + + self.assertEqual(result, 0) + mock_backup_manager.restore_backup.assert_called_once() + + # Verify success message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("[SUCCESS] Successfully restored backup" in call for call in print_calls)) + + +class TestMCPBackupListCommand(unittest.TestCase): + """Test suite for MCP backup list command.""" + + @regression_test + def test_backup_list_argument_parsing(self): + """Test argument parsing for 'hatch mcp backup list' command.""" + test_args = ['hatch', 'mcp', 'backup', 'list', 'vscode', '--detailed'] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager'): + with patch('hatch.cli_hatch.handle_mcp_backup_list', return_value=0) as mock_handler: + try: + main() + mock_handler.assert_called_once_with('vscode', True) + except SystemExit as e: + self.assertEqual(e.code, 0) + + @integration_test(scope="component") + def test_backup_list_invalid_host(self): + """Test backup list with invalid host type.""" + with patch('builtins.print') as mock_print: + result = handle_mcp_backup_list('invalid-host') + + self.assertEqual(result, 1) + + # Verify error message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("Error: Invalid host 'invalid-host'" in call for call in print_calls)) + + @integration_test(scope="component") + def test_backup_list_no_backups(self): + """Test backup list when no backups exist.""" + with patch('hatch.mcp_host_config.backup.MCPHostConfigBackupManager') as mock_backup_class: + mock_backup_manager = MagicMock() + mock_backup_manager.list_backups.return_value = [] + mock_backup_class.return_value = mock_backup_manager + + with patch('builtins.print') as mock_print: + result = handle_mcp_backup_list('claude-desktop') + + self.assertEqual(result, 0) + + # Verify no backups message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("No backups found for host 'claude-desktop'" in call for call in print_calls)) + + @integration_test(scope="component") + def test_backup_list_detailed_output(self): + """Test backup list with detailed output format.""" + from hatch.mcp_host_config.backup import BackupInfo + + # Create mock backup info with proper attributes + mock_backup = MagicMock(spec=BackupInfo) + mock_backup.file_path = MagicMock() + mock_backup.file_path.name = "mcp.json.claude-desktop.20250922_143000_123456" + mock_backup.timestamp = datetime(2025, 9, 22, 14, 30, 0) + mock_backup.file_size = 1024 + mock_backup.age_days = 5 + + with patch('hatch.mcp_host_config.backup.MCPHostConfigBackupManager') as mock_backup_class: + mock_backup_manager = MagicMock() + mock_backup_manager.list_backups.return_value = [mock_backup] + mock_backup_class.return_value = mock_backup_manager + + with patch('builtins.print') as mock_print: + result = handle_mcp_backup_list('claude-desktop', detailed=True) + + self.assertEqual(result, 0) + + # Verify detailed table output + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("Backup File" in call for call in print_calls)) + self.assertTrue(any("Created" in call for call in print_calls)) + self.assertTrue(any("Size" in call for call in print_calls)) + + +class TestMCPBackupCleanCommand(unittest.TestCase): + """Test suite for MCP backup clean command.""" + + @regression_test + def test_backup_clean_argument_parsing(self): + """Test argument parsing for 'hatch mcp backup clean' command.""" + test_args = ['hatch', 'mcp', 'backup', 'clean', 'cursor', '--older-than-days', '30', '--dry-run'] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager'): + with patch('hatch.cli_hatch.handle_mcp_backup_clean', return_value=0) as mock_handler: + try: + main() + mock_handler.assert_called_once_with('cursor', 30, None, True, False) + except SystemExit as e: + self.assertEqual(e.code, 0) + + @integration_test(scope="component") + def test_backup_clean_no_criteria(self): + """Test backup clean with no cleanup criteria specified.""" + with patch('builtins.print') as mock_print: + result = handle_mcp_backup_clean('claude-desktop') + + self.assertEqual(result, 1) + + # Verify error message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("Error: Must specify either --older-than-days or --keep-count" in call for call in print_calls)) + + @integration_test(scope="component") + def test_backup_clean_dry_run(self): + """Test backup clean dry run functionality.""" + from hatch.mcp_host_config.backup import BackupInfo + + # Create mock backup info with proper attributes + mock_backup = MagicMock(spec=BackupInfo) + mock_backup.file_path = Path("/test/old_backup.json") + mock_backup.age_days = 35 + + with patch('hatch.mcp_host_config.backup.MCPHostConfigBackupManager') as mock_backup_class: + mock_backup_manager = MagicMock() + mock_backup_manager.list_backups.return_value = [mock_backup] + mock_backup_class.return_value = mock_backup_manager + + with patch('builtins.print') as mock_print: + result = handle_mcp_backup_clean('claude-desktop', older_than_days=30, dry_run=True) + + self.assertEqual(result, 0) + + # Verify dry run output + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("[DRY RUN] Would clean" in call for call in print_calls)) + + @integration_test(scope="component") + def test_backup_clean_successful(self): + """Test successful backup clean operation.""" + from hatch.mcp_host_config.backup import BackupInfo + + # Create mock backup with proper attributes + mock_backup = MagicMock(spec=BackupInfo) + mock_backup.file_path = Path("/test/backup.json") + mock_backup.age_days = 35 + + with patch('hatch.mcp_host_config.backup.MCPHostConfigBackupManager') as mock_backup_class: + mock_backup_manager = MagicMock() + mock_backup_manager.list_backups.return_value = [mock_backup] # Some backups exist + mock_backup_manager.clean_backups.return_value = 3 # 3 backups cleaned + mock_backup_class.return_value = mock_backup_manager + + with patch('hatch.cli_hatch.request_confirmation', return_value=True): + with patch('builtins.print') as mock_print: + result = handle_mcp_backup_clean('claude-desktop', older_than_days=30, auto_approve=True) + + self.assertEqual(result, 0) + mock_backup_manager.clean_backups.assert_called_once() + + # Verify success message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("โœ“ Successfully cleaned 3 backup(s)" in call for call in print_calls)) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_mcp_cli_direct_management.py b/tests/test_mcp_cli_direct_management.py new file mode 100644 index 0000000..44ddfc6 --- /dev/null +++ b/tests/test_mcp_cli_direct_management.py @@ -0,0 +1,453 @@ +""" +Test suite for MCP CLI direct management commands (Phase 3e). + +This module tests the new MCP direct management functionality: +- hatch mcp configure +- hatch mcp remove + +Tests cover argument parsing, server configuration, output formatting, +and error handling scenarios. +""" + +import unittest +from unittest.mock import patch, MagicMock, ANY +import sys +from pathlib import Path + +# Add the parent directory to the path to import hatch modules +sys.path.insert(0, str(Path(__file__).parent.parent)) + +from hatch.cli_hatch import ( + main, handle_mcp_configure, handle_mcp_remove, handle_mcp_remove_server, + handle_mcp_remove_host, parse_env_vars, parse_header +) +from hatch.mcp_host_config.models import MCPHostType, MCPServerConfig +from wobble import regression_test, integration_test + + +class TestMCPConfigureCommand(unittest.TestCase): + """Test suite for MCP configure command.""" + + @regression_test + def test_configure_argument_parsing_basic(self): + """Test basic argument parsing for 'hatch mcp configure' command.""" + # Updated to match current CLI: server_name is positional, --host is required, --command/--url are mutually exclusive + test_args = ['hatch', 'mcp', 'configure', 'weather-server', '--host', 'claude-desktop', '--command', 'python', '--args', 'weather.py'] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager'): + with patch('hatch.cli_hatch.handle_mcp_configure', return_value=0) as mock_handler: + try: + result = main() + # If main() returns without SystemExit, check the handler was called + # Updated to include ALL host-specific parameters + mock_handler.assert_called_once_with( + 'claude-desktop', 'weather-server', 'python', ['weather.py'], + None, None, None, None, False, None, None, None, None, None, None, False, False, False + ) + except SystemExit as e: + # If SystemExit is raised, it should be 0 (success) and handler should have been called + if e.code == 0: + mock_handler.assert_called_once_with( + 'claude-desktop', 'weather-server', 'python', ['weather.py'], + None, None, None, None, False, None, None, None, None, None, None, False, False, False + ) + else: + self.fail(f"main() exited with code {e.code}, expected 0") + + @regression_test + def test_configure_argument_parsing_with_options(self): + """Test argument parsing with environment variables and options.""" + test_args = [ + 'hatch', 'mcp', 'configure', 'file-server', '--host', 'cursor', '--url', 'http://localhost:8080', + '--env-var', 'API_KEY=secret', '--env-var', 'DEBUG=true', + '--header', 'Authorization=Bearer token', + '--no-backup', '--dry-run', '--auto-approve' + ] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager'): + with patch('hatch.cli_hatch.handle_mcp_configure', return_value=0) as mock_handler: + try: + main() + # Updated to include ALL host-specific parameters + mock_handler.assert_called_once_with( + 'cursor', 'file-server', None, None, + ['API_KEY=secret', 'DEBUG=true'], 'http://localhost:8080', + ['Authorization=Bearer token'], None, False, None, None, None, None, None, None, True, True, True + ) + except SystemExit as e: + self.assertEqual(e.code, 0) + + @regression_test + def test_parse_env_vars(self): + """Test environment variable parsing utility.""" + # Valid environment variables + env_list = ['API_KEY=secret', 'DEBUG=true', 'PORT=8080'] + result = parse_env_vars(env_list) + + expected = { + 'API_KEY': 'secret', + 'DEBUG': 'true', + 'PORT': '8080' + } + self.assertEqual(result, expected) + + # Empty list + self.assertEqual(parse_env_vars(None), {}) + self.assertEqual(parse_env_vars([]), {}) + + # Invalid format (should be skipped with warning) + with patch('builtins.print') as mock_print: + result = parse_env_vars(['INVALID_FORMAT', 'VALID=value']) + self.assertEqual(result, {'VALID': 'value'}) + mock_print.assert_called() + + @regression_test + def test_parse_header(self): + """Test HTTP headers parsing utility.""" + # Valid headers + headers_list = ['Authorization=Bearer token', 'Content-Type=application/json'] + result = parse_header(headers_list) + + expected = { + 'Authorization': 'Bearer token', + 'Content-Type': 'application/json' + } + self.assertEqual(result, expected) + + # Empty list + self.assertEqual(parse_header(None), {}) + self.assertEqual(parse_header([]), {}) + + @integration_test(scope="component") + def test_configure_invalid_host(self): + """Test configure command with invalid host type.""" + with patch('builtins.print') as mock_print: + result = handle_mcp_configure('invalid-host', 'test-server', 'python', ['test.py']) + + self.assertEqual(result, 1) + + # Verify error message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("Error: Invalid host 'invalid-host'" in call for call in print_calls)) + + @integration_test(scope="component") + def test_configure_dry_run(self): + """Test configure command dry run functionality.""" + with patch('builtins.print') as mock_print: + result = handle_mcp_configure( + 'claude-desktop', 'weather-server', 'python', ['weather.py'], + env=['API_KEY=secret'], url=None, + dry_run=True + ) + + self.assertEqual(result, 0) + + # Verify dry run output + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("[DRY RUN] Would configure MCP server 'weather-server'" in call for call in print_calls)) + self.assertTrue(any("[DRY RUN] Command: python" in call for call in print_calls)) + self.assertTrue(any("[DRY RUN] Environment:" in call for call in print_calls)) + # URL should not be present for local server configuration + + @integration_test(scope="component") + def test_configure_successful(self): + """Test successful MCP server configuration.""" + from hatch.mcp_host_config.host_management import ConfigurationResult + + mock_result = ConfigurationResult( + success=True, + hostname='claude-desktop', + server_name='weather-server', + backup_path=Path('/test/backup.json') + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager.configure_server.return_value = mock_result + mock_manager_class.return_value = mock_manager + + with patch('hatch.cli_hatch.request_confirmation', return_value=True): + with patch('builtins.print') as mock_print: + result = handle_mcp_configure( + 'claude-desktop', 'weather-server', 'python', ['weather.py'], + auto_approve=True + ) + + self.assertEqual(result, 0) + mock_manager.configure_server.assert_called_once() + + # Verify success message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("[SUCCESS] Successfully configured MCP server 'weather-server'" in call for call in print_calls)) + self.assertTrue(any("Backup created:" in call for call in print_calls)) + + @integration_test(scope="component") + def test_configure_failed(self): + """Test failed MCP server configuration.""" + from hatch.mcp_host_config.host_management import ConfigurationResult + + mock_result = ConfigurationResult( + success=False, + hostname='claude-desktop', + server_name='weather-server', + error_message='Configuration validation failed' + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager.configure_server.return_value = mock_result + mock_manager_class.return_value = mock_manager + + with patch('hatch.cli_hatch.request_confirmation', return_value=True): + with patch('builtins.print') as mock_print: + result = handle_mcp_configure( + 'claude-desktop', 'weather-server', 'python', ['weather.py'], + auto_approve=True + ) + + self.assertEqual(result, 1) + + # Verify error message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("[ERROR] Failed to configure MCP server 'weather-server'" in call for call in print_calls)) + self.assertTrue(any("Configuration validation failed" in call for call in print_calls)) + + +class TestMCPRemoveCommand(unittest.TestCase): + """Test suite for MCP remove command.""" + + @regression_test + def test_remove_argument_parsing(self): + """Test argument parsing for 'hatch mcp remove server' command.""" + test_args = ['hatch', 'mcp', 'remove', 'server', 'old-server', '--host', 'vscode', '--no-backup', '--auto-approve'] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager'): + with patch('hatch.cli_hatch.handle_mcp_remove_server', return_value=0) as mock_handler: + try: + main() + mock_handler.assert_called_once_with(ANY, 'old-server', 'vscode', None, True, False, True) + except SystemExit as e: + self.assertEqual(e.code, 0) + + @integration_test(scope="component") + def test_remove_invalid_host(self): + """Test remove command with invalid host type.""" + with patch('builtins.print') as mock_print: + result = handle_mcp_remove('invalid-host', 'test-server') + + self.assertEqual(result, 1) + + # Verify error message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("Error: Invalid host 'invalid-host'" in call for call in print_calls)) + + @integration_test(scope="component") + def test_remove_dry_run(self): + """Test remove command dry run functionality.""" + with patch('builtins.print') as mock_print: + result = handle_mcp_remove('claude-desktop', 'old-server', no_backup=True, dry_run=True) + + self.assertEqual(result, 0) + + # Verify dry run output + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("[DRY RUN] Would remove MCP server 'old-server'" in call for call in print_calls)) + self.assertTrue(any("[DRY RUN] Backup: Disabled" in call for call in print_calls)) + + @integration_test(scope="component") + def test_remove_successful(self): + """Test successful MCP server removal.""" + from hatch.mcp_host_config.host_management import ConfigurationResult + + mock_result = ConfigurationResult( + success=True, + hostname='claude-desktop', + server_name='old-server', + backup_path=Path('/test/backup.json') + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager.remove_server.return_value = mock_result + mock_manager_class.return_value = mock_manager + + with patch('hatch.cli_hatch.request_confirmation', return_value=True): + with patch('builtins.print') as mock_print: + result = handle_mcp_remove('claude-desktop', 'old-server', auto_approve=True) + + self.assertEqual(result, 0) + mock_manager.remove_server.assert_called_once() + + # Verify success message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("[SUCCESS] Successfully removed MCP server 'old-server'" in call for call in print_calls)) + + @integration_test(scope="component") + def test_remove_failed(self): + """Test failed MCP server removal.""" + from hatch.mcp_host_config.host_management import ConfigurationResult + + mock_result = ConfigurationResult( + success=False, + hostname='claude-desktop', + server_name='old-server', + error_message='Server not found in configuration' + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager.remove_server.return_value = mock_result + mock_manager_class.return_value = mock_manager + + with patch('hatch.cli_hatch.request_confirmation', return_value=True): + with patch('builtins.print') as mock_print: + result = handle_mcp_remove('claude-desktop', 'old-server', auto_approve=True) + + self.assertEqual(result, 1) + + # Verify error message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("[ERROR] Failed to remove MCP server 'old-server'" in call for call in print_calls)) + self.assertTrue(any("Server not found in configuration" in call for call in print_calls)) + + +class TestMCPRemoveServerCommand(unittest.TestCase): + """Test suite for MCP remove server command (new object-action pattern).""" + + @regression_test + def test_remove_server_argument_parsing(self): + """Test argument parsing for 'hatch mcp remove server' command.""" + test_args = ['hatch', 'mcp', 'remove', 'server', 'test-server', '--host', 'claude-desktop', '--no-backup'] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager'): + with patch('hatch.cli_hatch.handle_mcp_remove_server', return_value=0) as mock_handler: + try: + main() + mock_handler.assert_called_once_with(ANY, 'test-server', 'claude-desktop', None, True, False, False) + except SystemExit as e: + self.assertEqual(e.code, 0) + + @integration_test(scope="component") + def test_remove_server_multi_host(self): + """Test remove server from multiple hosts.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager.remove_server.return_value = MagicMock(success=True, backup_path=None) + mock_manager_class.return_value = mock_manager + + with patch('hatch.cli_hatch.HatchEnvironmentManager') as mock_env_manager: + with patch('builtins.print') as mock_print: + result = handle_mcp_remove_server(mock_env_manager.return_value, 'test-server', 'claude-desktop,cursor', auto_approve=True) + + self.assertEqual(result, 0) + self.assertEqual(mock_manager.remove_server.call_count, 2) + + # Verify success messages + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("[SUCCESS] Successfully removed 'test-server' from 'claude-desktop'" in call for call in print_calls)) + self.assertTrue(any("[SUCCESS] Successfully removed 'test-server' from 'cursor'" in call for call in print_calls)) + + @integration_test(scope="component") + def test_remove_server_no_host_specified(self): + """Test remove server with no host specified.""" + with patch('hatch.cli_hatch.HatchEnvironmentManager') as mock_env_manager: + with patch('builtins.print') as mock_print: + result = handle_mcp_remove_server(mock_env_manager.return_value, 'test-server') + + self.assertEqual(result, 1) + + # Verify error message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("Error: Must specify either --host or --env" in call for call in print_calls)) + + @integration_test(scope="component") + def test_remove_server_dry_run(self): + """Test remove server dry run functionality.""" + with patch('hatch.cli_hatch.HatchEnvironmentManager') as mock_env_manager: + with patch('builtins.print') as mock_print: + result = handle_mcp_remove_server(mock_env_manager.return_value, 'test-server', 'claude-desktop', dry_run=True) + + self.assertEqual(result, 0) + + # Verify dry run output + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("[DRY RUN] Would remove MCP server 'test-server' from hosts: claude-desktop" in call for call in print_calls)) + + +class TestMCPRemoveHostCommand(unittest.TestCase): + """Test suite for MCP remove host command.""" + + @regression_test + def test_remove_host_argument_parsing(self): + """Test argument parsing for 'hatch mcp remove host' command.""" + test_args = ['hatch', 'mcp', 'remove', 'host', 'claude-desktop', '--auto-approve'] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager'): + with patch('hatch.cli_hatch.handle_mcp_remove_host', return_value=0) as mock_handler: + try: + main() + mock_handler.assert_called_once_with(ANY, 'claude-desktop', False, False, True) + except SystemExit as e: + self.assertEqual(e.code, 0) + + @integration_test(scope="component") + def test_remove_host_successful(self): + """Test successful host configuration removal.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_result = MagicMock() + mock_result.success = True + mock_result.backup_path = Path("/test/backup.json") + mock_manager.remove_host_configuration.return_value = mock_result + mock_manager_class.return_value = mock_manager + + with patch('hatch.cli_hatch.HatchEnvironmentManager') as mock_env_manager: + # Mock the clear_host_from_all_packages_all_envs method + mock_env_manager.return_value.clear_host_from_all_packages_all_envs.return_value = 2 + + with patch('builtins.print') as mock_print: + result = handle_mcp_remove_host(mock_env_manager.return_value, 'claude-desktop', auto_approve=True) + + self.assertEqual(result, 0) + mock_manager.remove_host_configuration.assert_called_once_with( + hostname='claude-desktop', no_backup=False + ) + + # Verify success message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("[SUCCESS] Successfully removed host configuration for 'claude-desktop'" in call for call in print_calls)) + + @integration_test(scope="component") + def test_remove_host_invalid_host(self): + """Test remove host with invalid host type.""" + with patch('hatch.cli_hatch.HatchEnvironmentManager') as mock_env_manager: + with patch('builtins.print') as mock_print: + result = handle_mcp_remove_host(mock_env_manager.return_value, 'invalid-host') + + self.assertEqual(result, 1) + + # Verify error message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("Error: Invalid host 'invalid-host'" in call for call in print_calls)) + + @integration_test(scope="component") + def test_remove_host_dry_run(self): + """Test remove host dry run functionality.""" + with patch('hatch.cli_hatch.HatchEnvironmentManager') as mock_env_manager: + with patch('builtins.print') as mock_print: + result = handle_mcp_remove_host(mock_env_manager.return_value, 'claude-desktop', dry_run=True) + + self.assertEqual(result, 0) + + # Verify dry run output + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("[DRY RUN] Would remove entire host configuration for 'claude-desktop'" in call for call in print_calls)) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_mcp_cli_discovery_listing.py b/tests/test_mcp_cli_discovery_listing.py new file mode 100644 index 0000000..778a2a4 --- /dev/null +++ b/tests/test_mcp_cli_discovery_listing.py @@ -0,0 +1,582 @@ +""" +Test suite for MCP CLI discovery and listing commands (Phase 3c). + +This module tests the new MCP discovery and listing functionality: +- hatch mcp discover hosts +- hatch mcp discover servers +- hatch mcp list hosts +- hatch mcp list servers + +Tests cover argument parsing, backend integration, output formatting, +and error handling scenarios. +""" + +import unittest +from unittest.mock import patch, MagicMock +import sys +from pathlib import Path + +# Add the parent directory to the path to import hatch modules +sys.path.insert(0, str(Path(__file__).parent.parent)) + +from hatch.cli_hatch import ( + main, handle_mcp_discover_hosts, handle_mcp_discover_servers, + handle_mcp_list_hosts, handle_mcp_list_servers +) +from hatch.mcp_host_config.models import MCPHostType, MCPServerConfig +from hatch.environment_manager import HatchEnvironmentManager +from wobble import regression_test, integration_test +import json + + +class TestMCPDiscoveryCommands(unittest.TestCase): + """Test suite for MCP discovery commands.""" + + def setUp(self): + """Set up test fixtures.""" + self.mock_env_manager = MagicMock(spec=HatchEnvironmentManager) + self.mock_env_manager.get_current_environment.return_value = "test-env" + self.mock_env_manager.environment_exists.return_value = True + + @regression_test + def test_discover_hosts_argument_parsing(self): + """Test argument parsing for 'hatch mcp discover hosts' command.""" + test_args = ['hatch', 'mcp', 'discover', 'hosts'] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager'): + with patch('hatch.cli_hatch.handle_mcp_discover_hosts', return_value=0) as mock_handler: + try: + main() + mock_handler.assert_called_once() + except SystemExit as e: + self.assertEqual(e.code, 0) + + @regression_test + def test_discover_servers_argument_parsing(self): + """Test argument parsing for 'hatch mcp discover servers' command.""" + test_args = ['hatch', 'mcp', 'discover', 'servers', '--env', 'test-env'] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager'): + with patch('hatch.cli_hatch.handle_mcp_discover_servers', return_value=0) as mock_handler: + try: + main() + mock_handler.assert_called_once() + except SystemExit as e: + self.assertEqual(e.code, 0) + + @regression_test + def test_discover_servers_default_environment(self): + """Test discover servers uses current environment when --env not specified.""" + test_args = ['hatch', 'mcp', 'discover', 'servers'] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager') as mock_env_class: + mock_env_manager = MagicMock() + mock_env_class.return_value = mock_env_manager + + with patch('hatch.cli_hatch.handle_mcp_discover_servers', return_value=0) as mock_handler: + try: + main() + # Should be called with env_manager and None (default env) + mock_handler.assert_called_once() + args = mock_handler.call_args[0] + self.assertEqual(len(args), 2) # env_manager, env_name + self.assertIsNone(args[1]) # env_name should be None + except SystemExit as e: + self.assertEqual(e.code, 0) + + @integration_test(scope="component") + def test_discover_hosts_backend_integration(self): + """Test discover hosts integration with MCPHostRegistry.""" + with patch('hatch.mcp_host_config.strategies'): # Import strategies + with patch('hatch.cli_hatch.MCPHostRegistry') as mock_registry: + mock_registry.detect_available_hosts.return_value = [ + MCPHostType.CLAUDE_DESKTOP, + MCPHostType.CURSOR + ] + + # Mock strategy for each host type + mock_strategy = MagicMock() + mock_strategy.get_config_path.return_value = Path("/test/config.json") + mock_registry.get_strategy.return_value = mock_strategy + + with patch('builtins.print') as mock_print: + result = handle_mcp_discover_hosts() + + self.assertEqual(result, 0) + mock_registry.detect_available_hosts.assert_called_once() + + # Verify output contains expected information + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("Available MCP host platforms:" in call for call in print_calls)) + + @integration_test(scope="component") + def test_discover_servers_backend_integration(self): + """Test discover servers integration with environment manager.""" + # Mock packages with MCP servers + mock_packages = [ + {'name': 'weather-toolkit', 'version': '1.0.0'}, + {'name': 'file-manager', 'version': '2.0.0'}, + {'name': 'regular-package', 'version': '1.5.0'} # No MCP server + ] + + self.mock_env_manager.list_packages.return_value = mock_packages + + # Mock get_package_mcp_server_config to return config for some packages + def mock_get_config(env_manager, env_name, package_name): + if package_name in ['weather-toolkit', 'file-manager']: + return MCPServerConfig( + name=f"{package_name}-server", + command="python", + args=[f"{package_name}.py"], + env={} + ) + else: + raise ValueError(f"Package '{package_name}' has no MCP server") + + with patch('hatch.cli_hatch.get_package_mcp_server_config', side_effect=mock_get_config): + with patch('builtins.print') as mock_print: + result = handle_mcp_discover_servers(self.mock_env_manager, "test-env") + + self.assertEqual(result, 0) + self.mock_env_manager.list_packages.assert_called_once_with("test-env") + + # Verify output contains MCP servers + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("MCP servers in environment 'test-env':" in call for call in print_calls)) + self.assertTrue(any("weather-toolkit-server:" in call for call in print_calls)) + self.assertTrue(any("file-manager-server:" in call for call in print_calls)) + + @regression_test + def test_discover_servers_no_mcp_packages(self): + """Test discover servers when no packages have MCP servers.""" + mock_packages = [ + {'name': 'regular-package-1', 'version': '1.0.0'}, + {'name': 'regular-package-2', 'version': '2.0.0'} + ] + + self.mock_env_manager.list_packages.return_value = mock_packages + + # Mock get_package_mcp_server_config to always raise ValueError + def mock_get_config(env_manager, env_name, package_name): + raise ValueError(f"Package '{package_name}' has no MCP server") + + with patch('hatch.cli_hatch.get_package_mcp_server_config', side_effect=mock_get_config): + with patch('builtins.print') as mock_print: + result = handle_mcp_discover_servers(self.mock_env_manager, "test-env") + + self.assertEqual(result, 0) + + # Verify appropriate message is shown + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("No MCP servers found in environment 'test-env'" in call for call in print_calls)) + + @regression_test + def test_discover_servers_nonexistent_environment(self): + """Test discover servers with nonexistent environment.""" + self.mock_env_manager.environment_exists.return_value = False + + with patch('builtins.print') as mock_print: + result = handle_mcp_discover_servers(self.mock_env_manager, "nonexistent-env") + + self.assertEqual(result, 1) + + # Verify error message + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("Error: Environment 'nonexistent-env' does not exist" in call for call in print_calls)) + + +class TestMCPListCommands(unittest.TestCase): + """Test suite for MCP list commands.""" + + def setUp(self): + """Set up test fixtures.""" + self.mock_env_manager = MagicMock(spec=HatchEnvironmentManager) + self.mock_env_manager.get_current_environment.return_value = "test-env" + self.mock_env_manager.environment_exists.return_value = True + + @regression_test + def test_list_hosts_argument_parsing(self): + """Test argument parsing for 'hatch mcp list hosts' command.""" + test_args = ['hatch', 'mcp', 'list', 'hosts'] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager'): + with patch('hatch.cli_hatch.handle_mcp_list_hosts', return_value=0) as mock_handler: + try: + main() + mock_handler.assert_called_once() + except SystemExit as e: + self.assertEqual(e.code, 0) + + @regression_test + def test_list_servers_argument_parsing(self): + """Test argument parsing for 'hatch mcp list servers' command.""" + test_args = ['hatch', 'mcp', 'list', 'servers', '--env', 'production'] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager'): + with patch('hatch.cli_hatch.handle_mcp_list_servers', return_value=0) as mock_handler: + try: + main() + mock_handler.assert_called_once() + except SystemExit as e: + self.assertEqual(e.code, 0) + + @integration_test(scope="component") + def test_list_hosts_formatted_output(self): + """Test list hosts produces properly formatted output for environment-scoped listing.""" + # Setup mock environment manager with test data + mock_env_manager = MagicMock(spec=HatchEnvironmentManager) + mock_env_manager.get_current_environment.return_value = "test-env" + mock_env_manager.environment_exists.return_value = True + mock_env_manager.get_environment_data.return_value = { + "packages": [ + { + "name": "weather-toolkit", + "configured_hosts": { + "claude-desktop": { + "config_path": "~/.claude/config.json", + "configured_at": "2025-09-25T10:00:00" + } + } + } + ] + } + + with patch('builtins.print') as mock_print: + result = handle_mcp_list_hosts(mock_env_manager, None, False) + + self.assertEqual(result, 0) + + # Verify environment-scoped output format + print_calls = [call[0][0] for call in mock_print.call_args_list] + output = ' '.join(print_calls) + self.assertIn("Configured hosts for environment 'test-env':", output) + self.assertIn("claude-desktop (1 packages)", output) + + @integration_test(scope="component") + def test_list_servers_formatted_output(self): + """Test list servers produces properly formatted table output.""" + # Mock packages with MCP servers + mock_packages = [ + {'name': 'weather-toolkit', 'version': '1.0.0'}, + {'name': 'file-manager', 'version': '2.1.0'} + ] + + self.mock_env_manager.list_packages.return_value = mock_packages + + # Mock get_package_mcp_server_config + def mock_get_config(env_manager, env_name, package_name): + return MCPServerConfig( + name=f"{package_name}-server", + command="python", + args=[f"{package_name}.py", "--port", "8080"], + env={} + ) + + with patch('hatch.cli_hatch.get_package_mcp_server_config', side_effect=mock_get_config): + with patch('builtins.print') as mock_print: + result = handle_mcp_list_servers(self.mock_env_manager, "test-env") + + self.assertEqual(result, 0) + + # Verify formatted table output + print_calls = [] + for call in mock_print.call_args_list: + if call[0]: # Check if args exist + print_calls.append(call[0][0]) + + self.assertTrue(any("MCP servers in environment 'test-env':" in call for call in print_calls)) + self.assertTrue(any("Server Name" in call for call in print_calls)) + self.assertTrue(any("weather-toolkit-server" in call for call in print_calls)) + self.assertTrue(any("file-manager-server" in call for call in print_calls)) + + +class TestMCPListHostsEnvironmentScoped(unittest.TestCase): + """Test suite for environment-scoped list hosts functionality.""" + + def setUp(self): + """Set up test fixtures.""" + self.mock_env_manager = MagicMock(spec=HatchEnvironmentManager) + self.mock_env_manager.get_current_environment.return_value = "test-env" + self.mock_env_manager.environment_exists.return_value = True + # Configure the mock to have the get_environment_data method + self.mock_env_manager.get_environment_data = MagicMock() + + # Load test fixture data + fixture_path = Path(__file__).parent / "test_data" / "fixtures" / "environment_host_configs.json" + with open(fixture_path, 'r') as f: + self.test_data = json.load(f) + + @regression_test + def test_list_hosts_environment_scoped_basic(self): + """Test list hosts shows only hosts configured in specified environment. + + Validates: + - Reads from environment data (not system detection) + - Shows only hosts with configured packages in target environment + - Displays host count information correctly + - Uses environment manager for data source + """ + # Setup: Mock environment with 2 packages using different hosts + self.mock_env_manager.get_environment_data.return_value = self.test_data["multi_host_environment"] + + with patch('builtins.print') as mock_print: + # Action: Call handle_mcp_list_hosts with env_manager and env_name + result = handle_mcp_list_hosts(self.mock_env_manager, "test-env", False) + + # Assert: Success exit code + self.assertEqual(result, 0) + + # Assert: Environment manager methods called correctly + self.mock_env_manager.environment_exists.assert_called_with("test-env") + self.mock_env_manager.get_environment_data.assert_called_with("test-env") + + # Assert: Output contains both hosts with correct package counts + print_calls = [call[0][0] for call in mock_print.call_args_list] + output = ' '.join(print_calls) + + self.assertIn("Configured hosts for environment 'test-env':", output) + self.assertIn("claude-desktop (2 packages)", output) + self.assertIn("cursor (1 packages)", output) + + @regression_test + def test_list_hosts_empty_environment(self): + """Test list hosts with environment containing no packages. + + Validates: + - Handles empty environment gracefully + - Displays appropriate message for no configured hosts + - Returns success exit code (0) + - Does not attempt system detection + """ + # Setup: Mock environment with no packages + self.mock_env_manager.get_environment_data.return_value = self.test_data["empty_environment"] + + with patch('builtins.print') as mock_print: + # Action: Call handle_mcp_list_hosts + result = handle_mcp_list_hosts(self.mock_env_manager, "empty-env", False) + + # Assert: Success exit code + self.assertEqual(result, 0) + + # Assert: Appropriate message displayed + print_calls = [call[0][0] for call in mock_print.call_args_list] + output = ' '.join(print_calls) + self.assertIn("No configured hosts for environment 'empty-env'", output) + + @regression_test + def test_list_hosts_packages_no_host_tracking(self): + """Test list hosts with packages that have no configured_hosts data. + + Validates: + - Handles packages without configured_hosts gracefully + - Displays appropriate message for no host configurations + - Maintains backward compatibility with older environment data + """ + # Setup: Mock environment with packages lacking configured_hosts + self.mock_env_manager.get_environment_data.return_value = self.test_data["packages_no_host_tracking"] + + with patch('builtins.print') as mock_print: + # Action: Call handle_mcp_list_hosts + result = handle_mcp_list_hosts(self.mock_env_manager, "legacy-env", False) + + # Assert: Success exit code + self.assertEqual(result, 0) + + # Assert: Handles missing configured_hosts keys without error + print_calls = [call[0][0] for call in mock_print.call_args_list] + output = ' '.join(print_calls) + self.assertIn("No configured hosts for environment 'legacy-env'", output) + + +class TestMCPListHostsCLIIntegration(unittest.TestCase): + """Test suite for CLI argument processing.""" + + def setUp(self): + """Set up test fixtures.""" + self.mock_env_manager = MagicMock(spec=HatchEnvironmentManager) + self.mock_env_manager.get_current_environment.return_value = "current-env" + self.mock_env_manager.environment_exists.return_value = True + # Configure the mock to have the get_environment_data method + self.mock_env_manager.get_environment_data = MagicMock(return_value={"packages": []}) + + @regression_test + def test_list_hosts_env_argument_parsing(self): + """Test --env argument processing for list hosts command. + + Validates: + - Accepts --env argument correctly + - Passes environment name to handler function + - Uses current environment when --env not specified + - Validates environment exists before processing + """ + # Test case 1: hatch mcp list hosts --env project-alpha + with patch('builtins.print'): + result = handle_mcp_list_hosts(self.mock_env_manager, "project-alpha", False) + self.assertEqual(result, 0) + self.mock_env_manager.environment_exists.assert_called_with("project-alpha") + self.mock_env_manager.get_environment_data.assert_called_with("project-alpha") + + # Reset mocks + self.mock_env_manager.reset_mock() + + # Test case 2: hatch mcp list hosts (uses current environment) + with patch('builtins.print'): + result = handle_mcp_list_hosts(self.mock_env_manager, None, False) + self.assertEqual(result, 0) + self.mock_env_manager.get_current_environment.assert_called_once() + self.mock_env_manager.environment_exists.assert_called_with("current-env") + + @regression_test + def test_list_hosts_detailed_flag_parsing(self): + """Test --detailed flag processing for list hosts command. + + Validates: + - Accepts --detailed flag correctly + - Passes detailed flag to handler function + - Default behavior when flag not specified + """ + # Load test data with detailed information + fixture_path = Path(__file__).parent / "test_data" / "fixtures" / "environment_host_configs.json" + with open(fixture_path, 'r') as f: + test_data = json.load(f) + + self.mock_env_manager.get_environment_data.return_value = test_data["single_host_environment"] + + with patch('builtins.print') as mock_print: + # Test: hatch mcp list hosts --detailed + result = handle_mcp_list_hosts(self.mock_env_manager, "test-env", True) + + # Assert: detailed=True passed to handler + self.assertEqual(result, 0) + + # Assert: Detailed output includes config paths and timestamps + print_calls = [call[0][0] for call in mock_print.call_args_list] + output = ' '.join(print_calls) + self.assertIn("Config path:", output) + self.assertIn("Configured at:", output) + + +class TestMCPListHostsEnvironmentManagerIntegration(unittest.TestCase): + """Test suite for environment manager integration.""" + + def setUp(self): + """Set up test fixtures.""" + self.mock_env_manager = MagicMock(spec=HatchEnvironmentManager) + # Configure the mock to have the get_environment_data method + self.mock_env_manager.get_environment_data = MagicMock() + + @integration_test(scope="component") + def test_list_hosts_reads_environment_data(self): + """Test list hosts reads actual environment data via environment manager. + + Validates: + - Calls environment manager methods correctly + - Processes configured_hosts data from packages + - Aggregates hosts across multiple packages + - Handles environment resolution (current vs specified) + """ + # Setup: Real environment manager with test data + fixture_path = Path(__file__).parent / "test_data" / "fixtures" / "environment_host_configs.json" + with open(fixture_path, 'r') as f: + test_data = json.load(f) + + self.mock_env_manager.get_current_environment.return_value = "test-env" + self.mock_env_manager.environment_exists.return_value = True + self.mock_env_manager.get_environment_data.return_value = test_data["multi_host_environment"] + + with patch('builtins.print'): + # Action: Call list hosts functionality + result = handle_mcp_list_hosts(self.mock_env_manager, None, False) + + # Assert: Correct environment manager method calls + self.mock_env_manager.get_current_environment.assert_called_once() + self.mock_env_manager.environment_exists.assert_called_with("test-env") + self.mock_env_manager.get_environment_data.assert_called_with("test-env") + + # Assert: Success result + self.assertEqual(result, 0) + + @integration_test(scope="component") + def test_list_hosts_environment_validation(self): + """Test list hosts validates environment existence. + + Validates: + - Checks environment exists before processing + - Returns appropriate error for non-existent environment + - Provides helpful error message with available environments + """ + # Setup: Environment manager with known environments + self.mock_env_manager.environment_exists.return_value = False + self.mock_env_manager.list_environments.return_value = ["env1", "env2", "env3"] + + with patch('builtins.print') as mock_print: + # Action: Call list hosts with non-existent environment + result = handle_mcp_list_hosts(self.mock_env_manager, "non-existent", False) + + # Assert: Error message includes available environments + print_calls = [call[0][0] for call in mock_print.call_args_list] + output = ' '.join(print_calls) + self.assertIn("Environment 'non-existent' does not exist", output) + self.assertIn("Available environments: env1, env2, env3", output) + + # Assert: Non-zero exit code + self.assertEqual(result, 1) + + +class TestMCPDiscoverHostsUnchanged(unittest.TestCase): + """Test suite for discover hosts unchanged behavior.""" + + def setUp(self): + """Set up test fixtures.""" + self.mock_env_manager = MagicMock(spec=HatchEnvironmentManager) + + @regression_test + def test_discover_hosts_system_detection_unchanged(self): + """Test discover hosts continues to use system detection. + + Validates: + - Uses host strategy detection (not environment data) + - Shows availability status for detected hosts + - Behavior unchanged from previous implementation + - No environment dependency + """ + # Setup: Mock host strategies with available hosts + with patch('hatch.mcp_host_config.strategies'): # Import strategies + with patch('hatch.cli_hatch.MCPHostRegistry') as mock_registry: + mock_registry.detect_available_hosts.return_value = [ + MCPHostType.CLAUDE_DESKTOP, + MCPHostType.CURSOR + ] + + # Mock strategy for each host type + mock_strategy = MagicMock() + mock_strategy.get_config_path.return_value = Path("~/.claude/config.json") + mock_registry.get_strategy.return_value = mock_strategy + + with patch('builtins.print') as mock_print: + # Action: Call handle_mcp_discover_hosts + result = handle_mcp_discover_hosts() + + # Assert: Host strategy detection called + mock_registry.detect_available_hosts.assert_called_once() + + # Assert: No environment manager calls (discover hosts is environment-independent) + # Note: discover hosts doesn't use environment manager at all + + # Assert: Availability-focused output format + print_calls = [call[0][0] for call in mock_print.call_args_list] + output = ' '.join(print_calls) + self.assertIn("Available MCP host platforms:", output) + self.assertIn("Available", output) + + # Assert: Success result + self.assertEqual(result, 0) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_mcp_cli_host_config_integration.py b/tests/test_mcp_cli_host_config_integration.py new file mode 100644 index 0000000..468c074 --- /dev/null +++ b/tests/test_mcp_cli_host_config_integration.py @@ -0,0 +1,823 @@ +""" +Test suite for MCP CLI host configuration integration. + +This module tests the integration of the Pydantic model hierarchy (Phase 3B) +and user feedback reporting system (Phase 3C) into Hatch's CLI commands. + +Tests focus on CLI-specific integration logic while leveraging existing test +infrastructure from Phases 3A-3C. +""" + +import unittest +import sys +from pathlib import Path +from unittest.mock import patch, MagicMock, call, ANY + +# Add the parent directory to the path to import wobble +sys.path.insert(0, str(Path(__file__).parent.parent)) + +try: + from wobble.decorators import regression_test, integration_test +except ImportError: + # Fallback decorators if wobble is not available + def regression_test(func): + return func + + def integration_test(scope="component"): + def decorator(func): + return func + return decorator + +from hatch.cli_hatch import ( + handle_mcp_configure, + parse_env_vars, + parse_header, + parse_host_list, +) +from hatch.mcp_host_config.models import ( + MCPServerConfig, + MCPServerConfigOmni, + HOST_MODEL_REGISTRY, + MCPHostType, + MCPServerConfigGemini, + MCPServerConfigVSCode, + MCPServerConfigCursor, + MCPServerConfigClaude, +) +from hatch.mcp_host_config.reporting import ( + generate_conversion_report, + display_report, + FieldOperation, + ConversionReport, +) + + +class TestCLIArgumentParsingToOmniCreation(unittest.TestCase): + """Test suite for CLI argument parsing to MCPServerConfigOmni creation.""" + + @regression_test + def test_configure_creates_omni_model_basic(self): + """Test that configure command creates MCPServerConfigOmni from CLI arguments.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + with patch('hatch.cli_hatch.request_confirmation', return_value=False): + # Call handle_mcp_configure with basic arguments + result = handle_mcp_configure( + host='claude-desktop', + server_name='test-server', + command='python', + args=['server.py'], + env=None, + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify the function executed without errors + self.assertEqual(result, 0) + + @regression_test + def test_configure_creates_omni_with_env_vars(self): + """Test that environment variables are parsed correctly into Omni model.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + with patch('hatch.cli_hatch.request_confirmation', return_value=False): + # Call with environment variables + result = handle_mcp_configure( + host='claude-desktop', + server_name='test-server', + command='python', + args=['server.py'], + env=['API_KEY=secret', 'DEBUG=true'], + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify the function executed without errors + self.assertEqual(result, 0) + + @regression_test + def test_configure_creates_omni_with_headers(self): + """Test that headers are parsed correctly into Omni model.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + with patch('hatch.cli_hatch.request_confirmation', return_value=False): + result = handle_mcp_configure( + host='gemini', # Use gemini which supports remote servers + server_name='test-server', + command=None, + args=None, + env=None, + url='https://api.example.com', + header=['Authorization=Bearer token', 'Content-Type=application/json'], + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify the function executed without errors (bug fixed in Phase 4) + self.assertEqual(result, 0) + + @regression_test + def test_configure_creates_omni_remote_server(self): + """Test that remote server arguments create correct Omni model.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + with patch('hatch.cli_hatch.request_confirmation', return_value=False): + result = handle_mcp_configure( + host='gemini', # Use gemini which supports remote servers + server_name='remote-server', + command=None, + args=None, + env=None, + url='https://api.example.com', + header=['Auth=token'], + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify the function executed without errors (bug fixed in Phase 4) + self.assertEqual(result, 0) + + @regression_test + def test_configure_omni_with_all_universal_fields(self): + """Test that all universal fields are supported in Omni creation.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + with patch('hatch.cli_hatch.request_confirmation', return_value=False): + # Call with all universal fields + result = handle_mcp_configure( + host='claude-desktop', + server_name='full-server', + command='python', + args=['server.py', '--port', '8080'], + env=['API_KEY=secret', 'DEBUG=true', 'LOG_LEVEL=info'], + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify the function executed without errors + self.assertEqual(result, 0) + + @regression_test + def test_configure_omni_with_optional_fields_none(self): + """Test that optional fields are handled correctly (None values).""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + with patch('hatch.cli_hatch.request_confirmation', return_value=False): + # Call with only required fields + result = handle_mcp_configure( + host='claude-desktop', + server_name='minimal-server', + command='python', + args=['server.py'], + env=None, + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify the function executed without errors + self.assertEqual(result, 0) + + +class TestModelIntegration(unittest.TestCase): + """Test suite for model integration in CLI handlers.""" + + @regression_test + def test_configure_uses_host_model_registry(self): + """Test that configure command uses HOST_MODEL_REGISTRY for host selection.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + with patch('hatch.cli_hatch.request_confirmation', return_value=False): + # Test with Gemini host + result = handle_mcp_configure( + host='gemini', + server_name='test-server', + command='python', + args=['server.py'], + env=None, + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify the function executed without errors + self.assertEqual(result, 0) + + @regression_test + def test_configure_calls_from_omni_conversion(self): + """Test that from_omni() is called to convert Omni to host-specific model.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + with patch('hatch.cli_hatch.request_confirmation', return_value=False): + # Call configure command + result = handle_mcp_configure( + host='claude-desktop', + server_name='test-server', + command='python', + args=['server.py'], + env=None, + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify the function executed without errors + self.assertEqual(result, 0) + + @integration_test(scope="component") + def test_configure_passes_host_specific_model_to_manager(self): + """Test that host-specific model is passed to MCPHostConfigurationManager.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.configure_server.return_value = MagicMock(success=True, backup_path=None) + + with patch('hatch.cli_hatch.request_confirmation', return_value=True): + # Call configure command + result = handle_mcp_configure( + host='claude-desktop', + server_name='test-server', + command='python', + args=['server.py'], + env=None, + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify configure_server was called + self.assertEqual(result, 0) + mock_manager.configure_server.assert_called_once() + + # Verify the server_config argument is a host-specific model instance + # (MCPServerConfigClaude for claude-desktop host) + call_args = mock_manager.configure_server.call_args + server_config = call_args.kwargs['server_config'] + self.assertIsInstance(server_config, MCPServerConfigClaude) + + +class TestReportingIntegration(unittest.TestCase): + """Test suite for reporting integration in CLI commands.""" + + @regression_test + def test_configure_dry_run_displays_report_only(self): + """Test that dry-run mode displays report without configuration.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + # Call with dry-run + result = handle_mcp_configure( + host='claude-desktop', + server_name='test-server', + command='python', + args=['server.py'], + env=None, + url=None, + header=None, + no_backup=True, + dry_run=True, + auto_approve=False + ) + + # Verify the function executed without errors + self.assertEqual(result, 0) + + # Verify MCPHostConfigurationManager.create_server was NOT called (dry-run doesn't persist) + # Note: get_server_config is called to check if server exists, but create_server is not called + mock_manager.return_value.create_server.assert_not_called() + + +class TestHostSpecificArguments(unittest.TestCase): + """Test suite for host-specific CLI arguments (Phase 3 - Mandatory).""" + + @regression_test + def test_configure_accepts_all_universal_fields(self): + """Test that all universal fields are accepted by CLI.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + with patch('hatch.cli_hatch.request_confirmation', return_value=False): + # Call with all universal fields + result = handle_mcp_configure( + host='claude-desktop', + server_name='test-server', + command='python', + args=['server.py', '--port', '8080'], + env=['API_KEY=secret', 'DEBUG=true'], + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify success + self.assertEqual(result, 0) + + @regression_test + def test_configure_multiple_env_vars(self): + """Test that multiple environment variables are handled correctly.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + with patch('hatch.cli_hatch.request_confirmation', return_value=False): + # Call with multiple env vars + result = handle_mcp_configure( + host='gemini', + server_name='test-server', + command='python', + args=['server.py'], + env=['VAR1=value1', 'VAR2=value2', 'VAR3=value3'], + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify success + self.assertEqual(result, 0) + + @regression_test + def test_configure_different_hosts(self): + """Test that different host types are handled correctly.""" + hosts_to_test = ['claude-desktop', 'cursor', 'vscode', 'gemini'] + + for host in hosts_to_test: + with self.subTest(host=host): + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + with patch('hatch.cli_hatch.request_confirmation', return_value=False): + result = handle_mcp_configure( + host=host, + server_name='test-server', + command='python', + args=['server.py'], + env=None, + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify success for each host + self.assertEqual(result, 0) + + +class TestErrorHandling(unittest.TestCase): + """Test suite for error handling in CLI commands.""" + + @regression_test + def test_configure_invalid_host_type_error(self): + """Test that clear error is shown for invalid host type.""" + # Call with invalid host + result = handle_mcp_configure( + host='invalid-host', + server_name='test-server', + command='python', + args=['server.py'], + env=None, + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify error return code + self.assertEqual(result, 1) + + @regression_test + def test_configure_invalid_field_value_error(self): + """Test that clear error is shown for invalid field values.""" + # Test with invalid URL format - this will be caught by Pydantic validation + # when creating MCPServerConfig + result = handle_mcp_configure( + host='claude-desktop', + server_name='test-server', + command=None, + args=None, # Must be None for remote server + env=None, + url='not-a-url', # Invalid URL format + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify error return code (validation error caught in exception handler) + self.assertEqual(result, 1) + + @regression_test + def test_configure_pydantic_validation_error_handling(self): + """Test that Pydantic ValidationErrors are caught and handled.""" + # Test with conflicting arguments (command with headers) + result = handle_mcp_configure( + host='claude-desktop', + server_name='test-server', + command='python', + args=['server.py'], + env=None, + url=None, + header=['Auth=token'], # Headers not allowed with command + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify error return code (caught by validation in handle_mcp_configure) + self.assertEqual(result, 1) + + @regression_test + def test_configure_missing_command_url_error(self): + """Test error handling when neither command nor URL provided.""" + # This test verifies the argparse validation (required=True for mutually exclusive group) + # In actual CLI usage, argparse would catch this before handle_mcp_configure is called + # For unit testing, we test that the function handles None values appropriately + result = handle_mcp_configure( + host='claude-desktop', + server_name='test-server', + command=None, + args=None, + env=None, + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify error return code (validation error) + self.assertEqual(result, 1) + + +class TestBackwardCompatibility(unittest.TestCase): + """Test suite for backward compatibility.""" + + @regression_test + def test_existing_configure_command_still_works(self): + """Test that existing configure command usage still works.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.configure_server.return_value = MagicMock(success=True, backup_path=None) + + with patch('hatch.cli_hatch.request_confirmation', return_value=True): + # Call with existing command pattern + result = handle_mcp_configure( + host='claude-desktop', + server_name='my-server', + command='python', + args=['-m', 'my_package.server'], + env=['API_KEY=secret'], + url=None, + header=None, + no_backup=False, + dry_run=False, + auto_approve=False + ) + + # Verify success + self.assertEqual(result, 0) + mock_manager.configure_server.assert_called_once() + + +class TestParseUtilities(unittest.TestCase): + """Test suite for CLI parsing utilities.""" + + @regression_test + def test_parse_env_vars_basic(self): + """Test parsing environment variables from KEY=VALUE format.""" + env_list = ['API_KEY=secret', 'DEBUG=true'] + result = parse_env_vars(env_list) + + expected = {'API_KEY': 'secret', 'DEBUG': 'true'} + self.assertEqual(result, expected) + + @regression_test + def test_parse_env_vars_empty(self): + """Test parsing empty environment variables list.""" + result = parse_env_vars(None) + self.assertEqual(result, {}) + + result = parse_env_vars([]) + self.assertEqual(result, {}) + + @regression_test + def test_parse_header_basic(self): + """Test parsing headers from KEY=VALUE format.""" + headers_list = ['Authorization=Bearer token', 'Content-Type=application/json'] + result = parse_header(headers_list) + + expected = {'Authorization': 'Bearer token', 'Content-Type': 'application/json'} + self.assertEqual(result, expected) + + @regression_test + def test_parse_header_empty(self): + """Test parsing empty headers list.""" + result = parse_header(None) + self.assertEqual(result, {}) + + result = parse_header([]) + self.assertEqual(result, {}) + + +class TestCLIIntegrationReadiness(unittest.TestCase): + """Test suite to verify readiness for Phase 4 CLI integration implementation.""" + + @regression_test + def test_host_model_registry_available(self): + """Test that HOST_MODEL_REGISTRY is available for CLI integration.""" + from hatch.mcp_host_config.models import HOST_MODEL_REGISTRY, MCPHostType + + # Verify registry contains all expected hosts + expected_hosts = [ + MCPHostType.GEMINI, + MCPHostType.CLAUDE_DESKTOP, + MCPHostType.CLAUDE_CODE, + MCPHostType.VSCODE, + MCPHostType.CURSOR, + MCPHostType.LMSTUDIO, + ] + + for host in expected_hosts: + self.assertIn(host, HOST_MODEL_REGISTRY) + + @regression_test + def test_omni_model_available(self): + """Test that MCPServerConfigOmni is available for CLI integration.""" + from hatch.mcp_host_config.models import MCPServerConfigOmni + + # Create a basic Omni model + omni = MCPServerConfigOmni( + name='test-server', + command='python', + args=['server.py'], + env={'API_KEY': 'secret'}, + ) + + # Verify model was created successfully + self.assertEqual(omni.name, 'test-server') + self.assertEqual(omni.command, 'python') + self.assertEqual(omni.args, ['server.py']) + self.assertEqual(omni.env, {'API_KEY': 'secret'}) + + @regression_test + def test_from_omni_conversion_available(self): + """Test that from_omni() conversion is available for all host models.""" + from hatch.mcp_host_config.models import ( + MCPServerConfigOmni, + MCPServerConfigGemini, + MCPServerConfigClaude, + MCPServerConfigVSCode, + MCPServerConfigCursor, + ) + + # Create Omni model + omni = MCPServerConfigOmni( + name='test-server', + command='python', + args=['server.py'], + ) + + # Test conversion to each host-specific model + gemini = MCPServerConfigGemini.from_omni(omni) + self.assertEqual(gemini.name, 'test-server') + + claude = MCPServerConfigClaude.from_omni(omni) + self.assertEqual(claude.name, 'test-server') + + vscode = MCPServerConfigVSCode.from_omni(omni) + self.assertEqual(vscode.name, 'test-server') + + cursor = MCPServerConfigCursor.from_omni(omni) + self.assertEqual(cursor.name, 'test-server') + + @regression_test + def test_reporting_functions_available(self): + """Test that reporting functions are available for CLI integration.""" + from hatch.mcp_host_config.reporting import ( + generate_conversion_report, + display_report, + ) + from hatch.mcp_host_config.models import MCPServerConfigOmni, MCPHostType + + # Create Omni model + omni = MCPServerConfigOmni( + name='test-server', + command='python', + args=['server.py'], + ) + + # Generate report + report = generate_conversion_report( + operation='create', + server_name='test-server', + target_host=MCPHostType.CLAUDE_DESKTOP, + omni=omni, + dry_run=True + ) + + # Verify report was created + self.assertIsNotNone(report) + self.assertEqual(report.operation, 'create') + + @regression_test + def test_claude_desktop_rejects_url_configuration(self): + """Test Claude Desktop rejects remote server (--url) configurations (Issue 2).""" + with patch('hatch.cli_hatch.print') as mock_print: + result = handle_mcp_configure( + host='claude-desktop', + server_name='remote-server', + command=None, + args=None, + env=None, + url='http://localhost:8080', # Should be rejected + header=None, + no_backup=True, + dry_run=False, + auto_approve=True + ) + + # Validate: Should return error code 1 + self.assertEqual(result, 1) + + # Validate: Error message displayed + error_calls = [call for call in mock_print.call_args_list + if 'Error' in str(call) or 'error' in str(call)] + self.assertTrue(len(error_calls) > 0, "Expected error message to be printed") + + @regression_test + def test_claude_code_rejects_url_configuration(self): + """Test Claude Code (same family) also rejects remote servers (Issue 2).""" + with patch('hatch.cli_hatch.print') as mock_print: + result = handle_mcp_configure( + host='claude-code', + server_name='remote-server', + command=None, + args=None, + env=None, + url='http://localhost:8080', + header=None, + no_backup=True, + dry_run=False, + auto_approve=True + ) + + # Validate: Should return error code 1 + self.assertEqual(result, 1) + + # Validate: Error message displayed + error_calls = [call for call in mock_print.call_args_list + if 'Error' in str(call) or 'error' in str(call)] + self.assertTrue(len(error_calls) > 0, "Expected error message to be printed") + + @regression_test + def test_args_quoted_string_splitting(self): + """Test that quoted strings in --args are properly split (Issue 4).""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + with patch('hatch.cli_hatch.request_confirmation', return_value=False): + # Simulate user providing: --args "-r --name aName" + # This arrives as a single string element in the args list + result = handle_mcp_configure( + host='claude-desktop', + server_name='test-server', + command='python', + args=['-r --name aName'], # Single string with quoted content + env=None, + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify: Should succeed (return 0) + self.assertEqual(result, 0) + + # Verify: MCPServerConfigOmni was created with split args + call_args = mock_manager.return_value.create_server.call_args + if call_args: + omni_config = call_args[1]['omni'] + # Args should be split into 3 elements: ['-r', '--name', 'aName'] + self.assertEqual(omni_config.args, ['-r', '--name', 'aName']) + + @regression_test + def test_args_multiple_quoted_strings(self): + """Test multiple quoted strings in --args are all split correctly (Issue 4).""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + with patch('hatch.cli_hatch.request_confirmation', return_value=False): + # Simulate: --args "-r" "--name aName" + result = handle_mcp_configure( + host='claude-desktop', + server_name='test-server', + command='python', + args=['-r', '--name aName'], # Two separate args + env=None, + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify: Should succeed + self.assertEqual(result, 0) + + # Verify: All args are properly split + call_args = mock_manager.return_value.create_server.call_args + if call_args: + omni_config = call_args[1]['omni'] + # Should be split into: ['-r', '--name', 'aName'] + self.assertEqual(omni_config.args, ['-r', '--name', 'aName']) + + @regression_test + def test_args_empty_string_handling(self): + """Test that empty strings in --args are filtered out (Issue 4).""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + with patch('hatch.cli_hatch.request_confirmation', return_value=False): + # Simulate: --args "" "server.py" + result = handle_mcp_configure( + host='claude-desktop', + server_name='test-server', + command='python', + args=['', 'server.py'], # Empty string should be filtered + env=None, + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify: Should succeed + self.assertEqual(result, 0) + + # Verify: Empty strings are filtered out + call_args = mock_manager.return_value.create_server.call_args + if call_args: + omni_config = call_args[1]['omni'] + # Should only contain 'server.py' + self.assertEqual(omni_config.args, ['server.py']) + + @regression_test + def test_args_invalid_quote_handling(self): + """Test that invalid quotes in --args are handled gracefully (Issue 4).""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager: + with patch('hatch.cli_hatch.request_confirmation', return_value=False): + with patch('hatch.cli_hatch.print') as mock_print: + # Simulate: --args 'unclosed "quote' + result = handle_mcp_configure( + host='claude-desktop', + server_name='test-server', + command='python', + args=['unclosed "quote'], # Invalid quote + env=None, + url=None, + header=None, + no_backup=True, + dry_run=False, + auto_approve=False + ) + + # Verify: Should succeed (graceful fallback) + self.assertEqual(result, 0) + + # Verify: Warning was printed + warning_calls = [call for call in mock_print.call_args_list + if 'Warning' in str(call)] + self.assertTrue(len(warning_calls) > 0, "Expected warning for invalid quote") + + # Verify: Original arg is used as fallback + call_args = mock_manager.return_value.create_server.call_args + if call_args: + omni_config = call_args[1]['omni'] + self.assertIn('unclosed "quote', omni_config.args) + + @regression_test + def test_cli_handler_signature_compatible(self): + """Test that handle_mcp_configure signature is compatible with integration.""" + import inspect + from hatch.cli_hatch import handle_mcp_configure + + # Get function signature + sig = inspect.signature(handle_mcp_configure) + + # Verify expected parameters exist + expected_params = [ + 'host', 'server_name', 'command', 'args', + 'env', 'url', 'header', 'no_backup', 'dry_run', 'auto_approve' + ] + + for param in expected_params: + self.assertIn(param, sig.parameters) + + +if __name__ == '__main__': + unittest.main() + diff --git a/tests/test_mcp_cli_package_management.py b/tests/test_mcp_cli_package_management.py new file mode 100644 index 0000000..75fb8e1 --- /dev/null +++ b/tests/test_mcp_cli_package_management.py @@ -0,0 +1,360 @@ +""" +Test suite for MCP CLI package management enhancements. + +This module tests the enhanced package management commands with MCP host +configuration integration following CrackingShells testing standards. +""" + +import sys +import unittest +from pathlib import Path +from unittest.mock import MagicMock, mock_open, patch + +# Add the parent directory to the path to import wobble +sys.path.insert(0, str(Path(__file__).parent.parent)) + +try: + from wobble.decorators import integration_test, regression_test +except ImportError: + # Fallback decorators if wobble is not available + def regression_test(func): + return func + + def integration_test(scope="component"): + def decorator(func): + return func + + return decorator + + +from hatch.cli_hatch import ( + get_package_mcp_server_config, + parse_host_list, + request_confirmation, +) +from hatch.mcp_host_config import MCPHostType, MCPServerConfig + + +class TestMCPCLIPackageManagement(unittest.TestCase): + """Test suite for MCP CLI package management enhancements.""" + + @regression_test + def test_parse_host_list_comma_separated(self): + """Test parsing comma-separated host list.""" + hosts = parse_host_list("claude-desktop,cursor,vscode") + expected = [MCPHostType.CLAUDE_DESKTOP, MCPHostType.CURSOR, MCPHostType.VSCODE] + self.assertEqual(hosts, expected) + + @regression_test + def test_parse_host_list_single_host(self): + """Test parsing single host.""" + hosts = parse_host_list("claude-desktop") + expected = [MCPHostType.CLAUDE_DESKTOP] + self.assertEqual(hosts, expected) + + @regression_test + def test_parse_host_list_empty(self): + """Test parsing empty host list.""" + hosts = parse_host_list("") + self.assertEqual(hosts, []) + + @regression_test + def test_parse_host_list_none(self): + """Test parsing None host list.""" + hosts = parse_host_list(None) + self.assertEqual(hosts, []) + + @regression_test + def test_parse_host_list_all(self): + """Test parsing 'all' host list.""" + with patch( + "hatch.cli_hatch.MCPHostRegistry.detect_available_hosts" + ) as mock_detect: + mock_detect.return_value = [MCPHostType.CLAUDE_DESKTOP, MCPHostType.CURSOR] + hosts = parse_host_list("all") + expected = [MCPHostType.CLAUDE_DESKTOP, MCPHostType.CURSOR] + self.assertEqual(hosts, expected) + mock_detect.assert_called_once() + + @regression_test + def test_parse_host_list_invalid_host(self): + """Test parsing invalid host raises ValueError.""" + with self.assertRaises(ValueError) as context: + parse_host_list("invalid-host") + + self.assertIn("Unknown host 'invalid-host'", str(context.exception)) + self.assertIn("Available:", str(context.exception)) + + @regression_test + def test_parse_host_list_mixed_valid_invalid(self): + """Test parsing mixed valid and invalid hosts.""" + with self.assertRaises(ValueError) as context: + parse_host_list("claude-desktop,invalid-host,cursor") + + self.assertIn("Unknown host 'invalid-host'", str(context.exception)) + + @regression_test + def test_parse_host_list_whitespace_handling(self): + """Test parsing host list with whitespace.""" + hosts = parse_host_list(" claude-desktop , cursor , vscode ") + expected = [MCPHostType.CLAUDE_DESKTOP, MCPHostType.CURSOR, MCPHostType.VSCODE] + self.assertEqual(hosts, expected) + + @regression_test + def test_request_confirmation_auto_approve(self): + """Test confirmation with auto-approve flag.""" + result = request_confirmation("Test message?", auto_approve=True) + self.assertTrue(result) + + @regression_test + def test_request_confirmation_user_yes(self): + """Test confirmation with user saying yes.""" + with patch("builtins.input", return_value="y"): + result = request_confirmation("Test message?", auto_approve=False) + self.assertTrue(result) + + @regression_test + def test_request_confirmation_user_yes_full(self): + """Test confirmation with user saying 'yes'.""" + with patch("builtins.input", return_value="yes"): + result = request_confirmation("Test message?", auto_approve=False) + self.assertTrue(result) + + @regression_test + def test_request_confirmation_user_no(self): + """Test confirmation with user saying no.""" + with patch.dict("os.environ", {"HATCH_AUTO_APPROVE": ""}, clear=False): + with patch("builtins.input", return_value="n"): + result = request_confirmation("Test message?", auto_approve=False) + self.assertFalse(result) + + @regression_test + def test_request_confirmation_user_no_full(self): + """Test confirmation with user saying 'no'.""" + with patch.dict("os.environ", {"HATCH_AUTO_APPROVE": ""}, clear=False): + with patch("builtins.input", return_value="no"): + result = request_confirmation("Test message?", auto_approve=False) + self.assertFalse(result) + + @regression_test + def test_request_confirmation_user_empty(self): + """Test confirmation with user pressing enter (default no).""" + with patch.dict("os.environ", {"HATCH_AUTO_APPROVE": ""}, clear=False): + with patch("builtins.input", return_value=""): + result = request_confirmation("Test message?", auto_approve=False) + self.assertFalse(result) + + @integration_test(scope="component") + def test_package_add_argument_parsing(self): + """Test package add command argument parsing with MCP flags.""" + import argparse + + from hatch.cli_hatch import main + + # Mock argparse to capture parsed arguments + with patch("argparse.ArgumentParser.parse_args") as mock_parse: + mock_args = MagicMock() + mock_args.command = "package" + mock_args.pkg_command = "add" + mock_args.package_path_or_name = "test-package" + mock_args.host = "claude-desktop,cursor" + mock_args.env = None + mock_args.version = None + mock_args.force_download = False + mock_args.refresh_registry = False + mock_args.auto_approve = False + mock_parse.return_value = mock_args + + # Mock environment manager to avoid actual operations + with patch("hatch.cli_hatch.HatchEnvironmentManager") as mock_env_manager: + mock_env_manager.return_value.add_package_to_environment.return_value = True + mock_env_manager.return_value.get_current_environment.return_value = ( + "default" + ) + + # Mock MCP manager + with patch("hatch.cli_hatch.MCPHostConfigurationManager"): + with patch("builtins.print") as mock_print: + result = main() + + # Should succeed + self.assertEqual(result, 0) + + # Should print success message + mock_print.assert_any_call( + "Successfully added package: test-package" + ) + + @integration_test(scope="component") + def test_package_sync_argument_parsing(self): + """Test package sync command argument parsing.""" + import argparse + + from hatch.cli_hatch import main + + # Mock argparse to capture parsed arguments + with patch("argparse.ArgumentParser.parse_args") as mock_parse: + mock_args = MagicMock() + mock_args.command = "package" + mock_args.pkg_command = "sync" + mock_args.package_name = "test-package" + mock_args.host = "claude-desktop,cursor" + mock_args.env = None + mock_args.dry_run = True # Use dry run to avoid actual configuration + mock_args.auto_approve = False + mock_args.no_backup = False + mock_parse.return_value = mock_args + + # Mock the get_package_mcp_server_config function + with patch( + "hatch.cli_hatch.get_package_mcp_server_config" + ) as mock_get_config: + mock_server_config = MagicMock() + mock_server_config.name = "test-package" + mock_server_config.args = ["/path/to/server.py"] + mock_get_config.return_value = mock_server_config + + # Mock environment manager + with patch( + "hatch.cli_hatch.HatchEnvironmentManager" + ) as mock_env_manager: + mock_env_manager.return_value.get_current_environment.return_value = "default" + + # Mock MCP manager + with patch("hatch.cli_hatch.MCPHostConfigurationManager"): + with patch("builtins.print") as mock_print: + result = main() + + # Should succeed + self.assertEqual(result, 0) + + # Should print dry run message (new format includes dependency info) + mock_print.assert_any_call( + "[DRY RUN] Would synchronize MCP servers for 1 package(s) to hosts: ['claude-desktop', 'cursor']" + ) + + @integration_test(scope="component") + def test_package_sync_package_not_found(self): + """Test package sync when package doesn't exist.""" + import argparse + + from hatch.cli_hatch import main + + # Mock argparse to capture parsed arguments + with patch("argparse.ArgumentParser.parse_args") as mock_parse: + mock_args = MagicMock() + mock_args.command = "package" + mock_args.pkg_command = "sync" + mock_args.package_name = "nonexistent-package" + mock_args.host = "claude-desktop" + mock_args.env = None + mock_args.dry_run = False + mock_args.auto_approve = False + mock_args.no_backup = False + mock_parse.return_value = mock_args + + # Mock the get_package_mcp_server_config function to raise ValueError + with patch( + "hatch.cli_hatch.get_package_mcp_server_config" + ) as mock_get_config: + mock_get_config.side_effect = ValueError( + "Package 'nonexistent-package' not found in environment 'default'" + ) + + # Mock environment manager + with patch( + "hatch.cli_hatch.HatchEnvironmentManager" + ) as mock_env_manager: + mock_env_manager.return_value.get_current_environment.return_value = "default" + + with patch("builtins.print") as mock_print: + result = main() + + # Should fail + self.assertEqual(result, 1) + + # Should print error message (new format) + mock_print.assert_any_call( + "Error: No MCP server configurations found for package 'nonexistent-package' or its dependencies" + ) + + @regression_test + def test_get_package_mcp_server_config_success(self): + """Test successful MCP server config retrieval.""" + # Mock environment manager + mock_env_manager = MagicMock() + mock_env_manager.list_packages.return_value = [ + { + "name": "test-package", + "version": "1.0.0", + "source": {"path": "/path/to/package"}, + } + ] + # Mock the Python executable method to return a proper string + mock_env_manager.get_current_python_executable.return_value = "/path/to/python" + + # Mock file system and metadata + with patch("pathlib.Path.exists", return_value=True): + with patch( + "builtins.open", + mock_open( + read_data='{"package_schema_version": "1.2.1", "name": "test-package"}' + ), + ): + with patch( + "hatch_validator.package.package_service.PackageService" + ) as mock_service_class: + mock_service = MagicMock() + mock_service.get_mcp_entry_point.return_value = "mcp_server.py" + mock_service_class.return_value = mock_service + + config = get_package_mcp_server_config( + mock_env_manager, "test-env", "test-package" + ) + + self.assertIsInstance(config, MCPServerConfig) + self.assertEqual(config.name, "test-package") + self.assertEqual( + config.command, "/path/to/python" + ) # Now uses environment-specific Python + self.assertTrue(config.args[0].endswith("mcp_server.py")) + + @regression_test + def test_get_package_mcp_server_config_package_not_found(self): + """Test MCP server config retrieval when package not found.""" + # Mock environment manager with empty package list + mock_env_manager = MagicMock() + mock_env_manager.list_packages.return_value = [] + + with self.assertRaises(ValueError) as context: + get_package_mcp_server_config( + mock_env_manager, "test-env", "nonexistent-package" + ) + + self.assertIn("Package 'nonexistent-package' not found", str(context.exception)) + + @regression_test + def test_get_package_mcp_server_config_no_metadata(self): + """Test MCP server config retrieval when package has no metadata.""" + # Mock environment manager + mock_env_manager = MagicMock() + mock_env_manager.list_packages.return_value = [ + { + "name": "test-package", + "version": "1.0.0", + "source": {"path": "/path/to/package"}, + } + ] + + # Mock file system - metadata file doesn't exist + with patch("pathlib.Path.exists", return_value=False): + with self.assertRaises(ValueError) as context: + get_package_mcp_server_config( + mock_env_manager, "test-env", "test-package" + ) + + self.assertIn("not a Hatch package", str(context.exception)) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_mcp_cli_partial_updates.py b/tests/test_mcp_cli_partial_updates.py new file mode 100644 index 0000000..d20e9a5 --- /dev/null +++ b/tests/test_mcp_cli_partial_updates.py @@ -0,0 +1,859 @@ +""" +Test suite for MCP CLI partial configuration update functionality. + +This module tests the partial configuration update feature that allows users to modify +specific fields without re-specifying entire server configurations. + +Tests cover: +- Server existence detection (get_server_config method) +- Partial update validation (create vs. update logic) +- Field preservation (merge logic) +- Command/URL switching behavior +- End-to-end integration workflows +- Backward compatibility +""" + +import unittest +from unittest.mock import patch, MagicMock, call +import sys +from pathlib import Path + +# Add the parent directory to the path to import hatch modules +sys.path.insert(0, str(Path(__file__).parent.parent)) + +from hatch.mcp_host_config.host_management import MCPHostConfigurationManager +from hatch.mcp_host_config.models import MCPHostType, MCPServerConfig, MCPServerConfigOmni +from hatch.cli_hatch import handle_mcp_configure +from wobble import regression_test, integration_test + + +class TestServerExistenceDetection(unittest.TestCase): + """Test suite for server existence detection (Category A).""" + + @regression_test + def test_get_server_config_exists(self): + """Test A1: get_server_config returns existing server configuration.""" + # Setup: Create a test server configuration + manager = MCPHostConfigurationManager() + + # Mock the strategy to return a configuration with our test server + mock_strategy = MagicMock() + mock_config = MagicMock() + test_server = MCPServerConfig( + name="test-server", + command="python", + args=["server.py"], + env={"API_KEY": "test_key"} + ) + mock_config.servers = {"test-server": test_server} + mock_strategy.read_configuration.return_value = mock_config + + with patch.object(manager.host_registry, 'get_strategy', return_value=mock_strategy): + # Execute + result = manager.get_server_config("claude-desktop", "test-server") + + # Validate + self.assertIsNotNone(result) + self.assertEqual(result.name, "test-server") + self.assertEqual(result.command, "python") + + @regression_test + def test_get_server_config_not_exists(self): + """Test A2: get_server_config returns None for non-existent server.""" + # Setup: Empty registry + manager = MCPHostConfigurationManager() + + mock_strategy = MagicMock() + mock_config = MagicMock() + mock_config.servers = {} # No servers + mock_strategy.read_configuration.return_value = mock_config + + with patch.object(manager.host_registry, 'get_strategy', return_value=mock_strategy): + # Execute + result = manager.get_server_config("claude-desktop", "non-existent-server") + + # Validate + self.assertIsNone(result) + + @regression_test + def test_get_server_config_invalid_host(self): + """Test A3: get_server_config handles invalid host gracefully.""" + # Setup + manager = MCPHostConfigurationManager() + + # Execute: Invalid host should be handled gracefully + result = manager.get_server_config("invalid-host", "test-server") + + # Validate: Should return None, not raise exception + self.assertIsNone(result) + + +class TestPartialUpdateValidation(unittest.TestCase): + """Test suite for partial update validation (Category B).""" + + @regression_test + def test_configure_update_single_field_timeout(self): + """Test B1: Update single field (timeout) preserves other fields.""" + # Setup: Existing server with timeout=30 + existing_server = MCPServerConfig( + name="test-server", + command="python", + args=["server.py"], + env={"API_KEY": "test_key"}, + timeout=30 + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.get_server_config.return_value = existing_server + mock_manager.configure_server.return_value = MagicMock(success=True) + + with patch('hatch.cli_hatch.print') as mock_print: + # Execute: Update only timeout (use Gemini which supports timeout) + result = handle_mcp_configure( + host="gemini", + server_name="test-server", + command=None, + args=None, + env=None, + url=None, + header=None, + timeout=60, # Only timeout provided + trust=False, + cwd=None, + env_file=None, + http_url=None, + include_tools=None, + exclude_tools=None, + input=None, + no_backup=False, + dry_run=False, + auto_approve=True + ) + + # Validate: Should succeed + self.assertEqual(result, 0) + + # Validate: configure_server was called with merged config + mock_manager.configure_server.assert_called_once() + call_args = mock_manager.configure_server.call_args + host_config = call_args[1]['server_config'] + + # Timeout should be updated (Gemini supports timeout) + self.assertEqual(host_config.timeout, 60) + # Other fields should be preserved + self.assertEqual(host_config.command, "python") + self.assertEqual(host_config.args, ["server.py"]) + + @regression_test + def test_configure_update_env_vars_only(self): + """Test B2: Update environment variables only preserves other fields.""" + # Setup: Existing server with env vars + existing_server = MCPServerConfig( + name="test-server", + command="python", + args=["server.py"], + env={"API_KEY": "old_key"} + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.get_server_config.return_value = existing_server + mock_manager.configure_server.return_value = MagicMock(success=True) + + with patch('hatch.cli_hatch.print') as mock_print: + # Execute: Update only env vars + result = handle_mcp_configure( + host="claude-desktop", + server_name="test-server", + command=None, + args=None, + env=["NEW_KEY=new_value"], # Only env provided + url=None, + header=None, + timeout=None, + trust=False, + cwd=None, + env_file=None, + http_url=None, + include_tools=None, + exclude_tools=None, + input=None, + no_backup=False, + dry_run=False, + auto_approve=True + ) + + # Validate: Should succeed + self.assertEqual(result, 0) + + # Validate: configure_server was called with merged config + mock_manager.configure_server.assert_called_once() + call_args = mock_manager.configure_server.call_args + omni_config = call_args[1]['server_config'] + + # Env should be updated + self.assertEqual(omni_config.env, {"NEW_KEY": "new_value"}) + # Other fields should be preserved + self.assertEqual(omni_config.command, "python") + self.assertEqual(omni_config.args, ["server.py"]) + + @regression_test + def test_configure_create_requires_command_or_url(self): + """Test B4: Create operation requires command or url.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.get_server_config.return_value = None # Server doesn't exist + + with patch('hatch.cli_hatch.print') as mock_print: + # Execute: Create without command or url + result = handle_mcp_configure( + host="claude-desktop", + server_name="new-server", + command=None, # No command + args=None, + env=None, + url=None, # No url + header=None, + timeout=60, + trust=False, + cwd=None, + env_file=None, + http_url=None, + include_tools=None, + exclude_tools=None, + input=None, + no_backup=False, + dry_run=False, + auto_approve=True + ) + + # Validate: Should fail with error + self.assertEqual(result, 1) + + # Validate: Error message mentions command or url + mock_print.assert_called() + error_message = str(mock_print.call_args[0][0]) + self.assertIn("command", error_message.lower()) + self.assertIn("url", error_message.lower()) + + @regression_test + def test_configure_update_allows_no_command_url(self): + """Test B5: Update operation allows omitting command/url.""" + # Setup: Existing server with command + existing_server = MCPServerConfig( + name="test-server", + command="python", + args=["server.py"] + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.get_server_config.return_value = existing_server + mock_manager.configure_server.return_value = MagicMock(success=True) + + with patch('hatch.cli_hatch.print') as mock_print: + # Execute: Update without command or url + result = handle_mcp_configure( + host="claude-desktop", + server_name="test-server", + command=None, # No command + args=None, + env=None, + url=None, # No url + header=None, + timeout=60, # Only timeout + trust=False, + cwd=None, + env_file=None, + http_url=None, + include_tools=None, + exclude_tools=None, + input=None, + no_backup=False, + dry_run=False, + auto_approve=True + ) + + # Validate: Should succeed + self.assertEqual(result, 0) + + # Validate: Command should be preserved + mock_manager.configure_server.assert_called_once() + call_args = mock_manager.configure_server.call_args + omni_config = call_args[1]['server_config'] + self.assertEqual(omni_config.command, "python") + + +class TestFieldPreservation(unittest.TestCase): + """Test suite for field preservation verification (Category C).""" + + @regression_test + def test_configure_update_preserves_unspecified_fields(self): + """Test C1: Unspecified fields remain unchanged during update.""" + # Setup: Existing server with multiple fields + existing_server = MCPServerConfig( + name="test-server", + command="python", + args=["server.py"], + env={"API_KEY": "test_key"}, + timeout=30 + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.get_server_config.return_value = existing_server + mock_manager.configure_server.return_value = MagicMock(success=True) + + with patch('hatch.cli_hatch.print') as mock_print: + # Execute: Update only timeout (use Gemini which supports timeout) + result = handle_mcp_configure( + host="gemini", + server_name="test-server", + command=None, + args=None, + env=None, + url=None, + header=None, + timeout=60, # Only timeout updated + trust=False, + cwd=None, + env_file=None, + http_url=None, + include_tools=None, + exclude_tools=None, + input=None, + no_backup=False, + dry_run=False, + auto_approve=True + ) + + # Validate + self.assertEqual(result, 0) + call_args = mock_manager.configure_server.call_args + host_config = call_args[1]['server_config'] + + # Timeout updated (Gemini supports timeout) + self.assertEqual(host_config.timeout, 60) + # All other fields preserved + self.assertEqual(host_config.command, "python") + self.assertEqual(host_config.args, ["server.py"]) + self.assertEqual(host_config.env, {"API_KEY": "test_key"}) + + @regression_test + def test_configure_update_dependent_fields(self): + """Test C3+C4: Update dependent fields without parent field.""" + # Scenario 1: Update args without command + existing_cmd_server = MCPServerConfig( + name="cmd-server", + command="python", + args=["old.py"] + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.get_server_config.return_value = existing_cmd_server + mock_manager.configure_server.return_value = MagicMock(success=True) + + with patch('hatch.cli_hatch.print') as mock_print: + # Execute: Update args without command + result = handle_mcp_configure( + host="claude-desktop", + server_name="cmd-server", + command=None, # Command not provided + args=["new.py"], # Args updated + env=None, + url=None, + header=None, + timeout=None, + trust=False, + cwd=None, + env_file=None, + http_url=None, + include_tools=None, + exclude_tools=None, + input=None, + no_backup=False, + dry_run=False, + auto_approve=True + ) + + # Validate: Should succeed + self.assertEqual(result, 0) + call_args = mock_manager.configure_server.call_args + omni_config = call_args[1]['server_config'] + + # Args updated, command preserved + self.assertEqual(omni_config.args, ["new.py"]) + self.assertEqual(omni_config.command, "python") + + # Scenario 2: Update headers without url + existing_url_server = MCPServerConfig( + name="url-server", + url="http://localhost:8080", + headers={"Authorization": "Bearer old_token"} + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.get_server_config.return_value = existing_url_server + mock_manager.configure_server.return_value = MagicMock(success=True) + + with patch('hatch.cli_hatch.print') as mock_print: + # Execute: Update headers without url + result = handle_mcp_configure( + host="claude-desktop", + server_name="url-server", + command=None, + args=None, + env=None, + url=None, # URL not provided + header=["Authorization=Bearer new_token"], # Headers updated + timeout=None, + trust=False, + cwd=None, + env_file=None, + http_url=None, + include_tools=None, + exclude_tools=None, + input=None, + no_backup=False, + dry_run=False, + auto_approve=True + ) + + # Validate: Should succeed + self.assertEqual(result, 0) + call_args = mock_manager.configure_server.call_args + omni_config = call_args[1]['server_config'] + + # Headers updated, url preserved + self.assertEqual(omni_config.headers, {"Authorization": "Bearer new_token"}) + self.assertEqual(omni_config.url, "http://localhost:8080") + + +class TestCommandUrlSwitching(unittest.TestCase): + """Test suite for command/URL switching behavior (Category E) [CRITICAL].""" + + @regression_test + def test_configure_switch_command_to_url(self): + """Test E1: Switch from command-based to URL-based server [CRITICAL].""" + # Setup: Existing command-based server + existing_server = MCPServerConfig( + name="test-server", + command="python", + args=["server.py"], + env={"API_KEY": "test_key"} + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.get_server_config.return_value = existing_server + mock_manager.configure_server.return_value = MagicMock(success=True) + + with patch('hatch.cli_hatch.print') as mock_print: + # Execute: Switch to URL-based (use gemini which supports URL) + result = handle_mcp_configure( + host="gemini", + server_name="test-server", + command=None, + args=None, + env=None, + url="http://localhost:8080", # Provide URL + header=["Authorization=Bearer token"], # Provide headers + timeout=None, + trust=False, + cwd=None, + env_file=None, + http_url=None, + include_tools=None, + exclude_tools=None, + input=None, + no_backup=False, + dry_run=False, + auto_approve=True + ) + + # Validate: Should succeed + self.assertEqual(result, 0) + call_args = mock_manager.configure_server.call_args + omni_config = call_args[1]['server_config'] + + # URL-based fields set + self.assertEqual(omni_config.url, "http://localhost:8080") + self.assertEqual(omni_config.headers, {"Authorization": "Bearer token"}) + # Command-based fields cleared + self.assertIsNone(omni_config.command) + self.assertIsNone(omni_config.args) + # Type field updated to 'sse' (Issue 1) + self.assertEqual(omni_config.type, "sse") + + @regression_test + def test_configure_switch_url_to_command(self): + """Test E2: Switch from URL-based to command-based server [CRITICAL].""" + # Setup: Existing URL-based server + existing_server = MCPServerConfig( + name="test-server", + url="http://localhost:8080", + headers={"Authorization": "Bearer token"} + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.get_server_config.return_value = existing_server + mock_manager.configure_server.return_value = MagicMock(success=True) + + with patch('hatch.cli_hatch.print') as mock_print: + # Execute: Switch to command-based (use gemini which supports both) + result = handle_mcp_configure( + host="gemini", + server_name="test-server", + command="node", # Provide command + args=["server.js"], # Provide args + env=None, + url=None, + header=None, + timeout=None, + trust=False, + cwd=None, + env_file=None, + http_url=None, + include_tools=None, + exclude_tools=None, + input=None, + no_backup=False, + dry_run=False, + auto_approve=True + ) + + # Validate: Should succeed + self.assertEqual(result, 0) + call_args = mock_manager.configure_server.call_args + omni_config = call_args[1]['server_config'] + + # Command-based fields set + self.assertEqual(omni_config.command, "node") + self.assertEqual(omni_config.args, ["server.js"]) + # URL-based fields cleared + self.assertIsNone(omni_config.url) + self.assertIsNone(omni_config.headers) + # Type field updated to 'stdio' (Issue 1) + self.assertEqual(omni_config.type, "stdio") + + +class TestPartialUpdateIntegration(unittest.TestCase): + """Test suite for end-to-end partial update workflows (Integration Tests).""" + + @integration_test(scope="component") + def test_partial_update_end_to_end_timeout(self): + """Test I1: End-to-end partial update workflow for timeout field.""" + # Setup: Existing server + existing_server = MCPServerConfig( + name="test-server", + command="python", + args=["server.py"], + timeout=30 + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.get_server_config.return_value = existing_server + mock_manager.configure_server.return_value = MagicMock(success=True) + + with patch('hatch.cli_hatch.print') as mock_print: + with patch('hatch.cli_hatch.generate_conversion_report') as mock_report: + # Mock report to verify UNCHANGED detection + mock_report.return_value = MagicMock() + + # Execute: Full CLI workflow + result = handle_mcp_configure( + host="claude-desktop", + server_name="test-server", + command=None, + args=None, + env=None, + url=None, + header=None, + timeout=60, # Update timeout only + trust=False, + cwd=None, + env_file=None, + http_url=None, + include_tools=None, + exclude_tools=None, + input=None, + no_backup=False, + dry_run=False, + auto_approve=True + ) + + # Validate: Should succeed + self.assertEqual(result, 0) + + # Validate: Report was generated with old_config for UNCHANGED detection + mock_report.assert_called_once() + call_kwargs = mock_report.call_args[1] + self.assertEqual(call_kwargs['operation'], 'update') + self.assertIsNotNone(call_kwargs.get('old_config')) + + @integration_test(scope="component") + def test_partial_update_end_to_end_switch_type(self): + """Test I2: End-to-end workflow for command/URL switching.""" + # Setup: Existing command-based server + existing_server = MCPServerConfig( + name="test-server", + command="python", + args=["server.py"] + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.get_server_config.return_value = existing_server + mock_manager.configure_server.return_value = MagicMock(success=True) + + with patch('hatch.cli_hatch.print') as mock_print: + with patch('hatch.cli_hatch.generate_conversion_report') as mock_report: + mock_report.return_value = MagicMock() + + # Execute: Switch to URL-based (use gemini which supports URL) + result = handle_mcp_configure( + host="gemini", + server_name="test-server", + command=None, + args=None, + env=None, + url="http://localhost:8080", + header=["Authorization=Bearer token"], + timeout=None, + trust=False, + cwd=None, + env_file=None, + http_url=None, + include_tools=None, + exclude_tools=None, + input=None, + no_backup=False, + dry_run=False, + auto_approve=True + ) + + # Validate: Should succeed + self.assertEqual(result, 0) + + # Validate: Server type switched + call_args = mock_manager.configure_server.call_args + omni_config = call_args[1]['server_config'] + self.assertEqual(omni_config.url, "http://localhost:8080") + self.assertIsNone(omni_config.command) + + +class TestBackwardCompatibility(unittest.TestCase): + """Test suite for backward compatibility (Regression Tests).""" + + @regression_test + def test_existing_create_operation_unchanged(self): + """Test R1: Existing create operations work identically.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.get_server_config.return_value = None # Server doesn't exist + mock_manager.configure_server.return_value = MagicMock(success=True) + + with patch('hatch.cli_hatch.print') as mock_print: + # Execute: Create operation with full configuration (use Gemini for timeout support) + result = handle_mcp_configure( + host="gemini", + server_name="new-server", + command="python", + args=["server.py"], + env=["API_KEY=secret"], + url=None, + header=None, + timeout=30, + trust=False, + cwd=None, + env_file=None, + http_url=None, + include_tools=None, + exclude_tools=None, + input=None, + no_backup=False, + dry_run=False, + auto_approve=True + ) + + # Validate: Should succeed + self.assertEqual(result, 0) + + # Validate: Server created with all fields + mock_manager.configure_server.assert_called_once() + call_args = mock_manager.configure_server.call_args + host_config = call_args[1]['server_config'] + self.assertEqual(host_config.command, "python") + self.assertEqual(host_config.args, ["server.py"]) + self.assertEqual(host_config.timeout, 30) + + @regression_test + def test_error_messages_remain_clear(self): + """Test R2: Error messages are clear and helpful (modified).""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.get_server_config.return_value = None # Server doesn't exist + + with patch('hatch.cli_hatch.print') as mock_print: + # Execute: Create without command or url + result = handle_mcp_configure( + host="claude-desktop", + server_name="new-server", + command=None, # No command + args=None, + env=None, + url=None, # No url + header=None, + timeout=60, + trust=False, + cwd=None, + env_file=None, + http_url=None, + include_tools=None, + exclude_tools=None, + input=None, + no_backup=False, + dry_run=False, + auto_approve=True + ) + + # Validate: Should fail + self.assertEqual(result, 1) + + # Validate: Error message is clear + mock_print.assert_called() + error_message = str(mock_print.call_args[0][0]) + self.assertIn("command", error_message.lower()) + self.assertIn("url", error_message.lower()) + # Should mention this is for creating a new server + self.assertTrue( + "creat" in error_message.lower() or "new" in error_message.lower(), + f"Error message should clarify this is for creating: {error_message}" + ) + + +class TestTypeFieldUpdating(unittest.TestCase): + """Test suite for type field updates during transport switching (Issue 1).""" + + @regression_test + def test_type_field_updates_command_to_url(self): + """Test type field updates from 'stdio' to 'sse' when switching to URL.""" + # Setup: Create existing command-based server with type='stdio' + existing_server = MCPServerConfig( + name="test-server", + type="stdio", + command="python", + args=["server.py"] + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.get_server_config.return_value = existing_server + mock_manager.configure_server.return_value = MagicMock(success=True) + + with patch('hatch.cli_hatch.print'): + # Execute: Switch to URL-based configuration + result = handle_mcp_configure( + host='gemini', + server_name='test-server', + command=None, + args=None, + env=None, + url='http://localhost:8080', + header=None, + timeout=None, + trust=False, + cwd=None, + env_file=None, + http_url=None, + include_tools=None, + exclude_tools=None, + input=None, + no_backup=False, + dry_run=False, + auto_approve=True + ) + + # Validate: Should succeed + self.assertEqual(result, 0) + + # Validate: Type field updated to 'sse' + call_args = mock_manager.configure_server.call_args + server_config = call_args.kwargs['server_config'] + self.assertEqual(server_config.type, "sse") + self.assertIsNone(server_config.command) + self.assertEqual(server_config.url, "http://localhost:8080") + + @regression_test + def test_type_field_updates_url_to_command(self): + """Test type field updates from 'sse' to 'stdio' when switching to command.""" + # Setup: Create existing URL-based server with type='sse' + existing_server = MCPServerConfig( + name="test-server", + type="sse", + url="http://localhost:8080", + headers={"Authorization": "Bearer token"} + ) + + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_manager_class.return_value = mock_manager + mock_manager.get_server_config.return_value = existing_server + mock_manager.configure_server.return_value = MagicMock(success=True) + + with patch('hatch.cli_hatch.print'): + # Execute: Switch to command-based configuration + result = handle_mcp_configure( + host='gemini', + server_name='test-server', + command='python', + args=['server.py'], + env=None, + url=None, + header=None, + timeout=None, + trust=False, + cwd=None, + env_file=None, + http_url=None, + include_tools=None, + exclude_tools=None, + input=None, + no_backup=False, + dry_run=False, + auto_approve=True + ) + + # Validate: Should succeed + self.assertEqual(result, 0) + + # Validate: Type field updated to 'stdio' + call_args = mock_manager.configure_server.call_args + server_config = call_args.kwargs['server_config'] + self.assertEqual(server_config.type, "stdio") + self.assertEqual(server_config.command, "python") + self.assertIsNone(server_config.url) + + +if __name__ == '__main__': + unittest.main() + diff --git a/tests/test_mcp_environment_integration.py b/tests/test_mcp_environment_integration.py new file mode 100644 index 0000000..47f14a0 --- /dev/null +++ b/tests/test_mcp_environment_integration.py @@ -0,0 +1,520 @@ +""" +Test suite for MCP environment integration. + +This module tests the integration between environment data and MCP host configuration +with the corrected data structure. +""" + +import unittest +import sys +from pathlib import Path +from datetime import datetime +from unittest.mock import MagicMock, patch +import json + +# Add the parent directory to the path to import wobble +sys.path.insert(0, str(Path(__file__).parent.parent)) + +try: + from wobble.decorators import regression_test, integration_test +except ImportError: + # Fallback decorators if wobble is not available + def regression_test(func): + return func + + def integration_test(scope="component"): + def decorator(func): + return func + return decorator + +from test_data_utils import MCPHostConfigTestDataLoader +from hatch.mcp_host_config.models import ( + MCPServerConfig, EnvironmentData, EnvironmentPackageEntry, + PackageHostConfiguration, MCPHostType +) +from hatch.environment_manager import HatchEnvironmentManager + + +class TestMCPEnvironmentIntegration(unittest.TestCase): + """Test suite for MCP environment integration with corrected structure.""" + + def setUp(self): + """Set up test environment.""" + self.test_data_loader = MCPHostConfigTestDataLoader() + + @regression_test + def test_environment_data_validation_success(self): + """Test successful environment data validation.""" + env_data = self.test_data_loader.load_corrected_environment_data("simple") + environment = EnvironmentData(**env_data) + + self.assertEqual(environment.name, "test_environment") + self.assertEqual(len(environment.packages), 1) + + package = environment.packages[0] + self.assertEqual(package.name, "weather-toolkit") + self.assertEqual(package.version, "1.0.0") + self.assertIn("claude-desktop", package.configured_hosts) + + host_config = package.configured_hosts["claude-desktop"] + self.assertIsInstance(host_config, PackageHostConfiguration) + self.assertIsInstance(host_config.server_config, MCPServerConfig) + + @regression_test + def test_environment_data_multi_host_validation(self): + """Test environment data validation with multiple hosts.""" + env_data = self.test_data_loader.load_corrected_environment_data("multi_host") + environment = EnvironmentData(**env_data) + + self.assertEqual(environment.name, "multi_host_environment") + self.assertEqual(len(environment.packages), 1) + + package = environment.packages[0] + self.assertEqual(package.name, "file-manager") + self.assertEqual(len(package.configured_hosts), 2) + self.assertIn("claude-desktop", package.configured_hosts) + self.assertIn("cursor", package.configured_hosts) + + # Verify both host configurations + claude_config = package.configured_hosts["claude-desktop"] + cursor_config = package.configured_hosts["cursor"] + + self.assertIsInstance(claude_config, PackageHostConfiguration) + self.assertIsInstance(cursor_config, PackageHostConfiguration) + + # Verify server configurations are different for different hosts + self.assertEqual(claude_config.server_config.command, "/usr/local/bin/python") + self.assertEqual(cursor_config.server_config.command, "python") + + @regression_test + def test_package_host_configuration_validation(self): + """Test package host configuration validation.""" + server_config_data = self.test_data_loader.load_mcp_server_config("local") + server_config = MCPServerConfig(**server_config_data) + + host_config = PackageHostConfiguration( + config_path="~/test/config.json", + configured_at=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + last_synced=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + server_config=server_config + ) + + self.assertEqual(host_config.config_path, "~/test/config.json") + self.assertIsInstance(host_config.server_config, MCPServerConfig) + self.assertEqual(host_config.server_config.command, "python") + self.assertEqual(len(host_config.server_config.args), 3) + + @regression_test + def test_environment_package_entry_validation_success(self): + """Test successful environment package entry validation.""" + server_config_data = self.test_data_loader.load_mcp_server_config("local") + server_config = MCPServerConfig(**server_config_data) + + host_config = PackageHostConfiguration( + config_path="~/test/config.json", + configured_at=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + last_synced=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + server_config=server_config + ) + + package = EnvironmentPackageEntry( + name="test-package", + version="1.0.0", + type="hatch", + source="github:user/test-package", + installed_at=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + configured_hosts={"claude-desktop": host_config} + ) + + self.assertEqual(package.name, "test-package") + self.assertEqual(package.version, "1.0.0") + self.assertEqual(package.type, "hatch") + self.assertEqual(len(package.configured_hosts), 1) + self.assertIn("claude-desktop", package.configured_hosts) + + @regression_test + def test_environment_package_entry_invalid_host_name(self): + """Test environment package entry validation with invalid host name.""" + server_config_data = self.test_data_loader.load_mcp_server_config("local") + server_config = MCPServerConfig(**server_config_data) + + host_config = PackageHostConfiguration( + config_path="~/test/config.json", + configured_at=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + last_synced=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + server_config=server_config + ) + + with self.assertRaises(Exception) as context: + EnvironmentPackageEntry( + name="test-package", + version="1.0.0", + type="hatch", + source="github:user/test-package", + installed_at=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + configured_hosts={"invalid-host": host_config} # Invalid host name + ) + + self.assertIn("Unsupported host", str(context.exception)) + + @regression_test + def test_environment_package_entry_invalid_package_name(self): + """Test environment package entry validation with invalid package name.""" + server_config_data = self.test_data_loader.load_mcp_server_config("local") + server_config = MCPServerConfig(**server_config_data) + + host_config = PackageHostConfiguration( + config_path="~/test/config.json", + configured_at=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + last_synced=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + server_config=server_config + ) + + with self.assertRaises(Exception) as context: + EnvironmentPackageEntry( + name="invalid@package!name", # Invalid characters + version="1.0.0", + type="hatch", + source="github:user/test-package", + installed_at=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + configured_hosts={"claude-desktop": host_config} + ) + + self.assertIn("Invalid package name format", str(context.exception)) + + @regression_test + def test_environment_data_get_mcp_packages(self): + """Test getting MCP packages from environment data.""" + env_data = self.test_data_loader.load_corrected_environment_data("multi_host") + environment = EnvironmentData(**env_data) + + mcp_packages = environment.get_mcp_packages() + + self.assertEqual(len(mcp_packages), 1) + self.assertEqual(mcp_packages[0].name, "file-manager") + self.assertEqual(len(mcp_packages[0].configured_hosts), 2) + + @regression_test + def test_environment_data_serialization_roundtrip(self): + """Test environment data serialization and deserialization.""" + env_data = self.test_data_loader.load_corrected_environment_data("simple") + environment = EnvironmentData(**env_data) + + # Serialize and deserialize + serialized = environment.model_dump() + roundtrip_environment = EnvironmentData(**serialized) + + self.assertEqual(environment.name, roundtrip_environment.name) + self.assertEqual(len(environment.packages), len(roundtrip_environment.packages)) + + original_package = environment.packages[0] + roundtrip_package = roundtrip_environment.packages[0] + + self.assertEqual(original_package.name, roundtrip_package.name) + self.assertEqual(original_package.version, roundtrip_package.version) + self.assertEqual(len(original_package.configured_hosts), len(roundtrip_package.configured_hosts)) + + # Verify host configuration roundtrip + original_host_config = original_package.configured_hosts["claude-desktop"] + roundtrip_host_config = roundtrip_package.configured_hosts["claude-desktop"] + + self.assertEqual(original_host_config.config_path, roundtrip_host_config.config_path) + self.assertEqual(original_host_config.server_config.command, roundtrip_host_config.server_config.command) + + @regression_test + def test_corrected_environment_structure_single_server_per_package(self): + """Test that corrected environment structure enforces single server per package.""" + env_data = self.test_data_loader.load_corrected_environment_data("simple") + environment = EnvironmentData(**env_data) + + # Verify single server per package constraint + for package in environment.packages: + # Each package should have one server configuration per host + for host_name, host_config in package.configured_hosts.items(): + self.assertIsInstance(host_config, PackageHostConfiguration) + self.assertIsInstance(host_config.server_config, MCPServerConfig) + + # The server configuration should be for this specific package + # (In real usage, the server would be the package's MCP server) + + @regression_test + def test_environment_data_json_serialization(self): + """Test JSON serialization compatibility.""" + import json + + env_data = self.test_data_loader.load_corrected_environment_data("simple") + environment = EnvironmentData(**env_data) + + # Test JSON serialization + json_str = environment.model_dump_json() + self.assertIsInstance(json_str, str) + + # Test JSON deserialization + parsed_data = json.loads(json_str) + roundtrip_environment = EnvironmentData(**parsed_data) + + self.assertEqual(environment.name, roundtrip_environment.name) + self.assertEqual(len(environment.packages), len(roundtrip_environment.packages)) + + +class TestMCPHostTypeIntegration(unittest.TestCase): + """Test suite for MCP host type integration.""" + + @regression_test + def test_mcp_host_type_enum_values(self): + """Test MCP host type enum values.""" + # Verify all expected host types are available + expected_hosts = [ + "claude-desktop", "claude-code", "vscode", + "cursor", "lmstudio", "gemini" + ] + + for host_name in expected_hosts: + host_type = MCPHostType(host_name) + self.assertEqual(host_type.value, host_name) + + @regression_test + def test_mcp_host_type_invalid_value(self): + """Test MCP host type with invalid value.""" + with self.assertRaises(ValueError): + MCPHostType("invalid-host") + + +class TestEnvironmentManagerHostSync(unittest.TestCase): + """Test suite for EnvironmentManager host synchronization methods.""" + + def setUp(self): + """Set up test fixtures.""" + self.mock_env_manager = MagicMock(spec=HatchEnvironmentManager) + + # Load test fixture data + fixture_path = Path(__file__).parent / "test_data" / "fixtures" / "host_sync_scenarios.json" + with open(fixture_path, 'r') as f: + self.test_data = json.load(f) + + @regression_test + def test_remove_package_host_configuration_success(self): + """Test successful removal of host from package tracking. + + Validates: + - Removes specified host from package's configured_hosts + - Updates environments.json file via _save_environments() + - Returns True when removal occurs + - Logs successful removal with package/host details + """ + # Setup: Environment with package having configured_hosts for multiple hosts + env_manager = HatchEnvironmentManager() + env_manager._environments = { + "test-env": self.test_data["remove_server_scenario"]["before"] + } + + with patch.object(env_manager, '_save_environments') as mock_save: + with patch.object(env_manager, 'logger') as mock_logger: + # Action: remove_package_host_configuration(env_name, package_name, hostname) + result = env_manager.remove_package_host_configuration("test-env", "weather-toolkit", "cursor") + + # Assert: Host removed from package, environments.json updated, returns True + self.assertTrue(result) + mock_save.assert_called_once() + mock_logger.info.assert_called_with("Removed host cursor from package weather-toolkit in env test-env") + + # Verify host was actually removed + packages = env_manager._environments["test-env"]["packages"] + weather_pkg = next(pkg for pkg in packages if pkg["name"] == "weather-toolkit") + self.assertNotIn("cursor", weather_pkg["configured_hosts"]) + self.assertIn("claude-desktop", weather_pkg["configured_hosts"]) + + @regression_test + def test_remove_package_host_configuration_not_found(self): + """Test removal when package or host not found. + + Validates: + - Returns False when environment doesn't exist + - Returns False when package not found in environment + - Returns False when host not in package's configured_hosts + - No changes to environments.json when nothing to remove + """ + env_manager = HatchEnvironmentManager() + env_manager._environments = { + "test-env": self.test_data["remove_server_scenario"]["before"] + } + + with patch.object(env_manager, '_save_environments') as mock_save: + # Test scenarios: missing env, missing package, missing host + + # Missing environment + result = env_manager.remove_package_host_configuration("missing-env", "weather-toolkit", "cursor") + self.assertFalse(result) + + # Missing package + result = env_manager.remove_package_host_configuration("test-env", "missing-package", "cursor") + self.assertFalse(result) + + # Missing host + result = env_manager.remove_package_host_configuration("test-env", "weather-toolkit", "missing-host") + self.assertFalse(result) + + # Assert: No file changes when nothing to remove + mock_save.assert_not_called() + + @regression_test + def test_clear_host_from_all_packages_all_envs(self): + """Test host removal across multiple environments. + + Validates: + - Iterates through all environments in _environments + - Removes hostname from all packages' configured_hosts + - Returns correct count of updated package entries + - Calls _save_environments() only once after all updates + """ + # Setup: Multiple environments with packages using same host + env_manager = HatchEnvironmentManager() + env_manager._environments = self.test_data["remove_host_scenario"]["multi_environment_before"] + + with patch.object(env_manager, '_save_environments') as mock_save: + with patch.object(env_manager, 'logger') as mock_logger: + # Action: clear_host_from_all_packages_all_envs(hostname) + updates_count = env_manager.clear_host_from_all_packages_all_envs("cursor") + + # Assert: Host removed from all packages, correct count returned + self.assertEqual(updates_count, 2) # 2 packages had cursor configured + mock_save.assert_called_once() + + # Verify cursor was removed from all packages + for env_name, env_data in env_manager._environments.items(): + for pkg in env_data["packages"]: + configured_hosts = pkg.get("configured_hosts", {}) + self.assertNotIn("cursor", configured_hosts) + + +class TestEnvironmentManagerHostSyncErrorHandling(unittest.TestCase): + """Test suite for error handling and edge cases.""" + + def setUp(self): + """Set up test fixtures.""" + self.env_manager = HatchEnvironmentManager() + + @regression_test + def test_remove_operations_exception_handling(self): + """Test exception handling in remove operations. + + Validates: + - Catches and logs exceptions during removal operations + - Returns False/0 on exceptions rather than crashing + - Provides meaningful error messages in logs + - Maintains environment file integrity on errors + """ + # Setup: Mock scenarios that raise exceptions + # Create environment with package that has the host, so _save_environments will be called + self.env_manager._environments = { + "test-env": { + "packages": [ + { + "name": "test-pkg", + "configured_hosts": { + "test-host": {"config_path": "test"} + } + } + ] + } + } + + with patch.object(self.env_manager, '_save_environments', side_effect=Exception("File error")): + with patch.object(self.env_manager, 'logger') as mock_logger: + # Action: Call remove methods with exception-inducing conditions + result = self.env_manager.remove_package_host_configuration("test-env", "test-pkg", "test-host") + + # Assert: Graceful error handling, no crashes, appropriate returns + self.assertFalse(result) + mock_logger.error.assert_called() + + +class TestCLIHostMutationSync(unittest.TestCase): + """Test suite for CLI integration with environment tracking.""" + + def setUp(self): + """Set up test fixtures.""" + self.mock_env_manager = MagicMock(spec=HatchEnvironmentManager) + + @integration_test(scope="component") + def test_remove_server_updates_environment(self): + """Test that remove server updates current environment tracking. + + Validates: + - CLI remove server calls environment manager update method + - Updates only current environment (not all environments) + - Passes correct parameters (env_name, server_name, hostname) + - Maintains existing CLI behavior and exit codes + """ + from hatch.cli_hatch import handle_mcp_remove_server + from hatch.mcp_host_config import MCPHostConfigurationManager + + # Setup: Environment with server configured on host + self.mock_env_manager.get_current_environment.return_value = "test-env" + + with patch.object(MCPHostConfigurationManager, 'remove_server') as mock_remove: + mock_result = MagicMock() + mock_result.success = True + mock_result.backup_path = None + mock_remove.return_value = mock_result + + with patch('hatch.cli_hatch.request_confirmation', return_value=True): + with patch('builtins.print'): + # Action: hatch mcp remove server --host + result = handle_mcp_remove_server( + self.mock_env_manager, "test-server", "claude-desktop", + None, False, False, True + ) + + # Assert: Environment manager method called with correct parameters + self.mock_env_manager.get_current_environment.assert_called_once() + self.mock_env_manager.remove_package_host_configuration.assert_called_with( + "test-env", "test-server", "claude-desktop" + ) + + # Assert: Success exit code + self.assertEqual(result, 0) + + @integration_test(scope="component") + def test_remove_host_updates_all_environments(self): + """Test that remove host updates all environment tracking. + + Validates: + - CLI remove host calls global environment update method + - Updates ALL environments (not just current) + - Passes correct hostname parameter + - Reports number of updates performed to user + """ + from hatch.cli_hatch import handle_mcp_remove_host + from hatch.mcp_host_config import MCPHostConfigurationManager + + # Setup: Multiple environments with packages using the host + with patch.object(MCPHostConfigurationManager, 'remove_host_configuration') as mock_remove: + mock_result = MagicMock() + mock_result.success = True + mock_result.backup_path = None + mock_remove.return_value = mock_result + + self.mock_env_manager.clear_host_from_all_packages_all_envs.return_value = 3 + + with patch('hatch.cli_hatch.request_confirmation', return_value=True): + with patch('builtins.print') as mock_print: + # Action: hatch mcp remove host + result = handle_mcp_remove_host( + self.mock_env_manager, "cursor", False, False, True + ) + + # Assert: Global environment update method called + self.mock_env_manager.clear_host_from_all_packages_all_envs.assert_called_with("cursor") + + # Assert: User informed of update count + print_calls = [call[0][0] for call in mock_print.call_args_list] + output = ' '.join(print_calls) + self.assertIn("Updated 3 package entries across environments", output) + + # Assert: Success exit code + self.assertEqual(result, 0) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_mcp_host_config_backup.py b/tests/test_mcp_host_config_backup.py new file mode 100644 index 0000000..55b5f5e --- /dev/null +++ b/tests/test_mcp_host_config_backup.py @@ -0,0 +1,257 @@ +"""Tests for MCPHostConfigBackupManager. + +This module contains tests for the MCP host configuration backup functionality, +including backup creation, restoration, and management with host-agnostic design. +""" + +import unittest +import tempfile +import shutil +import json +from pathlib import Path +from datetime import datetime +from unittest.mock import patch, Mock + +from wobble.decorators import regression_test, integration_test, slow_test +from test_data_utils import MCPBackupTestDataLoader + +from hatch.mcp_host_config.backup import ( + MCPHostConfigBackupManager, + BackupInfo, + BackupResult, + BackupError +) + + +class TestMCPHostConfigBackupManager(unittest.TestCase): + """Test MCPHostConfigBackupManager core functionality with host-agnostic design.""" + + def setUp(self): + """Set up test environment with host-agnostic configurations.""" + self.temp_dir = Path(tempfile.mkdtemp(prefix="test_mcp_backup_")) + self.backup_root = self.temp_dir / "backups" + self.config_dir = self.temp_dir / "configs" + self.config_dir.mkdir(parents=True) + + # Initialize test data loader + self.test_data = MCPBackupTestDataLoader() + + # Create host-agnostic test configuration files + self.test_configs = {} + for hostname in ['claude-desktop', 'vscode', 'cursor', 'lmstudio']: + config_data = self.test_data.load_host_agnostic_config("simple_server") + config_file = self.config_dir / f"{hostname}_config.json" + with open(config_file, 'w') as f: + json.dump(config_data, f, indent=2) + self.test_configs[hostname] = config_file + + self.backup_manager = MCPHostConfigBackupManager(backup_root=self.backup_root) + + def tearDown(self): + """Clean up test environment.""" + shutil.rmtree(self.temp_dir, ignore_errors=True) + + @regression_test + def test_backup_directory_creation(self): + """Test automatic backup directory creation.""" + self.assertTrue(self.backup_root.exists()) + self.assertTrue(self.backup_root.is_dir()) + + @regression_test + def test_create_backup_success_all_hosts(self): + """Test successful backup creation for all supported host types.""" + for hostname, config_file in self.test_configs.items(): + with self.subTest(hostname=hostname): + result = self.backup_manager.create_backup(config_file, hostname) + + # Validate BackupResult Pydantic model + self.assertIsInstance(result, BackupResult) + self.assertTrue(result.success) + self.assertIsNotNone(result.backup_path) + self.assertTrue(result.backup_path.exists()) + self.assertGreater(result.backup_size, 0) + self.assertEqual(result.original_size, result.backup_size) + + # Verify backup filename format (host-agnostic) + expected_pattern = rf"mcp\.json\.{hostname}\.\d{{8}}_\d{{6}}_\d{{6}}" + self.assertRegex(result.backup_path.name, expected_pattern) + + @regression_test + def test_create_backup_nonexistent_file(self): + """Test backup creation with nonexistent source file.""" + nonexistent = self.config_dir / "nonexistent.json" + result = self.backup_manager.create_backup(nonexistent, "claude-desktop") + + self.assertFalse(result.success) + self.assertIsNotNone(result.error_message) + self.assertIn("not found", result.error_message.lower()) + + @regression_test + def test_backup_content_integrity_host_agnostic(self): + """Test backup content matches original for any host configuration format.""" + hostname = 'claude-desktop' + config_file = self.test_configs[hostname] + original_content = config_file.read_text() + + result = self.backup_manager.create_backup(config_file, hostname) + + self.assertTrue(result.success) + backup_content = result.backup_path.read_text() + self.assertEqual(original_content, backup_content) + + # Verify JSON structure is preserved (host-agnostic validation) + original_json = json.loads(original_content) + backup_json = json.loads(backup_content) + self.assertEqual(original_json, backup_json) + + @regression_test + def test_multiple_backups_same_host(self): + """Test creating multiple backups for same host.""" + hostname = 'vscode' + config_file = self.test_configs[hostname] + + # Create first backup + result1 = self.backup_manager.create_backup(config_file, hostname) + self.assertTrue(result1.success) + + # Modify config and create second backup + modified_config = self.test_data.load_host_agnostic_config("complex_server") + with open(config_file, 'w') as f: + json.dump(modified_config, f, indent=2) + + result2 = self.backup_manager.create_backup(config_file, hostname) + self.assertTrue(result2.success) + + # Verify both backups exist and are different + self.assertTrue(result1.backup_path.exists()) + self.assertTrue(result2.backup_path.exists()) + self.assertNotEqual(result1.backup_path, result2.backup_path) + + @regression_test + def test_list_backups_empty(self): + """Test listing backups when none exist.""" + backups = self.backup_manager.list_backups("claude-desktop") + self.assertEqual(len(backups), 0) + + @regression_test + def test_list_backups_pydantic_validation(self): + """Test listing backups returns valid Pydantic models.""" + hostname = 'cursor' + config_file = self.test_configs[hostname] + + # Create multiple backups + self.backup_manager.create_backup(config_file, hostname) + self.backup_manager.create_backup(config_file, hostname) + + backups = self.backup_manager.list_backups(hostname) + self.assertEqual(len(backups), 2) + + # Verify BackupInfo Pydantic model validation + for backup in backups: + self.assertIsInstance(backup, BackupInfo) + self.assertEqual(backup.hostname, hostname) + self.assertIsInstance(backup.timestamp, datetime) + self.assertTrue(backup.file_path.exists()) + self.assertGreater(backup.file_size, 0) + + # Test Pydantic serialization + backup_dict = backup.dict() + self.assertIn('hostname', backup_dict) + self.assertIn('timestamp', backup_dict) + + # Test JSON serialization + backup_json = backup.json() + self.assertIsInstance(backup_json, str) + + # Verify sorting (newest first) + self.assertGreaterEqual(backups[0].timestamp, backups[1].timestamp) + + @regression_test + def test_backup_validation_unsupported_hostname(self): + """Test Pydantic validation rejects unsupported hostnames.""" + config_file = self.test_configs['claude-desktop'] + + # Test with unsupported hostname + result = self.backup_manager.create_backup(config_file, 'unsupported-host') + + self.assertFalse(result.success) + self.assertIn('unsupported', result.error_message.lower()) + + @regression_test + def test_multiple_hosts_isolation(self): + """Test backup isolation between different host types.""" + # Create backups for multiple hosts + results = {} + for hostname, config_file in self.test_configs.items(): + results[hostname] = self.backup_manager.create_backup(config_file, hostname) + self.assertTrue(results[hostname].success) + + # Verify separate backup directories + for hostname in self.test_configs.keys(): + backups = self.backup_manager.list_backups(hostname) + self.assertEqual(len(backups), 1) + + # Verify backup isolation (different directories) + backup_dir = backups[0].file_path.parent + self.assertEqual(backup_dir.name, hostname) + + # Verify no cross-contamination + for other_hostname in self.test_configs.keys(): + if other_hostname != hostname: + other_backups = self.backup_manager.list_backups(other_hostname) + self.assertNotEqual( + backups[0].file_path.parent, + other_backups[0].file_path.parent + ) + + @regression_test + def test_clean_backups_older_than_days(self): + """Test cleaning backups older than specified days.""" + hostname = 'lmstudio' + config_file = self.test_configs[hostname] + + # Create backup + result = self.backup_manager.create_backup(config_file, hostname) + self.assertTrue(result.success) + + # Mock old backup by modifying timestamp + old_backup_path = result.backup_path.parent / "mcp.json.lmstudio.20200101_120000_000000" + shutil.copy2(result.backup_path, old_backup_path) + + # Clean backups older than 1 day (should remove the old one) + cleaned_count = self.backup_manager.clean_backups(hostname, older_than_days=1) + + # Verify old backup was cleaned + self.assertGreater(cleaned_count, 0) + self.assertFalse(old_backup_path.exists()) + self.assertTrue(result.backup_path.exists()) # Recent backup should remain + + @regression_test + def test_clean_backups_keep_count(self): + """Test cleaning backups to keep only specified count.""" + hostname = 'claude-desktop' + config_file = self.test_configs[hostname] + + # Create multiple backups + for i in range(5): + self.backup_manager.create_backup(config_file, hostname) + + # Verify 5 backups exist + backups_before = self.backup_manager.list_backups(hostname) + self.assertEqual(len(backups_before), 5) + + # Clean to keep only 2 backups + cleaned_count = self.backup_manager.clean_backups(hostname, keep_count=2) + + # Verify only 2 backups remain + backups_after = self.backup_manager.list_backups(hostname) + self.assertEqual(len(backups_after), 2) + self.assertEqual(cleaned_count, 3) + + # Verify newest backups were kept + for backup in backups_after: + self.assertIn(backup, backups_before[:2]) # Should be the first 2 (newest) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_mcp_host_configuration_manager.py b/tests/test_mcp_host_configuration_manager.py new file mode 100644 index 0000000..9ff6d46 --- /dev/null +++ b/tests/test_mcp_host_configuration_manager.py @@ -0,0 +1,331 @@ +""" +Test suite for MCP host configuration manager. + +This module tests the core configuration manager with consolidated models +and integration with backup system. +""" + +import unittest +import sys +from pathlib import Path +import tempfile +import json +import os + +# Add the parent directory to the path to import wobble +sys.path.insert(0, str(Path(__file__).parent.parent)) + +try: + from wobble.decorators import regression_test, integration_test +except ImportError: + # Fallback decorators if wobble is not available + def regression_test(func): + return func + + def integration_test(scope="component"): + def decorator(func): + return func + return decorator + +from test_data_utils import MCPHostConfigTestDataLoader +from hatch.mcp_host_config.host_management import MCPHostConfigurationManager, MCPHostRegistry, register_host_strategy +from hatch.mcp_host_config.models import MCPHostType, MCPServerConfig, HostConfiguration, ConfigurationResult, SyncResult +from hatch.mcp_host_config.strategies import MCPHostStrategy + + +class TestMCPHostConfigurationManager(unittest.TestCase): + """Test suite for MCP host configuration manager.""" + + def setUp(self): + """Set up test environment.""" + self.test_data_loader = MCPHostConfigTestDataLoader() + self.temp_dir = tempfile.mkdtemp() + self.temp_config_path = Path(self.temp_dir) / "test_config.json" + + # Clear registry before each test + MCPHostRegistry._strategies.clear() + MCPHostRegistry._instances.clear() + + # Store temp_config_path for strategy access + temp_config_path = self.temp_config_path + + # Register test strategy + @register_host_strategy(MCPHostType.CLAUDE_DESKTOP) + class TestStrategy(MCPHostStrategy): + def get_config_path(self): + return temp_config_path + + def is_host_available(self): + return True + + def read_configuration(self): + if temp_config_path.exists(): + with open(temp_config_path, 'r') as f: + data = json.load(f) + + servers = {} + if "mcpServers" in data: + for name, config in data["mcpServers"].items(): + servers[name] = MCPServerConfig(**config) + + return HostConfiguration(servers=servers) + else: + return HostConfiguration(servers={}) + + def write_configuration(self, config, no_backup=False): + try: + # Convert MCPServerConfig objects to dict + servers_dict = {} + for name, server_config in config.servers.items(): + servers_dict[name] = server_config.model_dump(exclude_none=True) + + # Create configuration data + config_data = {"mcpServers": servers_dict} + + # Write to file + with open(temp_config_path, 'w') as f: + json.dump(config_data, f, indent=2) + + return True + except Exception: + return False + + def validate_server_config(self, server_config): + return True + + self.manager = MCPHostConfigurationManager() + self.temp_config_path = self.temp_config_path + + def tearDown(self): + """Clean up test environment.""" + # Clean up temp files + if self.temp_config_path.exists(): + self.temp_config_path.unlink() + os.rmdir(self.temp_dir) + + # Clear registry after each test + MCPHostRegistry._strategies.clear() + MCPHostRegistry._instances.clear() + + @regression_test + def test_configure_server_success(self): + """Test successful server configuration.""" + server_config_data = self.test_data_loader.load_mcp_server_config("local") + server_config = MCPServerConfig(**server_config_data) + # Add name attribute for the manager to use + server_config.name = "test_server" + + result = self.manager.configure_server( + server_config=server_config, + hostname="claude-desktop" + ) + + self.assertIsInstance(result, ConfigurationResult) + if not result.success: + print(f"Configuration failed: {result.error_message}") + self.assertTrue(result.success) + self.assertIsNone(result.error_message) + self.assertEqual(result.hostname, "claude-desktop") + self.assertEqual(result.server_name, "test_server") + + # Verify configuration was written + self.assertTrue(self.temp_config_path.exists()) + + # Verify configuration content + with open(self.temp_config_path, 'r') as f: + config_data = json.load(f) + + self.assertIn("mcpServers", config_data) + self.assertIn("test_server", config_data["mcpServers"]) + self.assertEqual(config_data["mcpServers"]["test_server"]["command"], "python") + + @regression_test + def test_configure_server_unknown_host_type(self): + """Test configuration with unknown host type.""" + server_config_data = self.test_data_loader.load_mcp_server_config("local") + server_config = MCPServerConfig(**server_config_data) + server_config.name = "test_server" + + # Clear registry to simulate unknown host type + MCPHostRegistry._strategies.clear() + + result = self.manager.configure_server( + server_config=server_config, + hostname="claude-desktop" + ) + + self.assertIsInstance(result, ConfigurationResult) + self.assertFalse(result.success) + self.assertIsNotNone(result.error_message) + self.assertIn("Unknown host type", result.error_message) + + @regression_test + def test_configure_server_validation_failure(self): + """Test configuration with validation failure.""" + # Create server config that will fail validation at the strategy level + server_config_data = self.test_data_loader.load_mcp_server_config("local") + server_config = MCPServerConfig(**server_config_data) + server_config.name = "test_server" + + # Override the test strategy to always fail validation + @register_host_strategy(MCPHostType.CLAUDE_DESKTOP) + class FailingValidationStrategy(MCPHostStrategy): + def get_config_path(self): + return self.temp_config_path + + def is_host_available(self): + return True + + def read_configuration(self): + return HostConfiguration(servers={}) + + def write_configuration(self, config, no_backup=False): + return True + + def validate_server_config(self, server_config): + return False # Always fail validation + + result = self.manager.configure_server( + server_config=server_config, + hostname="claude-desktop" + ) + + self.assertIsInstance(result, ConfigurationResult) + self.assertFalse(result.success) + self.assertIsNotNone(result.error_message) + self.assertIn("Server configuration invalid", result.error_message) + + @regression_test + def test_remove_server_success(self): + """Test successful server removal.""" + # First configure a server + server_config_data = self.test_data_loader.load_mcp_server_config("local") + server_config = MCPServerConfig(**server_config_data) + server_config.name = "test_server" + + self.manager.configure_server( + server_config=server_config, + hostname="claude-desktop" + ) + + # Verify server exists + with open(self.temp_config_path, 'r') as f: + config_data = json.load(f) + self.assertIn("test_server", config_data["mcpServers"]) + + # Remove server + result = self.manager.remove_server( + server_name="test_server", + hostname="claude-desktop" + ) + + self.assertIsInstance(result, ConfigurationResult) + self.assertTrue(result.success) + self.assertIsNone(result.error_message) + + # Verify server was removed + with open(self.temp_config_path, 'r') as f: + config_data = json.load(f) + self.assertNotIn("test_server", config_data["mcpServers"]) + + @regression_test + def test_remove_server_not_found(self): + """Test removing non-existent server.""" + result = self.manager.remove_server( + server_name="nonexistent_server", + hostname="claude-desktop" + ) + + self.assertIsInstance(result, ConfigurationResult) + self.assertFalse(result.success) + self.assertIsNotNone(result.error_message) + self.assertIn("Server 'nonexistent_server' not found", result.error_message) + + @regression_test + def test_sync_environment_to_hosts_success(self): + """Test successful environment synchronization.""" + from hatch.mcp_host_config.models import EnvironmentData, EnvironmentPackageEntry, PackageHostConfiguration + from datetime import datetime + + # Create test environment data + server_config_data = self.test_data_loader.load_mcp_server_config("local") + server_config = MCPServerConfig(**server_config_data) + + host_config = PackageHostConfiguration( + config_path="~/test/config.json", + configured_at=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + last_synced=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + server_config=server_config + ) + + package = EnvironmentPackageEntry( + name="test-package", + version="1.0.0", + type="hatch", + source="github:user/test-package", + installed_at=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + configured_hosts={"claude-desktop": host_config} + ) + + env_data = EnvironmentData( + name="test_env", + description="Test environment", + created_at=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + packages=[package] + ) + + # Sync environment to hosts + result = self.manager.sync_environment_to_hosts( + env_data=env_data, + target_hosts=["claude-desktop"] + ) + + self.assertIsInstance(result, SyncResult) + self.assertTrue(result.success) + self.assertEqual(result.servers_synced, 1) + self.assertEqual(result.hosts_updated, 1) + self.assertEqual(len(result.results), 1) + + # Verify configuration was written + self.assertTrue(self.temp_config_path.exists()) + + # Verify configuration content + with open(self.temp_config_path, 'r') as f: + config_data = json.load(f) + + self.assertIn("mcpServers", config_data) + self.assertIn("test-package", config_data["mcpServers"]) + self.assertEqual(config_data["mcpServers"]["test-package"]["command"], "python") + + @regression_test + def test_sync_environment_to_hosts_no_servers(self): + """Test environment synchronization with no servers.""" + from hatch.mcp_host_config.models import EnvironmentData + from datetime import datetime + + # Create empty environment data + env_data = EnvironmentData( + name="empty_env", + description="Empty environment", + created_at=datetime.fromisoformat("2025-09-21T10:00:00.000000"), + packages=[] + ) + + # Sync environment to hosts + result = self.manager.sync_environment_to_hosts( + env_data=env_data, + target_hosts=["claude-desktop"] + ) + + self.assertIsInstance(result, SyncResult) + self.assertTrue(result.success) # Success even with no servers + self.assertEqual(result.servers_synced, 0) + self.assertEqual(result.hosts_updated, 1) + self.assertEqual(len(result.results), 1) + + # Verify result message + self.assertEqual(result.results[0].error_message, "No servers to sync") + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_mcp_host_registry_decorator.py b/tests/test_mcp_host_registry_decorator.py new file mode 100644 index 0000000..2bc88ed --- /dev/null +++ b/tests/test_mcp_host_registry_decorator.py @@ -0,0 +1,348 @@ +""" +Test suite for decorator-based host registry. + +This module tests the decorator-based strategy registration system +following Hatchling patterns with inheritance validation. +""" + +import unittest +import sys +from pathlib import Path + +# Add the parent directory to the path to import wobble +sys.path.insert(0, str(Path(__file__).parent.parent)) + +try: + from wobble.decorators import regression_test, integration_test +except ImportError: + # Fallback decorators if wobble is not available + def regression_test(func): + return func + + def integration_test(scope="component"): + def decorator(func): + return func + return decorator + +from hatch.mcp_host_config.host_management import MCPHostRegistry, register_host_strategy, MCPHostStrategy +from hatch.mcp_host_config.models import MCPHostType, MCPServerConfig, HostConfiguration +from pathlib import Path + + +class TestMCPHostRegistryDecorator(unittest.TestCase): + """Test suite for decorator-based host registry.""" + + def setUp(self): + """Set up test environment.""" + # Clear registry before each test + MCPHostRegistry._strategies.clear() + MCPHostRegistry._instances.clear() + + def tearDown(self): + """Clean up test environment.""" + # Clear registry after each test + MCPHostRegistry._strategies.clear() + MCPHostRegistry._instances.clear() + + @regression_test + def test_decorator_registration_functionality(self): + """Test that decorator registration works correctly.""" + + @register_host_strategy(MCPHostType.CLAUDE_DESKTOP) + class TestClaudeStrategy(MCPHostStrategy): + def get_config_path(self): + return Path("/test/path") + def is_host_available(self): + return True + def read_configuration(self): + return HostConfiguration() + def write_configuration(self, config, no_backup=False): + return True + def validate_server_config(self, server_config): + return True + + # Verify registration + self.assertIn(MCPHostType.CLAUDE_DESKTOP, MCPHostRegistry._strategies) + self.assertEqual( + MCPHostRegistry._strategies[MCPHostType.CLAUDE_DESKTOP], + TestClaudeStrategy + ) + + # Verify instance creation + strategy = MCPHostRegistry.get_strategy(MCPHostType.CLAUDE_DESKTOP) + self.assertIsInstance(strategy, TestClaudeStrategy) + + @regression_test + def test_decorator_registration_with_inheritance(self): + """Test decorator registration with inheritance patterns.""" + + class TestClaudeBase(MCPHostStrategy): + def __init__(self): + self.company_origin = "Anthropic" + self.config_format = "claude_format" + + def get_config_key(self): + return "mcpServers" + + @register_host_strategy(MCPHostType.CLAUDE_DESKTOP) + class TestClaudeDesktop(TestClaudeBase): + def get_config_path(self): + return Path("/test/claude") + def is_host_available(self): + return True + def read_configuration(self): + return HostConfiguration() + def write_configuration(self, config, no_backup=False): + return True + def validate_server_config(self, server_config): + return True + + strategy = MCPHostRegistry.get_strategy(MCPHostType.CLAUDE_DESKTOP) + + # Verify inheritance properties + self.assertEqual(strategy.company_origin, "Anthropic") + self.assertEqual(strategy.config_format, "claude_format") + self.assertEqual(strategy.get_config_key(), "mcpServers") + self.assertIsInstance(strategy, TestClaudeBase) + + @regression_test + def test_decorator_registration_duplicate_warning(self): + """Test warning on duplicate strategy registration.""" + import logging + + class BaseTestStrategy(MCPHostStrategy): + def get_config_path(self): + return Path("/test") + def is_host_available(self): + return True + def read_configuration(self): + return HostConfiguration() + def write_configuration(self, config, no_backup=False): + return True + def validate_server_config(self, server_config): + return True + + @register_host_strategy(MCPHostType.CLAUDE_DESKTOP) + class FirstStrategy(BaseTestStrategy): + pass + + # Register second strategy for same host type - should log warning + with self.assertLogs('hatch.mcp_host_config.host_management', level='WARNING') as log: + @register_host_strategy(MCPHostType.CLAUDE_DESKTOP) + class SecondStrategy(BaseTestStrategy): + pass + + # Verify warning was logged + self.assertTrue(any("Overriding existing strategy" in message for message in log.output)) + + # Verify second strategy is now registered + strategy = MCPHostRegistry.get_strategy(MCPHostType.CLAUDE_DESKTOP) + self.assertIsInstance(strategy, SecondStrategy) + + @regression_test + def test_decorator_registration_inheritance_validation(self): + """Test that decorator validates inheritance from MCPHostStrategy.""" + + # Should raise ValueError for non-MCPHostStrategy class + with self.assertRaises(ValueError) as context: + @register_host_strategy(MCPHostType.CLAUDE_DESKTOP) + class InvalidStrategy: # Does not inherit from MCPHostStrategy + pass + + self.assertIn("must inherit from MCPHostStrategy", str(context.exception)) + + @regression_test + def test_registry_get_strategy_unknown_host_type(self): + """Test error handling for unknown host type.""" + # Clear registry to ensure no strategies are registered + MCPHostRegistry._strategies.clear() + + with self.assertRaises(ValueError) as context: + MCPHostRegistry.get_strategy(MCPHostType.CLAUDE_DESKTOP) + + self.assertIn("Unknown host type", str(context.exception)) + self.assertIn("Available: []", str(context.exception)) + + @regression_test + def test_registry_singleton_instance_behavior(self): + """Test that registry returns singleton instances.""" + + @register_host_strategy(MCPHostType.CLAUDE_DESKTOP) + class TestStrategy(MCPHostStrategy): + def __init__(self): + self.instance_id = id(self) + + def get_config_path(self): + return Path("/test") + def is_host_available(self): + return True + def read_configuration(self): + return HostConfiguration() + def write_configuration(self, config, no_backup=False): + return True + def validate_server_config(self, server_config): + return True + + # Get strategy multiple times + strategy1 = MCPHostRegistry.get_strategy(MCPHostType.CLAUDE_DESKTOP) + strategy2 = MCPHostRegistry.get_strategy(MCPHostType.CLAUDE_DESKTOP) + + # Should be the same instance + self.assertIs(strategy1, strategy2) + self.assertEqual(strategy1.instance_id, strategy2.instance_id) + + @regression_test + def test_registry_detect_available_hosts(self): + """Test host detection functionality.""" + + @register_host_strategy(MCPHostType.CLAUDE_DESKTOP) + class AvailableStrategy(MCPHostStrategy): + def get_config_path(self): + return Path("/test") + def is_host_available(self): + return True # Available + def read_configuration(self): + return HostConfiguration() + def write_configuration(self, config, no_backup=False): + return True + def validate_server_config(self, server_config): + return True + + @register_host_strategy(MCPHostType.CURSOR) + class UnavailableStrategy(MCPHostStrategy): + def get_config_path(self): + return Path("/test") + def is_host_available(self): + return False # Not available + def read_configuration(self): + return HostConfiguration() + def write_configuration(self, config, no_backup=False): + return True + def validate_server_config(self, server_config): + return True + + @register_host_strategy(MCPHostType.VSCODE) + class ErrorStrategy(MCPHostStrategy): + def get_config_path(self): + return Path("/test") + def is_host_available(self): + raise Exception("Detection error") # Error during detection + def read_configuration(self): + return HostConfiguration() + def write_configuration(self, config, no_backup=False): + return True + def validate_server_config(self, server_config): + return True + + available_hosts = MCPHostRegistry.detect_available_hosts() + + # Only the available strategy should be detected + self.assertIn(MCPHostType.CLAUDE_DESKTOP, available_hosts) + self.assertNotIn(MCPHostType.CURSOR, available_hosts) + self.assertNotIn(MCPHostType.VSCODE, available_hosts) + + @regression_test + def test_registry_family_mappings(self): + """Test family host mappings.""" + claude_family = MCPHostRegistry.get_family_hosts("claude") + cursor_family = MCPHostRegistry.get_family_hosts("cursor") + unknown_family = MCPHostRegistry.get_family_hosts("unknown") + + # Verify family mappings + self.assertIn(MCPHostType.CLAUDE_DESKTOP, claude_family) + self.assertIn(MCPHostType.CLAUDE_CODE, claude_family) + self.assertIn(MCPHostType.CURSOR, cursor_family) + self.assertIn(MCPHostType.LMSTUDIO, cursor_family) + self.assertEqual(unknown_family, []) + + @regression_test + def test_registry_get_host_config_path(self): + """Test getting host configuration path through registry.""" + + @register_host_strategy(MCPHostType.CLAUDE_DESKTOP) + class TestStrategy(MCPHostStrategy): + def get_config_path(self): + return Path("/test/claude/config.json") + def is_host_available(self): + return True + def read_configuration(self): + return HostConfiguration() + def write_configuration(self, config, no_backup=False): + return True + def validate_server_config(self, server_config): + return True + + config_path = MCPHostRegistry.get_host_config_path(MCPHostType.CLAUDE_DESKTOP) + self.assertEqual(config_path, Path("/test/claude/config.json")) + + +class TestFamilyBasedStrategyRegistration(unittest.TestCase): + """Test suite for family-based strategy registration with decorators.""" + + def setUp(self): + """Set up test environment.""" + # Clear registry before each test + MCPHostRegistry._strategies.clear() + MCPHostRegistry._instances.clear() + + def tearDown(self): + """Clean up test environment.""" + # Clear registry after each test + MCPHostRegistry._strategies.clear() + MCPHostRegistry._instances.clear() + + @regression_test + def test_claude_family_decorator_registration(self): + """Test Claude family strategies register with decorators.""" + + class TestClaudeBase(MCPHostStrategy): + def __init__(self): + self.company_origin = "Anthropic" + self.config_format = "claude_format" + + def validate_server_config(self, server_config): + # Claude family accepts any valid command or URL + if server_config.command or server_config.url: + return True + return False + + @register_host_strategy(MCPHostType.CLAUDE_DESKTOP) + class TestClaudeDesktop(TestClaudeBase): + def get_config_path(self): + return Path("/test/claude_desktop") + def is_host_available(self): + return True + def read_configuration(self): + return HostConfiguration() + def write_configuration(self, config, no_backup=False): + return True + + @register_host_strategy(MCPHostType.CLAUDE_CODE) + class TestClaudeCode(TestClaudeBase): + def get_config_path(self): + return Path("/test/claude_code") + def is_host_available(self): + return True + def read_configuration(self): + return HostConfiguration() + def write_configuration(self, config, no_backup=False): + return True + + # Verify both strategies are registered + claude_desktop = MCPHostRegistry.get_strategy(MCPHostType.CLAUDE_DESKTOP) + claude_code = MCPHostRegistry.get_strategy(MCPHostType.CLAUDE_CODE) + + # Verify inheritance properties + self.assertEqual(claude_desktop.company_origin, "Anthropic") + self.assertEqual(claude_code.company_origin, "Anthropic") + self.assertIsInstance(claude_desktop, TestClaudeBase) + self.assertIsInstance(claude_code, TestClaudeBase) + + # Verify family mappings + claude_family = MCPHostRegistry.get_family_hosts("claude") + self.assertIn(MCPHostType.CLAUDE_DESKTOP, claude_family) + self.assertIn(MCPHostType.CLAUDE_CODE, claude_family) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_mcp_pydantic_architecture_v4.py b/tests/test_mcp_pydantic_architecture_v4.py new file mode 100644 index 0000000..4a332d9 --- /dev/null +++ b/tests/test_mcp_pydantic_architecture_v4.py @@ -0,0 +1,603 @@ +""" +Test suite for Round 04 v4 Pydantic Model Hierarchy. + +This module tests the new model hierarchy including MCPServerConfigBase, +host-specific models (Gemini, VS Code, Cursor, Claude), MCPServerConfigOmni, +HOST_MODEL_REGISTRY, and from_omni() conversion methods. +""" + +import unittest +import sys +from pathlib import Path + +# Add the parent directory to the path to import wobble +sys.path.insert(0, str(Path(__file__).parent.parent)) + +try: + from wobble.decorators import regression_test +except ImportError: + # Fallback decorator if wobble is not available + def regression_test(func): + return func + +from hatch.mcp_host_config.models import ( + MCPServerConfigBase, + MCPServerConfigGemini, + MCPServerConfigVSCode, + MCPServerConfigCursor, + MCPServerConfigClaude, + MCPServerConfigOmni, + HOST_MODEL_REGISTRY, + MCPHostType +) +from pydantic import ValidationError + + +class TestMCPServerConfigBase(unittest.TestCase): + """Test suite for MCPServerConfigBase model.""" + + @regression_test + def test_base_model_local_server_validation_success(self): + """Test successful local server configuration with type inference.""" + config = MCPServerConfigBase( + name="test-server", + command="python", + args=["server.py"], + env={"API_KEY": "test"} + ) + + self.assertEqual(config.command, "python") + self.assertEqual(config.type, "stdio") # Inferred from command + self.assertEqual(len(config.args), 1) + self.assertEqual(config.env["API_KEY"], "test") + + @regression_test + def test_base_model_remote_server_validation_success(self): + """Test successful remote server configuration with type inference.""" + config = MCPServerConfigBase( + name="test-server", + url="https://api.example.com/mcp", + headers={"Authorization": "Bearer token"} + ) + + self.assertEqual(config.url, "https://api.example.com/mcp") + self.assertEqual(config.type, "sse") # Inferred from url (default to sse) + self.assertEqual(config.headers["Authorization"], "Bearer token") + + @regression_test + def test_base_model_mutual_exclusion_validation_fails(self): + """Test validation fails when both command and url provided.""" + with self.assertRaises(ValidationError) as context: + MCPServerConfigBase( + name="test-server", + command="python", + url="https://api.example.com/mcp" + ) + + self.assertIn("Cannot specify both 'command' and 'url'", str(context.exception)) + + @regression_test + def test_base_model_type_field_stdio_validation(self): + """Test type=stdio validation.""" + # Valid: type=stdio with command + config = MCPServerConfigBase( + name="test-server", + type="stdio", + command="python" + ) + self.assertEqual(config.type, "stdio") + self.assertEqual(config.command, "python") + + # Invalid: type=stdio without command + with self.assertRaises(ValidationError) as context: + MCPServerConfigBase( + name="test-server", + type="stdio", + url="https://api.example.com/mcp" + ) + self.assertIn("'command' is required for stdio transport", str(context.exception)) + + @regression_test + def test_base_model_type_field_sse_validation(self): + """Test type=sse validation.""" + # Valid: type=sse with url + config = MCPServerConfigBase( + name="test-server", + type="sse", + url="https://api.example.com/mcp" + ) + self.assertEqual(config.type, "sse") + self.assertEqual(config.url, "https://api.example.com/mcp") + + # Invalid: type=sse without url + with self.assertRaises(ValidationError) as context: + MCPServerConfigBase( + name="test-server", + type="sse", + command="python" + ) + self.assertIn("'url' is required for sse/http transports", str(context.exception)) + + @regression_test + def test_base_model_type_field_http_validation(self): + """Test type=http validation.""" + # Valid: type=http with url + config = MCPServerConfigBase( + name="test-server", + type="http", + url="https://api.example.com/mcp" + ) + self.assertEqual(config.type, "http") + self.assertEqual(config.url, "https://api.example.com/mcp") + + # Invalid: type=http without url + with self.assertRaises(ValidationError) as context: + MCPServerConfigBase( + name="test-server", + type="http", + command="python" + ) + self.assertIn("'url' is required for sse/http transports", str(context.exception)) + + @regression_test + def test_base_model_type_field_invalid_value(self): + """Test validation fails for invalid type value.""" + with self.assertRaises(ValidationError) as context: + MCPServerConfigBase( + name="test-server", + type="invalid", + command="python" + ) + + # Pydantic will reject invalid Literal value + self.assertIn("Input should be 'stdio', 'sse' or 'http'", str(context.exception)) + + +class TestMCPServerConfigGemini(unittest.TestCase): + """Test suite for MCPServerConfigGemini model.""" + + @regression_test + def test_gemini_model_with_all_fields(self): + """Test Gemini model with all Gemini-specific fields.""" + config = MCPServerConfigGemini( + name="gemini-server", + command="npx", + args=["-y", "server"], + env={"API_KEY": "test"}, + cwd="/path/to/dir", + timeout=30000, + trust=True, + includeTools=["tool1", "tool2"], + excludeTools=["tool3"] + ) + + # Verify universal fields + self.assertEqual(config.command, "npx") + self.assertEqual(config.type, "stdio") # Inferred + + # Verify Gemini-specific fields + self.assertEqual(config.cwd, "/path/to/dir") + self.assertEqual(config.timeout, 30000) + self.assertTrue(config.trust) + self.assertEqual(len(config.includeTools), 2) + self.assertEqual(len(config.excludeTools), 1) + + @regression_test + def test_gemini_model_minimal_configuration(self): + """Test Gemini model with minimal configuration.""" + config = MCPServerConfigGemini( + name="gemini-server", + command="python" + ) + + self.assertEqual(config.command, "python") + self.assertEqual(config.type, "stdio") # Inferred + self.assertIsNone(config.cwd) + self.assertIsNone(config.timeout) + self.assertIsNone(config.trust) + + @regression_test + def test_gemini_model_field_filtering(self): + """Test Gemini model field filtering with model_dump.""" + config = MCPServerConfigGemini( + name="gemini-server", + command="python", + cwd="/path/to/dir" + ) + + # Use model_dump(exclude_unset=True) to get only set fields + data = config.model_dump(exclude_unset=True) + + # Should include name, command, cwd, type (inferred) + self.assertIn("name", data) + self.assertIn("command", data) + self.assertIn("cwd", data) + self.assertIn("type", data) + + # Should NOT include unset fields + self.assertNotIn("timeout", data) + self.assertNotIn("trust", data) + + +class TestMCPServerConfigVSCode(unittest.TestCase): + """Test suite for MCPServerConfigVSCode model.""" + + @regression_test + def test_vscode_model_with_inputs_array(self): + """Test VS Code model with inputs array.""" + config = MCPServerConfigVSCode( + name="vscode-server", + command="python", + args=["server.py"], + inputs=[ + { + "type": "promptString", + "id": "api-key", + "description": "API Key", + "password": True + } + ] + ) + + self.assertEqual(config.command, "python") + self.assertEqual(len(config.inputs), 1) + self.assertEqual(config.inputs[0]["id"], "api-key") + self.assertTrue(config.inputs[0]["password"]) + + @regression_test + def test_vscode_model_with_envFile(self): + """Test VS Code model with envFile field.""" + config = MCPServerConfigVSCode( + name="vscode-server", + command="python", + envFile=".env" + ) + + self.assertEqual(config.command, "python") + self.assertEqual(config.envFile, ".env") + + @regression_test + def test_vscode_model_minimal_configuration(self): + """Test VS Code model with minimal configuration.""" + config = MCPServerConfigVSCode( + name="vscode-server", + command="python" + ) + + self.assertEqual(config.command, "python") + self.assertEqual(config.type, "stdio") # Inferred + self.assertIsNone(config.envFile) + self.assertIsNone(config.inputs) + + +class TestMCPServerConfigCursor(unittest.TestCase): + """Test suite for MCPServerConfigCursor model.""" + + @regression_test + def test_cursor_model_with_envFile(self): + """Test Cursor model with envFile field.""" + config = MCPServerConfigCursor( + name="cursor-server", + command="python", + envFile=".env" + ) + + self.assertEqual(config.command, "python") + self.assertEqual(config.envFile, ".env") + + @regression_test + def test_cursor_model_minimal_configuration(self): + """Test Cursor model with minimal configuration.""" + config = MCPServerConfigCursor( + name="cursor-server", + command="python" + ) + + self.assertEqual(config.command, "python") + self.assertEqual(config.type, "stdio") # Inferred + self.assertIsNone(config.envFile) + + @regression_test + def test_cursor_model_env_with_interpolation_syntax(self): + """Test Cursor model with env containing interpolation syntax.""" + # Our code writes the literal string value + # Cursor handles ${env:NAME}, ${userHome}, etc. expansion at runtime + config = MCPServerConfigCursor( + name="cursor-server", + command="python", + env={"API_KEY": "${env:API_KEY}", "HOME": "${userHome}"} + ) + + self.assertEqual(config.env["API_KEY"], "${env:API_KEY}") + self.assertEqual(config.env["HOME"], "${userHome}") + + +class TestMCPServerConfigClaude(unittest.TestCase): + """Test suite for MCPServerConfigClaude model.""" + + @regression_test + def test_claude_model_universal_fields_only(self): + """Test Claude model with universal fields only.""" + config = MCPServerConfigClaude( + name="claude-server", + command="python", + args=["server.py"], + env={"API_KEY": "test"} + ) + + # Verify universal fields work + self.assertEqual(config.command, "python") + self.assertEqual(config.type, "stdio") # Inferred + self.assertEqual(len(config.args), 1) + self.assertEqual(config.env["API_KEY"], "test") + + @regression_test + def test_claude_model_all_transport_types(self): + """Test Claude model supports all transport types.""" + # stdio transport + config_stdio = MCPServerConfigClaude( + name="claude-server", + type="stdio", + command="python" + ) + self.assertEqual(config_stdio.type, "stdio") + + # sse transport + config_sse = MCPServerConfigClaude( + name="claude-server", + type="sse", + url="https://api.example.com/mcp" + ) + self.assertEqual(config_sse.type, "sse") + + # http transport + config_http = MCPServerConfigClaude( + name="claude-server", + type="http", + url="https://api.example.com/mcp" + ) + self.assertEqual(config_http.type, "http") + + +class TestMCPServerConfigOmni(unittest.TestCase): + """Test suite for MCPServerConfigOmni model.""" + + @regression_test + def test_omni_model_all_fields_optional(self): + """Test Omni model with no fields (all optional).""" + # Should not raise ValidationError + config = MCPServerConfigOmni() + + self.assertIsNone(config.name) + self.assertIsNone(config.command) + self.assertIsNone(config.url) + + @regression_test + def test_omni_model_with_mixed_host_fields(self): + """Test Omni model with fields from multiple hosts.""" + config = MCPServerConfigOmni( + name="omni-server", + command="python", + cwd="/path/to/dir", # Gemini field + envFile=".env" # VS Code/Cursor field + ) + + self.assertEqual(config.command, "python") + self.assertEqual(config.cwd, "/path/to/dir") + self.assertEqual(config.envFile, ".env") + + @regression_test + def test_omni_model_exclude_unset(self): + """Test Omni model with exclude_unset.""" + config = MCPServerConfigOmni( + name="omni-server", + command="python", + args=["server.py"] + ) + + # Use model_dump(exclude_unset=True) + data = config.model_dump(exclude_unset=True) + + # Should only include set fields + self.assertIn("name", data) + self.assertIn("command", data) + self.assertIn("args", data) + + # Should NOT include unset fields + self.assertNotIn("url", data) + self.assertNotIn("cwd", data) + self.assertNotIn("envFile", data) + + +class TestHostModelRegistry(unittest.TestCase): + """Test suite for HOST_MODEL_REGISTRY dictionary dispatch.""" + + @regression_test + def test_registry_contains_all_host_types(self): + """Test registry contains entries for all MCPHostType values.""" + # Verify registry has entries for all host types + self.assertIn(MCPHostType.GEMINI, HOST_MODEL_REGISTRY) + self.assertIn(MCPHostType.CLAUDE_DESKTOP, HOST_MODEL_REGISTRY) + self.assertIn(MCPHostType.CLAUDE_CODE, HOST_MODEL_REGISTRY) + self.assertIn(MCPHostType.VSCODE, HOST_MODEL_REGISTRY) + self.assertIn(MCPHostType.CURSOR, HOST_MODEL_REGISTRY) + self.assertIn(MCPHostType.LMSTUDIO, HOST_MODEL_REGISTRY) + + # Verify correct model classes + self.assertEqual(HOST_MODEL_REGISTRY[MCPHostType.GEMINI], MCPServerConfigGemini) + self.assertEqual(HOST_MODEL_REGISTRY[MCPHostType.CLAUDE_DESKTOP], MCPServerConfigClaude) + self.assertEqual(HOST_MODEL_REGISTRY[MCPHostType.CLAUDE_CODE], MCPServerConfigClaude) + self.assertEqual(HOST_MODEL_REGISTRY[MCPHostType.VSCODE], MCPServerConfigVSCode) + self.assertEqual(HOST_MODEL_REGISTRY[MCPHostType.CURSOR], MCPServerConfigCursor) + self.assertEqual(HOST_MODEL_REGISTRY[MCPHostType.LMSTUDIO], MCPServerConfigCursor) + + @regression_test + def test_registry_dictionary_dispatch(self): + """Test dictionary dispatch retrieves correct model class.""" + # Test Gemini + gemini_class = HOST_MODEL_REGISTRY[MCPHostType.GEMINI] + self.assertEqual(gemini_class, MCPServerConfigGemini) + + # Test VS Code + vscode_class = HOST_MODEL_REGISTRY[MCPHostType.VSCODE] + self.assertEqual(vscode_class, MCPServerConfigVSCode) + + # Test Cursor + cursor_class = HOST_MODEL_REGISTRY[MCPHostType.CURSOR] + self.assertEqual(cursor_class, MCPServerConfigCursor) + + # Test Claude Desktop + claude_class = HOST_MODEL_REGISTRY[MCPHostType.CLAUDE_DESKTOP] + self.assertEqual(claude_class, MCPServerConfigClaude) + + +class TestFromOmniConversion(unittest.TestCase): + """Test suite for from_omni() conversion methods.""" + + @regression_test + def test_gemini_from_omni_with_supported_fields(self): + """Test Gemini from_omni with supported fields.""" + omni = MCPServerConfigOmni( + name="gemini-server", + command="npx", + args=["-y", "server"], + cwd="/path/to/dir", + timeout=30000 + ) + + # Convert to Gemini model + gemini = MCPServerConfigGemini.from_omni(omni) + + # Verify all supported fields transferred + self.assertEqual(gemini.name, "gemini-server") + self.assertEqual(gemini.command, "npx") + self.assertEqual(len(gemini.args), 2) + self.assertEqual(gemini.cwd, "/path/to/dir") + self.assertEqual(gemini.timeout, 30000) + + @regression_test + def test_gemini_from_omni_with_unsupported_fields(self): + """Test Gemini from_omni excludes unsupported fields.""" + omni = MCPServerConfigOmni( + name="gemini-server", + command="python", + cwd="/path/to/dir", # Gemini field + envFile=".env" # VS Code field (unsupported by Gemini) + ) + + # Convert to Gemini model + gemini = MCPServerConfigGemini.from_omni(omni) + + # Verify Gemini fields transferred + self.assertEqual(gemini.command, "python") + self.assertEqual(gemini.cwd, "/path/to/dir") + + # Verify unsupported field NOT transferred + # (Gemini model doesn't have envFile field) + self.assertFalse(hasattr(gemini, 'envFile') and gemini.envFile is not None) + + @regression_test + def test_vscode_from_omni_with_supported_fields(self): + """Test VS Code from_omni with supported fields.""" + omni = MCPServerConfigOmni( + name="vscode-server", + command="python", + args=["server.py"], + envFile=".env", + inputs=[{"type": "promptString", "id": "api-key"}] + ) + + # Convert to VS Code model + vscode = MCPServerConfigVSCode.from_omni(omni) + + # Verify all supported fields transferred + self.assertEqual(vscode.name, "vscode-server") + self.assertEqual(vscode.command, "python") + self.assertEqual(vscode.envFile, ".env") + self.assertEqual(len(vscode.inputs), 1) + + @regression_test + def test_cursor_from_omni_with_supported_fields(self): + """Test Cursor from_omni with supported fields.""" + omni = MCPServerConfigOmni( + name="cursor-server", + command="python", + args=["server.py"], + envFile=".env" + ) + + # Convert to Cursor model + cursor = MCPServerConfigCursor.from_omni(omni) + + # Verify all supported fields transferred + self.assertEqual(cursor.name, "cursor-server") + self.assertEqual(cursor.command, "python") + self.assertEqual(cursor.envFile, ".env") + + @regression_test + def test_claude_from_omni_with_universal_fields(self): + """Test Claude from_omni with universal fields only.""" + omni = MCPServerConfigOmni( + name="claude-server", + command="python", + args=["server.py"], + env={"API_KEY": "test"}, + type="stdio" + ) + + # Convert to Claude model + claude = MCPServerConfigClaude.from_omni(omni) + + # Verify universal fields transferred + self.assertEqual(claude.name, "claude-server") + self.assertEqual(claude.command, "python") + self.assertEqual(claude.type, "stdio") + self.assertEqual(len(claude.args), 1) + self.assertEqual(claude.env["API_KEY"], "test") + + +class TestGeminiDualTransport(unittest.TestCase): + """Test suite for Gemini dual-transport validation (Issue 3).""" + + @regression_test + def test_gemini_sse_transport_with_url(self): + """Test Gemini SSE transport uses url field.""" + config = MCPServerConfigGemini( + name="gemini-server", + type="sse", + url="https://api.example.com/mcp" + ) + + self.assertEqual(config.type, "sse") + self.assertEqual(config.url, "https://api.example.com/mcp") + self.assertIsNone(config.httpUrl) + + @regression_test + def test_gemini_http_transport_with_httpUrl(self): + """Test Gemini HTTP transport uses httpUrl field.""" + config = MCPServerConfigGemini( + name="gemini-server", + type="http", + httpUrl="https://api.example.com/mcp" + ) + + self.assertEqual(config.type, "http") + self.assertEqual(config.httpUrl, "https://api.example.com/mcp") + self.assertIsNone(config.url) + + @regression_test + def test_gemini_mutual_exclusion_url_and_httpUrl(self): + """Test Gemini rejects both url and httpUrl simultaneously.""" + with self.assertRaises(ValidationError) as context: + MCPServerConfigGemini( + name="gemini-server", + url="https://api.example.com/sse", + httpUrl="https://api.example.com/http" + ) + + self.assertIn("Cannot specify both 'url' and 'httpUrl'", str(context.exception)) + + +if __name__ == '__main__': + unittest.main() + diff --git a/tests/test_mcp_server_config_models.py b/tests/test_mcp_server_config_models.py new file mode 100644 index 0000000..92d3348 --- /dev/null +++ b/tests/test_mcp_server_config_models.py @@ -0,0 +1,242 @@ +""" +Test suite for consolidated MCPServerConfig Pydantic model. + +This module tests the consolidated MCPServerConfig model that supports +both local and remote server configurations with proper validation. +""" + +import unittest +import sys +from pathlib import Path + +# Add the parent directory to the path to import wobble +sys.path.insert(0, str(Path(__file__).parent.parent)) + +try: + from wobble.decorators import regression_test, integration_test +except ImportError: + # Fallback decorators if wobble is not available + def regression_test(func): + return func + + def integration_test(scope="component"): + def decorator(func): + return func + return decorator + +from test_data_utils import MCPHostConfigTestDataLoader +from hatch.mcp_host_config.models import MCPServerConfig +from pydantic import ValidationError + + +class TestMCPServerConfigModels(unittest.TestCase): + """Test suite for consolidated MCPServerConfig Pydantic model.""" + + def setUp(self): + """Set up test environment.""" + self.test_data_loader = MCPHostConfigTestDataLoader() + + @regression_test + def test_mcp_server_config_local_server_validation_success(self): + """Test successful local server configuration validation.""" + config_data = self.test_data_loader.load_mcp_server_config("local") + config = MCPServerConfig(**config_data) + + self.assertEqual(config.command, "python") + self.assertEqual(len(config.args), 3) + self.assertEqual(config.env["API_KEY"], "test") + self.assertTrue(config.is_local_server) + self.assertFalse(config.is_remote_server) + + @regression_test + def test_mcp_server_config_remote_server_validation_success(self): + """Test successful remote server configuration validation.""" + config_data = self.test_data_loader.load_mcp_server_config("remote") + config = MCPServerConfig(**config_data) + + self.assertEqual(config.url, "https://api.example.com/mcp") + self.assertEqual(config.headers["Authorization"], "Bearer token") + self.assertFalse(config.is_local_server) + self.assertTrue(config.is_remote_server) + + @regression_test + def test_mcp_server_config_validation_fails_both_command_and_url(self): + """Test validation fails when both command and URL are provided.""" + config_data = { + "command": "python", + "args": ["server.py"], + "url": "https://example.com/mcp" # Invalid: both command and URL + } + + with self.assertRaises(ValidationError) as context: + MCPServerConfig(**config_data) + + self.assertIn("Cannot specify both 'command' and 'url'", str(context.exception)) + + @regression_test + def test_mcp_server_config_validation_fails_neither_command_nor_url(self): + """Test validation fails when neither command nor URL are provided.""" + config_data = { + "env": {"TEST": "value"} + # Missing both command and url + } + + with self.assertRaises(ValidationError) as context: + MCPServerConfig(**config_data) + + self.assertIn("Either 'command' (local server) or 'url' (remote server) must be provided", + str(context.exception)) + + @regression_test + def test_mcp_server_config_validation_args_without_command_fails(self): + """Test validation fails when args provided without command.""" + config_data = { + "url": "https://example.com/mcp", + "args": ["--flag"] # Invalid: args without command + } + + with self.assertRaises(ValidationError) as context: + MCPServerConfig(**config_data) + + self.assertIn("'args' can only be specified with 'command'", str(context.exception)) + + @regression_test + def test_mcp_server_config_validation_headers_without_url_fails(self): + """Test validation fails when headers provided without URL.""" + config_data = { + "command": "python", + "headers": {"Authorization": "Bearer token"} # Invalid: headers without URL + } + + with self.assertRaises(ValidationError) as context: + MCPServerConfig(**config_data) + + self.assertIn("'headers' can only be specified with 'url'", str(context.exception)) + + @regression_test + def test_mcp_server_config_url_format_validation(self): + """Test URL format validation.""" + invalid_urls = ["ftp://example.com", "example.com", "not-a-url"] + + for invalid_url in invalid_urls: + with self.assertRaises(ValidationError): + MCPServerConfig(url=invalid_url) + + @regression_test + def test_mcp_server_config_no_future_extension_fields(self): + """Test that extra fields are allowed for host-specific extensions.""" + # Current design allows extra fields to support host-specific configurations + # (e.g., Gemini's timeout, VS Code's envFile, etc.) + config_data = { + "command": "python", + "timeout": 30, # Allowed (host-specific field) + "retry_attempts": 3, # Allowed (host-specific field) + "ssl_verify": True # Allowed (host-specific field) + } + + # Should NOT raise ValidationError (extra="allow") + config = MCPServerConfig(**config_data) + + # Verify core fields are set correctly + self.assertEqual(config.command, "python") + + # Note: In Phase 3B, strict validation will be enforced in host-specific models + + @regression_test + def test_mcp_server_config_command_empty_validation(self): + """Test validation fails for empty command.""" + config_data = { + "command": " ", # Empty/whitespace command + "args": ["server.py"] + } + + with self.assertRaises(ValidationError) as context: + MCPServerConfig(**config_data) + + self.assertIn("Command cannot be empty", str(context.exception)) + + @regression_test + def test_mcp_server_config_command_strip_whitespace(self): + """Test command whitespace is stripped.""" + config_data = { + "command": " python ", + "args": ["server.py"] + } + + config = MCPServerConfig(**config_data) + self.assertEqual(config.command, "python") + + @regression_test + def test_mcp_server_config_minimal_local_server(self): + """Test minimal local server configuration.""" + config_data = self.test_data_loader.load_mcp_server_config("local_minimal") + config = MCPServerConfig(**config_data) + + self.assertEqual(config.command, "python") + self.assertEqual(config.args, ["minimal_server.py"]) + self.assertIsNone(config.env) + self.assertTrue(config.is_local_server) + self.assertFalse(config.is_remote_server) + + @regression_test + def test_mcp_server_config_minimal_remote_server(self): + """Test minimal remote server configuration.""" + config_data = self.test_data_loader.load_mcp_server_config("remote_minimal") + config = MCPServerConfig(**config_data) + + self.assertEqual(config.url, "https://minimal.example.com/mcp") + self.assertIsNone(config.headers) + self.assertFalse(config.is_local_server) + self.assertTrue(config.is_remote_server) + + @regression_test + def test_mcp_server_config_serialization_roundtrip(self): + """Test serialization and deserialization roundtrip.""" + # Test local server + local_config_data = self.test_data_loader.load_mcp_server_config("local") + local_config = MCPServerConfig(**local_config_data) + + # Serialize and deserialize + serialized = local_config.model_dump() + roundtrip_config = MCPServerConfig(**serialized) + + self.assertEqual(local_config.command, roundtrip_config.command) + self.assertEqual(local_config.args, roundtrip_config.args) + self.assertEqual(local_config.env, roundtrip_config.env) + self.assertEqual(local_config.is_local_server, roundtrip_config.is_local_server) + + # Test remote server + remote_config_data = self.test_data_loader.load_mcp_server_config("remote") + remote_config = MCPServerConfig(**remote_config_data) + + # Serialize and deserialize + serialized = remote_config.model_dump() + roundtrip_config = MCPServerConfig(**serialized) + + self.assertEqual(remote_config.url, roundtrip_config.url) + self.assertEqual(remote_config.headers, roundtrip_config.headers) + self.assertEqual(remote_config.is_remote_server, roundtrip_config.is_remote_server) + + @regression_test + def test_mcp_server_config_json_serialization(self): + """Test JSON serialization compatibility.""" + import json + + config_data = self.test_data_loader.load_mcp_server_config("local") + config = MCPServerConfig(**config_data) + + # Test JSON serialization + json_str = config.model_dump_json() + self.assertIsInstance(json_str, str) + + # Test JSON deserialization + parsed_data = json.loads(json_str) + roundtrip_config = MCPServerConfig(**parsed_data) + + self.assertEqual(config.command, roundtrip_config.command) + self.assertEqual(config.args, roundtrip_config.args) + self.assertEqual(config.env, roundtrip_config.env) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_mcp_server_config_type_field.py b/tests/test_mcp_server_config_type_field.py new file mode 100644 index 0000000..733eeb8 --- /dev/null +++ b/tests/test_mcp_server_config_type_field.py @@ -0,0 +1,221 @@ +""" +Test suite for MCPServerConfig type field (Phase 3A). + +This module tests the type field addition to MCPServerConfig model, +including validation and property behavior. +""" + +import unittest +import sys +from pathlib import Path + +# Add the parent directory to the path to import wobble +sys.path.insert(0, str(Path(__file__).parent.parent)) + +try: + from wobble.decorators import regression_test +except ImportError: + # Fallback decorator if wobble is not available + def regression_test(func): + return func + +from hatch.mcp_host_config.models import MCPServerConfig +from pydantic import ValidationError + + +class TestMCPServerConfigTypeField(unittest.TestCase): + """Test suite for MCPServerConfig type field validation.""" + + @regression_test + def test_type_stdio_with_command_success(self): + """Test successful stdio type with command.""" + config = MCPServerConfig( + name="test-server", + type="stdio", + command="python", + args=["server.py"] + ) + + self.assertEqual(config.type, "stdio") + self.assertEqual(config.command, "python") + self.assertTrue(config.is_local_server) + self.assertFalse(config.is_remote_server) + + @regression_test + def test_type_sse_with_url_success(self): + """Test successful sse type with url.""" + config = MCPServerConfig( + name="test-server", + type="sse", + url="https://api.example.com/mcp" + ) + + self.assertEqual(config.type, "sse") + self.assertEqual(config.url, "https://api.example.com/mcp") + self.assertFalse(config.is_local_server) + self.assertTrue(config.is_remote_server) + + @regression_test + def test_type_http_with_url_success(self): + """Test successful http type with url.""" + config = MCPServerConfig( + name="test-server", + type="http", + url="https://api.example.com/mcp", + headers={"Authorization": "Bearer token"} + ) + + self.assertEqual(config.type, "http") + self.assertEqual(config.url, "https://api.example.com/mcp") + self.assertFalse(config.is_local_server) + self.assertTrue(config.is_remote_server) + + @regression_test + def test_type_stdio_without_command_fails(self): + """Test validation fails when type=stdio without command.""" + with self.assertRaises(ValidationError) as context: + MCPServerConfig( + name="test-server", + type="stdio", + url="https://api.example.com/mcp" # Invalid: stdio with url + ) + + self.assertIn("'type=stdio' requires 'command' field", str(context.exception)) + + @regression_test + def test_type_stdio_with_url_fails(self): + """Test validation fails when type=stdio with url.""" + with self.assertRaises(ValidationError) as context: + MCPServerConfig( + name="test-server", + type="stdio", + command="python", + url="https://api.example.com/mcp" # Invalid: both command and url + ) + + # The validate_server_type() validator catches this first + self.assertIn("Cannot specify both 'command' and 'url'", str(context.exception)) + + @regression_test + def test_type_sse_without_url_fails(self): + """Test validation fails when type=sse without url.""" + with self.assertRaises(ValidationError) as context: + MCPServerConfig( + name="test-server", + type="sse", + command="python" # Invalid: sse with command + ) + + self.assertIn("'type=sse' requires 'url' field", str(context.exception)) + + @regression_test + def test_type_http_without_url_fails(self): + """Test validation fails when type=http without url.""" + with self.assertRaises(ValidationError) as context: + MCPServerConfig( + name="test-server", + type="http", + command="python" # Invalid: http with command + ) + + self.assertIn("'type=http' requires 'url' field", str(context.exception)) + + @regression_test + def test_type_sse_with_command_fails(self): + """Test validation fails when type=sse with command.""" + with self.assertRaises(ValidationError) as context: + MCPServerConfig( + name="test-server", + type="sse", + command="python", + url="https://api.example.com/mcp" # Invalid: both command and url + ) + + # The validate_server_type() validator catches this first + self.assertIn("Cannot specify both 'command' and 'url'", str(context.exception)) + + @regression_test + def test_backward_compatibility_no_type_field_local(self): + """Test backward compatibility: local server without type field.""" + config = MCPServerConfig( + name="test-server", + command="python", + args=["server.py"] + ) + + self.assertIsNone(config.type) + self.assertEqual(config.command, "python") + self.assertTrue(config.is_local_server) + self.assertFalse(config.is_remote_server) + + @regression_test + def test_backward_compatibility_no_type_field_remote(self): + """Test backward compatibility: remote server without type field.""" + config = MCPServerConfig( + name="test-server", + url="https://api.example.com/mcp" + ) + + self.assertIsNone(config.type) + self.assertEqual(config.url, "https://api.example.com/mcp") + self.assertFalse(config.is_local_server) + self.assertTrue(config.is_remote_server) + + @regression_test + def test_type_field_with_env_variables(self): + """Test type field with environment variables.""" + config = MCPServerConfig( + name="test-server", + type="stdio", + command="python", + args=["server.py"], + env={"API_KEY": "test-key", "DEBUG": "true"} + ) + + self.assertEqual(config.type, "stdio") + self.assertEqual(config.env["API_KEY"], "test-key") + self.assertEqual(config.env["DEBUG"], "true") + + @regression_test + def test_type_field_serialization(self): + """Test type field is included in serialization.""" + config = MCPServerConfig( + name="test-server", + type="stdio", + command="python", + args=["server.py"] + ) + + # Test model_dump includes type field + data = config.model_dump() + self.assertEqual(data["type"], "stdio") + self.assertEqual(data["command"], "python") + + # Test JSON serialization + import json + json_str = config.model_dump_json() + parsed = json.loads(json_str) + self.assertEqual(parsed["type"], "stdio") + + @regression_test + def test_type_field_roundtrip(self): + """Test type field survives serialization roundtrip.""" + original = MCPServerConfig( + name="test-server", + type="sse", + url="https://api.example.com/mcp", + headers={"Authorization": "Bearer token"} + ) + + # Serialize and deserialize + data = original.model_dump() + roundtrip = MCPServerConfig(**data) + + self.assertEqual(roundtrip.type, "sse") + self.assertEqual(roundtrip.url, "https://api.example.com/mcp") + self.assertEqual(roundtrip.headers["Authorization"], "Bearer token") + + +if __name__ == '__main__': + unittest.main() + diff --git a/tests/test_mcp_sync_functionality.py b/tests/test_mcp_sync_functionality.py new file mode 100644 index 0000000..0cd5b20 --- /dev/null +++ b/tests/test_mcp_sync_functionality.py @@ -0,0 +1,316 @@ +""" +Test suite for MCP synchronization functionality (Phase 3f). + +This module contains comprehensive tests for the advanced synchronization +features including cross-environment and cross-host synchronization. +""" + +import unittest +from unittest.mock import MagicMock, patch, call +from pathlib import Path +import tempfile +import json +from typing import Dict, List, Optional + +# Import test decorators from wobble framework +from wobble import integration_test, regression_test + +# Import the modules we'll be testing +from hatch.mcp_host_config.host_management import MCPHostConfigurationManager, MCPHostType +from hatch.mcp_host_config.models import ( + EnvironmentData, MCPServerConfig, SyncResult, ConfigurationResult +) +from hatch.cli_hatch import handle_mcp_sync, parse_host_list, main + + +class TestMCPSyncConfigurations(unittest.TestCase): + """Test suite for MCPHostConfigurationManager.sync_configurations() method.""" + + def setUp(self): + """Set up test fixtures.""" + self.temp_dir = tempfile.mkdtemp() + self.manager = MCPHostConfigurationManager() + + # We'll use mocks instead of real data objects to avoid validation issues + + @regression_test + def test_sync_from_environment_to_single_host(self): + """Test basic environment-to-host synchronization.""" + with patch.object(self.manager, 'sync_configurations') as mock_sync: + mock_result = SyncResult( + success=True, + results=[ConfigurationResult(success=True, hostname="claude-desktop")], + servers_synced=2, + hosts_updated=1 + ) + mock_sync.return_value = mock_result + + result = self.manager.sync_configurations( + from_env="test-env", + to_hosts=["claude-desktop"] + ) + + self.assertTrue(result.success) + self.assertEqual(result.servers_synced, 2) + self.assertEqual(result.hosts_updated, 1) + mock_sync.assert_called_once() + + @integration_test(scope="component") + def test_sync_from_environment_to_multiple_hosts(self): + """Test environment-to-multiple-hosts synchronization.""" + with patch.object(self.manager, 'sync_configurations') as mock_sync: + mock_result = SyncResult( + success=True, + results=[ + ConfigurationResult(success=True, hostname="claude-desktop"), + ConfigurationResult(success=True, hostname="cursor") + ], + servers_synced=4, + hosts_updated=2 + ) + mock_sync.return_value = mock_result + + result = self.manager.sync_configurations( + from_env="test-env", + to_hosts=["claude-desktop", "cursor"] + ) + + self.assertTrue(result.success) + self.assertEqual(result.servers_synced, 4) + self.assertEqual(result.hosts_updated, 2) + + @integration_test(scope="component") + def test_sync_from_host_to_host(self): + """Test host-to-host configuration synchronization.""" + # This test will validate the new host-to-host sync functionality + # that needs to be implemented + with patch.object(self.manager.host_registry, 'get_strategy') as mock_get_strategy: + mock_strategy = MagicMock() + mock_strategy.read_configuration.return_value = MagicMock() + mock_strategy.write_configuration.return_value = True + mock_get_strategy.return_value = mock_strategy + + # Mock the sync_configurations method that we'll implement + with patch.object(self.manager, 'sync_configurations') as mock_sync: + mock_result = SyncResult( + success=True, + results=[ConfigurationResult(success=True, hostname="cursor")], + servers_synced=2, + hosts_updated=1 + ) + mock_sync.return_value = mock_result + + result = self.manager.sync_configurations( + from_host="claude-desktop", + to_hosts=["cursor"] + ) + + self.assertTrue(result.success) + self.assertEqual(result.hosts_updated, 1) + + @integration_test(scope="component") + def test_sync_with_server_name_filter(self): + """Test synchronization with specific server names.""" + with patch.object(self.manager, 'sync_configurations') as mock_sync: + mock_result = SyncResult( + success=True, + results=[ConfigurationResult(success=True, hostname="claude-desktop")], + servers_synced=1, # Only one server due to filtering + hosts_updated=1 + ) + mock_sync.return_value = mock_result + + result = self.manager.sync_configurations( + from_env="test-env", + to_hosts=["claude-desktop"], + servers=["weather-toolkit"] + ) + + self.assertTrue(result.success) + self.assertEqual(result.servers_synced, 1) + + @integration_test(scope="component") + def test_sync_with_pattern_filter(self): + """Test synchronization with regex pattern filter.""" + with patch.object(self.manager, 'sync_configurations') as mock_sync: + mock_result = SyncResult( + success=True, + results=[ConfigurationResult(success=True, hostname="claude-desktop")], + servers_synced=1, # Only servers matching pattern + hosts_updated=1 + ) + mock_sync.return_value = mock_result + + result = self.manager.sync_configurations( + from_env="test-env", + to_hosts=["claude-desktop"], + pattern="weather-.*" + ) + + self.assertTrue(result.success) + self.assertEqual(result.servers_synced, 1) + + @regression_test + def test_sync_invalid_source_environment(self): + """Test synchronization with non-existent source environment.""" + with patch.object(self.manager, 'sync_configurations') as mock_sync: + mock_result = SyncResult( + success=False, + results=[ConfigurationResult( + success=False, + hostname="claude-desktop", + error_message="Environment 'nonexistent' not found" + )], + servers_synced=0, + hosts_updated=0 + ) + mock_sync.return_value = mock_result + + result = self.manager.sync_configurations( + from_env="nonexistent", + to_hosts=["claude-desktop"] + ) + + self.assertFalse(result.success) + self.assertEqual(result.servers_synced, 0) + + @regression_test + def test_sync_no_source_specified(self): + """Test synchronization without source specification.""" + with self.assertRaises(ValueError) as context: + self.manager.sync_configurations(to_hosts=["claude-desktop"]) + + self.assertIn("Must specify either from_env or from_host", str(context.exception)) + + @regression_test + def test_sync_both_sources_specified(self): + """Test synchronization with both env and host sources.""" + with self.assertRaises(ValueError) as context: + self.manager.sync_configurations( + from_env="test-env", + from_host="claude-desktop", + to_hosts=["cursor"] + ) + + self.assertIn("Cannot specify both from_env and from_host", str(context.exception)) + + +class TestMCPSyncCommandParsing(unittest.TestCase): + """Test suite for MCP sync command argument parsing.""" + + @regression_test + def test_sync_command_basic_parsing(self): + """Test basic sync command argument parsing.""" + test_args = [ + 'hatch', 'mcp', 'sync', + '--from-env', 'test-env', + '--to-host', 'claude-desktop' + ] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager'): + with patch('hatch.cli_hatch.handle_mcp_sync', return_value=0) as mock_handler: + try: + main() + mock_handler.assert_called_once_with( + from_env='test-env', + from_host=None, + to_hosts='claude-desktop', + servers=None, + pattern=None, + dry_run=False, + auto_approve=False, + no_backup=False + ) + except SystemExit as e: + self.assertEqual(e.code, 0) + + @regression_test + def test_sync_command_with_filters(self): + """Test sync command with server filters.""" + test_args = [ + 'hatch', 'mcp', 'sync', + '--from-env', 'test-env', + '--to-host', 'claude-desktop,cursor', + '--servers', 'weather-api,file-manager', + '--dry-run' + ] + + with patch('sys.argv', test_args): + with patch('hatch.cli_hatch.HatchEnvironmentManager'): + with patch('hatch.cli_hatch.handle_mcp_sync', return_value=0) as mock_handler: + try: + main() + mock_handler.assert_called_once_with( + from_env='test-env', + from_host=None, + to_hosts='claude-desktop,cursor', + servers='weather-api,file-manager', + pattern=None, + dry_run=True, + auto_approve=False, + no_backup=False + ) + except SystemExit as e: + self.assertEqual(e.code, 0) + + +class TestMCPSyncCommandHandler(unittest.TestCase): + """Test suite for MCP sync command handler.""" + + @integration_test(scope="component") + def test_handle_sync_environment_to_host(self): + """Test sync handler for environment-to-host operation.""" + with patch('hatch.cli_hatch.MCPHostConfigurationManager') as mock_manager_class: + mock_manager = MagicMock() + mock_result = SyncResult( + success=True, + results=[ConfigurationResult(success=True, hostname="claude-desktop")], + servers_synced=2, + hosts_updated=1 + ) + mock_manager.sync_configurations.return_value = mock_result + mock_manager_class.return_value = mock_manager + + with patch('builtins.print') as mock_print: + with patch('hatch.cli_hatch.parse_host_list') as mock_parse: + with patch('hatch.cli_hatch.request_confirmation', return_value=True) as mock_confirm: + from hatch.mcp_host_config.models import MCPHostType + mock_parse.return_value = [MCPHostType.CLAUDE_DESKTOP] + + result = handle_mcp_sync( + from_env="test-env", + to_hosts="claude-desktop" + ) + + self.assertEqual(result, 0) + mock_manager.sync_configurations.assert_called_once() + mock_confirm.assert_called_once() + + # Verify success output + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("[SUCCESS] Synchronization completed" in call for call in print_calls)) + + @integration_test(scope="component") + def test_handle_sync_dry_run(self): + """Test sync handler dry-run functionality.""" + with patch('builtins.print') as mock_print: + with patch('hatch.cli_hatch.parse_host_list') as mock_parse: + from hatch.mcp_host_config.models import MCPHostType + mock_parse.return_value = [MCPHostType.CLAUDE_DESKTOP] + + result = handle_mcp_sync( + from_env="test-env", + to_hosts="claude-desktop", + dry_run=True + ) + + self.assertEqual(result, 0) + + # Verify dry-run output + print_calls = [call[0][0] for call in mock_print.call_args_list] + self.assertTrue(any("[DRY RUN] Would synchronize" in call for call in print_calls)) + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_mcp_user_feedback_reporting.py b/tests/test_mcp_user_feedback_reporting.py new file mode 100644 index 0000000..6beff73 --- /dev/null +++ b/tests/test_mcp_user_feedback_reporting.py @@ -0,0 +1,359 @@ +""" +Test suite for MCP user feedback reporting system. + +This module tests the FieldOperation and ConversionReport models, +generate_conversion_report() function, and display_report() function. +""" + +import unittest +import sys +from pathlib import Path +from io import StringIO + +# Add the parent directory to the path to import wobble +sys.path.insert(0, str(Path(__file__).parent.parent)) + +try: + from wobble.decorators import regression_test +except ImportError: + # Fallback decorator if wobble is not available + def regression_test(func): + return func + +from hatch.mcp_host_config.reporting import ( + FieldOperation, + ConversionReport, + generate_conversion_report, + display_report +) +from hatch.mcp_host_config.models import ( + MCPServerConfigOmni, + MCPHostType +) + + +class TestFieldOperation(unittest.TestCase): + """Test suite for FieldOperation model.""" + + @regression_test + def test_field_operation_updated_str_representation(self): + """Test UPDATED operation string representation.""" + field_op = FieldOperation( + field_name="command", + operation="UPDATED", + old_value="old_command", + new_value="new_command" + ) + + result = str(field_op) + + # Verify ASCII arrow used (not Unicode) + self.assertIn("-->", result) + self.assertNotIn("โ†’", result) + + # Verify format + self.assertEqual(result, "command: UPDATED 'old_command' --> 'new_command'") + + @regression_test + def test_field_operation_updated_with_none_old_value(self): + """Test UPDATED operation with None old_value (field added).""" + field_op = FieldOperation( + field_name="timeout", + operation="UPDATED", + old_value=None, + new_value=30000 + ) + + result = str(field_op) + + # Verify None is displayed + self.assertEqual(result, "timeout: UPDATED None --> 30000") + + @regression_test + def test_field_operation_unsupported_str_representation(self): + """Test UNSUPPORTED operation string representation.""" + field_op = FieldOperation( + field_name="envFile", + operation="UNSUPPORTED", + new_value=".env" + ) + + result = str(field_op) + + # Verify format + self.assertEqual(result, "envFile: UNSUPPORTED") + + @regression_test + def test_field_operation_unchanged_str_representation(self): + """Test UNCHANGED operation string representation.""" + field_op = FieldOperation( + field_name="name", + operation="UNCHANGED", + new_value="my-server" + ) + + result = str(field_op) + + # Verify format + self.assertEqual(result, "name: UNCHANGED 'my-server'") + + +class TestConversionReport(unittest.TestCase): + """Test suite for ConversionReport model.""" + + @regression_test + def test_conversion_report_create_operation(self): + """Test ConversionReport with create operation.""" + report = ConversionReport( + operation="create", + server_name="my-server", + target_host=MCPHostType.GEMINI, + field_operations=[ + FieldOperation(field_name="command", operation="UPDATED", old_value=None, new_value="python") + ] + ) + + self.assertEqual(report.operation, "create") + self.assertEqual(report.server_name, "my-server") + self.assertEqual(report.target_host, MCPHostType.GEMINI) + self.assertTrue(report.success) + self.assertIsNone(report.error_message) + self.assertEqual(len(report.field_operations), 1) + self.assertFalse(report.dry_run) + + @regression_test + def test_conversion_report_update_operation(self): + """Test ConversionReport with update operation.""" + report = ConversionReport( + operation="update", + server_name="my-server", + target_host=MCPHostType.VSCODE, + field_operations=[ + FieldOperation(field_name="command", operation="UPDATED", old_value="old", new_value="new"), + FieldOperation(field_name="name", operation="UNCHANGED", new_value="my-server") + ] + ) + + self.assertEqual(report.operation, "update") + self.assertEqual(len(report.field_operations), 2) + + @regression_test + def test_conversion_report_migrate_operation(self): + """Test ConversionReport with migrate operation.""" + report = ConversionReport( + operation="migrate", + server_name="my-server", + source_host=MCPHostType.GEMINI, + target_host=MCPHostType.VSCODE, + field_operations=[] + ) + + self.assertEqual(report.operation, "migrate") + self.assertEqual(report.source_host, MCPHostType.GEMINI) + self.assertEqual(report.target_host, MCPHostType.VSCODE) + + +class TestGenerateConversionReport(unittest.TestCase): + """Test suite for generate_conversion_report() function.""" + + @regression_test + def test_generate_report_create_operation_all_supported(self): + """Test generate_conversion_report for create with all supported fields.""" + omni = MCPServerConfigOmni( + name="gemini-server", + command="npx", + args=["-y", "server"], + cwd="/path/to/dir", + timeout=30000 + ) + + report = generate_conversion_report( + operation="create", + server_name="gemini-server", + target_host=MCPHostType.GEMINI, + omni=omni + ) + + # Verify all fields are UPDATED (create operation) + self.assertEqual(report.operation, "create") + self.assertEqual(report.server_name, "gemini-server") + self.assertEqual(report.target_host, MCPHostType.GEMINI) + + # All set fields should be UPDATED + updated_ops = [op for op in report.field_operations if op.operation == "UPDATED"] + self.assertEqual(len(updated_ops), 5) # name, command, args, cwd, timeout + + # No unsupported fields + unsupported_ops = [op for op in report.field_operations if op.operation == "UNSUPPORTED"] + self.assertEqual(len(unsupported_ops), 0) + + @regression_test + def test_generate_report_create_operation_with_unsupported(self): + """Test generate_conversion_report with unsupported fields.""" + omni = MCPServerConfigOmni( + name="gemini-server", + command="python", + cwd="/path/to/dir", # Gemini field + envFile=".env" # VS Code field (unsupported by Gemini) + ) + + report = generate_conversion_report( + operation="create", + server_name="gemini-server", + target_host=MCPHostType.GEMINI, + omni=omni + ) + + # Verify Gemini fields are UPDATED + updated_ops = [op for op in report.field_operations if op.operation == "UPDATED"] + updated_fields = {op.field_name for op in updated_ops} + self.assertIn("name", updated_fields) + self.assertIn("command", updated_fields) + self.assertIn("cwd", updated_fields) + + # Verify VS Code field is UNSUPPORTED + unsupported_ops = [op for op in report.field_operations if op.operation == "UNSUPPORTED"] + self.assertEqual(len(unsupported_ops), 1) + self.assertEqual(unsupported_ops[0].field_name, "envFile") + + @regression_test + def test_generate_report_update_operation(self): + """Test generate_conversion_report for update operation.""" + old_config = MCPServerConfigOmni( + name="my-server", + command="python", + args=["old.py"] + ) + + new_omni = MCPServerConfigOmni( + name="my-server", + command="python", + args=["new.py"] + ) + + report = generate_conversion_report( + operation="update", + server_name="my-server", + target_host=MCPHostType.GEMINI, + omni=new_omni, + old_config=old_config + ) + + # Verify name and command are UNCHANGED + unchanged_ops = [op for op in report.field_operations if op.operation == "UNCHANGED"] + unchanged_fields = {op.field_name for op in unchanged_ops} + self.assertIn("name", unchanged_fields) + self.assertIn("command", unchanged_fields) + + # Verify args is UPDATED + updated_ops = [op for op in report.field_operations if op.operation == "UPDATED"] + self.assertEqual(len(updated_ops), 1) + self.assertEqual(updated_ops[0].field_name, "args") + self.assertEqual(updated_ops[0].old_value, ["old.py"]) + self.assertEqual(updated_ops[0].new_value, ["new.py"]) + + @regression_test + def test_generate_report_dynamic_field_derivation(self): + """Test that generate_conversion_report uses dynamic field derivation.""" + omni = MCPServerConfigOmni( + name="test-server", + command="python" + ) + + # Generate report for Gemini + report_gemini = generate_conversion_report( + operation="create", + server_name="test-server", + target_host=MCPHostType.GEMINI, + omni=omni + ) + + # All fields should be UPDATED (no unsupported) + unsupported_ops = [op for op in report_gemini.field_operations if op.operation == "UNSUPPORTED"] + self.assertEqual(len(unsupported_ops), 0) + + +class TestDisplayReport(unittest.TestCase): + """Test suite for display_report() function.""" + + @regression_test + def test_display_report_create_operation(self): + """Test display_report for create operation.""" + report = ConversionReport( + operation="create", + server_name="my-server", + target_host=MCPHostType.GEMINI, + field_operations=[ + FieldOperation(field_name="command", operation="UPDATED", old_value=None, new_value="python") + ] + ) + + # Capture stdout + captured_output = StringIO() + sys.stdout = captured_output + + display_report(report) + + sys.stdout = sys.__stdout__ + output = captured_output.getvalue() + + # Verify header + self.assertIn("Server 'my-server' created for host", output) + self.assertIn("gemini", output.lower()) + + # Verify field operation displayed + self.assertIn("command: UPDATED", output) + + @regression_test + def test_display_report_update_operation(self): + """Test display_report for update operation.""" + report = ConversionReport( + operation="update", + server_name="my-server", + target_host=MCPHostType.VSCODE, + field_operations=[ + FieldOperation(field_name="args", operation="UPDATED", old_value=["old.py"], new_value=["new.py"]) + ] + ) + + # Capture stdout + captured_output = StringIO() + sys.stdout = captured_output + + display_report(report) + + sys.stdout = sys.__stdout__ + output = captured_output.getvalue() + + # Verify header + self.assertIn("Server 'my-server' updated for host", output) + + @regression_test + def test_display_report_dry_run(self): + """Test display_report for dry-run mode.""" + report = ConversionReport( + operation="create", + server_name="my-server", + target_host=MCPHostType.GEMINI, + field_operations=[], + dry_run=True + ) + + # Capture stdout + captured_output = StringIO() + sys.stdout = captured_output + + display_report(report) + + sys.stdout = sys.__stdout__ + output = captured_output.getvalue() + + # Verify dry-run header and footer + self.assertIn("[DRY RUN]", output) + self.assertIn("Preview of changes", output) + self.assertIn("No changes were made", output) + + +if __name__ == '__main__': + unittest.main() + diff --git a/tests/test_non_tty_integration.py b/tests/test_non_tty_integration.py index 19f47dc..962936a 100644 --- a/tests/test_non_tty_integration.py +++ b/tests/test_non_tty_integration.py @@ -11,7 +11,7 @@ from pathlib import Path from unittest.mock import patch from hatch.environment_manager import HatchEnvironmentManager -from wobble.decorators import integration_test +from wobble.decorators import integration_test, slow_test from test_data_utils import NonTTYTestDataLoader, TestDataLoader @@ -34,6 +34,7 @@ def _cleanup_temp_dir(self): shutil.rmtree(self.temp_dir, ignore_errors=True) @integration_test(scope="component") + @slow_test @patch('sys.stdin.isatty', return_value=False) def test_cli_package_add_non_tty(self, mock_isatty): """Test package addition in non-TTY environment via CLI.""" @@ -58,6 +59,7 @@ def test_cli_package_add_non_tty(self, mock_isatty): mock_isatty.assert_called() @integration_test(scope="component") + @slow_test @patch.dict(os.environ, {'HATCH_AUTO_APPROVE': '1'}) def test_environment_variable_integration(self): """Test HATCH_AUTO_APPROVE environment variable integration.""" @@ -81,6 +83,7 @@ def test_environment_variable_integration(self): self.assertTrue(result, "Package addition should succeed with HATCH_AUTO_APPROVE") @integration_test(scope="component") + @slow_test @patch('sys.stdin.isatty', return_value=False) def test_multiple_package_installation_non_tty(self, mock_isatty): """Test multiple package installation in non-TTY environment.""" @@ -110,6 +113,7 @@ def test_multiple_package_installation_non_tty(self, mock_isatty): self.assertTrue(result2, "Second package installation should succeed") @integration_test(scope="component") + @slow_test @patch.dict(os.environ, {'HATCH_AUTO_APPROVE': 'true'}) def test_environment_variable_case_insensitive_integration(self): """Test case-insensitive environment variable in full integration.""" @@ -131,6 +135,7 @@ def test_environment_variable_case_insensitive_integration(self): self.assertTrue(result, "Package addition should succeed with case-insensitive env var") @integration_test(scope="component") + @slow_test @patch('sys.stdin.isatty', return_value=True) @patch.dict(os.environ, {'HATCH_AUTO_APPROVE': 'invalid'}) @patch('builtins.input', return_value='y') @@ -175,6 +180,7 @@ def _cleanup_temp_dir(self): shutil.rmtree(self.temp_dir, ignore_errors=True) @integration_test(scope="component") + @slow_test @patch('sys.stdin.isatty', return_value=True) @patch('builtins.input', side_effect=KeyboardInterrupt()) def test_keyboard_interrupt_integration(self, mock_input, mock_isatty): @@ -198,6 +204,7 @@ def test_keyboard_interrupt_integration(self, mock_input, mock_isatty): self.assertFalse(result, "Package installation should be cancelled by user") @integration_test(scope="component") + @slow_test @patch('sys.stdin.isatty', return_value=True) @patch('builtins.input', side_effect=EOFError()) def test_eof_error_integration(self, mock_input, mock_isatty): @@ -240,6 +247,7 @@ def _cleanup_temp_dir(self): shutil.rmtree(self.temp_dir, ignore_errors=True) @integration_test(scope="component") + @slow_test def test_all_valid_environment_variables_integration(self): """Test all valid environment variable values in integration.""" # Create test environment diff --git a/tests/test_python_environment_manager.py b/tests/test_python_environment_manager.py index fe5296f..0652d46 100644 --- a/tests/test_python_environment_manager.py +++ b/tests/test_python_environment_manager.py @@ -22,14 +22,32 @@ def setUp(self): self.temp_dir = tempfile.mkdtemp() self.environments_dir = Path(self.temp_dir) / "envs" self.environments_dir.mkdir(exist_ok=True) - + # Create manager instance for testing self.manager = PythonEnvironmentManager(environments_dir=self.environments_dir) + # Track environments created during this test for cleanup + self.created_environments = [] + def tearDown(self): """Clean up test environment.""" + # Clean up any conda/mamba environments created during this test + if hasattr(self, 'manager') and self.manager.is_available(): + for env_name in self.created_environments: + try: + if self.manager.environment_exists(env_name): + self.manager.remove_python_environment(env_name) + except Exception: + pass # Best effort cleanup + + # Clean up temporary directory shutil.rmtree(self.temp_dir, ignore_errors=True) + def _track_environment(self, env_name): + """Track an environment for cleanup in tearDown.""" + if env_name not in self.created_environments: + self.created_environments.append(env_name) + @regression_test @patch('hatch.python_environment_manager.PythonEnvironmentManager._conda_env_exists', return_value=True) @patch('hatch.python_environment_manager.PythonEnvironmentManager._get_conda_env_name', return_value='hatch_test_env') @@ -349,26 +367,61 @@ def setUpClass(cls): cls.temp_dir = tempfile.mkdtemp() cls.environments_dir = Path(cls.temp_dir) / "envs" cls.environments_dir.mkdir(exist_ok=True) - + # Create manager instance for integration testing cls.manager = PythonEnvironmentManager(environments_dir=cls.environments_dir) + # Track all environments created during integration tests + cls.all_created_environments = set() + # Skip all tests if conda/mamba is not available if not cls.manager.is_available(): raise unittest.SkipTest("Conda/mamba not available for integration tests") + def setUp(self): + """Set up individual test.""" + # Track environments created during this specific test + self.test_environments = [] + + def tearDown(self): + """Clean up individual test.""" + # Clean up environments created during this specific test + for env_name in self.test_environments: + try: + if self.manager.environment_exists(env_name): + self.manager.remove_python_environment(env_name) + self.all_created_environments.discard(env_name) + except Exception: + pass # Best effort cleanup + + def _track_environment(self, env_name): + """Track an environment for cleanup.""" + if env_name not in self.test_environments: + self.test_environments.append(env_name) + self.all_created_environments.add(env_name) + @classmethod def tearDownClass(cls): """Clean up class-level test environment.""" - # Clean up any test environments that might have been created + # Clean up any remaining test environments try: - test_envs = ["test_integration_env", "test_python_311", "test_python_312", "test_diagnostics_env"] - for env_name in test_envs: + # Clean up tracked environments + for env_name in list(cls.all_created_environments): + if cls.manager.environment_exists(env_name): + cls.manager.remove_python_environment(env_name) + + # Clean up known test environment patterns (fallback) + known_patterns = [ + "test_integration_env", "test_python_311", "test_python_312", "test_diagnostics_env", + "test_env_1", "test_env_2", "test_env_3", "test_env_4", "test_env_5", + "test_python_39", "test_python_310", "test_python_312", "test_cache_env1", "test_cache_env2" + ] + for env_name in known_patterns: if cls.manager.environment_exists(env_name): cls.manager.remove_python_environment(env_name) except Exception: pass # Best effort cleanup - + shutil.rmtree(cls.temp_dir, ignore_errors=True) @integration_test(scope="system") @@ -418,11 +471,12 @@ def test_manager_diagnostics_real(self): def test_create_and_remove_python_environment_real(self): """Test real Python environment creation and removal.""" env_name = "test_integration_env" - + self._track_environment(env_name) + # Ensure environment doesn't exist initially if self.manager.environment_exists(env_name): self.manager.remove_python_environment(env_name) - + # Create environment result = self.manager.create_python_environment(env_name) self.assertTrue(result, "Failed to create Python environment") @@ -454,6 +508,7 @@ def test_create_and_remove_python_environment_real(self): def test_create_python_environment_with_version_real(self): """Test real Python environment creation with specific version.""" env_name = "test_python_311" + self._track_environment(env_name) python_version = "3.11" # Ensure environment doesn't exist initially @@ -553,6 +608,10 @@ def test_list_environments_real(self): test_envs = ["test_env_1", "test_env_2"] final_names = ["hatch_test_env_1", "hatch_test_env_2"] + # Track environments for cleanup + for env_name in test_envs: + self._track_environment(env_name) + # Clean up any existing test environments for env_name in test_envs: if self.manager.environment_exists(env_name): @@ -645,14 +704,32 @@ def setUp(self): self.temp_dir = tempfile.mkdtemp() self.environments_dir = Path(self.temp_dir) / "envs" self.environments_dir.mkdir(exist_ok=True) - + # Create manager instance for testing self.manager = PythonEnvironmentManager(environments_dir=self.environments_dir) + # Track environments created during this test for cleanup + self.created_environments = [] + def tearDown(self): """Clean up test environment.""" + # Clean up any conda/mamba environments created during this test + if hasattr(self, 'manager') and self.manager.is_available(): + for env_name in self.created_environments: + try: + if self.manager.environment_exists(env_name): + self.manager.remove_python_environment(env_name) + except Exception: + pass # Best effort cleanup + + # Clean up temporary directory shutil.rmtree(self.temp_dir, ignore_errors=True) + def _track_environment(self, env_name): + """Track an environment for cleanup in tearDown.""" + if env_name not in self.created_environments: + self.created_environments.append(env_name) + @regression_test @patch('subprocess.run') def test_launch_shell_with_command(self, mock_run):