diff --git a/.github/workflows/publish-npm.yml b/.github/workflows/publish-npm.yml index b130dd2..5de1e17 100644 --- a/.github/workflows/publish-npm.yml +++ b/.github/workflows/publish-npm.yml @@ -16,6 +16,8 @@ jobs: publish: name: publish runs-on: ubuntu-latest + permissions: + contents: write steps: - uses: actions/checkout@v4 @@ -43,4 +45,6 @@ jobs: - name: Upload MCP Server DXT GitHub release asset run: | gh release upload ${{ github.event.release.tag_name }} \ - packages/mcp-server/cas_parser_node_api.dxt + packages/mcp-server/cas_parser_node_api.mcpb + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 4b27d0f..d62bea5 100644 --- a/.gitignore +++ b/.gitignore @@ -7,5 +7,6 @@ dist dist-deno /*.tgz .idea/ +.eslintcache dist-bundle -*.dxt +*.mcpb diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 4918b25..dd8fde7 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.4.1" + ".": "1.5.0" } diff --git a/.stats.yml b/.stats.yml index 92721c7..48b33b3 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 5 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/cas-parser%2Fcas-parser-b7fdba3d3f97c7debc22c7ca30b828bce81bcd64648df8c94029b27a3321ebb9.yml -openapi_spec_hash: 03f1315f1d32ada42445ca920f047dff +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/cas-parser%2Fcas-parser-38618cc5c938e87eeacf4893d6a6ba4e6ef7da390e6283dc7b50b484a7b97165.yml +openapi_spec_hash: b9e439ecee904ded01aa34efdee88856 config_hash: cb5d75abef6264b5d86448caf7295afa diff --git a/CHANGELOG.md b/CHANGELOG.md index fda1b9d..17b212b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,65 @@ # Changelog +## 1.5.0 (2026-01-01) + +Full Changelog: [v1.4.1...v1.5.0](https://github.com/CASParser/cas-parser-node/compare/v1.4.1...v1.5.0) + +### Features + +* **api:** api update ([3047e70](https://github.com/CASParser/cas-parser-node/commit/3047e70310ddf1dd571c70c4683b01041579bf5f)) +* **api:** api update ([7f71235](https://github.com/CASParser/cas-parser-node/commit/7f712351a3d5349fff33bf321ab946a58a8ae015)) +* **mcp:** add docs search tool ([e2afa83](https://github.com/CASParser/cas-parser-node/commit/e2afa8394a7f891e90526dffeab493f4033ebfe0)) +* **mcp:** add option for including docs tools ([e39c90e](https://github.com/CASParser/cas-parser-node/commit/e39c90ed1a75114d95ccae8183f69983331bf7c2)) +* **mcp:** enable experimental docs search tool ([86b23ff](https://github.com/CASParser/cas-parser-node/commit/86b23ff7a185038d52d18395f7cccb68716affed)) +* **mcp:** enable optional code execution tool on http mcp servers ([c65e4a3](https://github.com/CASParser/cas-parser-node/commit/c65e4a343bc9f6d6064e5c3a416630ef3880aa9e)) + + +### Bug Fixes + +* **ci:** set permissions for DXT publish action ([461a411](https://github.com/CASParser/cas-parser-node/commit/461a411c1491f80b951618ec83a117667da2c786)) +* **mcpb:** pin @anthropic-ai/mcpb version ([3a1120e](https://github.com/CASParser/cas-parser-node/commit/3a1120e08ae1f95374532f8711918b20ad6b74c1)) +* **mcp:** fix cli argument parsing logic ([6d18c3d](https://github.com/CASParser/cas-parser-node/commit/6d18c3d887c074a25205c62dd3a461fff1424d28)) +* **mcp:** resolve a linting issue in server code ([691d0c1](https://github.com/CASParser/cas-parser-node/commit/691d0c124350d54df029a9fa2873a85369615f7d)) +* **mcp:** return tool execution error on jq failure ([b36d83b](https://github.com/CASParser/cas-parser-node/commit/b36d83bad328f0cfe7eb66ec2472ab0611ea079b)) + + +### Performance Improvements + +* faster formatting ([5e72001](https://github.com/CASParser/cas-parser-node/commit/5e72001cfce74462351337f831f066cfdd500db4)) + + +### Chores + +* **codegen:** internal codegen update ([63d6cd7](https://github.com/CASParser/cas-parser-node/commit/63d6cd75e8ec5d1985cbaf203bd60a4172c3cbd6)) +* do not install brew dependencies in ./scripts/bootstrap by default ([d4fb00e](https://github.com/CASParser/cas-parser-node/commit/d4fb00e57c79b1b2420cd1fd4d3843ee1e73675d)) +* extract some types in mcp docs ([464c4ad](https://github.com/CASParser/cas-parser-node/commit/464c4adaf3e55bba56c65b932686b196057816c8)) +* **internal:** codegen related update ([03494e0](https://github.com/CASParser/cas-parser-node/commit/03494e0cfbecc96d136297acb7cc3c44ec622c63)) +* **internal:** codegen related update ([4d1a0f4](https://github.com/CASParser/cas-parser-node/commit/4d1a0f4799890a888f01eb528ca0d2d3a798b0cd)) +* **internal:** codegen related update ([22c7ea2](https://github.com/CASParser/cas-parser-node/commit/22c7ea2ba63601478bc3d9ad534ff0df493f4c8c)) +* **internal:** fix incremental formatting in some cases ([170eef2](https://github.com/CASParser/cas-parser-node/commit/170eef2a9573b4a2c3a51f4a8979a4d8cb49ce77)) +* **internal:** gitignore .mcpb files ([037b423](https://github.com/CASParser/cas-parser-node/commit/037b423d403f4af547043cc73164df04fcbe8115)) +* **internal:** grammar fix (it's -> its) ([0f7008c](https://github.com/CASParser/cas-parser-node/commit/0f7008c68019fc03ae354f85839784008e838eac)) +* **internal:** ignore .eslintcache ([475202b](https://github.com/CASParser/cas-parser-node/commit/475202b2ce09234a57356ce5a1dd466f3122e9a8)) +* **internal:** remove .eslintcache ([8897700](https://github.com/CASParser/cas-parser-node/commit/8897700e7ebf141e8416adf62a78a679e37c55bc)) +* **internal:** remove deprecated `compilerOptions.baseUrl` from tsconfig.json ([3e2894e](https://github.com/CASParser/cas-parser-node/commit/3e2894e798b721339c48214a6b8b2d932a3e2c02)) +* **internal:** use npm pack for build uploads ([080eee4](https://github.com/CASParser/cas-parser-node/commit/080eee4a0de5730b86eabe4cdd9bf6831ed69713)) +* **jsdoc:** fix [@link](https://github.com/link) annotations to refer only to parts of the package‘s public interface ([2c53c6a](https://github.com/CASParser/cas-parser-node/commit/2c53c6a40eae947ff82bf62021471d770f3154a0)) +* mcp code tool explicit error message when missing a run function ([c1688e1](https://github.com/CASParser/cas-parser-node/commit/c1688e1762adac87ce781019f0c7e3797d6cbc20)) +* **mcp:** add friendlier MCP code tool errors on incorrect method invocations ([92613e9](https://github.com/CASParser/cas-parser-node/commit/92613e96bb14ed9de8d1042cbb30d580d44a4fa7)) +* **mcp:** add line numbers to code tool errors ([b3c083e](https://github.com/CASParser/cas-parser-node/commit/b3c083e9e828fa712b60904be1ab82a43679615c)) +* **mcp:** allow pointing `docs_search` tool at other URLs ([5df72aa](https://github.com/CASParser/cas-parser-node/commit/5df72aac38114806d999acd0e266eb6c6abff4cc)) +* **mcp:** clarify http auth error ([63e29fe](https://github.com/CASParser/cas-parser-node/commit/63e29fee0c13d900bab2f1abdc905c1cc519138a)) +* **mcp:** rename dxt to mcpb ([4206154](https://github.com/CASParser/cas-parser-node/commit/4206154f0e8c7f82ce8c62b0729e4859af7fbda9)) +* **mcp:** upgrade jq-web ([fb5ee4c](https://github.com/CASParser/cas-parser-node/commit/fb5ee4cee2cfc01cb3718e7b71b6e0573e1128aa)) +* update lockfile ([952858c](https://github.com/CASParser/cas-parser-node/commit/952858c419b88f6a5205daece2c9ecdac5324dd1)) +* use structured error when code execution tool errors ([0b9eb86](https://github.com/CASParser/cas-parser-node/commit/0b9eb86e3ee6d9e2a35b396453bf4fbec66dc905)) + + +### Documentation + +* **mcp:** add a README button for one-click add to Cursor ([f57f923](https://github.com/CASParser/cas-parser-node/commit/f57f9234638bd30b09ee62fb45e4374d78e3b93f)) +* **mcp:** add a README link to add server to VS Code or Claude Code ([d687a4a](https://github.com/CASParser/cas-parser-node/commit/d687a4a380963a0e493b8a106e6c30f88e3d0047)) + ## 1.4.1 (2025-09-12) Full Changelog: [v1.4.0...v1.4.1](https://github.com/CASParser/cas-parser-node/compare/v1.4.0...v1.4.1) diff --git a/LICENSE b/LICENSE index f1756ce..6bbb512 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2025 Cas Parser + Copyright 2026 Cas Parser Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/package.json b/package.json index 0f1fb96..f19007c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "cas-parser-node", - "version": "1.4.1", + "version": "1.5.0", "description": "The official TypeScript library for the Cas Parser API", "author": "Cas Parser ", "types": "dist/index.d.ts", @@ -35,7 +35,7 @@ "@types/node": "^20.17.6", "@typescript-eslint/eslint-plugin": "8.31.1", "@typescript-eslint/parser": "8.31.1", - "eslint": "^9.20.1", + "eslint": "^9.39.1", "eslint-plugin-prettier": "^5.4.1", "eslint-plugin-unused-imports": "^4.1.4", "iconv-lite": "^0.6.3", diff --git a/packages/mcp-server/README.md b/packages/mcp-server/README.md index 6dec8e6..ccea2f7 100644 --- a/packages/mcp-server/README.md +++ b/packages/mcp-server/README.md @@ -25,7 +25,7 @@ For clients with a configuration JSON, it might look something like this: "mcpServers": { "cas_parser_node_api": { "command": "npx", - "args": ["-y", "cas-parser-node-mcp", "--client=claude", "--tools=all"], + "args": ["-y", "cas-parser-node-mcp"], "env": { "CAS_PARSER_API_KEY": "My API Key" } @@ -34,97 +34,45 @@ For clients with a configuration JSON, it might look something like this: } ``` -## Exposing endpoints to your MCP Client +### Cursor -There are two ways to expose endpoints as tools in the MCP server: +If you use Cursor, you can install the MCP server by using the button below. You will need to set your environment variables +in Cursor's `mcp.json`, which can be found in Cursor Settings > Tools & MCP > New MCP Server. -1. Exposing one tool per endpoint, and filtering as necessary -2. Exposing a set of tools to dynamically discover and invoke endpoints from the API +[![Add to Cursor](https://cursor.com/deeplink/mcp-install-dark.svg)](https://cursor.com/en-US/install-mcp?name=cas-parser-node-mcp&config=eyJjb21tYW5kIjoibnB4IiwiYXJncyI6WyIteSIsImNhcy1wYXJzZXItbm9kZS1tY3AiXSwiZW52Ijp7IkNBU19QQVJTRVJfQVBJX0tFWSI6IlNldCB5b3VyIENBU19QQVJTRVJfQVBJX0tFWSBoZXJlLiJ9fQ) -### Filtering endpoints and tools +### VS Code -You can run the package on the command line to discover and filter the set of tools that are exposed by the -MCP Server. This can be helpful for large APIs where including all endpoints at once is too much for your AI's -context window. +If you use MCP, you can install the MCP server by clicking the link below. You will need to set your environment variables +in VS Code's `mcp.json`, which can be found via Command Palette > MCP: Open User Configuration. -You can filter by multiple aspects: +[Open VS Code](https://vscode.stainless.com/mcp/%7B%22name%22%3A%22cas-parser-node-mcp%22%2C%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22cas-parser-node-mcp%22%5D%2C%22env%22%3A%7B%22CAS_PARSER_API_KEY%22%3A%22Set%20your%20CAS_PARSER_API_KEY%20here.%22%7D%7D) -- `--tool` includes a specific tool by name -- `--resource` includes all tools under a specific resource, and can have wildcards, e.g. `my.resource*` -- `--operation` includes just read (get/list) or just write operations +### Claude Code -### Dynamic tools +If you use Claude Code, you can install the MCP server by running the command below in your terminal. You will need to set your +environment variables in Claude Code's `.claude.json`, which can be found in your home directory. -If you specify `--tools=dynamic` to the MCP server, instead of exposing one tool per endpoint in the API, it will -expose the following tools: - -1. `list_api_endpoints` - Discovers available endpoints, with optional filtering by search query -2. `get_api_endpoint_schema` - Gets detailed schema information for a specific endpoint -3. `invoke_api_endpoint` - Executes any endpoint with the appropriate parameters - -This allows you to have the full set of API endpoints available to your MCP Client, while not requiring that all -of their schemas be loaded into context at once. Instead, the LLM will automatically use these tools together to -search for, look up, and invoke endpoints dynamically. However, due to the indirect nature of the schemas, it -can struggle to provide the correct properties a bit more than when tools are imported explicitly. Therefore, -you can opt-in to explicit tools, the dynamic tools, or both. - -See more information with `--help`. - -All of these command-line options can be repeated, combined together, and have corresponding exclusion versions (e.g. `--no-tool`). - -Use `--list` to see the list of available tools, or see below. - -### Specifying the MCP Client - -Different clients have varying abilities to handle arbitrary tools and schemas. - -You can specify the client you are using with the `--client` argument, and the MCP server will automatically -serve tools and schemas that are more compatible with that client. - -- `--client=`: Set all capabilities based on a known MCP client - - - Valid values: `openai-agents`, `claude`, `claude-code`, `cursor` - - Example: `--client=cursor` - -Additionally, if you have a client not on the above list, or the client has gotten better -over time, you can manually enable or disable certain capabilities: - -- `--capability=`: Specify individual client capabilities - - Available capabilities: - - `top-level-unions`: Enable support for top-level unions in tool schemas - - `valid-json`: Enable JSON string parsing for arguments - - `refs`: Enable support for $ref pointers in schemas - - `unions`: Enable support for union types (anyOf) in schemas - - `formats`: Enable support for format validations in schemas (e.g. date-time, email) - - `tool-name-length=N`: Set maximum tool name length to N characters - - Example: `--capability=top-level-unions --capability=tool-name-length=40` - - Example: `--capability=top-level-unions,tool-name-length=40` - -### Examples - -1. Filter for read operations on cards: - -```bash ---resource=cards --operation=read ``` - -2. Exclude specific tools while including others: - -```bash ---resource=cards --no-tool=create_cards +claude mcp add --transport stdio cas_parser_node_api --env CAS_PARSER_API_KEY="Your CAS_PARSER_API_KEY here." -- npx -y cas-parser-node-mcp ``` -3. Configure for Cursor client with custom max tool name length: +## Code Mode -```bash ---client=cursor --capability=tool-name-length=40 -``` +This MCP server is built on the "Code Mode" tool scheme. In this MCP Server, +your agent will write code against the TypeScript SDK, which will then be executed in an +isolated sandbox. To accomplish this, the server will expose two tools to your agent: -4. Complex filtering with multiple criteria: +- The first tool is a docs search tool, which can be used to generically query for + documentation about your API/SDK. -```bash ---resource=cards,accounts --operation=read --tag=kyc --no-tool=create_cards -``` +- The second tool is a code tool, where the agent can write code against the TypeScript SDK. + The code will be executed in a sandbox environment without web or filesystem access. Then, + anything the code returns or prints will be returned to the agent as the result of the + tool call. + +Using this scheme, agents are capable of performing very complex tasks deterministically +and repeatably. ## Running remotely @@ -149,71 +97,3 @@ A configuration JSON for this server might look like this, assuming the server i } } ``` - -The command-line arguments for filtering tools and specifying clients can also be used as query parameters in the URL. -For example, to exclude specific tools while including others, use the URL: - -``` -http://localhost:3000?resource=cards&resource=accounts&no_tool=create_cards -``` - -Or, to configure for the Cursor client, with a custom max tool name length, use the URL: - -``` -http://localhost:3000?client=cursor&capability=tool-name-length%3D40 -``` - -## Importing the tools and server individually - -```js -// Import the server, generated endpoints, or the init function -import { server, endpoints, init } from "cas-parser-node-mcp/server"; - -// import a specific tool -import camsKfintechCasParser from "cas-parser-node-mcp/tools/cas-parser/cams-kfintech-cas-parser"; - -// initialize the server and all endpoints -init({ server, endpoints }); - -// manually start server -const transport = new StdioServerTransport(); -await server.connect(transport); - -// or initialize your own server with specific tools -const myServer = new McpServer(...); - -// define your own endpoint -const myCustomEndpoint = { - tool: { - name: 'my_custom_tool', - description: 'My custom tool', - inputSchema: zodToJsonSchema(z.object({ a_property: z.string() })), - }, - handler: async (client: client, args: any) => { - return { myResponse: 'Hello world!' }; - }) -}; - -// initialize the server with your custom endpoints -init({ server: myServer, endpoints: [camsKfintechCasParser, myCustomEndpoint] }); -``` - -## Available Tools - -The following tools are available in this MCP server. - -### Resource `CAS Parser`: - -- `cams_kfintech_cas_parser` (`write`): This endpoint specifically parses CAMS/KFintech CAS (Consolidated Account Statement) PDF files and returns data in a unified format. - Use this endpoint when you know the PDF is from CAMS or KFintech. -- `cdsl_cas_parser` (`write`): This endpoint specifically parses CDSL CAS (Consolidated Account Statement) PDF files and returns data in a unified format. - Use this endpoint when you know the PDF is from CDSL. -- `nsdl_cas_parser` (`write`): This endpoint specifically parses NSDL CAS (Consolidated Account Statement) PDF files and returns data in a unified format. - Use this endpoint when you know the PDF is from NSDL. -- `smart_parse_cas_parser` (`write`): This endpoint parses CAS (Consolidated Account Statement) PDF files from NSDL, CDSL, or CAMS/KFintech and returns data in a unified format. - It auto-detects the CAS type and transforms the data into a consistent structure regardless of the source. - -### Resource `CAS Generator`: - -- `generate_cas_cas_generator` (`write`): This endpoint generates CAS (Consolidated Account Statement) documents by submitting a mailback request to the specified CAS authority. - Currently only supports KFintech, with plans to support CAMS, CDSL, and NSDL in the future. diff --git a/packages/mcp-server/build b/packages/mcp-server/build index d842d02..90e5f33 100644 --- a/packages/mcp-server/build +++ b/packages/mcp-server/build @@ -32,7 +32,7 @@ chmod +x dist/index.js DIST_PATH=./dist PKG_IMPORT_PATH=cas-parser-node-mcp/ node ../../scripts/utils/postprocess-files.cjs # mcp bundle -rm -rf dist-bundle cas_parser_node_api.dxt; mkdir dist-bundle +rm -rf dist-bundle cas_parser_node_api.mcpb; mkdir dist-bundle # copy package.json PKG_JSON_PATH=../../packages/mcp-server/package.json node ../../scripts/utils/make-dist-package-json.cjs > dist-bundle/package.json @@ -48,9 +48,9 @@ cd .. # pack bundle cp manifest.json dist-bundle -npx dxt pack dist-bundle cas_parser_node_api.dxt +npx mcpb pack dist-bundle cas_parser_node_api.mcpb -npx dxt sign cas_parser_node_api.dxt --self-signed +npx mcpb sign cas_parser_node_api.mcpb --self-signed # clean up rm -rf dist-bundle diff --git a/packages/mcp-server/manifest.json b/packages/mcp-server/manifest.json index d407451..287df34 100644 --- a/packages/mcp-server/manifest.json +++ b/packages/mcp-server/manifest.json @@ -1,5 +1,5 @@ { - "dxt_version": "0.1", + "dxt_version": "0.2", "name": "cas-parser-node-mcp", "version": "1.3.0", "description": "The official MCP Server for the Cas Parser API", @@ -15,7 +15,7 @@ "documentation": "https://docs.casparser.in/reference", "server": { "type": "node", - "entry_point": "${__dirname}/index.js", + "entry_point": "index.js", "mcp_config": { "command": "node", "args": ["${__dirname}/index.js"], @@ -32,6 +32,7 @@ "type": "string" } }, + "tools": [], "tools_generated": true, "compatibility": { "runtimes": { diff --git a/packages/mcp-server/package.json b/packages/mcp-server/package.json index 40f498d..c1afae6 100644 --- a/packages/mcp-server/package.json +++ b/packages/mcp-server/package.json @@ -1,6 +1,6 @@ { "name": "cas-parser-node-mcp", - "version": "1.4.1", + "version": "1.5.0", "description": "The official MCP Server for the Cas Parser API", "author": "Cas Parser ", "types": "dist/index.d.ts", @@ -32,12 +32,14 @@ "dependencies": { "cas-parser-node": "file:../../dist/", "@cloudflare/cabidela": "^0.2.4", - "@modelcontextprotocol/sdk": "^1.11.5", + "@modelcontextprotocol/sdk": "^1.24.0", "@valtown/deno-http-worker": "^0.0.21", "cors": "^2.8.5", "express": "^5.1.0", - "jq-web": "https://github.com/stainless-api/jq-web/releases/download/v0.8.6/jq-web.tar.gz", + "fuse.js": "^7.1.0", + "jq-web": "https://github.com/stainless-api/jq-web/releases/download/v0.8.8/jq-web.tar.gz", "qs": "^6.14.0", + "typescript": "5.8.3", "yargs": "^17.7.2", "zod": "^3.25.20", "zod-to-json-schema": "^3.24.5", @@ -47,7 +49,7 @@ "mcp-server": "dist/index.js" }, "devDependencies": { - "@anthropic-ai/dxt": "^0.2.6", + "@anthropic-ai/mcpb": "1.1.0", "@types/cors": "^2.8.19", "@types/express": "^5.0.3", "@types/jest": "^29.4.0", @@ -64,8 +66,7 @@ "ts-morph": "^19.0.0", "ts-node": "^10.5.0", "tsc-multi": "https://github.com/stainless-api/tsc-multi/releases/download/v1.1.9/tsc-multi.tgz", - "tsconfig-paths": "^4.0.0", - "typescript": "5.8.3" + "tsconfig-paths": "^4.0.0" }, "imports": { "cas-parser-node-mcp": ".", diff --git a/packages/mcp-server/src/code-tool-paths.cts b/packages/mcp-server/src/code-tool-paths.cts deleted file mode 100644 index 15ce7f5..0000000 --- a/packages/mcp-server/src/code-tool-paths.cts +++ /dev/null @@ -1,3 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -export const workerPath = require.resolve('./code-tool-worker.mjs'); diff --git a/packages/mcp-server/src/code-tool-types.ts b/packages/mcp-server/src/code-tool-types.ts index 583728d..5d21166 100644 --- a/packages/mcp-server/src/code-tool-types.ts +++ b/packages/mcp-server/src/code-tool-types.ts @@ -11,4 +11,8 @@ export type WorkerSuccess = { logLines: string[]; errLines: string[]; }; -export type WorkerError = { message: string | undefined }; +export type WorkerError = { + message: string | undefined; + logLines: string[]; + errLines: string[]; +}; diff --git a/packages/mcp-server/src/code-tool-worker.ts b/packages/mcp-server/src/code-tool-worker.ts deleted file mode 100644 index f34d5ab..0000000 --- a/packages/mcp-server/src/code-tool-worker.ts +++ /dev/null @@ -1,46 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import util from 'node:util'; -import { WorkerInput, WorkerSuccess, WorkerError } from './code-tool-types'; -import { CasParser } from 'cas-parser-node'; - -const fetch = async (req: Request): Promise => { - const { opts, code } = (await req.json()) as WorkerInput; - const client = new CasParser({ - ...opts, - }); - - const logLines: string[] = []; - const errLines: string[] = []; - const console = { - log: (...args: unknown[]) => { - logLines.push(util.format(...args)); - }, - error: (...args: unknown[]) => { - errLines.push(util.format(...args)); - }, - }; - try { - let run_ = async (client: any) => {}; - eval(` - ${code} - run_ = run; - `); - const result = await run_(client); - return Response.json({ - result, - logLines, - errLines, - } satisfies WorkerSuccess); - } catch (e) { - const message = e instanceof Error ? e.message : undefined; - return Response.json( - { - message, - } satisfies WorkerError, - { status: 400, statusText: 'Code execution error' }, - ); - } -}; - -export default { fetch }; diff --git a/packages/mcp-server/src/code-tool.ts b/packages/mcp-server/src/code-tool.ts index 8428672..f65e97a 100644 --- a/packages/mcp-server/src/code-tool.ts +++ b/packages/mcp-server/src/code-tool.ts @@ -1,145 +1,61 @@ // File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. -import { dirname } from 'node:path'; -import { pathToFileURL } from 'node:url'; -import CasParser, { ClientOptions } from 'cas-parser-node'; -import { Endpoint, ContentBlock, Metadata } from './tools/types'; - +import { McpTool, Metadata, ToolCallResult, asTextContentResult } from './types'; import { Tool } from '@modelcontextprotocol/sdk/types.js'; - -import { WorkerInput, WorkerError, WorkerSuccess } from './code-tool-types'; - +import { readEnv } from './server'; +import { WorkerSuccess } from './code-tool-types'; /** * A tool that runs code against a copy of the SDK. * - * Instead of exposing every endpoint as it's own tool, which uses up too many tokens for LLMs to use at once, + * Instead of exposing every endpoint as its own tool, which uses up too many tokens for LLMs to use at once, * we expose a single tool that can be used to search for endpoints by name, resource, operation, or tag, and then * a generic endpoint that can be used to invoke any endpoint with the provided arguments. * * @param endpoints - The endpoints to include in the list. */ -export async function codeTool(): Promise { +export function codeTool(): McpTool { const metadata: Metadata = { resource: 'all', operation: 'write', tags: [] }; const tool: Tool = { name: 'execute', description: - 'Runs Typescript code to interact with the API.\nYou are a skilled programmer writing code to interface with the service.\nDefine an async function named "run" that takes a single parameter of an initialized client, and it will be run.\nDo not initialize a client, but instead use the client that you are given as a parameter.\nYou will be returned anything that your function returns, plus the results of any console.log statements.\nIf any code triggers an error, the tool will return an error response, so you do not need to add error handling unless you want to output something more helpful than the raw error.\nIt is not necessary to add comments to code, unless by adding those comments you believe that you can generate better code.\nThis code will run in a container, and you will not be able to use fetch or otherwise interact with the network calls other than through the client you are given.\nAny variables you define won\'t live between successive uses of this call, so make sure to return or log any data you might need later.', + 'Runs JavaScript code to interact with the API.\n\nYou are a skilled programmer writing code to interface with the service.\nDefine an async function named "run" that takes a single parameter of an initialized SDK client and it will be run.\nWrite code within this template:\n\n```\nasync function run(client) {\n // Fill this out\n}\n```\n\nYou will be returned anything that your function returns, plus the results of any console.log statements.\nIf any code triggers an error, the tool will return an error response, so you do not need to add error handling unless you want to output something more helpful than the raw error.\nIt is not necessary to add comments to code, unless by adding those comments you believe that you can generate better code.\nThis code will run in a container, and you will not be able to use fetch or otherwise interact with the network calls other than through the client you are given.\nAny variables you define won\'t live between successive uses of this call, so make sure to return or log any data you might need later.', inputSchema: { type: 'object', properties: { code: { type: 'string' } } }, }; - - // Import dynamically to avoid failing at import time in cases where the environment is not well-supported. - const { newDenoHTTPWorker } = await import('@valtown/deno-http-worker'); - const { workerPath } = await import('./code-tool-paths.cjs'); - - const handler = async (client: CasParser, args: unknown) => { - const baseURLHostname = new URL(client.baseURL).hostname; - const { code } = args as { code: string }; - - const worker = await newDenoHTTPWorker(pathToFileURL(workerPath), { - runFlags: [ - `--node-modules-dir=manual`, - `--allow-read=code-tool-worker.mjs,${workerPath.replace(/([\/\\]node_modules)[\/\\].+$/, '$1')}/`, - `--allow-net=${baseURLHostname}`, - // Allow environment variables because instantiating the client will try to read from them, - // even though they are not set. - '--allow-env', - ], - printOutput: true, - spawnOptions: { - cwd: dirname(workerPath), + const handler = async (_: unknown, args: any): Promise => { + const code = args.code as string; + + // this is not required, but passing a Stainless API key for the matching project_name + // will allow you to run code-mode queries against non-published versions of your SDK. + const stainlessAPIKey = readEnv('STAINLESS_API_KEY'); + const codeModeEndpoint = + readEnv('CODE_MODE_ENDPOINT_URL') ?? 'https://api.stainless.com/api/ai/code-tool'; + + const res = await fetch(codeModeEndpoint, { + method: 'POST', + headers: { + ...(stainlessAPIKey && { Authorization: stainlessAPIKey }), + 'Content-Type': 'application/json', + client_envs: JSON.stringify({ + CAS_PARSER_API_KEY: readEnv('CAS_PARSER_API_KEY'), + CAS_PARSER_BASE_URL: readEnv('CAS_PARSER_BASE_URL'), + }), }, + body: JSON.stringify({ + project_name: 'cas-parser', + client_opts: {}, + code, + }), }); - try { - const resp = await new Promise((resolve, reject) => { - worker.addEventListener('exit', (exitCode) => { - reject(new Error(`Worker exited with code ${exitCode}`)); - }); - - const opts: ClientOptions = { - baseURL: client.baseURL, - apiKey: client.apiKey, - defaultHeaders: { - 'X-Stainless-MCP': 'true', - }, - }; - - const req = worker.request( - 'http://localhost', - { - headers: { - 'content-type': 'application/json', - }, - method: 'POST', - }, - (resp) => { - const body: Uint8Array[] = []; - resp.on('error', (err) => { - reject(err); - }); - resp.on('data', (chunk) => { - body.push(chunk); - }); - resp.on('end', () => { - resolve( - new Response(Buffer.concat(body).toString(), { - status: resp.statusCode ?? 200, - headers: resp.headers as any, - }), - ); - }); - }, - ); - - const body = JSON.stringify({ - opts, - code, - } satisfies WorkerInput); - - req.write(body, (err) => { - if (err !== null && err !== undefined) { - reject(err); - } - }); - - req.end(); - }); - - if (resp.status === 200) { - const { result, logLines, errLines } = (await resp.json()) as WorkerSuccess; - const returnOutput: ContentBlock | null = - result === null ? null - : result === undefined ? null - : { - type: 'text', - text: typeof result === 'string' ? (result as string) : JSON.stringify(result), - }; - const logOutput: ContentBlock | null = - logLines.length === 0 ? - null - : { - type: 'text', - text: logLines.join('\n'), - }; - const errOutput: ContentBlock | null = - errLines.length === 0 ? - null - : { - type: 'text', - text: 'Error output:\n' + errLines.join('\n'), - }; - return { - content: [returnOutput, logOutput, errOutput].filter((block) => block !== null), - }; - } else { - const { message } = (await resp.json()) as WorkerError; - throw new Error(message); - } - } catch (e) { - throw e; - } finally { - worker.terminate(); + if (!res.ok) { + throw new Error( + `${res.status}: ${ + res.statusText + } error when trying to contact Code Tool server. Details: ${await res.text()}`, + ); } + + return asTextContentResult((await res.json()) as WorkerSuccess); }; return { metadata, tool, handler }; diff --git a/packages/mcp-server/src/compat.ts b/packages/mcp-server/src/compat.ts deleted file mode 100644 index f84053c..0000000 --- a/packages/mcp-server/src/compat.ts +++ /dev/null @@ -1,483 +0,0 @@ -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import { z } from 'zod'; -import { Endpoint } from './tools'; - -export interface ClientCapabilities { - topLevelUnions: boolean; - validJson: boolean; - refs: boolean; - unions: boolean; - formats: boolean; - toolNameLength: number | undefined; -} - -export const defaultClientCapabilities: ClientCapabilities = { - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, -}; - -export const ClientType = z.enum(['openai-agents', 'claude', 'claude-code', 'cursor', 'infer']); -export type ClientType = z.infer; - -// Client presets for compatibility -// Note that these could change over time as models get better, so this is -// a best effort. -export const knownClients: Record, ClientCapabilities> = { - 'openai-agents': { - topLevelUnions: false, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }, - claude: { - topLevelUnions: true, - validJson: false, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }, - 'claude-code': { - topLevelUnions: false, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }, - cursor: { - topLevelUnions: false, - validJson: true, - refs: false, - unions: false, - formats: false, - toolNameLength: 50, - }, -}; - -/** - * Attempts to parse strings into JSON objects - */ -export function parseEmbeddedJSON(args: Record, schema: Record) { - let updated = false; - const newArgs: Record = Object.assign({}, args); - - for (const [key, value] of Object.entries(newArgs)) { - if (typeof value === 'string') { - try { - const parsed = JSON.parse(value); - // Only parse if result is a plain object (not array, null, or primitive) - if (parsed && typeof parsed === 'object' && !Array.isArray(parsed)) { - newArgs[key] = parsed; - updated = true; - } - } catch (e) { - // Not valid JSON, leave as is - } - } - } - - if (updated) { - return newArgs; - } - - return args; -} - -export type JSONSchema = { - type?: string; - properties?: Record; - required?: string[]; - anyOf?: JSONSchema[]; - $ref?: string; - $defs?: Record; - [key: string]: any; -}; - -/** - * Truncates tool names to the specified length while ensuring uniqueness. - * If truncation would cause duplicate names, appends a number to make them unique. - */ -export function truncateToolNames(names: string[], maxLength: number): Map { - if (maxLength <= 0) { - return new Map(); - } - - const renameMap = new Map(); - const usedNames = new Set(); - - const toTruncate = names.filter((name) => name.length > maxLength); - - if (toTruncate.length === 0) { - return renameMap; - } - - const willCollide = - new Set(toTruncate.map((name) => name.slice(0, maxLength - 1))).size < toTruncate.length; - - if (!willCollide) { - for (const name of toTruncate) { - const truncatedName = name.slice(0, maxLength); - renameMap.set(name, truncatedName); - } - } else { - const baseLength = maxLength - 1; - - for (const name of toTruncate) { - const baseName = name.slice(0, baseLength); - let counter = 1; - - while (usedNames.has(baseName + counter)) { - counter++; - } - - const finalName = baseName + counter; - renameMap.set(name, finalName); - usedNames.add(finalName); - } - } - - return renameMap; -} - -/** - * Removes top-level unions from a tool by splitting it into multiple tools, - * one for each variant in the union. - */ -export function removeTopLevelUnions(tool: Tool): Tool[] { - const inputSchema = tool.inputSchema as JSONSchema; - const variants = inputSchema.anyOf; - - if (!variants || !Array.isArray(variants) || variants.length === 0) { - return [tool]; - } - - const defs = inputSchema.$defs || {}; - - return variants.map((variant, index) => { - const variantSchema: JSONSchema = { - ...inputSchema, - ...variant, - type: 'object', - properties: { - ...(inputSchema.properties || {}), - ...(variant.properties || {}), - }, - }; - - delete variantSchema.anyOf; - - if (!variantSchema['description']) { - variantSchema['description'] = tool.description; - } - - const usedDefs = findUsedDefs(variant, defs); - if (Object.keys(usedDefs).length > 0) { - variantSchema.$defs = usedDefs; - } else { - delete variantSchema.$defs; - } - - return { - ...tool, - name: `${tool.name}_${toSnakeCase(variant['title'] || `variant${index + 1}`)}`, - description: variant['description'] || tool.description, - inputSchema: variantSchema, - } as Tool; - }); -} - -function findUsedDefs( - schema: JSONSchema, - defs: Record, - visited: Set = new Set(), -): Record { - const usedDefs: Record = {}; - - if (typeof schema !== 'object' || schema === null) { - return usedDefs; - } - - if (schema.$ref) { - const refParts = schema.$ref.split('/'); - if (refParts[0] === '#' && refParts[1] === '$defs' && refParts[2]) { - const defName = refParts[2]; - const def = defs[defName]; - if (def && !visited.has(schema.$ref)) { - usedDefs[defName] = def; - visited.add(schema.$ref); - Object.assign(usedDefs, findUsedDefs(def, defs, visited)); - visited.delete(schema.$ref); - } - } - return usedDefs; - } - - for (const key in schema) { - if (key !== '$defs' && typeof schema[key] === 'object' && schema[key] !== null) { - Object.assign(usedDefs, findUsedDefs(schema[key] as JSONSchema, defs, visited)); - } - } - - return usedDefs; -} - -// Export for testing -export { findUsedDefs }; - -/** - * Inlines all $refs in a schema, eliminating $defs. - * If a circular reference is detected, the circular property is removed. - */ -export function inlineRefs(schema: JSONSchema): JSONSchema { - if (!schema || typeof schema !== 'object') { - return schema; - } - - const clonedSchema = { ...schema }; - const defs: Record = schema.$defs || {}; - - delete clonedSchema.$defs; - - const result = inlineRefsRecursive(clonedSchema, defs, new Set()); - // The top level can never be null - return result === null ? {} : result; -} - -function inlineRefsRecursive( - schema: JSONSchema, - defs: Record, - refPath: Set, -): JSONSchema | null { - if (!schema || typeof schema !== 'object') { - return schema; - } - - if (Array.isArray(schema)) { - return schema.map((item) => { - const processed = inlineRefsRecursive(item, defs, refPath); - return processed === null ? {} : processed; - }) as JSONSchema; - } - - const result = { ...schema }; - - if ('$ref' in result && typeof result.$ref === 'string') { - if (result.$ref.startsWith('#/$defs/')) { - const refName = result.$ref.split('/').pop() as string; - const def = defs[refName]; - - // If we've already seen this ref in our path, we have a circular reference - if (refPath.has(result.$ref)) { - // For circular references, we completely remove the property - // by returning null. The parent will remove it. - return null; - } - - if (def) { - const newRefPath = new Set(refPath); - newRefPath.add(result.$ref); - - const inlinedDef = inlineRefsRecursive({ ...def }, defs, newRefPath); - - if (inlinedDef === null) { - return { ...result }; - } - - // Merge the inlined definition with the original schema's properties - // but preserve things like description, etc. - const { $ref, ...rest } = result; - return { ...inlinedDef, ...rest }; - } - } - - // Keep external refs as-is - return result; - } - - for (const key in result) { - if (result[key] && typeof result[key] === 'object') { - const processed = inlineRefsRecursive(result[key] as JSONSchema, defs, refPath); - if (processed === null) { - // Remove properties that would cause circular references - delete result[key]; - } else { - result[key] = processed; - } - } - } - - return result; -} - -/** - * Removes anyOf fields from a schema, using only the first variant. - */ -export function removeAnyOf(schema: JSONSchema): JSONSchema { - if (!schema || typeof schema !== 'object') { - return schema; - } - - if (Array.isArray(schema)) { - return schema.map((item) => removeAnyOf(item)) as JSONSchema; - } - - const result = { ...schema }; - - if ('anyOf' in result && Array.isArray(result.anyOf) && result.anyOf.length > 0) { - const firstVariant = result.anyOf[0]; - - if (firstVariant && typeof firstVariant === 'object') { - // Special handling for properties to ensure deep merge - if (firstVariant.properties && result.properties) { - result.properties = { - ...result.properties, - ...(firstVariant.properties as Record), - }; - } else if (firstVariant.properties) { - result.properties = { ...firstVariant.properties }; - } - - for (const key in firstVariant) { - if (key !== 'properties') { - result[key] = firstVariant[key]; - } - } - } - - delete result.anyOf; - } - - for (const key in result) { - if (result[key] && typeof result[key] === 'object') { - result[key] = removeAnyOf(result[key] as JSONSchema); - } - } - - return result; -} - -/** - * Removes format fields from a schema and appends them to the description. - */ -export function removeFormats(schema: JSONSchema, formatsCapability: boolean): JSONSchema { - if (formatsCapability) { - return schema; - } - - if (!schema || typeof schema !== 'object') { - return schema; - } - - if (Array.isArray(schema)) { - return schema.map((item) => removeFormats(item, formatsCapability)) as JSONSchema; - } - - const result = { ...schema }; - - if ('format' in result && typeof result['format'] === 'string') { - const formatStr = `(format: "${result['format']}")`; - - if ('description' in result && typeof result['description'] === 'string') { - result['description'] = `${result['description']} ${formatStr}`; - } else { - result['description'] = formatStr; - } - - delete result['format']; - } - - for (const key in result) { - if (result[key] && typeof result[key] === 'object') { - result[key] = removeFormats(result[key] as JSONSchema, formatsCapability); - } - } - - return result; -} - -/** - * Applies all compatibility transformations to the endpoints based on the provided capabilities. - */ -export function applyCompatibilityTransformations( - endpoints: Endpoint[], - capabilities: ClientCapabilities, -): Endpoint[] { - let transformedEndpoints = [...endpoints]; - - // Handle top-level unions first as this changes tool names - if (!capabilities.topLevelUnions) { - const newEndpoints: Endpoint[] = []; - - for (const endpoint of transformedEndpoints) { - const variantTools = removeTopLevelUnions(endpoint.tool); - - if (variantTools.length === 1) { - newEndpoints.push(endpoint); - } else { - for (const variantTool of variantTools) { - newEndpoints.push({ - ...endpoint, - tool: variantTool, - }); - } - } - } - - transformedEndpoints = newEndpoints; - } - - if (capabilities.toolNameLength) { - const toolNames = transformedEndpoints.map((endpoint) => endpoint.tool.name); - const renameMap = truncateToolNames(toolNames, capabilities.toolNameLength); - - transformedEndpoints = transformedEndpoints.map((endpoint) => ({ - ...endpoint, - tool: { - ...endpoint.tool, - name: renameMap.get(endpoint.tool.name) ?? endpoint.tool.name, - }, - })); - } - - if (!capabilities.refs || !capabilities.unions || !capabilities.formats) { - transformedEndpoints = transformedEndpoints.map((endpoint) => { - let schema = endpoint.tool.inputSchema as JSONSchema; - - if (!capabilities.refs) { - schema = inlineRefs(schema); - } - - if (!capabilities.unions) { - schema = removeAnyOf(schema); - } - - if (!capabilities.formats) { - schema = removeFormats(schema, capabilities.formats); - } - - return { - ...endpoint, - tool: { - ...endpoint.tool, - inputSchema: schema as typeof endpoint.tool.inputSchema, - }, - }; - }); - } - - return transformedEndpoints; -} - -function toSnakeCase(str: string): string { - return str - .replace(/\s+/g, '_') - .replace(/([a-z])([A-Z])/g, '$1_$2') - .toLowerCase(); -} diff --git a/packages/mcp-server/src/docs-search-tool.ts b/packages/mcp-server/src/docs-search-tool.ts new file mode 100644 index 0000000..23b984a --- /dev/null +++ b/packages/mcp-server/src/docs-search-tool.ts @@ -0,0 +1,59 @@ +// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +import { Metadata, asTextContentResult } from './types'; + +import { Tool } from '@modelcontextprotocol/sdk/types.js'; + +export const metadata: Metadata = { + resource: 'all', + operation: 'read', + tags: [], + httpMethod: 'get', +}; + +export const tool: Tool = { + name: 'search_docs', + description: 'Search for documentation for how to use the client to interact with the API.', + inputSchema: { + type: 'object', + properties: { + query: { + type: 'string', + description: 'The query to search for.', + }, + language: { + type: 'string', + description: 'The language for the SDK to search for.', + enum: ['http', 'python', 'go', 'typescript', 'javascript', 'terraform', 'ruby', 'java', 'kotlin'], + }, + detail: { + type: 'string', + description: 'The amount of detail to return.', + enum: ['default', 'verbose'], + }, + }, + required: ['query', 'language'], + }, + annotations: { + readOnlyHint: true, + }, +}; + +const docsSearchURL = + process.env['DOCS_SEARCH_URL'] || 'https://api.stainless.com/api/projects/cas-parser/docs/search'; + +export const handler = async (_: unknown, args: Record | undefined) => { + const body = args as any; + const query = new URLSearchParams(body).toString(); + const result = await fetch(`${docsSearchURL}?${query}`); + + if (!result.ok) { + throw new Error( + `${result.status}: ${result.statusText} when using doc search tool. Details: ${await result.text()}`, + ); + } + + return asTextContentResult(await result.json()); +}; + +export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/dynamic-tools.ts b/packages/mcp-server/src/dynamic-tools.ts deleted file mode 100644 index e0b4266..0000000 --- a/packages/mcp-server/src/dynamic-tools.ts +++ /dev/null @@ -1,159 +0,0 @@ -import CasParser from 'cas-parser-node'; -import { Endpoint, asTextContentResult, ToolCallResult } from './tools/types'; -import { zodToJsonSchema } from 'zod-to-json-schema'; -import { z } from 'zod'; -import { Cabidela } from '@cloudflare/cabidela'; - -function zodToInputSchema(schema: z.ZodSchema) { - return { - type: 'object' as const, - ...(zodToJsonSchema(schema) as any), - }; -} - -/** - * A list of tools that expose all the endpoints in the API dynamically. - * - * Instead of exposing every endpoint as it's own tool, which uses up too many tokens for LLMs to use at once, - * we expose a single tool that can be used to search for endpoints by name, resource, operation, or tag, and then - * a generic endpoint that can be used to invoke any endpoint with the provided arguments. - * - * @param endpoints - The endpoints to include in the list. - */ -export function dynamicTools(endpoints: Endpoint[]): Endpoint[] { - const listEndpointsSchema = z.object({ - search_query: z - .string() - .optional() - .describe( - 'An optional search query to filter the endpoints by. Provide a partial name, resource, operation, or tag to filter the endpoints returned.', - ), - }); - - const listEndpointsTool = { - metadata: { - resource: 'dynamic_tools', - operation: 'read' as const, - tags: [], - }, - tool: { - name: 'list_api_endpoints', - description: 'List or search for all endpoints in the Cas Parser TypeScript API', - inputSchema: zodToInputSchema(listEndpointsSchema), - }, - handler: async ( - client: CasParser, - args: Record | undefined, - ): Promise => { - const query = args && listEndpointsSchema.parse(args).search_query?.trim(); - - const filteredEndpoints = - query && query.length > 0 ? - endpoints.filter((endpoint) => { - const fieldsToMatch = [ - endpoint.tool.name, - endpoint.tool.description, - endpoint.metadata.resource, - endpoint.metadata.operation, - ...endpoint.metadata.tags, - ]; - return fieldsToMatch.some((field) => field && field.toLowerCase().includes(query.toLowerCase())); - }) - : endpoints; - - return asTextContentResult({ - tools: filteredEndpoints.map(({ tool, metadata }) => ({ - name: tool.name, - description: tool.description, - resource: metadata.resource, - operation: metadata.operation, - tags: metadata.tags, - })), - }); - }, - }; - - const getEndpointSchema = z.object({ - endpoint: z.string().describe('The name of the endpoint to get the schema for.'), - }); - const getEndpointTool = { - metadata: { - resource: 'dynamic_tools', - operation: 'read' as const, - tags: [], - }, - tool: { - name: 'get_api_endpoint_schema', - description: - 'Get the schema for an endpoint in the Cas Parser TypeScript API. You can use the schema returned by this tool to invoke an endpoint with the `invoke_api_endpoint` tool.', - inputSchema: zodToInputSchema(getEndpointSchema), - }, - handler: async (client: CasParser, args: Record | undefined) => { - if (!args) { - throw new Error('No endpoint provided'); - } - const endpointName = getEndpointSchema.parse(args).endpoint; - - const endpoint = endpoints.find((e) => e.tool.name === endpointName); - if (!endpoint) { - throw new Error(`Endpoint ${endpointName} not found`); - } - return asTextContentResult(endpoint.tool); - }, - }; - - const invokeEndpointSchema = z.object({ - endpoint_name: z.string().describe('The name of the endpoint to invoke.'), - args: z - .record(z.string(), z.any()) - .describe( - 'The arguments to pass to the endpoint. This must match the schema returned by the `get_api_endpoint_schema` tool.', - ), - }); - - const invokeEndpointTool = { - metadata: { - resource: 'dynamic_tools', - operation: 'write' as const, - tags: [], - }, - tool: { - name: 'invoke_api_endpoint', - description: - 'Invoke an endpoint in the Cas Parser TypeScript API. Note: use the `list_api_endpoints` tool to get the list of endpoints and `get_api_endpoint_schema` tool to get the schema for an endpoint.', - inputSchema: zodToInputSchema(invokeEndpointSchema), - }, - handler: async ( - client: CasParser, - args: Record | undefined, - ): Promise => { - if (!args) { - throw new Error('No endpoint provided'); - } - const { success, data, error } = invokeEndpointSchema.safeParse(args); - if (!success) { - throw new Error(`Invalid arguments for endpoint. ${error?.format()}`); - } - const { endpoint_name, args: endpointArgs } = data; - - const endpoint = endpoints.find((e) => e.tool.name === endpoint_name); - if (!endpoint) { - throw new Error( - `Endpoint ${endpoint_name} not found. Use the \`list_api_endpoints\` tool to get the list of available endpoints.`, - ); - } - - try { - // Try to validate the arguments for a better error message - const cabidela = new Cabidela(endpoint.tool.inputSchema, { fullErrors: true }); - cabidela.validate(endpointArgs); - } catch (error) { - throw new Error(`Invalid arguments for endpoint ${endpoint_name}:\n${error}`); - } - - return await endpoint.handler(client, endpointArgs); - }, - }; - - return [getEndpointTool, listEndpointsTool, invokeEndpointTool]; -} diff --git a/packages/mcp-server/src/filtering.ts b/packages/mcp-server/src/filtering.ts deleted file mode 100644 index 1aa9a40..0000000 --- a/packages/mcp-server/src/filtering.ts +++ /dev/null @@ -1,14 +0,0 @@ -// @ts-nocheck -import initJq from 'jq-web'; - -export async function maybeFilter(jqFilter: unknown | undefined, response: any): Promise { - if (jqFilter && typeof jqFilter === 'string') { - return await jq(response, jqFilter); - } else { - return response; - } -} - -async function jq(json: any, jqFilter: string) { - return (await initJq).json(json, jqFilter); -} diff --git a/packages/mcp-server/src/http.ts b/packages/mcp-server/src/http.ts index ec34ab4..2366d8f 100644 --- a/packages/mcp-server/src/http.ts +++ b/packages/mcp-server/src/http.ts @@ -4,38 +4,21 @@ import { McpServer } from '@modelcontextprotocol/sdk/server/mcp'; import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js'; import express from 'express'; -import { fromError } from 'zod-validation-error/v3'; -import { McpOptions, parseQueryOptions } from './options'; +import { McpOptions } from './options'; import { ClientOptions, initMcpServer, newMcpServer } from './server'; import { parseAuthHeaders } from './headers'; const newServer = ({ clientOptions, - mcpOptions: defaultMcpOptions, req, res, }: { clientOptions: ClientOptions; - mcpOptions: McpOptions; req: express.Request; res: express.Response; }): McpServer | null => { const server = newMcpServer(); - let mcpOptions: McpOptions; - try { - mcpOptions = parseQueryOptions(defaultMcpOptions, req.query); - } catch (error) { - res.status(400).json({ - jsonrpc: '2.0', - error: { - code: -32000, - message: `Invalid request: ${fromError(error)}`, - }, - }); - return null; - } - try { const authOptions = parseAuthHeaders(req); initMcpServer({ @@ -44,14 +27,13 @@ const newServer = ({ ...clientOptions, ...authOptions, }, - mcpOptions, }); - } catch { + } catch (error) { res.status(401).json({ jsonrpc: '2.0', error: { code: -32000, - message: 'Unauthorized', + message: `Unauthorized: ${error instanceof Error ? error.message : error}`, }, }); return null; diff --git a/packages/mcp-server/src/index.ts b/packages/mcp-server/src/index.ts index 4850a0e..0f6dd42 100644 --- a/packages/mcp-server/src/index.ts +++ b/packages/mcp-server/src/index.ts @@ -1,20 +1,15 @@ #!/usr/bin/env node import { selectTools } from './server'; -import { Endpoint, endpoints } from './tools'; import { McpOptions, parseCLIOptions } from './options'; import { launchStdioServer } from './stdio'; import { launchStreamableHTTPServer } from './http'; +import type { McpTool } from './types'; async function main() { const options = parseOptionsOrError(); - if (options.list) { - listAllTools(); - return; - } - - const selectedTools = await selectToolsOrError(endpoints, options); + const selectedTools = await selectToolsOrError(options); console.error( `MCP Server starting with ${selectedTools.length} tools:`, @@ -23,7 +18,7 @@ async function main() { switch (options.transport) { case 'stdio': - await launchStdioServer(options); + await launchStdioServer(); break; case 'http': await launchStreamableHTTPServer(options, options.port ?? options.socket); @@ -47,9 +42,9 @@ function parseOptionsOrError() { } } -async function selectToolsOrError(endpoints: Endpoint[], options: McpOptions): Promise { +async function selectToolsOrError(options: McpOptions): Promise { try { - const includedTools = await selectTools(endpoints, options); + const includedTools = selectTools(options); if (includedTools.length === 0) { console.error('No tools match the provided filters.'); process.exit(1); @@ -64,45 +59,3 @@ async function selectToolsOrError(endpoints: Endpoint[], options: McpOptions): P process.exit(1); } } - -function listAllTools() { - if (endpoints.length === 0) { - console.log('No tools available.'); - return; - } - console.log('Available tools:\n'); - - // Group endpoints by resource - const resourceGroups = new Map(); - - for (const endpoint of endpoints) { - const resource = endpoint.metadata.resource; - if (!resourceGroups.has(resource)) { - resourceGroups.set(resource, []); - } - resourceGroups.get(resource)!.push(endpoint); - } - - // Sort resources alphabetically - const sortedResources = Array.from(resourceGroups.keys()).sort(); - - // Display hierarchically by resource - for (const resource of sortedResources) { - console.log(`Resource: ${resource}`); - - const resourceEndpoints = resourceGroups.get(resource)!; - // Sort endpoints by tool name - resourceEndpoints.sort((a, b) => a.tool.name.localeCompare(b.tool.name)); - - for (const endpoint of resourceEndpoints) { - const { - tool, - metadata: { operation, tags }, - } = endpoint; - - console.log(` - ${tool.name} (${operation}) ${tags.length > 0 ? `tags: ${tags.join(', ')}` : ''}`); - console.log(` Description: ${tool.description}`); - } - console.log(''); - } -} diff --git a/packages/mcp-server/src/options.ts b/packages/mcp-server/src/options.ts index ecc9f10..6c8bb8d 100644 --- a/packages/mcp-server/src/options.ts +++ b/packages/mcp-server/src/options.ts @@ -2,139 +2,31 @@ import qs from 'qs'; import yargs from 'yargs'; import { hideBin } from 'yargs/helpers'; import z from 'zod'; -import { endpoints, Filter } from './tools'; -import { ClientCapabilities, knownClients, ClientType } from './compat'; export type CLIOptions = McpOptions & { - list: boolean; transport: 'stdio' | 'http'; port: number | undefined; socket: string | undefined; }; export type McpOptions = { - client?: ClientType | undefined; - includeDynamicTools?: boolean | undefined; - includeAllTools?: boolean | undefined; - includeCodeTools?: boolean | undefined; - filters?: Filter[] | undefined; - capabilities?: Partial | undefined; + includeDocsTools?: boolean | undefined; }; -const CAPABILITY_CHOICES = [ - 'top-level-unions', - 'valid-json', - 'refs', - 'unions', - 'formats', - 'tool-name-length', -] as const; - -type Capability = (typeof CAPABILITY_CHOICES)[number]; - -function parseCapabilityValue(cap: string): { name: Capability; value?: number } { - if (cap.startsWith('tool-name-length=')) { - const parts = cap.split('='); - if (parts.length === 2) { - const length = parseInt(parts[1]!, 10); - if (!isNaN(length)) { - return { name: 'tool-name-length', value: length }; - } - throw new Error(`Invalid tool-name-length value: ${parts[1]}. Expected a number.`); - } - throw new Error(`Invalid format for tool-name-length. Expected tool-name-length=N.`); - } - if (!CAPABILITY_CHOICES.includes(cap as Capability)) { - throw new Error(`Unknown capability: ${cap}. Valid capabilities are: ${CAPABILITY_CHOICES.join(', ')}`); - } - return { name: cap as Capability }; -} - export function parseCLIOptions(): CLIOptions { const opts = yargs(hideBin(process.argv)) .option('tools', { type: 'string', array: true, - choices: ['dynamic', 'all', 'code'], + choices: ['code', 'docs'], description: 'Use dynamic tools or all tools', }) .option('no-tools', { type: 'string', array: true, - choices: ['dynamic', 'all', 'code'], + choices: ['code', 'docs'], description: 'Do not use any dynamic or all tools', }) - .option('tool', { - type: 'string', - array: true, - description: 'Include tools matching the specified names', - }) - .option('resource', { - type: 'string', - array: true, - description: 'Include tools matching the specified resources', - }) - .option('operation', { - type: 'string', - array: true, - choices: ['read', 'write'], - description: 'Include tools matching the specified operations', - }) - .option('tag', { - type: 'string', - array: true, - description: 'Include tools with the specified tags', - }) - .option('no-tool', { - type: 'string', - array: true, - description: 'Exclude tools matching the specified names', - }) - .option('no-resource', { - type: 'string', - array: true, - description: 'Exclude tools matching the specified resources', - }) - .option('no-operation', { - type: 'string', - array: true, - description: 'Exclude tools matching the specified operations', - }) - .option('no-tag', { - type: 'string', - array: true, - description: 'Exclude tools with the specified tags', - }) - .option('list', { - type: 'boolean', - description: 'List all tools and exit', - }) - .option('client', { - type: 'string', - choices: Object.keys(knownClients), - description: 'Specify the MCP client being used', - }) - .option('capability', { - type: 'string', - array: true, - description: 'Specify client capabilities', - coerce: (values: string[]) => { - return values.flatMap((v) => v.split(',')); - }, - }) - .option('no-capability', { - type: 'string', - array: true, - description: 'Unset client capabilities', - choices: CAPABILITY_CHOICES, - coerce: (values: string[]) => { - return values.flatMap((v) => v.split(',')); - }, - }) - .option('describe-capabilities', { - type: 'boolean', - description: 'Print detailed explanation of client capabilities and exit', - }) .option('transport', { type: 'string', choices: ['stdio', 'http'], @@ -151,119 +43,19 @@ export function parseCLIOptions(): CLIOptions { }) .help(); - for (const [command, desc] of examples()) { - opts.example(command, desc); - } - const argv = opts.parseSync(); - // Handle describe-capabilities flag - if (argv.describeCapabilities) { - console.log(getCapabilitiesExplanation()); - process.exit(0); - } - - const filters: Filter[] = []; - - // Helper function to support comma-separated values - const splitValues = (values: string[] | undefined): string[] => { - if (!values) return []; - return values.flatMap((v) => v.split(',')); - }; - - for (const tag of splitValues(argv.tag)) { - filters.push({ type: 'tag', op: 'include', value: tag }); - } - - for (const tag of splitValues(argv.noTag)) { - filters.push({ type: 'tag', op: 'exclude', value: tag }); - } - - for (const resource of splitValues(argv.resource)) { - filters.push({ type: 'resource', op: 'include', value: resource }); - } - - for (const resource of splitValues(argv.noResource)) { - filters.push({ type: 'resource', op: 'exclude', value: resource }); - } - - for (const tool of splitValues(argv.tool)) { - filters.push({ type: 'tool', op: 'include', value: tool }); - } - - for (const tool of splitValues(argv.noTool)) { - filters.push({ type: 'tool', op: 'exclude', value: tool }); - } + const shouldIncludeToolType = (toolType: 'code' | 'docs') => + argv.noTools?.includes(toolType) ? false + : argv.tools?.includes(toolType) ? true + : undefined; - for (const operation of splitValues(argv.operation)) { - filters.push({ type: 'operation', op: 'include', value: operation }); - } - - for (const operation of splitValues(argv.noOperation)) { - filters.push({ type: 'operation', op: 'exclude', value: operation }); - } - - // Parse client capabilities - const clientCapabilities: Partial = {}; - - // Apply individual capability overrides - if (Array.isArray(argv.capability)) { - for (const cap of argv.capability) { - const parsedCap = parseCapabilityValue(cap); - if (parsedCap.name === 'top-level-unions') { - clientCapabilities.topLevelUnions = true; - } else if (parsedCap.name === 'valid-json') { - clientCapabilities.validJson = true; - } else if (parsedCap.name === 'refs') { - clientCapabilities.refs = true; - } else if (parsedCap.name === 'unions') { - clientCapabilities.unions = true; - } else if (parsedCap.name === 'formats') { - clientCapabilities.formats = true; - } else if (parsedCap.name === 'tool-name-length') { - clientCapabilities.toolNameLength = parsedCap.value; - } - } - } - - // Handle no-capability options to unset capabilities - if (Array.isArray(argv.noCapability)) { - for (const cap of argv.noCapability) { - if (cap === 'top-level-unions') { - clientCapabilities.topLevelUnions = false; - } else if (cap === 'valid-json') { - clientCapabilities.validJson = false; - } else if (cap === 'refs') { - clientCapabilities.refs = false; - } else if (cap === 'unions') { - clientCapabilities.unions = false; - } else if (cap === 'formats') { - clientCapabilities.formats = false; - } else if (cap === 'tool-name-length') { - clientCapabilities.toolNameLength = undefined; - } - } - } - - const shouldIncludeToolType = (toolType: 'dynamic' | 'all' | 'code') => - explicitTools ? argv.tools?.includes(toolType) && !argv.noTools?.includes(toolType) : undefined; - - const explicitTools = Boolean(argv.tools || argv.noTools); - const includeDynamicTools = shouldIncludeToolType('dynamic'); - const includeAllTools = shouldIncludeToolType('all'); - const includeCodeTools = shouldIncludeToolType('code'); + const includeDocsTools = shouldIncludeToolType('docs'); const transport = argv.transport as 'stdio' | 'http'; - const client = argv.client as ClientType; return { - client: client && client !== 'infer' && knownClients[client] ? client : undefined, - includeDynamicTools, - includeAllTools, - includeCodeTools, - filters, - capabilities: clientCapabilities, - list: argv.list || false, + includeDocsTools, transport, port: argv.port, socket: argv.socket, @@ -280,177 +72,21 @@ const coerceArray = (zodType: T) => ); const QueryOptions = z.object({ - tools: coerceArray(z.enum(['dynamic', 'all'])).describe('Use dynamic tools or all tools'), - no_tools: coerceArray(z.enum(['dynamic', 'all'])).describe('Do not use dynamic tools or all tools'), + tools: coerceArray(z.enum(['code', 'docs'])).describe('Specify which MCP tools to use'), + no_tools: coerceArray(z.enum(['code', 'docs'])).describe('Specify which MCP tools to not use.'), tool: coerceArray(z.string()).describe('Include tools matching the specified names'), - resource: coerceArray(z.string()).describe('Include tools matching the specified resources'), - operation: coerceArray(z.enum(['read', 'write'])).describe( - 'Include tools matching the specified operations', - ), - tag: coerceArray(z.string()).describe('Include tools with the specified tags'), - no_tool: coerceArray(z.string()).describe('Exclude tools matching the specified names'), - no_resource: coerceArray(z.string()).describe('Exclude tools matching the specified resources'), - no_operation: coerceArray(z.enum(['read', 'write'])).describe( - 'Exclude tools matching the specified operations', - ), - no_tag: coerceArray(z.string()).describe('Exclude tools with the specified tags'), - client: ClientType.optional().describe('Specify the MCP client being used'), - capability: coerceArray(z.string()).describe('Specify client capabilities'), - no_capability: coerceArray(z.enum(CAPABILITY_CHOICES)).describe('Unset client capabilities'), }); export function parseQueryOptions(defaultOptions: McpOptions, query: unknown): McpOptions { const queryObject = typeof query === 'string' ? qs.parse(query) : query; const queryOptions = QueryOptions.parse(queryObject); - const filters: Filter[] = [...(defaultOptions.filters ?? [])]; - - for (const resource of queryOptions.resource || []) { - filters.push({ type: 'resource', op: 'include', value: resource }); - } - for (const operation of queryOptions.operation || []) { - filters.push({ type: 'operation', op: 'include', value: operation }); - } - for (const tag of queryOptions.tag || []) { - filters.push({ type: 'tag', op: 'include', value: tag }); - } - for (const tool of queryOptions.tool || []) { - filters.push({ type: 'tool', op: 'include', value: tool }); - } - for (const resource of queryOptions.no_resource || []) { - filters.push({ type: 'resource', op: 'exclude', value: resource }); - } - for (const operation of queryOptions.no_operation || []) { - filters.push({ type: 'operation', op: 'exclude', value: operation }); - } - for (const tag of queryOptions.no_tag || []) { - filters.push({ type: 'tag', op: 'exclude', value: tag }); - } - for (const tool of queryOptions.no_tool || []) { - filters.push({ type: 'tool', op: 'exclude', value: tool }); - } - - // Parse client capabilities - const clientCapabilities: Partial = { ...defaultOptions.capabilities }; - - for (const cap of queryOptions.capability || []) { - const parsed = parseCapabilityValue(cap); - if (parsed.name === 'top-level-unions') { - clientCapabilities.topLevelUnions = true; - } else if (parsed.name === 'valid-json') { - clientCapabilities.validJson = true; - } else if (parsed.name === 'refs') { - clientCapabilities.refs = true; - } else if (parsed.name === 'unions') { - clientCapabilities.unions = true; - } else if (parsed.name === 'formats') { - clientCapabilities.formats = true; - } else if (parsed.name === 'tool-name-length') { - clientCapabilities.toolNameLength = parsed.value; - } - } - - for (const cap of queryOptions.no_capability || []) { - if (cap === 'top-level-unions') { - clientCapabilities.topLevelUnions = false; - } else if (cap === 'valid-json') { - clientCapabilities.validJson = false; - } else if (cap === 'refs') { - clientCapabilities.refs = false; - } else if (cap === 'unions') { - clientCapabilities.unions = false; - } else if (cap === 'formats') { - clientCapabilities.formats = false; - } else if (cap === 'tool-name-length') { - clientCapabilities.toolNameLength = undefined; - } - } - - let dynamicTools: boolean | undefined = - queryOptions.no_tools && queryOptions.no_tools?.includes('dynamic') ? false - : queryOptions.tools?.includes('dynamic') ? true - : defaultOptions.includeDynamicTools; - - let allTools: boolean | undefined = - queryOptions.no_tools && queryOptions.no_tools?.includes('all') ? false - : queryOptions.tools?.includes('all') ? true - : defaultOptions.includeAllTools; + let docsTools: boolean | undefined = + queryOptions.no_tools && queryOptions.no_tools?.includes('docs') ? false + : queryOptions.tools?.includes('docs') ? true + : defaultOptions.includeDocsTools; return { - client: queryOptions.client ?? defaultOptions.client, - includeDynamicTools: dynamicTools, - includeAllTools: allTools, - includeCodeTools: undefined, - filters, - capabilities: clientCapabilities, + includeDocsTools: docsTools, }; } - -function getCapabilitiesExplanation(): string { - return ` -Client Capabilities Explanation: - -Different Language Models (LLMs) and the MCP clients that use them have varying limitations in how they handle tool schemas. Capability flags allow you to inform the MCP server about these limitations. - -When a capability flag is set to false, the MCP server will automatically adjust the tool schemas to work around that limitation, ensuring broader compatibility. - -Available Capabilities: - -# top-level-unions -Some clients/LLMs do not support JSON schemas with a union type (anyOf) at the root level. If a client lacks this capability, the MCP server splits tools with top-level unions into multiple separate tools, one for each variant in the union. - -# refs -Some clients/LLMs do not support $ref pointers for schema reuse. If a client lacks this capability, the MCP server automatically inlines all references ($defs) directly into the schema. Properties that would cause circular references are removed during this process. - -# valid-json -Some clients/LLMs may incorrectly send arguments as a JSON-encoded string instead of a proper JSON object. If a client *has* this capability, the MCP server will attempt to parse string values as JSON if the initial validation against the schema fails. - -# unions -Some clients/LLMs do not support union types (anyOf) in JSON schemas. If a client lacks this capability, the MCP server removes all anyOf fields and uses only the first variant as the schema. - -# formats -Some clients/LLMs do not support the 'format' keyword in JSON Schema specifications. If a client lacks this capability, the MCP server removes all format fields and appends the format information to the field's description in parentheses. - -# tool-name-length=N -Some clients/LLMs impose a maximum length on tool names. If this capability is set, the MCP server will automatically truncate tool names exceeding the specified length (N), ensuring uniqueness by appending numbers if necessary. - -Client Presets (--client): -Presets like '--client=openai-agents' or '--client=cursor' automatically configure these capabilities based on current known limitations of those clients, simplifying setup. - -Current presets: -${JSON.stringify(knownClients, null, 2)} - `; -} - -function examples(): [string, string][] { - const firstEndpoint = endpoints[0]!; - const secondEndpoint = - endpoints.find((e) => e.metadata.resource !== firstEndpoint.metadata.resource) || endpoints[1]; - const tag = endpoints.find((e) => e.metadata.tags.length > 0)?.metadata.tags[0]; - const otherEndpoint = secondEndpoint || firstEndpoint; - - return [ - [ - `--tool="${firstEndpoint.tool.name}" ${secondEndpoint ? `--tool="${secondEndpoint.tool.name}"` : ''}`, - 'Include tools by name', - ], - [ - `--resource="${firstEndpoint.metadata.resource}" --operation="read"`, - 'Filter by resource and operation', - ], - [ - `--resource="${otherEndpoint.metadata.resource}*" --no-tool="${otherEndpoint.tool.name}"`, - 'Use resource wildcards and exclusions', - ], - [`--client="cursor"`, 'Adjust schemas to be more compatible with Cursor'], - [ - `--capability="top-level-unions" --capability="tool-name-length=40"`, - 'Specify individual client capabilities', - ], - [ - `--client="cursor" --no-capability="tool-name-length"`, - 'Use cursor client preset but remove tool name length limit', - ], - ...(tag ? [[`--tag="${tag}"`, 'Filter based on tags'] as [string, string]] : []), - ]; -} diff --git a/packages/mcp-server/src/server.ts b/packages/mcp-server/src/server.ts index 13bcf36..c5a98ee 100644 --- a/packages/mcp-server/src/server.ts +++ b/packages/mcp-server/src/server.ts @@ -2,38 +2,26 @@ import { Server } from '@modelcontextprotocol/sdk/server/index.js'; import { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js'; -import { Endpoint, endpoints, HandlerFunction, query } from './tools'; import { CallToolRequestSchema, ListToolsRequestSchema, SetLevelRequestSchema, - Implementation, - Tool, } from '@modelcontextprotocol/sdk/types.js'; import { ClientOptions } from 'cas-parser-node'; import CasParser from 'cas-parser-node'; -import { - applyCompatibilityTransformations, - ClientCapabilities, - defaultClientCapabilities, - knownClients, - parseEmbeddedJSON, -} from './compat'; -import { dynamicTools } from './dynamic-tools'; import { codeTool } from './code-tool'; +import docsSearchTool from './docs-search-tool'; import { McpOptions } from './options'; +import { HandlerFunction, McpTool } from './types'; export { McpOptions } from './options'; -export { ClientType } from './compat'; -export { Filter } from './tools'; export { ClientOptions } from 'cas-parser-node'; -export { endpoints } from './tools'; export const newMcpServer = () => new McpServer( { name: 'cas_parser_node_api', - version: '1.4.1', + version: '1.5.0', }, { capabilities: { tools: {}, logging: {} } }, ); @@ -51,25 +39,6 @@ export function initMcpServer(params: { mcpOptions?: McpOptions; }) { const server = params.server instanceof McpServer ? params.server.server : params.server; - const mcpOptions = params.mcpOptions ?? {}; - - let providedEndpoints: Endpoint[] | null = null; - let endpointMap: Record | null = null; - - const initTools = async (implementation?: Implementation) => { - if (implementation && (!mcpOptions.client || mcpOptions.client === 'infer')) { - mcpOptions.client = - implementation.name.toLowerCase().includes('claude') ? 'claude' - : implementation.name.toLowerCase().includes('cursor') ? 'cursor' - : undefined; - mcpOptions.capabilities = { - ...(mcpOptions.client && knownClients[mcpOptions.client]), - ...mcpOptions.capabilities, - }; - } - providedEndpoints ??= await selectTools(endpoints, mcpOptions); - endpointMap ??= Object.fromEntries(providedEndpoints.map((endpoint) => [endpoint.tool.name, endpoint])); - }; const logAtLevel = (level: 'debug' | 'info' | 'warning' | 'error') => @@ -95,26 +64,23 @@ export function initMcpServer(params: { }, }); + const providedTools = selectTools(params.mcpOptions); + const toolMap = Object.fromEntries(providedTools.map((mcpTool) => [mcpTool.tool.name, mcpTool])); + server.setRequestHandler(ListToolsRequestSchema, async () => { - if (providedEndpoints === null) { - await initTools(server.getClientVersion()); - } return { - tools: providedEndpoints!.map((endpoint) => endpoint.tool), + tools: providedTools.map((mcpTool) => mcpTool.tool), }; }); server.setRequestHandler(CallToolRequestSchema, async (request) => { - if (endpointMap === null) { - await initTools(server.getClientVersion()); - } const { name, arguments: args } = request.params; - const endpoint = endpointMap![name]; - if (!endpoint) { + const mcpTool = toolMap[name]; + if (!mcpTool) { throw new Error(`Unknown tool: ${name}`); } - return executeHandler(endpoint.tool, endpoint.handler, client, args, mcpOptions.capabilities); + return executeHandler(mcpTool.handler, client, args); }); server.setRequestHandler(SetLevelRequestSchema, async (request) => { @@ -144,45 +110,22 @@ export function initMcpServer(params: { /** * Selects the tools to include in the MCP Server based on the provided options. */ -export async function selectTools(endpoints: Endpoint[], options?: McpOptions): Promise { - const filteredEndpoints = query(options?.filters ?? [], endpoints); - - let includedTools = filteredEndpoints; - - if (includedTools.length > 0) { - if (options?.includeDynamicTools) { - includedTools = dynamicTools(includedTools); - } - } else { - if (options?.includeAllTools) { - includedTools = endpoints; - } else if (options?.includeDynamicTools) { - includedTools = dynamicTools(endpoints); - } else if (options?.includeCodeTools) { - includedTools = [await codeTool()]; - } else { - includedTools = endpoints; - } +export function selectTools(options?: McpOptions): McpTool[] { + const includedTools = [codeTool()]; + if (options?.includeDocsTools ?? true) { + includedTools.push(docsSearchTool); } - - const capabilities = { ...defaultClientCapabilities, ...options?.capabilities }; - return applyCompatibilityTransformations(includedTools, capabilities); + return includedTools; } /** * Runs the provided handler with the given client and arguments. */ export async function executeHandler( - tool: Tool, handler: HandlerFunction, client: CasParser, args: Record | undefined, - compatibilityOptions?: Partial, ) { - const options = { ...defaultClientCapabilities, ...compatibilityOptions }; - if (!options.validJson && args) { - args = parseEmbeddedJSON(args, tool.inputSchema); - } return await handler(client, args || {}); } diff --git a/packages/mcp-server/src/stdio.ts b/packages/mcp-server/src/stdio.ts index d902a5b..f07696f 100644 --- a/packages/mcp-server/src/stdio.ts +++ b/packages/mcp-server/src/stdio.ts @@ -1,11 +1,10 @@ import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; import { initMcpServer, newMcpServer } from './server'; -import { McpOptions } from './options'; -export const launchStdioServer = async (options: McpOptions) => { +export const launchStdioServer = async () => { const server = newMcpServer(); - initMcpServer({ server, mcpOptions: options }); + initMcpServer({ server }); const transport = new StdioServerTransport(); await server.connect(transport); diff --git a/packages/mcp-server/src/tools.ts b/packages/mcp-server/src/tools.ts deleted file mode 100644 index 7e516de..0000000 --- a/packages/mcp-server/src/tools.ts +++ /dev/null @@ -1 +0,0 @@ -export * from './tools/index'; diff --git a/packages/mcp-server/src/tools/cas-generator/generate-cas-cas-generator.ts b/packages/mcp-server/src/tools/cas-generator/generate-cas-cas-generator.ts deleted file mode 100644 index 3075230..0000000 --- a/packages/mcp-server/src/tools/cas-generator/generate-cas-cas-generator.ts +++ /dev/null @@ -1,67 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { maybeFilter } from 'cas-parser-node-mcp/filtering'; -import { Metadata, asTextContentResult } from 'cas-parser-node-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import CasParser from 'cas-parser-node'; - -export const metadata: Metadata = { - resource: 'CAS Generator', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v4/generate', - operationId: 'generateCAS', -}; - -export const tool: Tool = { - name: 'generate_cas_cas_generator', - description: - "When using this tool, always use the `jq_filter` parameter to reduce the response size and improve performance.\n\nOnly omit if you're sure you don't need the data.\n\nThis endpoint generates CAS (Consolidated Account Statement) documents by submitting a mailback request to the specified CAS authority.\nCurrently only supports KFintech, with plans to support CAMS, CDSL, and NSDL in the future.\n\n\n# Response Schema\n```json\n{\n type: 'object',\n properties: {\n msg: {\n type: 'string'\n },\n status: {\n type: 'string'\n }\n }\n}\n```", - inputSchema: { - type: 'object', - properties: { - email: { - type: 'string', - description: 'Email address to receive the CAS document', - }, - from_date: { - type: 'string', - description: 'Start date for the CAS period (format YYYY-MM-DD)', - }, - password: { - type: 'string', - description: 'Password to protect the generated CAS PDF', - }, - to_date: { - type: 'string', - description: 'End date for the CAS period (format YYYY-MM-DD)', - }, - cas_authority: { - type: 'string', - description: 'CAS authority to generate the document from (currently only kfintech is supported)', - enum: ['kfintech', 'cams', 'cdsl', 'nsdl'], - }, - pan_no: { - type: 'string', - description: 'PAN number (optional for some CAS authorities)', - }, - jq_filter: { - type: 'string', - title: 'jq Filter', - description: - 'A jq filter to apply to the response to include certain fields. Consult the output schema in the tool description to see the fields that are available.\n\nFor example: to include only the `name` field in every object of a results array, you can provide ".results[].name".\n\nFor more information, see the [jq documentation](https://jqlang.org/manual/).', - }, - }, - required: ['email', 'from_date', 'password', 'to_date'], - }, - annotations: {}, -}; - -export const handler = async (client: CasParser, args: Record | undefined) => { - const { jq_filter, ...body } = args as any; - return asTextContentResult(await maybeFilter(jq_filter, await client.casGenerator.generateCas(body))); -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/cas-parser/cams-kfintech-cas-parser.ts b/packages/mcp-server/src/tools/cas-parser/cams-kfintech-cas-parser.ts deleted file mode 100644 index 1b14061..0000000 --- a/packages/mcp-server/src/tools/cas-parser/cams-kfintech-cas-parser.ts +++ /dev/null @@ -1,47 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asTextContentResult } from 'cas-parser-node-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import CasParser from 'cas-parser-node'; - -export const metadata: Metadata = { - resource: 'CAS Parser', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v4/cams_kfintech/parse', - operationId: 'camsKfintechParse', -}; - -export const tool: Tool = { - name: 'cams_kfintech_cas_parser', - description: - 'This endpoint specifically parses CAMS/KFintech CAS (Consolidated Account Statement) PDF files and returns data in a unified format.\nUse this endpoint when you know the PDF is from CAMS or KFintech.\n', - inputSchema: { - type: 'object', - properties: { - password: { - type: 'string', - description: 'Password for the PDF file (if required)', - }, - pdf_file: { - type: 'string', - description: 'Base64 encoded CAS PDF file', - }, - pdf_url: { - type: 'string', - description: 'URL to the CAS PDF file', - }, - }, - required: [], - }, - annotations: {}, -}; - -export const handler = async (client: CasParser, args: Record | undefined) => { - const body = args as any; - return asTextContentResult(await client.casParser.camsKfintech(body)); -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/cas-parser/cdsl-cas-parser.ts b/packages/mcp-server/src/tools/cas-parser/cdsl-cas-parser.ts deleted file mode 100644 index ca21e12..0000000 --- a/packages/mcp-server/src/tools/cas-parser/cdsl-cas-parser.ts +++ /dev/null @@ -1,47 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asTextContentResult } from 'cas-parser-node-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import CasParser from 'cas-parser-node'; - -export const metadata: Metadata = { - resource: 'CAS Parser', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v4/cdsl/parse', - operationId: 'cdslParse', -}; - -export const tool: Tool = { - name: 'cdsl_cas_parser', - description: - 'This endpoint specifically parses CDSL CAS (Consolidated Account Statement) PDF files and returns data in a unified format.\nUse this endpoint when you know the PDF is from CDSL.\n', - inputSchema: { - type: 'object', - properties: { - password: { - type: 'string', - description: 'Password for the PDF file (if required)', - }, - pdf_file: { - type: 'string', - description: 'Base64 encoded CAS PDF file', - }, - pdf_url: { - type: 'string', - description: 'URL to the CAS PDF file', - }, - }, - required: [], - }, - annotations: {}, -}; - -export const handler = async (client: CasParser, args: Record | undefined) => { - const body = args as any; - return asTextContentResult(await client.casParser.cdsl(body)); -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/cas-parser/nsdl-cas-parser.ts b/packages/mcp-server/src/tools/cas-parser/nsdl-cas-parser.ts deleted file mode 100644 index 756213e..0000000 --- a/packages/mcp-server/src/tools/cas-parser/nsdl-cas-parser.ts +++ /dev/null @@ -1,47 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asTextContentResult } from 'cas-parser-node-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import CasParser from 'cas-parser-node'; - -export const metadata: Metadata = { - resource: 'CAS Parser', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v4/nsdl/parse', - operationId: 'nsdlParse', -}; - -export const tool: Tool = { - name: 'nsdl_cas_parser', - description: - 'This endpoint specifically parses NSDL CAS (Consolidated Account Statement) PDF files and returns data in a unified format.\nUse this endpoint when you know the PDF is from NSDL.\n', - inputSchema: { - type: 'object', - properties: { - password: { - type: 'string', - description: 'Password for the PDF file (if required)', - }, - pdf_file: { - type: 'string', - description: 'Base64 encoded CAS PDF file', - }, - pdf_url: { - type: 'string', - description: 'URL to the CAS PDF file', - }, - }, - required: [], - }, - annotations: {}, -}; - -export const handler = async (client: CasParser, args: Record | undefined) => { - const body = args as any; - return asTextContentResult(await client.casParser.nsdl(body)); -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/cas-parser/smart-parse-cas-parser.ts b/packages/mcp-server/src/tools/cas-parser/smart-parse-cas-parser.ts deleted file mode 100644 index 2a764d1..0000000 --- a/packages/mcp-server/src/tools/cas-parser/smart-parse-cas-parser.ts +++ /dev/null @@ -1,47 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, asTextContentResult } from 'cas-parser-node-mcp/tools/types'; - -import { Tool } from '@modelcontextprotocol/sdk/types.js'; -import CasParser from 'cas-parser-node'; - -export const metadata: Metadata = { - resource: 'CAS Parser', - operation: 'write', - tags: [], - httpMethod: 'post', - httpPath: '/v4/smart/parse', - operationId: 'smartParse', -}; - -export const tool: Tool = { - name: 'smart_parse_cas_parser', - description: - 'This endpoint parses CAS (Consolidated Account Statement) PDF files from NSDL, CDSL, or CAMS/KFintech and returns data in a unified format.\nIt auto-detects the CAS type and transforms the data into a consistent structure regardless of the source.\n', - inputSchema: { - type: 'object', - properties: { - password: { - type: 'string', - description: 'Password for the PDF file (if required)', - }, - pdf_file: { - type: 'string', - description: 'Base64 encoded CAS PDF file', - }, - pdf_url: { - type: 'string', - description: 'URL to the CAS PDF file', - }, - }, - required: [], - }, - annotations: {}, -}; - -export const handler = async (client: CasParser, args: Record | undefined) => { - const body = args as any; - return asTextContentResult(await client.casParser.smartParse(body)); -}; - -export default { metadata, tool, handler }; diff --git a/packages/mcp-server/src/tools/index.ts b/packages/mcp-server/src/tools/index.ts deleted file mode 100644 index bcae016..0000000 --- a/packages/mcp-server/src/tools/index.ts +++ /dev/null @@ -1,79 +0,0 @@ -// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import { Metadata, Endpoint, HandlerFunction } from './types'; - -export { Metadata, Endpoint, HandlerFunction }; - -import cams_kfintech_cas_parser from './cas-parser/cams-kfintech-cas-parser'; -import cdsl_cas_parser from './cas-parser/cdsl-cas-parser'; -import nsdl_cas_parser from './cas-parser/nsdl-cas-parser'; -import smart_parse_cas_parser from './cas-parser/smart-parse-cas-parser'; -import generate_cas_cas_generator from './cas-generator/generate-cas-cas-generator'; - -export const endpoints: Endpoint[] = []; - -function addEndpoint(endpoint: Endpoint) { - endpoints.push(endpoint); -} - -addEndpoint(cams_kfintech_cas_parser); -addEndpoint(cdsl_cas_parser); -addEndpoint(nsdl_cas_parser); -addEndpoint(smart_parse_cas_parser); -addEndpoint(generate_cas_cas_generator); - -export type Filter = { - type: 'resource' | 'operation' | 'tag' | 'tool'; - op: 'include' | 'exclude'; - value: string; -}; - -export function query(filters: Filter[], endpoints: Endpoint[]): Endpoint[] { - const allExcludes = filters.length > 0 && filters.every((filter) => filter.op === 'exclude'); - const unmatchedFilters = new Set(filters); - - const filtered = endpoints.filter((endpoint: Endpoint) => { - let included = false || allExcludes; - - for (const filter of filters) { - if (match(filter, endpoint)) { - unmatchedFilters.delete(filter); - included = filter.op === 'include'; - } - } - - return included; - }); - - // Check if any filters didn't match - const unmatched = Array.from(unmatchedFilters).filter((f) => f.type === 'tool' || f.type === 'resource'); - if (unmatched.length > 0) { - throw new Error( - `The following filters did not match any endpoints: ${unmatched - .map((f) => `${f.type}=${f.value}`) - .join(', ')}`, - ); - } - - return filtered; -} - -function match({ type, value }: Filter, endpoint: Endpoint): boolean { - switch (type) { - case 'resource': { - const regexStr = '^' + normalizeResource(value).replace(/\*/g, '.*') + '$'; - const regex = new RegExp(regexStr); - return regex.test(normalizeResource(endpoint.metadata.resource)); - } - case 'operation': - return endpoint.metadata.operation === value; - case 'tag': - return endpoint.metadata.tags.includes(value); - case 'tool': - return endpoint.tool.name === value; - } -} - -function normalizeResource(resource: string): string { - return resource.toLowerCase().replace(/[^a-z.*\-_]*/g, ''); -} diff --git a/packages/mcp-server/src/tools/types.ts b/packages/mcp-server/src/types.ts similarity index 91% rename from packages/mcp-server/src/tools/types.ts rename to packages/mcp-server/src/types.ts index 3cf0758..36dd727 100644 --- a/packages/mcp-server/src/tools/types.ts +++ b/packages/mcp-server/src/types.ts @@ -87,6 +87,18 @@ export async function asBinaryContentResult(response: Response): Promise { - it('should return original names when maxLength is 0 or negative', () => { - const names = ['tool1', 'tool2', 'tool3']; - expect(truncateToolNames(names, 0)).toEqual(new Map()); - expect(truncateToolNames(names, -1)).toEqual(new Map()); - }); - - it('should return original names when all names are shorter than maxLength', () => { - const names = ['tool1', 'tool2', 'tool3']; - expect(truncateToolNames(names, 10)).toEqual(new Map()); - }); - - it('should truncate names longer than maxLength', () => { - const names = ['very-long-tool-name', 'another-long-tool-name', 'short']; - expect(truncateToolNames(names, 10)).toEqual( - new Map([ - ['very-long-tool-name', 'very-long-'], - ['another-long-tool-name', 'another-lo'], - ]), - ); - }); - - it('should handle duplicate truncated names by appending numbers', () => { - const names = ['tool-name-a', 'tool-name-b', 'tool-name-c']; - expect(truncateToolNames(names, 8)).toEqual( - new Map([ - ['tool-name-a', 'tool-na1'], - ['tool-name-b', 'tool-na2'], - ['tool-name-c', 'tool-na3'], - ]), - ); - }); -}); - -describe('removeTopLevelUnions', () => { - const createTestTool = (overrides = {}): Tool => ({ - name: 'test-tool', - description: 'Test tool', - inputSchema: { - type: 'object', - properties: {}, - }, - ...overrides, - }); - - it('should return the original tool if it has no anyOf at the top level', () => { - const tool = createTestTool({ - inputSchema: { - type: 'object', - properties: { - foo: { type: 'string' }, - }, - }, - }); - - expect(removeTopLevelUnions(tool)).toEqual([tool]); - }); - - it('should split a tool with top-level anyOf into multiple tools', () => { - const tool = createTestTool({ - name: 'union-tool', - description: 'A tool with unions', - inputSchema: { - type: 'object', - properties: { - common: { type: 'string' }, - }, - anyOf: [ - { - title: 'first variant', - description: 'Its the first variant', - properties: { - variant1: { type: 'string' }, - }, - required: ['variant1'], - }, - { - title: 'second variant', - properties: { - variant2: { type: 'number' }, - }, - required: ['variant2'], - }, - ], - }, - }); - - const result = removeTopLevelUnions(tool); - - expect(result).toEqual([ - { - name: 'union-tool_first_variant', - description: 'Its the first variant', - inputSchema: { - type: 'object', - title: 'first variant', - description: 'Its the first variant', - properties: { - common: { type: 'string' }, - variant1: { type: 'string' }, - }, - required: ['variant1'], - }, - }, - { - name: 'union-tool_second_variant', - description: 'A tool with unions', - inputSchema: { - type: 'object', - title: 'second variant', - description: 'A tool with unions', - properties: { - common: { type: 'string' }, - variant2: { type: 'number' }, - }, - required: ['variant2'], - }, - }, - ]); - }); - - it('should handle $defs and only include those used by the variant', () => { - const tool = createTestTool({ - name: 'defs-tool', - description: 'A tool with $defs', - inputSchema: { - type: 'object', - properties: { - common: { type: 'string' }, - }, - $defs: { - def1: { type: 'string', format: 'email' }, - def2: { type: 'number', minimum: 0 }, - unused: { type: 'boolean' }, - }, - anyOf: [ - { - properties: { - email: { $ref: '#/$defs/def1' }, - }, - }, - { - properties: { - count: { $ref: '#/$defs/def2' }, - }, - }, - ], - }, - }); - - const result = removeTopLevelUnions(tool); - - expect(result).toEqual([ - { - name: 'defs-tool_variant1', - description: 'A tool with $defs', - inputSchema: { - type: 'object', - description: 'A tool with $defs', - properties: { - common: { type: 'string' }, - email: { $ref: '#/$defs/def1' }, - }, - $defs: { - def1: { type: 'string', format: 'email' }, - }, - }, - }, - { - name: 'defs-tool_variant2', - description: 'A tool with $defs', - inputSchema: { - type: 'object', - description: 'A tool with $defs', - properties: { - common: { type: 'string' }, - count: { $ref: '#/$defs/def2' }, - }, - $defs: { - def2: { type: 'number', minimum: 0 }, - }, - }, - }, - ]); - }); -}); - -describe('removeAnyOf', () => { - it('should return original schema if it has no anyOf', () => { - const schema = { - type: 'object', - properties: { - foo: { type: 'string' }, - bar: { type: 'number' }, - }, - }; - - expect(removeAnyOf(schema)).toEqual(schema); - }); - - it('should remove anyOf field and use the first variant', () => { - const schema = { - type: 'object', - properties: { - common: { type: 'string' }, - }, - anyOf: [ - { - properties: { - variant1: { type: 'string' }, - }, - required: ['variant1'], - }, - { - properties: { - variant2: { type: 'number' }, - }, - required: ['variant2'], - }, - ], - }; - - const expected = { - type: 'object', - properties: { - common: { type: 'string' }, - variant1: { type: 'string' }, - }, - required: ['variant1'], - }; - - expect(removeAnyOf(schema)).toEqual(expected); - }); - - it('should recursively remove anyOf fields from nested properties', () => { - const schema = { - type: 'object', - properties: { - foo: { type: 'string' }, - nested: { - type: 'object', - properties: { - bar: { type: 'number' }, - }, - anyOf: [ - { - properties: { - option1: { type: 'boolean' }, - }, - }, - { - properties: { - option2: { type: 'array' }, - }, - }, - ], - }, - }, - }; - - const expected = { - type: 'object', - properties: { - foo: { type: 'string' }, - nested: { - type: 'object', - properties: { - bar: { type: 'number' }, - option1: { type: 'boolean' }, - }, - }, - }, - }; - - expect(removeAnyOf(schema)).toEqual(expected); - }); - - it('should handle arrays', () => { - const schema = { - type: 'object', - properties: { - items: { - type: 'array', - items: { - anyOf: [{ type: 'string' }, { type: 'number' }], - }, - }, - }, - }; - - const expected = { - type: 'object', - properties: { - items: { - type: 'array', - items: { - type: 'string', - }, - }, - }, - }; - - expect(removeAnyOf(schema)).toEqual(expected); - }); -}); - -describe('findUsedDefs', () => { - it('should handle circular references without stack overflow', () => { - const defs = { - person: { - type: 'object', - properties: { - name: { type: 'string' }, - friend: { $ref: '#/$defs/person' }, // Circular reference - }, - }, - }; - - const schema = { - type: 'object', - properties: { - user: { $ref: '#/$defs/person' }, - }, - }; - - // This should not throw a stack overflow error - expect(() => { - const result = findUsedDefs(schema, defs); - expect(result).toHaveProperty('person'); - }).not.toThrow(); - }); - - it('should handle indirect circular references without stack overflow', () => { - const defs = { - node: { - type: 'object', - properties: { - value: { type: 'string' }, - child: { $ref: '#/$defs/childNode' }, - }, - }, - childNode: { - type: 'object', - properties: { - value: { type: 'string' }, - parent: { $ref: '#/$defs/node' }, // Indirect circular reference - }, - }, - }; - - const schema = { - type: 'object', - properties: { - root: { $ref: '#/$defs/node' }, - }, - }; - - // This should not throw a stack overflow error - expect(() => { - const result = findUsedDefs(schema, defs); - expect(result).toHaveProperty('node'); - expect(result).toHaveProperty('childNode'); - }).not.toThrow(); - }); - - it('should find all used definitions in non-circular schemas', () => { - const defs = { - user: { - type: 'object', - properties: { - name: { type: 'string' }, - address: { $ref: '#/$defs/address' }, - }, - }, - address: { - type: 'object', - properties: { - street: { type: 'string' }, - city: { type: 'string' }, - }, - }, - unused: { - type: 'object', - properties: { - data: { type: 'string' }, - }, - }, - }; - - const schema = { - type: 'object', - properties: { - person: { $ref: '#/$defs/user' }, - }, - }; - - const result = findUsedDefs(schema, defs); - expect(result).toHaveProperty('user'); - expect(result).toHaveProperty('address'); - expect(result).not.toHaveProperty('unused'); - }); -}); - -describe('inlineRefs', () => { - it('should return the original schema if it does not contain $refs', () => { - const schema: JSONSchema = { - type: 'object', - properties: { - name: { type: 'string' }, - age: { type: 'number' }, - }, - }; - - expect(inlineRefs(schema)).toEqual(schema); - }); - - it('should inline simple $refs', () => { - const schema: JSONSchema = { - type: 'object', - properties: { - user: { $ref: '#/$defs/user' }, - }, - $defs: { - user: { - type: 'object', - properties: { - name: { type: 'string' }, - email: { type: 'string' }, - }, - }, - }, - }; - - const expected: JSONSchema = { - type: 'object', - properties: { - user: { - type: 'object', - properties: { - name: { type: 'string' }, - email: { type: 'string' }, - }, - }, - }, - }; - - expect(inlineRefs(schema)).toEqual(expected); - }); - - it('should inline nested $refs', () => { - const schema: JSONSchema = { - type: 'object', - properties: { - order: { $ref: '#/$defs/order' }, - }, - $defs: { - order: { - type: 'object', - properties: { - id: { type: 'string' }, - items: { type: 'array', items: { $ref: '#/$defs/item' } }, - }, - }, - item: { - type: 'object', - properties: { - product: { type: 'string' }, - quantity: { type: 'integer' }, - }, - }, - }, - }; - - const expected: JSONSchema = { - type: 'object', - properties: { - order: { - type: 'object', - properties: { - id: { type: 'string' }, - items: { - type: 'array', - items: { - type: 'object', - properties: { - product: { type: 'string' }, - quantity: { type: 'integer' }, - }, - }, - }, - }, - }, - }, - }; - - expect(inlineRefs(schema)).toEqual(expected); - }); - - it('should handle circular references by removing the circular part', () => { - const schema: JSONSchema = { - type: 'object', - properties: { - person: { $ref: '#/$defs/person' }, - }, - $defs: { - person: { - type: 'object', - properties: { - name: { type: 'string' }, - friend: { $ref: '#/$defs/person' }, // Circular reference - }, - }, - }, - }; - - const expected: JSONSchema = { - type: 'object', - properties: { - person: { - type: 'object', - properties: { - name: { type: 'string' }, - // friend property is removed to break the circular reference - }, - }, - }, - }; - - expect(inlineRefs(schema)).toEqual(expected); - }); - - it('should handle indirect circular references', () => { - const schema: JSONSchema = { - type: 'object', - properties: { - node: { $ref: '#/$defs/node' }, - }, - $defs: { - node: { - type: 'object', - properties: { - value: { type: 'string' }, - child: { $ref: '#/$defs/childNode' }, - }, - }, - childNode: { - type: 'object', - properties: { - value: { type: 'string' }, - parent: { $ref: '#/$defs/node' }, // Circular reference through childNode - }, - }, - }, - }; - - const expected: JSONSchema = { - type: 'object', - properties: { - node: { - type: 'object', - properties: { - value: { type: 'string' }, - child: { - type: 'object', - properties: { - value: { type: 'string' }, - // parent property is removed to break the circular reference - }, - }, - }, - }, - }, - }; - - expect(inlineRefs(schema)).toEqual(expected); - }); - - it('should preserve other properties when inlining references', () => { - const schema: JSONSchema = { - type: 'object', - properties: { - address: { $ref: '#/$defs/address', description: 'User address' }, - }, - $defs: { - address: { - type: 'object', - properties: { - street: { type: 'string' }, - city: { type: 'string' }, - }, - required: ['street'], - }, - }, - }; - - const expected: JSONSchema = { - type: 'object', - properties: { - address: { - type: 'object', - description: 'User address', - properties: { - street: { type: 'string' }, - city: { type: 'string' }, - }, - required: ['street'], - }, - }, - }; - - expect(inlineRefs(schema)).toEqual(expected); - }); -}); - -describe('removeFormats', () => { - it('should return original schema if formats capability is true', () => { - const schema = { - type: 'object', - properties: { - date: { type: 'string', description: 'A date field', format: 'date' }, - email: { type: 'string', description: 'An email field', format: 'email' }, - }, - }; - - expect(removeFormats(schema, true)).toEqual(schema); - }); - - it('should move format to description when formats capability is false', () => { - const schema = { - type: 'object', - properties: { - date: { type: 'string', description: 'A date field', format: 'date' }, - email: { type: 'string', description: 'An email field', format: 'email' }, - }, - }; - - const expected = { - type: 'object', - properties: { - date: { type: 'string', description: 'A date field (format: "date")' }, - email: { type: 'string', description: 'An email field (format: "email")' }, - }, - }; - - expect(removeFormats(schema, false)).toEqual(expected); - }); - - it('should handle properties without description', () => { - const schema = { - type: 'object', - properties: { - date: { type: 'string', format: 'date' }, - }, - }; - - const expected = { - type: 'object', - properties: { - date: { type: 'string', description: '(format: "date")' }, - }, - }; - - expect(removeFormats(schema, false)).toEqual(expected); - }); - - it('should handle nested properties', () => { - const schema = { - type: 'object', - properties: { - user: { - type: 'object', - properties: { - created_at: { type: 'string', description: 'Creation date', format: 'date-time' }, - }, - }, - }, - }; - - const expected = { - type: 'object', - properties: { - user: { - type: 'object', - properties: { - created_at: { type: 'string', description: 'Creation date (format: "date-time")' }, - }, - }, - }, - }; - - expect(removeFormats(schema, false)).toEqual(expected); - }); - - it('should handle arrays of objects', () => { - const schema = { - type: 'object', - properties: { - dates: { - type: 'array', - items: { - type: 'object', - properties: { - start: { type: 'string', description: 'Start date', format: 'date' }, - end: { type: 'string', description: 'End date', format: 'date' }, - }, - }, - }, - }, - }; - - const expected = { - type: 'object', - properties: { - dates: { - type: 'array', - items: { - type: 'object', - properties: { - start: { type: 'string', description: 'Start date (format: "date")' }, - end: { type: 'string', description: 'End date (format: "date")' }, - }, - }, - }, - }, - }; - - expect(removeFormats(schema, false)).toEqual(expected); - }); - - it('should handle schemas with $defs', () => { - const schema = { - type: 'object', - properties: { - date: { type: 'string', description: 'A date field', format: 'date' }, - }, - $defs: { - timestamp: { - type: 'string', - description: 'A timestamp field', - format: 'date-time', - }, - }, - }; - - const expected = { - type: 'object', - properties: { - date: { type: 'string', description: 'A date field (format: "date")' }, - }, - $defs: { - timestamp: { - type: 'string', - description: 'A timestamp field (format: "date-time")', - }, - }, - }; - - expect(removeFormats(schema, false)).toEqual(expected); - }); -}); - -describe('applyCompatibilityTransformations', () => { - const createTestTool = (name: string, overrides = {}): Tool => ({ - name, - description: 'Test tool', - inputSchema: { - type: 'object', - properties: {}, - }, - ...overrides, - }); - - const createTestEndpoint = (tool: Tool): Endpoint => ({ - tool, - handler: jest.fn(), - metadata: { - resource: 'test', - operation: 'read' as const, - tags: [], - }, - }); - - it('should not modify endpoints when all capabilities are enabled', () => { - const tool = createTestTool('test-tool'); - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - expect(transformed).toEqual(endpoints); - }); - - it('should split tools with top-level unions when topLevelUnions is disabled', () => { - const tool = createTestTool('union-tool', { - inputSchema: { - type: 'object', - properties: { - common: { type: 'string' }, - }, - anyOf: [ - { - title: 'first variant', - properties: { - variant1: { type: 'string' }, - }, - }, - { - title: 'second variant', - properties: { - variant2: { type: 'number' }, - }, - }, - ], - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: false, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - expect(transformed.length).toBe(2); - expect(transformed[0]!.tool.name).toBe('union-tool_first_variant'); - expect(transformed[1]!.tool.name).toBe('union-tool_second_variant'); - }); - - it('should handle variants without titles in removeTopLevelUnions', () => { - const tool = createTestTool('union-tool', { - inputSchema: { - type: 'object', - properties: { - common: { type: 'string' }, - }, - anyOf: [ - { - properties: { - variant1: { type: 'string' }, - }, - }, - { - properties: { - variant2: { type: 'number' }, - }, - }, - ], - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: false, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - expect(transformed.length).toBe(2); - expect(transformed[0]!.tool.name).toBe('union-tool_variant1'); - expect(transformed[1]!.tool.name).toBe('union-tool_variant2'); - }); - - it('should truncate tool names when toolNameLength is set', () => { - const tools = [ - createTestTool('very-long-tool-name-that-exceeds-limit'), - createTestTool('another-long-tool-name-to-truncate'), - createTestTool('short-name'), - ]; - - const endpoints = tools.map(createTestEndpoint); - - const capabilities = { - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: 20, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - expect(transformed[0]!.tool.name).toBe('very-long-tool-name-'); - expect(transformed[1]!.tool.name).toBe('another-long-tool-na'); - expect(transformed[2]!.tool.name).toBe('short-name'); - }); - - it('should inline refs when refs capability is disabled', () => { - const tool = createTestTool('ref-tool', { - inputSchema: { - type: 'object', - properties: { - user: { $ref: '#/$defs/user' }, - }, - $defs: { - user: { - type: 'object', - properties: { - name: { type: 'string' }, - email: { type: 'string' }, - }, - }, - }, - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: true, - validJson: true, - refs: false, - unions: true, - formats: true, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - const schema = transformed[0]!.tool.inputSchema as JSONSchema; - expect(schema.$defs).toBeUndefined(); - - if (schema.properties) { - expect(schema.properties['user']).toEqual({ - type: 'object', - properties: { - name: { type: 'string' }, - email: { type: 'string' }, - }, - }); - } - }); - - it('should preserve external refs when inlining', () => { - const tool = createTestTool('ref-tool', { - inputSchema: { - type: 'object', - properties: { - internal: { $ref: '#/$defs/internal' }, - external: { $ref: 'https://example.com/schemas/external.json' }, - }, - $defs: { - internal: { - type: 'object', - properties: { - name: { type: 'string' }, - }, - }, - }, - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: true, - validJson: true, - refs: false, - unions: true, - formats: true, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - const schema = transformed[0]!.tool.inputSchema as JSONSchema; - - if (schema.properties) { - expect(schema.properties['internal']).toEqual({ - type: 'object', - properties: { - name: { type: 'string' }, - }, - }); - expect(schema.properties['external']).toEqual({ - $ref: 'https://example.com/schemas/external.json', - }); - } - }); - - it('should remove anyOf fields when unions capability is disabled', () => { - const tool = createTestTool('union-tool', { - inputSchema: { - type: 'object', - properties: { - field: { - anyOf: [{ type: 'string' }, { type: 'number' }], - }, - }, - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: true, - validJson: true, - refs: true, - unions: false, - formats: true, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - const schema = transformed[0]!.tool.inputSchema as JSONSchema; - - if (schema.properties && schema.properties['field']) { - const field = schema.properties['field']; - expect(field.anyOf).toBeUndefined(); - expect(field.type).toBe('string'); - } - }); - - it('should correctly combine topLevelUnions and toolNameLength transformations', () => { - const tool = createTestTool('very-long-union-tool-name', { - inputSchema: { - type: 'object', - properties: { - common: { type: 'string' }, - }, - anyOf: [ - { - title: 'first variant', - properties: { - variant1: { type: 'string' }, - }, - }, - { - title: 'second variant', - properties: { - variant2: { type: 'number' }, - }, - }, - ], - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: false, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: 20, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - expect(transformed.length).toBe(2); - - // Both names should be truncated because they exceed 20 characters - expect(transformed[0]!.tool.name).toBe('very-long-union-too1'); - expect(transformed[1]!.tool.name).toBe('very-long-union-too2'); - }); - - it('should correctly combine refs and unions transformations', () => { - const tool = createTestTool('complex-tool', { - inputSchema: { - type: 'object', - properties: { - user: { $ref: '#/$defs/user' }, - }, - $defs: { - user: { - type: 'object', - properties: { - preference: { - anyOf: [{ type: 'string' }, { type: 'number' }], - }, - }, - }, - }, - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: true, - validJson: true, - refs: false, - unions: false, - formats: true, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - const schema = transformed[0]!.tool.inputSchema as JSONSchema; - - // Refs should be inlined - expect(schema.$defs).toBeUndefined(); - - // Safely access nested properties - if (schema.properties && schema.properties['user']) { - const user = schema.properties['user']; - // User should be inlined - expect(user.type).toBe('object'); - - // AnyOf in the inlined user.preference should be removed - if (user.properties && user.properties['preference']) { - const preference = user.properties['preference']; - expect(preference.anyOf).toBeUndefined(); - expect(preference.type).toBe('string'); - } - } - }); - - it('should handle formats capability being false', () => { - const tool = createTestTool('format-tool', { - inputSchema: { - type: 'object', - properties: { - date: { type: 'string', description: 'A date', format: 'date' }, - }, - }, - }); - - const endpoints = [createTestEndpoint(tool)]; - - const capabilities = { - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - formats: false, - toolNameLength: undefined, - }; - - const transformed = applyCompatibilityTransformations(endpoints, capabilities); - const schema = transformed[0]!.tool.inputSchema as JSONSchema; - - if (schema.properties && schema.properties['date']) { - const dateField = schema.properties['date']; - expect(dateField['format']).toBeUndefined(); - expect(dateField['description']).toBe('A date (format: "date")'); - } - }); -}); diff --git a/packages/mcp-server/tests/dynamic-tools.test.ts b/packages/mcp-server/tests/dynamic-tools.test.ts deleted file mode 100644 index 08963af..0000000 --- a/packages/mcp-server/tests/dynamic-tools.test.ts +++ /dev/null @@ -1,185 +0,0 @@ -import { dynamicTools } from '../src/dynamic-tools'; -import { Endpoint } from '../src/tools'; - -describe('dynamicTools', () => { - const fakeClient = {} as any; - - const endpoints: Endpoint[] = [ - makeEndpoint('test_read_endpoint', 'test_resource', 'read', ['test']), - makeEndpoint('test_write_endpoint', 'test_resource', 'write', ['test']), - makeEndpoint('user_endpoint', 'user', 'read', ['user', 'admin']), - makeEndpoint('admin_endpoint', 'admin', 'write', ['admin']), - ]; - - const tools = dynamicTools(endpoints); - - const toolsMap = { - list_api_endpoints: toolOrError('list_api_endpoints'), - get_api_endpoint_schema: toolOrError('get_api_endpoint_schema'), - invoke_api_endpoint: toolOrError('invoke_api_endpoint'), - }; - - describe('list_api_endpoints', () => { - it('should return all endpoints when no search query is provided', async () => { - const content = await toolsMap.list_api_endpoints.handler(fakeClient, {}); - const result = JSON.parse(content.content[0].text); - - expect(result.tools).toHaveLength(endpoints.length); - expect(result.tools.map((t: { name: string }) => t.name)).toContain('test_read_endpoint'); - expect(result.tools.map((t: { name: string }) => t.name)).toContain('test_write_endpoint'); - expect(result.tools.map((t: { name: string }) => t.name)).toContain('user_endpoint'); - expect(result.tools.map((t: { name: string }) => t.name)).toContain('admin_endpoint'); - }); - - it('should filter endpoints by name', async () => { - const content = await toolsMap.list_api_endpoints.handler(fakeClient, { search_query: 'user' }); - const result = JSON.parse(content.content[0].text); - - expect(result.tools).toHaveLength(1); - expect(result.tools[0].name).toBe('user_endpoint'); - }); - - it('should filter endpoints by resource', async () => { - const content = await toolsMap.list_api_endpoints.handler(fakeClient, { search_query: 'admin' }); - const result = JSON.parse(content.content[0].text); - - expect(result.tools.some((t: { resource: string }) => t.resource === 'admin')).toBeTruthy(); - }); - - it('should filter endpoints by tag', async () => { - const content = await toolsMap.list_api_endpoints.handler(fakeClient, { search_query: 'admin' }); - const result = JSON.parse(content.content[0].text); - - expect(result.tools.some((t: { tags: string[] }) => t.tags.includes('admin'))).toBeTruthy(); - }); - - it('should be case insensitive in search', async () => { - const content = await toolsMap.list_api_endpoints.handler(fakeClient, { search_query: 'ADMIN' }); - const result = JSON.parse(content.content[0].text); - - expect(result.tools.length).toBe(2); - result.tools.forEach((tool: { name: string; resource: string; tags: string[] }) => { - expect( - tool.name.toLowerCase().includes('admin') || - tool.resource.toLowerCase().includes('admin') || - tool.tags.some((tag: string) => tag.toLowerCase().includes('admin')), - ).toBeTruthy(); - }); - }); - - it('should filter endpoints by description', async () => { - const content = await toolsMap.list_api_endpoints.handler(fakeClient, { - search_query: 'Test endpoint for user_endpoint', - }); - const result = JSON.parse(content.content[0].text); - - expect(result.tools).toHaveLength(1); - expect(result.tools[0].name).toBe('user_endpoint'); - expect(result.tools[0].description).toBe('Test endpoint for user_endpoint'); - }); - - it('should filter endpoints by partial description match', async () => { - const content = await toolsMap.list_api_endpoints.handler(fakeClient, { - search_query: 'endpoint for user', - }); - const result = JSON.parse(content.content[0].text); - - expect(result.tools).toHaveLength(1); - expect(result.tools[0].name).toBe('user_endpoint'); - }); - }); - - describe('get_api_endpoint_schema', () => { - it('should return schema for existing endpoint', async () => { - const content = await toolsMap.get_api_endpoint_schema.handler(fakeClient, { - endpoint: 'test_read_endpoint', - }); - const result = JSON.parse(content.content[0].text); - - expect(result).toEqual(endpoints[0]?.tool); - }); - - it('should throw error for non-existent endpoint', async () => { - await expect( - toolsMap.get_api_endpoint_schema.handler(fakeClient, { endpoint: 'non_existent_endpoint' }), - ).rejects.toThrow('Endpoint non_existent_endpoint not found'); - }); - - it('should throw error when no endpoint provided', async () => { - await expect(toolsMap.get_api_endpoint_schema.handler(fakeClient, undefined)).rejects.toThrow( - 'No endpoint provided', - ); - }); - }); - - describe('invoke_api_endpoint', () => { - it('should successfully invoke endpoint with valid arguments', async () => { - const mockHandler = endpoints[0]?.handler as jest.Mock; - mockHandler.mockClear(); - - await toolsMap.invoke_api_endpoint.handler(fakeClient, { - endpoint_name: 'test_read_endpoint', - args: { testParam: 'test value' }, - }); - - expect(mockHandler).toHaveBeenCalledWith(fakeClient, { testParam: 'test value' }); - }); - - it('should throw error for non-existent endpoint', async () => { - await expect( - toolsMap.invoke_api_endpoint.handler(fakeClient, { - endpoint_name: 'non_existent_endpoint', - args: { testParam: 'test value' }, - }), - ).rejects.toThrow(/Endpoint non_existent_endpoint not found/); - }); - - it('should throw error when no arguments provided', async () => { - await expect(toolsMap.invoke_api_endpoint.handler(fakeClient, undefined)).rejects.toThrow( - 'No endpoint provided', - ); - }); - - it('should throw error for invalid argument schema', async () => { - await expect( - toolsMap.invoke_api_endpoint.handler(fakeClient, { - endpoint_name: 'test_read_endpoint', - args: { wrongParam: 'test value' }, // Missing required testParam - }), - ).rejects.toThrow(/Invalid arguments for endpoint/); - }); - }); - - function toolOrError(name: string) { - const tool = tools.find((tool) => tool.tool.name === name); - if (!tool) throw new Error(`Tool ${name} not found`); - return tool; - } -}); - -function makeEndpoint( - name: string, - resource: string, - operation: 'read' | 'write', - tags: string[] = [], -): Endpoint { - return { - metadata: { - resource, - operation, - tags, - }, - tool: { - name, - description: `Test endpoint for ${name}`, - inputSchema: { - type: 'object', - properties: { - testParam: { type: 'string' }, - }, - required: ['testParam'], - }, - }, - handler: jest.fn().mockResolvedValue({ success: true }), - }; -} diff --git a/packages/mcp-server/tests/options.test.ts b/packages/mcp-server/tests/options.test.ts index a8a5b81..532666a 100644 --- a/packages/mcp-server/tests/options.test.ts +++ b/packages/mcp-server/tests/options.test.ts @@ -1,6 +1,4 @@ import { parseCLIOptions, parseQueryOptions } from '../src/options'; -import { Filter } from '../src/tools'; -import { parseEmbeddedJSON } from '../src/compat'; // Mock process.argv const mockArgv = (args: string[]) => { @@ -12,337 +10,35 @@ const mockArgv = (args: string[]) => { }; describe('parseCLIOptions', () => { - it('should parse basic filter options', () => { - const cleanup = mockArgv([ - '--tool=test-tool', - '--resource=test-resource', - '--operation=read', - '--tag=test-tag', - ]); + it('default parsing should be stdio', () => { + const cleanup = mockArgv([]); const result = parseCLIOptions(); - expect(result.filters).toEqual([ - { type: 'tag', op: 'include', value: 'test-tag' }, - { type: 'resource', op: 'include', value: 'test-resource' }, - { type: 'tool', op: 'include', value: 'test-tool' }, - { type: 'operation', op: 'include', value: 'read' }, - ] as Filter[]); - - expect(result.capabilities).toEqual({}); - - expect(result.list).toBe(false); - - cleanup(); - }); - - it('should parse exclusion filters', () => { - const cleanup = mockArgv([ - '--no-tool=exclude-tool', - '--no-resource=exclude-resource', - '--no-operation=write', - '--no-tag=exclude-tag', - ]); - - const result = parseCLIOptions(); - - expect(result.filters).toEqual([ - { type: 'tag', op: 'exclude', value: 'exclude-tag' }, - { type: 'resource', op: 'exclude', value: 'exclude-resource' }, - { type: 'tool', op: 'exclude', value: 'exclude-tool' }, - { type: 'operation', op: 'exclude', value: 'write' }, - ] as Filter[]); - - expect(result.capabilities).toEqual({}); - - cleanup(); - }); - - it('should parse client presets', () => { - const cleanup = mockArgv(['--client=openai-agents']); - - const result = parseCLIOptions(); - - expect(result.client).toEqual('openai-agents'); - - cleanup(); - }); - - it('should parse individual capabilities', () => { - const cleanup = mockArgv([ - '--capability=top-level-unions', - '--capability=valid-json', - '--capability=refs', - '--capability=unions', - '--capability=tool-name-length=40', - ]); - - const result = parseCLIOptions(); - - expect(result.capabilities).toEqual({ - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - toolNameLength: 40, - }); - - cleanup(); - }); - - it('should handle list option', () => { - const cleanup = mockArgv(['--list']); - - const result = parseCLIOptions(); - - expect(result.list).toBe(true); - - cleanup(); - }); - - it('should handle multiple filters of the same type', () => { - const cleanup = mockArgv(['--tool=tool1', '--tool=tool2', '--resource=res1', '--resource=res2']); - - const result = parseCLIOptions(); - - expect(result.filters).toEqual([ - { type: 'resource', op: 'include', value: 'res1' }, - { type: 'resource', op: 'include', value: 'res2' }, - { type: 'tool', op: 'include', value: 'tool1' }, - { type: 'tool', op: 'include', value: 'tool2' }, - ] as Filter[]); + expect(result.transport).toBe('stdio'); cleanup(); }); - it('should handle comma-separated values in array options', () => { - const cleanup = mockArgv([ - '--tool=tool1,tool2', - '--resource=res1,res2', - '--capability=top-level-unions,valid-json,unions', - ]); + it('using http transport with a port', () => { + const cleanup = mockArgv(['--transport=http', '--port=2222']); const result = parseCLIOptions(); - expect(result.filters).toEqual([ - { type: 'resource', op: 'include', value: 'res1' }, - { type: 'resource', op: 'include', value: 'res2' }, - { type: 'tool', op: 'include', value: 'tool1' }, - { type: 'tool', op: 'include', value: 'tool2' }, - ] as Filter[]); - - expect(result.capabilities).toEqual({ - topLevelUnions: true, - validJson: true, - unions: true, - }); - - cleanup(); - }); - - it('should handle invalid tool-name-length format', () => { - const cleanup = mockArgv(['--capability=tool-name-length=invalid']); - - // Mock console.error to prevent output during test - const originalError = console.error; - console.error = jest.fn(); - - expect(() => parseCLIOptions()).toThrow(); - - console.error = originalError; - cleanup(); - }); - - it('should handle unknown capability', () => { - const cleanup = mockArgv(['--capability=unknown-capability']); - - // Mock console.error to prevent output during test - const originalError = console.error; - console.error = jest.fn(); - - expect(() => parseCLIOptions()).toThrow(); - - console.error = originalError; + expect(result.transport).toBe('http'); + expect(result.port).toBe('2222'); cleanup(); }); }); describe('parseQueryOptions', () => { - const defaultOptions = { - client: undefined, - includeDynamicTools: undefined, - includeAllTools: undefined, - filters: [], - capabilities: { - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }, - }; - - it('should parse basic filter options from query string', () => { - const query = 'tool=test-tool&resource=test-resource&operation=read&tag=test-tag'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.filters).toEqual([ - { type: 'resource', op: 'include', value: 'test-resource' }, - { type: 'operation', op: 'include', value: 'read' }, - { type: 'tag', op: 'include', value: 'test-tag' }, - { type: 'tool', op: 'include', value: 'test-tool' }, - ]); - - expect(result.capabilities).toEqual({ - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: undefined, - }); - }); - - it('should parse exclusion filters from query string', () => { - const query = 'no_tool=exclude-tool&no_resource=exclude-resource&no_operation=write&no_tag=exclude-tag'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.filters).toEqual([ - { type: 'resource', op: 'exclude', value: 'exclude-resource' }, - { type: 'operation', op: 'exclude', value: 'write' }, - { type: 'tag', op: 'exclude', value: 'exclude-tag' }, - { type: 'tool', op: 'exclude', value: 'exclude-tool' }, - ]); - }); - - it('should parse client option from query string', () => { - const query = 'client=openai-agents'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.client).toBe('openai-agents'); - }); - - it('should parse client capabilities from query string', () => { - const query = 'capability=top-level-unions&capability=valid-json&capability=tool-name-length%3D40'; - const result = parseQueryOptions(defaultOptions, query); + const defaultOptions = {}; - expect(result.capabilities).toEqual({ - topLevelUnions: true, - validJson: true, - refs: true, - unions: true, - formats: true, - toolNameLength: 40, - }); - }); - - it('should parse no-capability options from query string', () => { - const query = 'no_capability=top-level-unions&no_capability=refs&no_capability=formats'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.capabilities).toEqual({ - topLevelUnions: false, - validJson: true, - refs: false, - unions: true, - formats: false, - toolNameLength: undefined, - }); - }); - - it('should parse tools options from query string', () => { - const query = 'tools=dynamic&tools=all'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.includeDynamicTools).toBe(true); - expect(result.includeAllTools).toBe(true); - }); - - it('should parse no-tools options from query string', () => { - const query = 'tools=dynamic&tools=all&no_tools=dynamic'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.includeDynamicTools).toBe(false); - expect(result.includeAllTools).toBe(true); - }); - - it('should handle array values in query string', () => { - const query = 'tool[]=tool1&tool[]=tool2&resource[]=res1&resource[]=res2'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.filters).toEqual([ - { type: 'resource', op: 'include', value: 'res1' }, - { type: 'resource', op: 'include', value: 'res2' }, - { type: 'tool', op: 'include', value: 'tool1' }, - { type: 'tool', op: 'include', value: 'tool2' }, - ]); - }); - - it('should merge with default options', () => { - const defaultWithFilters = { - ...defaultOptions, - filters: [{ type: 'tag' as const, op: 'include' as const, value: 'existing-tag' }], - client: 'cursor' as const, - includeDynamicTools: true, - }; - - const query = 'tool=new-tool&resource=new-resource'; - const result = parseQueryOptions(defaultWithFilters, query); - - expect(result.filters).toEqual([ - { type: 'tag', op: 'include', value: 'existing-tag' }, - { type: 'resource', op: 'include', value: 'new-resource' }, - { type: 'tool', op: 'include', value: 'new-tool' }, - ]); - - expect(result.client).toBe('cursor'); - expect(result.includeDynamicTools).toBe(true); - }); - - it('should override client from default options', () => { - const defaultWithClient = { - ...defaultOptions, - client: 'cursor' as const, - }; - - const query = 'client=openai-agents'; - const result = parseQueryOptions(defaultWithClient, query); - - expect(result.client).toBe('openai-agents'); - }); - - it('should merge capabilities with default options', () => { - const defaultWithCapabilities = { - ...defaultOptions, - capabilities: { - topLevelUnions: false, - validJson: false, - refs: true, - unions: true, - formats: true, - toolNameLength: 30, - }, - }; - - const query = 'capability=top-level-unions&no_capability=refs'; - const result = parseQueryOptions(defaultWithCapabilities, query); - - expect(result.capabilities).toEqual({ - topLevelUnions: true, - validJson: false, - refs: false, - unions: true, - formats: true, - toolNameLength: 30, - }); - }); - - it('should handle empty query string', () => { + it('default parsing should be empty', () => { const query = ''; const result = parseQueryOptions(defaultOptions, query); - expect(result).toEqual(defaultOptions); + expect(result).toBe({}); }); it('should handle invalid query string gracefully', () => { @@ -351,168 +47,4 @@ describe('parseQueryOptions', () => { // Should throw due to Zod validation for invalid operation expect(() => parseQueryOptions(defaultOptions, query)).toThrow(); }); - - it('should preserve default undefined values when not specified', () => { - const defaultWithUndefined = { - ...defaultOptions, - client: undefined, - includeDynamicTools: undefined, - includeAllTools: undefined, - }; - - const query = 'tool=test-tool'; - const result = parseQueryOptions(defaultWithUndefined, query); - - expect(result.client).toBeUndefined(); - expect(result.includeDynamicTools).toBeFalsy(); - expect(result.includeAllTools).toBeFalsy(); - }); - - it('should handle complex query with mixed include and exclude filters', () => { - const query = - 'tool=include-tool&no_tool=exclude-tool&resource=include-res&no_resource=exclude-res&operation=read&tag=include-tag&no_tag=exclude-tag'; - const result = parseQueryOptions(defaultOptions, query); - - expect(result.filters).toEqual([ - { type: 'resource', op: 'include', value: 'include-res' }, - { type: 'operation', op: 'include', value: 'read' }, - { type: 'tag', op: 'include', value: 'include-tag' }, - { type: 'tool', op: 'include', value: 'include-tool' }, - { type: 'resource', op: 'exclude', value: 'exclude-res' }, - { type: 'tag', op: 'exclude', value: 'exclude-tag' }, - { type: 'tool', op: 'exclude', value: 'exclude-tool' }, - ]); - }); -}); - -describe('parseEmbeddedJSON', () => { - it('should not change non-string values', () => { - const args = { - numberProp: 42, - booleanProp: true, - objectProp: { nested: 'value' }, - arrayProp: [1, 2, 3], - nullProp: null, - undefinedProp: undefined, - }; - const schema = {}; - - const result = parseEmbeddedJSON(args, schema); - - expect(result).toBe(args); // Should return original object since no changes made - expect(result['numberProp']).toBe(42); - expect(result['booleanProp']).toBe(true); - expect(result['objectProp']).toEqual({ nested: 'value' }); - expect(result['arrayProp']).toEqual([1, 2, 3]); - expect(result['nullProp']).toBe(null); - expect(result['undefinedProp']).toBe(undefined); - }); - - it('should parse valid JSON objects in string properties', () => { - const args = { - jsonObjectString: '{"key": "value", "number": 123}', - regularString: 'not json', - }; - const schema = {}; - - const result = parseEmbeddedJSON(args, schema); - - expect(result).not.toBe(args); // Should return new object since changes were made - expect(result['jsonObjectString']).toEqual({ key: 'value', number: 123 }); - expect(result['regularString']).toBe('not json'); - }); - - it('should leave invalid JSON in string properties unchanged', () => { - const args = { - invalidJson1: '{"key": value}', // Missing quotes around value - invalidJson2: '{key: "value"}', // Missing quotes around key - invalidJson3: '{"key": "value",}', // Trailing comma - invalidJson4: 'just a regular string', - emptyString: '', - }; - const schema = {}; - - const result = parseEmbeddedJSON(args, schema); - - expect(result).toBe(args); // Should return original object since no changes made - expect(result['invalidJson1']).toBe('{"key": value}'); - expect(result['invalidJson2']).toBe('{key: "value"}'); - expect(result['invalidJson3']).toBe('{"key": "value",}'); - expect(result['invalidJson4']).toBe('just a regular string'); - expect(result['emptyString']).toBe(''); - }); - - it('should not parse JSON primitives in string properties', () => { - const args = { - numberString: '123', - floatString: '45.67', - negativeNumberString: '-89', - booleanTrueString: 'true', - booleanFalseString: 'false', - nullString: 'null', - jsonArrayString: '[1, 2, 3, "test"]', - regularString: 'not json', - }; - const schema = {}; - - const result = parseEmbeddedJSON(args, schema); - - expect(result).toBe(args); // Should return original object since no changes made - expect(result['numberString']).toBe('123'); - expect(result['floatString']).toBe('45.67'); - expect(result['negativeNumberString']).toBe('-89'); - expect(result['booleanTrueString']).toBe('true'); - expect(result['booleanFalseString']).toBe('false'); - expect(result['nullString']).toBe('null'); - expect(result['jsonArrayString']).toBe('[1, 2, 3, "test"]'); - expect(result['regularString']).toBe('not json'); - }); - - it('should handle mixed valid objects and other JSON types', () => { - const args = { - validObject: '{"success": true}', - invalidObject: '{"missing": quote}', - validNumber: '42', - validArray: '[1, 2, 3]', - keepAsString: 'hello world', - nonString: 123, - }; - const schema = {}; - - const result = parseEmbeddedJSON(args, schema); - - expect(result).not.toBe(args); // Should return new object since some changes were made - expect(result['validObject']).toEqual({ success: true }); - expect(result['invalidObject']).toBe('{"missing": quote}'); - expect(result['validNumber']).toBe('42'); // Not parsed, remains string - expect(result['validArray']).toBe('[1, 2, 3]'); // Not parsed, remains string - expect(result['keepAsString']).toBe('hello world'); - expect(result['nonString']).toBe(123); - }); - - it('should return original object when no strings are present', () => { - const args = { - number: 42, - boolean: true, - object: { key: 'value' }, - }; - const schema = {}; - - const result = parseEmbeddedJSON(args, schema); - - expect(result).toBe(args); // Should return original object since no changes made - }); - - it('should return original object when all strings are invalid JSON', () => { - const args = { - string1: 'hello', - string2: 'world', - string3: 'not json at all', - }; - const schema = {}; - - const result = parseEmbeddedJSON(args, schema); - - expect(result).toBe(args); // Should return original object since no changes made - }); }); diff --git a/packages/mcp-server/tests/tools.test.ts b/packages/mcp-server/tests/tools.test.ts deleted file mode 100644 index cfff24a..0000000 --- a/packages/mcp-server/tests/tools.test.ts +++ /dev/null @@ -1,225 +0,0 @@ -import { Endpoint, Filter, Metadata, query } from '../src/tools'; - -describe('Endpoint filtering', () => { - const endpoints: Endpoint[] = [ - endpoint({ - resource: 'user', - operation: 'read', - tags: ['admin'], - toolName: 'retrieve_user', - }), - endpoint({ - resource: 'user.profile', - operation: 'write', - tags: [], - toolName: 'create_user_profile', - }), - endpoint({ - resource: 'user.profile', - operation: 'read', - tags: [], - toolName: 'get_user_profile', - }), - endpoint({ - resource: 'user.roles.permissions', - operation: 'write', - tags: ['admin', 'security'], - toolName: 'update_user_role_permissions', - }), - endpoint({ - resource: 'documents.metadata.tags', - operation: 'write', - tags: ['taxonomy', 'metadata'], - toolName: 'create_document_metadata_tags', - }), - endpoint({ - resource: 'organization.settings', - operation: 'read', - tags: ['admin', 'configuration'], - toolName: 'get_organization_settings', - }), - ]; - - const tests: { name: string; filters: Filter[]; expected: string[] }[] = [ - { - name: 'match none', - filters: [], - expected: [], - }, - - // Resource tests - { - name: 'simple resource', - filters: [{ type: 'resource', op: 'include', value: 'user' }], - expected: ['retrieve_user'], - }, - { - name: 'exclude resource', - filters: [{ type: 'resource', op: 'exclude', value: 'user' }], - expected: [ - 'create_user_profile', - 'get_user_profile', - 'update_user_role_permissions', - 'create_document_metadata_tags', - 'get_organization_settings', - ], - }, - { - name: 'resource and subresources', - filters: [{ type: 'resource', op: 'include', value: 'user*' }], - expected: ['retrieve_user', 'create_user_profile', 'get_user_profile', 'update_user_role_permissions'], - }, - { - name: 'just subresources', - filters: [{ type: 'resource', op: 'include', value: 'user.*' }], - expected: ['create_user_profile', 'get_user_profile', 'update_user_role_permissions'], - }, - { - name: 'specific subresource', - filters: [{ type: 'resource', op: 'include', value: 'user.roles.permissions' }], - expected: ['update_user_role_permissions'], - }, - { - name: 'deep wildcard match', - filters: [{ type: 'resource', op: 'include', value: '*.*.tags' }], - expected: ['create_document_metadata_tags'], - }, - - // Operation tests - { - name: 'read operation', - filters: [{ type: 'operation', op: 'include', value: 'read' }], - expected: ['retrieve_user', 'get_user_profile', 'get_organization_settings'], - }, - { - name: 'write operation', - filters: [{ type: 'operation', op: 'include', value: 'write' }], - expected: ['create_user_profile', 'update_user_role_permissions', 'create_document_metadata_tags'], - }, - { - name: 'resource and operation combined', - filters: [ - { type: 'resource', op: 'include', value: 'user.profile' }, - { type: 'operation', op: 'exclude', value: 'write' }, - ], - expected: ['get_user_profile'], - }, - - // Tag tests - { - name: 'admin tag', - filters: [{ type: 'tag', op: 'include', value: 'admin' }], - expected: ['retrieve_user', 'update_user_role_permissions', 'get_organization_settings'], - }, - { - name: 'taxonomy tag', - filters: [{ type: 'tag', op: 'include', value: 'taxonomy' }], - expected: ['create_document_metadata_tags'], - }, - { - name: 'multiple tags (OR logic)', - filters: [ - { type: 'tag', op: 'include', value: 'admin' }, - { type: 'tag', op: 'include', value: 'security' }, - ], - expected: ['retrieve_user', 'update_user_role_permissions', 'get_organization_settings'], - }, - { - name: 'excluding a tag', - filters: [ - { type: 'tag', op: 'include', value: 'admin' }, - { type: 'tag', op: 'exclude', value: 'security' }, - ], - expected: ['retrieve_user', 'get_organization_settings'], - }, - - // Tool name tests - { - name: 'tool name match', - filters: [{ type: 'tool', op: 'include', value: 'get_organization_settings' }], - expected: ['get_organization_settings'], - }, - { - name: 'two tools match', - filters: [ - { type: 'tool', op: 'include', value: 'get_organization_settings' }, - { type: 'tool', op: 'include', value: 'create_user_profile' }, - ], - expected: ['create_user_profile', 'get_organization_settings'], - }, - { - name: 'excluding tool by name', - filters: [ - { type: 'resource', op: 'include', value: 'user*' }, - { type: 'tool', op: 'exclude', value: 'retrieve_user' }, - ], - expected: ['create_user_profile', 'get_user_profile', 'update_user_role_permissions'], - }, - - // Complex combinations - { - name: 'complex filter: read operations with admin tag', - filters: [ - { type: 'operation', op: 'include', value: 'read' }, - { type: 'tag', op: 'include', value: 'admin' }, - ], - expected: [ - 'retrieve_user', - 'get_user_profile', - 'update_user_role_permissions', - 'get_organization_settings', - ], - }, - { - name: 'complex filter: user resources with no tags', - filters: [ - { type: 'resource', op: 'include', value: 'user.profile' }, - { type: 'tag', op: 'exclude', value: 'admin' }, - ], - expected: ['create_user_profile', 'get_user_profile'], - }, - { - name: 'complex filter: user resources and tags', - filters: [ - { type: 'resource', op: 'include', value: 'user.profile' }, - { type: 'tag', op: 'include', value: 'admin' }, - ], - expected: [ - 'retrieve_user', - 'create_user_profile', - 'get_user_profile', - 'update_user_role_permissions', - 'get_organization_settings', - ], - }, - ]; - - tests.forEach((test) => { - it(`filters by ${test.name}`, () => { - const filtered = query(test.filters, endpoints); - expect(filtered.map((e) => e.tool.name)).toEqual(test.expected); - }); - }); -}); - -function endpoint({ - resource, - operation, - tags, - toolName, -}: { - resource: string; - operation: Metadata['operation']; - tags: string[]; - toolName: string; -}): Endpoint { - return { - metadata: { - resource, - operation, - tags, - }, - tool: { name: toolName, inputSchema: { type: 'object', properties: {} } }, - handler: jest.fn(), - }; -} diff --git a/packages/mcp-server/tsconfig.build.json b/packages/mcp-server/tsconfig.build.json index 702ec64..83df2c1 100644 --- a/packages/mcp-server/tsconfig.build.json +++ b/packages/mcp-server/tsconfig.build.json @@ -5,8 +5,8 @@ "compilerOptions": { "rootDir": "./dist/src", "paths": { - "cas-parser-node-mcp/*": ["dist/src/*"], - "cas-parser-node-mcp": ["dist/src/index.ts"] + "cas-parser-node-mcp/*": ["./dist/src/*"], + "cas-parser-node-mcp": ["./dist/src/index.ts"] }, "noEmit": false, "declaration": true, diff --git a/packages/mcp-server/tsconfig.json b/packages/mcp-server/tsconfig.json index e24e096..5c4c76e 100644 --- a/packages/mcp-server/tsconfig.json +++ b/packages/mcp-server/tsconfig.json @@ -7,10 +7,9 @@ "module": "commonjs", "moduleResolution": "node", "esModuleInterop": true, - "baseUrl": "./", "paths": { - "cas-parser-node-mcp/*": ["src/*"], - "cas-parser-node-mcp": ["src/index.ts"] + "cas-parser-node-mcp/*": ["./src/*"], + "cas-parser-node-mcp": ["./src/index.ts"] }, "noEmit": true, diff --git a/packages/mcp-server/yarn.lock b/packages/mcp-server/yarn.lock index ad81983..38be884 100644 --- a/packages/mcp-server/yarn.lock +++ b/packages/mcp-server/yarn.lock @@ -10,10 +10,10 @@ "@jridgewell/gen-mapping" "^0.3.5" "@jridgewell/trace-mapping" "^0.3.24" -"@anthropic-ai/dxt@^0.2.6": - version "0.2.6" - resolved "https://registry.yarnpkg.com/@anthropic-ai/dxt/-/dxt-0.2.6.tgz#636197c3d083c9136ac3b5a11d2ba82477fdc2c6" - integrity sha512-5VSqKRpkytTYh5UJz9jOaI8zLXNCe4Gc+ArKGFV6IeWnEPP0Qnd0k+V3pO8cYzp92Puf/+Cgo0xc4haE0azTXg== +"@anthropic-ai/mcpb@1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@anthropic-ai/mcpb/-/mcpb-1.1.0.tgz#1af18de2ab9499d321d6310d0be095f5fef5161b" + integrity sha512-nOnhG1eNpGKSIDv6lt3xsI3w2p2k0D/rPTMGXXugLovCEaJ7svh8XMfCe145vs8qo384t8wKbokWAvx9PkQMDA== dependencies: "@inquirer/prompts" "^6.0.1" commander "^13.1.0" @@ -406,9 +406,9 @@ yoctocolors-cjs "^2.1.2" "@inquirer/figures@^1.0.6": - version "1.0.13" - resolved "https://registry.yarnpkg.com/@inquirer/figures/-/figures-1.0.13.tgz#ad0afd62baab1c23175115a9b62f511b6a751e45" - integrity sha512-lGPVU3yO9ZNqA7vTYz26jny41lE7yoQansmqdMLBEfqaGsmdg7V3W9mK9Pvb5IL4EVZ9GnSDGMO/cJXud5dMaw== + version "1.0.15" + resolved "https://registry.yarnpkg.com/@inquirer/figures/-/figures-1.0.15.tgz#dbb49ed80df11df74268023b496ac5d9acd22b3a" + integrity sha512-t2IEY+unGHOzAaVM5Xx6DEWKeXlDDcNPeDyUpsRc6CUhBfU3VQOEl+Vssh7VNp1dR8MdUJBWhuObjXCsVpjN5g== "@inquirer/input@^3.0.1": version "3.0.1" @@ -736,12 +736,13 @@ "@jridgewell/resolve-uri" "^3.1.0" "@jridgewell/sourcemap-codec" "^1.4.14" -"@modelcontextprotocol/sdk@^1.11.5": - version "1.17.3" - resolved "https://registry.yarnpkg.com/@modelcontextprotocol/sdk/-/sdk-1.17.3.tgz#cf92354220f0183d28179e96a9bf3a8f6d3211ae" - integrity sha512-JPwUKWSsbzx+DLFznf/QZ32Qa+ptfbUlHhRLrBQBAFu9iI1iYvizM4p+zhhRDceSsPutXp4z+R/HPVphlIiclg== +"@modelcontextprotocol/sdk@^1.24.0": + version "1.24.3" + resolved "https://registry.yarnpkg.com/@modelcontextprotocol/sdk/-/sdk-1.24.3.tgz#81a3fcc919cb4ce8630e2bcecf59759176eb331a" + integrity sha512-YgSHW29fuzKKAHTGe9zjNoo+yF8KaQPzDC2W9Pv41E7/57IfY+AMGJ/aDFlgTLcVVELoggKE4syABCE75u3NCw== dependencies: - ajv "^6.12.6" + ajv "^8.17.1" + ajv-formats "^3.0.1" content-type "^1.0.5" cors "^2.8.5" cross-spawn "^7.0.5" @@ -749,10 +750,11 @@ eventsource-parser "^3.0.0" express "^5.0.1" express-rate-limit "^7.5.0" + jose "^6.1.1" pkce-challenge "^5.0.0" raw-body "^3.0.0" - zod "^3.23.8" - zod-to-json-schema "^3.24.1" + zod "^3.25 || ^4.0" + zod-to-json-schema "^3.25.0" "@nodelib/fs.scandir@2.1.5": version "2.1.5" @@ -962,9 +964,9 @@ undici-types "~6.21.0" "@types/node@^22.5.5": - version "22.18.0" - resolved "https://registry.yarnpkg.com/@types/node/-/node-22.18.0.tgz#9e4709be4f104e3568f7dd1c71e2949bf147a47b" - integrity sha512-m5ObIqwsUp6BZzyiy4RdZpzWGub9bqLJMvZDD0QMXhxjqMHMENlj+SqF5QxoUwaQNFe+8kz8XM8ZQhqkQPTgMQ== + version "22.19.2" + resolved "https://registry.yarnpkg.com/@types/node/-/node-22.19.2.tgz#2f0956fba46518aaf7578c84e37bddab55f85d01" + integrity sha512-LPM2G3Syo1GLzXLGJAKdqoU35XvrWzGJ21/7sgZTUpbkBaOasTj8tjwn6w+hCkqaa1TfJ/w67rJSwYItlJ2mYw== dependencies: undici-types "~6.21.0" @@ -1141,7 +1143,14 @@ aggregate-error@^3.0.0: clean-stack "^2.0.0" indent-string "^4.0.0" -ajv@^6.12.4, ajv@^6.12.6: +ajv-formats@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/ajv-formats/-/ajv-formats-3.0.1.tgz#3d5dc762bca17679c3c2ea7e90ad6b7532309578" + integrity sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ== + dependencies: + ajv "^8.0.0" + +ajv@^6.12.4: version "6.12.6" resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -1151,6 +1160,16 @@ ajv@^6.12.4, ajv@^6.12.6: json-schema-traverse "^0.4.1" uri-js "^4.2.2" +ajv@^8.0.0, ajv@^8.17.1: + version "8.17.1" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.17.1.tgz#37d9a5c776af6bc92d7f4f9510eba4c0a60d11a6" + integrity sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g== + dependencies: + fast-deep-equal "^3.1.3" + fast-uri "^3.0.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + ansi-escapes@^4.2.1, ansi-escapes@^4.3.2: version "4.3.2" resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" @@ -1911,6 +1930,11 @@ fast-levenshtein@^2.0.6: resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== +fast-uri@^3.0.1: + version "3.1.0" + resolved "https://registry.yarnpkg.com/fast-uri/-/fast-uri-3.1.0.tgz#66eecff6c764c0df9b762e62ca7edcfb53b4edfa" + integrity sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA== + fastq@^1.6.0: version "1.19.1" resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.19.1.tgz#d50eaba803c8846a883c16492821ebcd2cda55f5" @@ -2035,6 +2059,11 @@ function-bind@^1.1.2: resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== +fuse.js@^7.1.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/fuse.js/-/fuse.js-7.1.0.tgz#306228b4befeee11e05b027087c2744158527d09" + integrity sha512-trLf4SzuuUxfusZADLINj+dE8clK1frKdmqiJNb1Es75fmI5oY6X2mxLVUciLLjxqw/xr72Dhy+lER6dGd02FQ== + galactus@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/galactus/-/galactus-1.0.0.tgz#c2615182afa0c6d0859b92e56ae36d052827db7e" @@ -2732,9 +2761,14 @@ jest@^29.4.0: import-local "^3.0.2" jest-cli "^29.7.0" -"jq-web@https://github.com/stainless-api/jq-web/releases/download/v0.8.6/jq-web.tar.gz": - version "0.8.6" - resolved "https://github.com/stainless-api/jq-web/releases/download/v0.8.6/jq-web.tar.gz#14d0e126987736e82e964d675c3838b5944faa6f" +jose@^6.1.1: + version "6.1.3" + resolved "https://registry.yarnpkg.com/jose/-/jose-6.1.3.tgz#8453d7be88af7bb7d64a0481d6a35a0145ba3ea5" + integrity sha512-0TpaTfihd4QMNwrz/ob2Bp7X04yuxJkjRGi4aKmOqwhov54i6u79oCv7T+C7lo70MKH6BesI3vscD1yb/yzKXQ== + +"jq-web@https://github.com/stainless-api/jq-web/releases/download/v0.8.8/jq-web.tar.gz": + version "0.8.8" + resolved "https://github.com/stainless-api/jq-web/releases/download/v0.8.8/jq-web.tar.gz#7849ef64bdfc28f70cbfc9888f886860e96da10d" js-tokens@^4.0.0: version "4.0.0" @@ -2776,6 +2810,11 @@ json-schema-traverse@^0.4.1: resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== +json-schema-traverse@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" + integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== + json-stable-stringify-without-jsonify@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" @@ -2984,9 +3023,9 @@ negotiator@^1.0.0: integrity sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg== node-forge@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3" - integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA== + version "1.3.3" + resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.3.tgz#0ad80f6333b3a0045e827ac20b7f735f93716751" + integrity sha512-rLvcdSyRCyouf6jcOIPe/BgwG/d7hKjzMKOas33/pHEr6gbq18IK9zV7DiPvzsz0oBJPme6qr6H6kGZuI9/DZg== node-int64@^0.4.0: version "0.4.0" @@ -3287,6 +3326,11 @@ require-directory@^2.1.1: resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q== +require-from-string@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" + integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== + resolve-cwd@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" @@ -3895,20 +3939,25 @@ yoctocolors-cjs@^2.1.2: resolved "https://registry.yarnpkg.com/yoctocolors-cjs/-/yoctocolors-cjs-2.1.3.tgz#7e4964ea8ec422b7a40ac917d3a344cfd2304baa" integrity sha512-U/PBtDf35ff0D8X8D0jfdzHYEPFxAI7jJlxZXwCSez5M3190m+QobIfh+sWDWSHMCWWJN2AWamkegn6vr6YBTw== -zod-to-json-schema@^3.24.1, zod-to-json-schema@^3.24.5: +zod-to-json-schema@^3.24.5: version "3.24.5" resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz#d1095440b147fb7c2093812a53c54df8d5df50a3" integrity sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g== +zod-to-json-schema@^3.25.0: + version "3.25.0" + resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.25.0.tgz#df504c957c4fb0feff467c74d03e6aab0b013e1c" + integrity sha512-HvWtU2UG41LALjajJrML6uQejQhNJx+JBO9IflpSja4R03iNWfKXrj6W2h7ljuLyc1nKS+9yDyL/9tD1U/yBnQ== + zod-validation-error@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/zod-validation-error/-/zod-validation-error-4.0.1.tgz#a105723eb40299578a6a38cb86647068f6d005b1" integrity sha512-F3rdaCOHs5ViJ5YTz5zzRtfkQdMdIeKudJAoxy7yB/2ZMEHw73lmCAcQw11r7++20MyGl4WV59EVh7A9rNAyog== -zod@^3.23.8: - version "3.24.4" - resolved "https://registry.yarnpkg.com/zod/-/zod-3.24.4.tgz#e2e2cca5faaa012d76e527d0d36622e0a90c315f" - integrity sha512-OdqJE9UDRPwWsrHjLN2F8bPxvwJBK22EHLWtanu0LSYr5YqzsaaW3RMgmjwr8Rypg5k+meEJdSPXJZXE/yqOMg== +"zod@^3.25 || ^4.0": + version "4.1.13" + resolved "https://registry.yarnpkg.com/zod/-/zod-4.1.13.tgz#93699a8afe937ba96badbb0ce8be6033c0a4b6b1" + integrity sha512-AvvthqfqrAhNH9dnfmrfKzX5upOdjUVJYFqNSlkmGf64gRaTzlPwz99IHYnVs28qYAybvAlBV+H7pn0saFY4Ig== zod@^3.25.20, zod@^3.25.67: version "3.25.76" diff --git a/scripts/bootstrap b/scripts/bootstrap index 062a034..a8b69ff 100755 --- a/scripts/bootstrap +++ b/scripts/bootstrap @@ -4,10 +4,18 @@ set -e cd "$(dirname "$0")/.." -if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "$SKIP_BREW" != "1" ]; then +if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "$SKIP_BREW" != "1" ] && [ -t 0 ]; then brew bundle check >/dev/null 2>&1 || { - echo "==> Installing Homebrew dependencies…" - brew bundle + echo -n "==> Install Homebrew dependencies? (y/N): " + read -r response + case "$response" in + [yY][eE][sS]|[yY]) + brew bundle + ;; + *) + ;; + esac + echo } fi diff --git a/scripts/fast-format b/scripts/fast-format new file mode 100755 index 0000000..53721ac --- /dev/null +++ b/scripts/fast-format @@ -0,0 +1,40 @@ +#!/usr/bin/env bash + +set -euo pipefail + +echo "Script started with $# arguments" +echo "Arguments: $*" +echo "Script location: $(dirname "$0")" + +cd "$(dirname "$0")/.." +echo "Changed to directory: $(pwd)" + +if [ $# -eq 0 ]; then + echo "Usage: $0 [additional-formatter-args...]" + echo "The file should contain one file path per line" + exit 1 +fi + +FILE_LIST="$1" + +echo "Looking for file: $FILE_LIST" + +if [ ! -f "$FILE_LIST" ]; then + echo "Error: File '$FILE_LIST' not found" + exit 1 +fi + +echo "==> Running eslint --fix" +ESLINT_FILES="$(grep '\.ts$' "$FILE_LIST" || true)" +if ! [ -z "$ESLINT_FILES" ]; then + echo "$ESLINT_FILES" | xargs ./node_modules/.bin/eslint --cache --fix +fi + +echo "==> Running prettier --write" +# format things eslint didn't +PRETTIER_FILES="$(grep '\.\(js\|json\)$' "$FILE_LIST" || true)" +if ! [ -z "$PRETTIER_FILES" ]; then + echo "$PRETTIER_FILES" | xargs ./node_modules/.bin/prettier \ + --write --cache --cache-strategy metadata --no-error-on-unmatched-pattern \ + '!**/dist' '!**/*.ts' '!**/*.mts' '!**/*.cts' '!**/*.js' '!**/*.mjs' '!**/*.cjs' +fi diff --git a/scripts/utils/upload-artifact.sh b/scripts/utils/upload-artifact.sh index 89be9e7..1c37c70 100755 --- a/scripts/utils/upload-artifact.sh +++ b/scripts/utils/upload-artifact.sh @@ -12,9 +12,11 @@ if [[ "$SIGNED_URL" == "null" ]]; then exit 1 fi -UPLOAD_RESPONSE=$(tar "${BASE_PATH:+-C$BASE_PATH}" -cz "${ARTIFACT_PATH:-dist}" | curl -v -X PUT \ +TARBALL=$(cd dist && npm pack --silent) + +UPLOAD_RESPONSE=$(curl -v -X PUT \ -H "Content-Type: application/gzip" \ - --data-binary @- "$SIGNED_URL" 2>&1) + --data-binary "@dist/$TARBALL" "$SIGNED_URL" 2>&1) if echo "$UPLOAD_RESPONSE" | grep -q "HTTP/[0-9.]* 200"; then echo -e "\033[32mUploaded build to Stainless storage.\033[0m" diff --git a/src/client.ts b/src/client.ts index a2f16e8..435af42 100644 --- a/src/client.ts +++ b/src/client.ts @@ -128,7 +128,7 @@ export class CasParser { baseURL: string; maxRetries: number; timeout: number; - logger: Logger | undefined; + logger: Logger; logLevel: LogLevel | undefined; fetchOptions: MergedRequestInit | undefined; diff --git a/src/internal/to-file.ts b/src/internal/to-file.ts index 245e849..30eada3 100644 --- a/src/internal/to-file.ts +++ b/src/internal/to-file.ts @@ -73,7 +73,7 @@ export type ToFileInput = /** * Helper for creating a {@link File} to pass to an SDK upload method from a variety of different data formats - * @param value the raw content of the file. Can be an {@link Uploadable}, {@link BlobLikePart}, or {@link AsyncIterable} of {@link BlobLikePart}s + * @param value the raw content of the file. Can be an {@link Uploadable}, BlobLikePart, or AsyncIterable of BlobLikeParts * @param {string=} name the name of the file. If omitted, toFile will try to determine a file name from bits if possible * @param {Object=} options additional properties * @param {string=} options.type the MIME type of the content diff --git a/src/resources/cas-parser.ts b/src/resources/cas-parser.ts index 9bd1cef..171ec55 100644 --- a/src/resources/cas-parser.ts +++ b/src/resources/cas-parser.ts @@ -67,6 +67,11 @@ export interface UnifiedResponse { mutual_funds?: Array; + /** + * List of NPS accounts + */ + nps?: Array; + summary?: UnifiedResponse.Summary; } @@ -104,6 +109,11 @@ export namespace UnifiedResponse { holdings?: DematAccount.Holdings; + /** + * List of account holders linked to this demat account + */ + linked_holders?: Array; + /** * Total value of the demat account */ @@ -173,7 +183,7 @@ export namespace UnifiedResponse { /** * Additional information specific to the AIF */ - additional_info?: unknown; + additional_info?: Aif.AdditionalInfo; /** * ISIN code of the AIF @@ -185,6 +195,11 @@ export namespace UnifiedResponse { */ name?: string; + /** + * List of transactions for this holding (beta) + */ + transactions?: Array; + /** * Number of units held */ @@ -196,11 +211,140 @@ export namespace UnifiedResponse { value?: number; } + export namespace Aif { + /** + * Additional information specific to the AIF + */ + export interface AdditionalInfo { + /** + * Closing balance units for the statement period (beta) + */ + close_units?: number | null; + + /** + * Opening balance units for the statement period (beta) + */ + open_units?: number | null; + } + + /** + * Unified transaction schema for all holding types (MF folios, equities, bonds, + * etc.) + */ + export interface Transaction { + /** + * Additional transaction-specific fields that vary by source + */ + additional_info?: Transaction.AdditionalInfo; + + /** + * Transaction amount in currency (computed from units × price/NAV) + */ + amount?: number | null; + + /** + * Balance units after transaction + */ + balance?: number; + + /** + * Transaction date (YYYY-MM-DD) + */ + date?: string; + + /** + * Transaction description/particulars + */ + description?: string; + + /** + * Dividend rate (for DIVIDEND_PAYOUT transactions) + */ + dividend_rate?: number | null; + + /** + * NAV/price per unit on transaction date + */ + nav?: number | null; + + /** + * Transaction type. Possible values are PURCHASE, PURCHASE_SIP, REDEMPTION, + * SWITCH_IN, SWITCH_IN_MERGER, SWITCH_OUT, SWITCH_OUT_MERGER, DIVIDEND_PAYOUT, + * DIVIDEND_REINVEST, SEGREGATION, STAMP_DUTY_TAX, TDS_TAX, STT_TAX, MISC, + * REVERSAL, UNKNOWN. + */ + type?: + | 'PURCHASE' + | 'PURCHASE_SIP' + | 'REDEMPTION' + | 'SWITCH_IN' + | 'SWITCH_IN_MERGER' + | 'SWITCH_OUT' + | 'SWITCH_OUT_MERGER' + | 'DIVIDEND_PAYOUT' + | 'DIVIDEND_REINVEST' + | 'SEGREGATION' + | 'STAMP_DUTY_TAX' + | 'TDS_TAX' + | 'STT_TAX' + | 'MISC' + | 'REVERSAL' + | 'UNKNOWN'; + + /** + * Number of units involved in transaction + */ + units?: number; + } + + export namespace Transaction { + /** + * Additional transaction-specific fields that vary by source + */ + export interface AdditionalInfo { + /** + * Capital withdrawal amount (CDSL MF transactions) + */ + capital_withdrawal?: number; + + /** + * Units credited (demat transactions) + */ + credit?: number; + + /** + * Units debited (demat transactions) + */ + debit?: number; + + /** + * Income distribution amount (CDSL MF transactions) + */ + income_distribution?: number; + + /** + * Order/transaction reference number (demat transactions) + */ + order_no?: string; + + /** + * Price per unit (NSDL/CDSL MF transactions) + */ + price?: number; + + /** + * Stamp duty charged + */ + stamp_duty?: number; + } + } + } + export interface CorporateBond { /** * Additional information specific to the corporate bond */ - additional_info?: unknown; + additional_info?: CorporateBond.AdditionalInfo; /** * ISIN code of the corporate bond @@ -212,6 +356,11 @@ export namespace UnifiedResponse { */ name?: string; + /** + * List of transactions for this holding (beta) + */ + transactions?: Array; + /** * Number of units held */ @@ -223,11 +372,140 @@ export namespace UnifiedResponse { value?: number; } + export namespace CorporateBond { + /** + * Additional information specific to the corporate bond + */ + export interface AdditionalInfo { + /** + * Closing balance units for the statement period (beta) + */ + close_units?: number | null; + + /** + * Opening balance units for the statement period (beta) + */ + open_units?: number | null; + } + + /** + * Unified transaction schema for all holding types (MF folios, equities, bonds, + * etc.) + */ + export interface Transaction { + /** + * Additional transaction-specific fields that vary by source + */ + additional_info?: Transaction.AdditionalInfo; + + /** + * Transaction amount in currency (computed from units × price/NAV) + */ + amount?: number | null; + + /** + * Balance units after transaction + */ + balance?: number; + + /** + * Transaction date (YYYY-MM-DD) + */ + date?: string; + + /** + * Transaction description/particulars + */ + description?: string; + + /** + * Dividend rate (for DIVIDEND_PAYOUT transactions) + */ + dividend_rate?: number | null; + + /** + * NAV/price per unit on transaction date + */ + nav?: number | null; + + /** + * Transaction type. Possible values are PURCHASE, PURCHASE_SIP, REDEMPTION, + * SWITCH_IN, SWITCH_IN_MERGER, SWITCH_OUT, SWITCH_OUT_MERGER, DIVIDEND_PAYOUT, + * DIVIDEND_REINVEST, SEGREGATION, STAMP_DUTY_TAX, TDS_TAX, STT_TAX, MISC, + * REVERSAL, UNKNOWN. + */ + type?: + | 'PURCHASE' + | 'PURCHASE_SIP' + | 'REDEMPTION' + | 'SWITCH_IN' + | 'SWITCH_IN_MERGER' + | 'SWITCH_OUT' + | 'SWITCH_OUT_MERGER' + | 'DIVIDEND_PAYOUT' + | 'DIVIDEND_REINVEST' + | 'SEGREGATION' + | 'STAMP_DUTY_TAX' + | 'TDS_TAX' + | 'STT_TAX' + | 'MISC' + | 'REVERSAL' + | 'UNKNOWN'; + + /** + * Number of units involved in transaction + */ + units?: number; + } + + export namespace Transaction { + /** + * Additional transaction-specific fields that vary by source + */ + export interface AdditionalInfo { + /** + * Capital withdrawal amount (CDSL MF transactions) + */ + capital_withdrawal?: number; + + /** + * Units credited (demat transactions) + */ + credit?: number; + + /** + * Units debited (demat transactions) + */ + debit?: number; + + /** + * Income distribution amount (CDSL MF transactions) + */ + income_distribution?: number; + + /** + * Order/transaction reference number (demat transactions) + */ + order_no?: string; + + /** + * Price per unit (NSDL/CDSL MF transactions) + */ + price?: number; + + /** + * Stamp duty charged + */ + stamp_duty?: number; + } + } + } + export interface DematMutualFund { /** * Additional information specific to the mutual fund */ - additional_info?: unknown; + additional_info?: DematMutualFund.AdditionalInfo; /** * ISIN code of the mutual fund @@ -239,6 +517,11 @@ export namespace UnifiedResponse { */ name?: string; + /** + * List of transactions for this holding (beta) + */ + transactions?: Array; + /** * Number of units held */ @@ -250,11 +533,140 @@ export namespace UnifiedResponse { value?: number; } + export namespace DematMutualFund { + /** + * Additional information specific to the mutual fund + */ + export interface AdditionalInfo { + /** + * Closing balance units for the statement period (beta) + */ + close_units?: number | null; + + /** + * Opening balance units for the statement period (beta) + */ + open_units?: number | null; + } + + /** + * Unified transaction schema for all holding types (MF folios, equities, bonds, + * etc.) + */ + export interface Transaction { + /** + * Additional transaction-specific fields that vary by source + */ + additional_info?: Transaction.AdditionalInfo; + + /** + * Transaction amount in currency (computed from units × price/NAV) + */ + amount?: number | null; + + /** + * Balance units after transaction + */ + balance?: number; + + /** + * Transaction date (YYYY-MM-DD) + */ + date?: string; + + /** + * Transaction description/particulars + */ + description?: string; + + /** + * Dividend rate (for DIVIDEND_PAYOUT transactions) + */ + dividend_rate?: number | null; + + /** + * NAV/price per unit on transaction date + */ + nav?: number | null; + + /** + * Transaction type. Possible values are PURCHASE, PURCHASE_SIP, REDEMPTION, + * SWITCH_IN, SWITCH_IN_MERGER, SWITCH_OUT, SWITCH_OUT_MERGER, DIVIDEND_PAYOUT, + * DIVIDEND_REINVEST, SEGREGATION, STAMP_DUTY_TAX, TDS_TAX, STT_TAX, MISC, + * REVERSAL, UNKNOWN. + */ + type?: + | 'PURCHASE' + | 'PURCHASE_SIP' + | 'REDEMPTION' + | 'SWITCH_IN' + | 'SWITCH_IN_MERGER' + | 'SWITCH_OUT' + | 'SWITCH_OUT_MERGER' + | 'DIVIDEND_PAYOUT' + | 'DIVIDEND_REINVEST' + | 'SEGREGATION' + | 'STAMP_DUTY_TAX' + | 'TDS_TAX' + | 'STT_TAX' + | 'MISC' + | 'REVERSAL' + | 'UNKNOWN'; + + /** + * Number of units involved in transaction + */ + units?: number; + } + + export namespace Transaction { + /** + * Additional transaction-specific fields that vary by source + */ + export interface AdditionalInfo { + /** + * Capital withdrawal amount (CDSL MF transactions) + */ + capital_withdrawal?: number; + + /** + * Units credited (demat transactions) + */ + credit?: number; + + /** + * Units debited (demat transactions) + */ + debit?: number; + + /** + * Income distribution amount (CDSL MF transactions) + */ + income_distribution?: number; + + /** + * Order/transaction reference number (demat transactions) + */ + order_no?: string; + + /** + * Price per unit (NSDL/CDSL MF transactions) + */ + price?: number; + + /** + * Stamp duty charged + */ + stamp_duty?: number; + } + } + } + export interface Equity { /** * Additional information specific to the equity */ - additional_info?: unknown; + additional_info?: Equity.AdditionalInfo; /** * ISIN code of the equity @@ -266,6 +678,11 @@ export namespace UnifiedResponse { */ name?: string; + /** + * List of transactions for this holding (beta) + */ + transactions?: Array; + /** * Number of units held */ @@ -277,11 +694,140 @@ export namespace UnifiedResponse { value?: number; } + export namespace Equity { + /** + * Additional information specific to the equity + */ + export interface AdditionalInfo { + /** + * Closing balance units for the statement period (beta) + */ + close_units?: number | null; + + /** + * Opening balance units for the statement period (beta) + */ + open_units?: number | null; + } + + /** + * Unified transaction schema for all holding types (MF folios, equities, bonds, + * etc.) + */ + export interface Transaction { + /** + * Additional transaction-specific fields that vary by source + */ + additional_info?: Transaction.AdditionalInfo; + + /** + * Transaction amount in currency (computed from units × price/NAV) + */ + amount?: number | null; + + /** + * Balance units after transaction + */ + balance?: number; + + /** + * Transaction date (YYYY-MM-DD) + */ + date?: string; + + /** + * Transaction description/particulars + */ + description?: string; + + /** + * Dividend rate (for DIVIDEND_PAYOUT transactions) + */ + dividend_rate?: number | null; + + /** + * NAV/price per unit on transaction date + */ + nav?: number | null; + + /** + * Transaction type. Possible values are PURCHASE, PURCHASE_SIP, REDEMPTION, + * SWITCH_IN, SWITCH_IN_MERGER, SWITCH_OUT, SWITCH_OUT_MERGER, DIVIDEND_PAYOUT, + * DIVIDEND_REINVEST, SEGREGATION, STAMP_DUTY_TAX, TDS_TAX, STT_TAX, MISC, + * REVERSAL, UNKNOWN. + */ + type?: + | 'PURCHASE' + | 'PURCHASE_SIP' + | 'REDEMPTION' + | 'SWITCH_IN' + | 'SWITCH_IN_MERGER' + | 'SWITCH_OUT' + | 'SWITCH_OUT_MERGER' + | 'DIVIDEND_PAYOUT' + | 'DIVIDEND_REINVEST' + | 'SEGREGATION' + | 'STAMP_DUTY_TAX' + | 'TDS_TAX' + | 'STT_TAX' + | 'MISC' + | 'REVERSAL' + | 'UNKNOWN'; + + /** + * Number of units involved in transaction + */ + units?: number; + } + + export namespace Transaction { + /** + * Additional transaction-specific fields that vary by source + */ + export interface AdditionalInfo { + /** + * Capital withdrawal amount (CDSL MF transactions) + */ + capital_withdrawal?: number; + + /** + * Units credited (demat transactions) + */ + credit?: number; + + /** + * Units debited (demat transactions) + */ + debit?: number; + + /** + * Income distribution amount (CDSL MF transactions) + */ + income_distribution?: number; + + /** + * Order/transaction reference number (demat transactions) + */ + order_no?: string; + + /** + * Price per unit (NSDL/CDSL MF transactions) + */ + price?: number; + + /** + * Stamp duty charged + */ + stamp_duty?: number; + } + } + } + export interface GovernmentSecurity { /** * Additional information specific to the government security */ - additional_info?: unknown; + additional_info?: GovernmentSecurity.AdditionalInfo; /** * ISIN code of the government security @@ -293,6 +839,11 @@ export namespace UnifiedResponse { */ name?: string; + /** + * List of transactions for this holding (beta) + */ + transactions?: Array; + /** * Number of units held */ @@ -303,6 +854,147 @@ export namespace UnifiedResponse { */ value?: number; } + + export namespace GovernmentSecurity { + /** + * Additional information specific to the government security + */ + export interface AdditionalInfo { + /** + * Closing balance units for the statement period (beta) + */ + close_units?: number | null; + + /** + * Opening balance units for the statement period (beta) + */ + open_units?: number | null; + } + + /** + * Unified transaction schema for all holding types (MF folios, equities, bonds, + * etc.) + */ + export interface Transaction { + /** + * Additional transaction-specific fields that vary by source + */ + additional_info?: Transaction.AdditionalInfo; + + /** + * Transaction amount in currency (computed from units × price/NAV) + */ + amount?: number | null; + + /** + * Balance units after transaction + */ + balance?: number; + + /** + * Transaction date (YYYY-MM-DD) + */ + date?: string; + + /** + * Transaction description/particulars + */ + description?: string; + + /** + * Dividend rate (for DIVIDEND_PAYOUT transactions) + */ + dividend_rate?: number | null; + + /** + * NAV/price per unit on transaction date + */ + nav?: number | null; + + /** + * Transaction type. Possible values are PURCHASE, PURCHASE_SIP, REDEMPTION, + * SWITCH_IN, SWITCH_IN_MERGER, SWITCH_OUT, SWITCH_OUT_MERGER, DIVIDEND_PAYOUT, + * DIVIDEND_REINVEST, SEGREGATION, STAMP_DUTY_TAX, TDS_TAX, STT_TAX, MISC, + * REVERSAL, UNKNOWN. + */ + type?: + | 'PURCHASE' + | 'PURCHASE_SIP' + | 'REDEMPTION' + | 'SWITCH_IN' + | 'SWITCH_IN_MERGER' + | 'SWITCH_OUT' + | 'SWITCH_OUT_MERGER' + | 'DIVIDEND_PAYOUT' + | 'DIVIDEND_REINVEST' + | 'SEGREGATION' + | 'STAMP_DUTY_TAX' + | 'TDS_TAX' + | 'STT_TAX' + | 'MISC' + | 'REVERSAL' + | 'UNKNOWN'; + + /** + * Number of units involved in transaction + */ + units?: number; + } + + export namespace Transaction { + /** + * Additional transaction-specific fields that vary by source + */ + export interface AdditionalInfo { + /** + * Capital withdrawal amount (CDSL MF transactions) + */ + capital_withdrawal?: number; + + /** + * Units credited (demat transactions) + */ + credit?: number; + + /** + * Units debited (demat transactions) + */ + debit?: number; + + /** + * Income distribution amount (CDSL MF transactions) + */ + income_distribution?: number; + + /** + * Order/transaction reference number (demat transactions) + */ + order_no?: string; + + /** + * Price per unit (NSDL/CDSL MF transactions) + */ + price?: number; + + /** + * Stamp duty charged + */ + stamp_duty?: number; + } + } + } + } + + export interface LinkedHolder { + /** + * Name of the account holder + */ + name?: string; + + /** + * PAN of the account holder + */ + pan?: string; } } @@ -440,6 +1132,11 @@ export namespace UnifiedResponse { */ folio_number?: string; + /** + * List of account holders linked to this mutual fund folio + */ + linked_holders?: Array; + /** * Registrar and Transfer Agent name */ @@ -474,6 +1171,18 @@ export namespace UnifiedResponse { pankyc?: string; } + export interface LinkedHolder { + /** + * Name of the account holder + */ + name?: string; + + /** + * PAN of the account holder + */ + pan?: string; + } + export interface Scheme { /** * Additional information specific to the scheme @@ -541,14 +1250,14 @@ export namespace UnifiedResponse { amfi?: string; /** - * Closing balance units (CAMS/KFintech) + * Closing balance units for the statement period */ - close_units?: number; + close_units?: number | null; /** - * Opening balance units (CAMS/KFintech) + * Opening balance units for the statement period */ - open_units?: number; + open_units?: number | null; /** * RTA code for the scheme (CAMS/KFintech) @@ -568,11 +1277,20 @@ export namespace UnifiedResponse { percentage?: number; } + /** + * Unified transaction schema for all holding types (MF folios, equities, bonds, + * etc.) + */ export interface Transaction { /** - * Transaction amount + * Additional transaction-specific fields that vary by source */ - amount?: number; + additional_info?: Transaction.AdditionalInfo; + + /** + * Transaction amount in currency (computed from units × price/NAV) + */ + amount?: number | null; /** * Balance units after transaction @@ -580,38 +1298,188 @@ export namespace UnifiedResponse { balance?: number; /** - * Transaction date + * Transaction date (YYYY-MM-DD) */ date?: string; /** - * Transaction description + * Transaction description/particulars */ description?: string; /** - * Dividend rate (for dividend transactions) + * Dividend rate (for DIVIDEND_PAYOUT transactions) */ - dividend_rate?: number; + dividend_rate?: number | null; /** - * NAV on transaction date + * NAV/price per unit on transaction date */ - nav?: number; + nav?: number | null; /** - * Transaction type detected based on description. Possible values are - * PURCHASE,PURCHASE_SIP,REDEMPTION,SWITCH_IN,SWITCH_IN_MERGER,SWITCH_OUT,SWITCH_OUT_MERGER,DIVIDEND_PAYOUT,DIVIDEND_REINVESTMENT,SEGREGATION,STAMP_DUTY_TAX,TDS_TAX,STT_TAX,MISC. - * If dividend_rate is present, then possible values are dividend_rate is - * applicable only for DIVIDEND_PAYOUT and DIVIDEND_REINVESTMENT. + * Transaction type. Possible values are PURCHASE, PURCHASE_SIP, REDEMPTION, + * SWITCH_IN, SWITCH_IN_MERGER, SWITCH_OUT, SWITCH_OUT_MERGER, DIVIDEND_PAYOUT, + * DIVIDEND_REINVEST, SEGREGATION, STAMP_DUTY_TAX, TDS_TAX, STT_TAX, MISC, + * REVERSAL, UNKNOWN. */ - type?: string; + type?: + | 'PURCHASE' + | 'PURCHASE_SIP' + | 'REDEMPTION' + | 'SWITCH_IN' + | 'SWITCH_IN_MERGER' + | 'SWITCH_OUT' + | 'SWITCH_OUT_MERGER' + | 'DIVIDEND_PAYOUT' + | 'DIVIDEND_REINVEST' + | 'SEGREGATION' + | 'STAMP_DUTY_TAX' + | 'TDS_TAX' + | 'STT_TAX' + | 'MISC' + | 'REVERSAL' + | 'UNKNOWN'; /** - * Number of units involved + * Number of units involved in transaction */ units?: number; } + + export namespace Transaction { + /** + * Additional transaction-specific fields that vary by source + */ + export interface AdditionalInfo { + /** + * Capital withdrawal amount (CDSL MF transactions) + */ + capital_withdrawal?: number; + + /** + * Units credited (demat transactions) + */ + credit?: number; + + /** + * Units debited (demat transactions) + */ + debit?: number; + + /** + * Income distribution amount (CDSL MF transactions) + */ + income_distribution?: number; + + /** + * Order/transaction reference number (demat transactions) + */ + order_no?: string; + + /** + * Price per unit (NSDL/CDSL MF transactions) + */ + price?: number; + + /** + * Stamp duty charged + */ + stamp_duty?: number; + } + } + } + } + + export interface Np { + /** + * Additional information specific to the NPS account + */ + additional_info?: unknown; + + /** + * Central Record Keeping Agency name + */ + cra?: string; + + funds?: Array; + + /** + * List of account holders linked to this NPS account + */ + linked_holders?: Array; + + /** + * Permanent Retirement Account Number (PRAN) + */ + pran?: string; + + /** + * Total value of the NPS account + */ + value?: number; + } + + export namespace Np { + export interface Fund { + /** + * Additional information specific to the NPS fund + */ + additional_info?: Fund.AdditionalInfo; + + /** + * Cost of investment + */ + cost?: number; + + /** + * Name of the NPS fund + */ + name?: string; + + /** + * Net Asset Value per unit + */ + nav?: number; + + /** + * Number of units held + */ + units?: number; + + /** + * Current market value of the holding + */ + value?: number; + } + + export namespace Fund { + /** + * Additional information specific to the NPS fund + */ + export interface AdditionalInfo { + /** + * Fund manager name + */ + manager?: string; + + /** + * NPS tier (Tier I or Tier II) + */ + tier?: 1 | 2 | null; + } + } + + export interface LinkedHolder { + /** + * Name of the account holder + */ + name?: string; + + /** + * PAN of the account holder + */ + pan?: string; } } @@ -631,6 +1499,8 @@ export namespace UnifiedResponse { insurance?: Accounts.Insurance; mutual_funds?: Accounts.MutualFunds; + + nps?: Accounts.Nps; } export namespace Accounts { @@ -669,6 +1539,18 @@ export namespace UnifiedResponse { */ total_value?: number; } + + export interface Nps { + /** + * Number of NPS accounts + */ + count?: number; + + /** + * Total value of NPS accounts + */ + total_value?: number; + } } } } diff --git a/src/version.ts b/src/version.ts index 45b0c3a..e514fa0 100644 --- a/src/version.ts +++ b/src/version.ts @@ -1 +1 @@ -export const VERSION = '1.4.1'; // x-release-please-version +export const VERSION = '1.5.0'; // x-release-please-version diff --git a/tsconfig.build.json b/tsconfig.build.json index 820f85c..bc46a37 100644 --- a/tsconfig.build.json +++ b/tsconfig.build.json @@ -5,8 +5,8 @@ "compilerOptions": { "rootDir": "./dist/src", "paths": { - "cas-parser-node/*": ["dist/src/*"], - "cas-parser-node": ["dist/src/index.ts"] + "cas-parser-node/*": ["./dist/src/*"], + "cas-parser-node": ["./dist/src/index.ts"] }, "noEmit": false, "declaration": true, diff --git a/tsconfig.json b/tsconfig.json index 3f4f112..dbd3476 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -7,10 +7,9 @@ "module": "commonjs", "moduleResolution": "node", "esModuleInterop": true, - "baseUrl": "./", "paths": { - "cas-parser-node/*": ["src/*"], - "cas-parser-node": ["src/index.ts"] + "cas-parser-node/*": ["./src/*"], + "cas-parser-node": ["./src/index.ts"] }, "noEmit": true, diff --git a/yarn.lock b/yarn.lock index 8311caf..5f56a20 100644 --- a/yarn.lock +++ b/yarn.lock @@ -350,45 +350,52 @@ dependencies: "@cspotcode/source-map-consumer" "0.8.0" -"@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": +"@eslint-community/eslint-utils@^4.4.0": version "4.4.0" resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59" integrity sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA== dependencies: eslint-visitor-keys "^3.3.0" +"@eslint-community/eslint-utils@^4.8.0": + version "4.9.0" + resolved "https://registry.yarnpkg.com/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz#7308df158e064f0dd8b8fdb58aa14fa2a7f913b3" + integrity sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g== + dependencies: + eslint-visitor-keys "^3.4.3" + "@eslint-community/regexpp@^4.10.0", "@eslint-community/regexpp@^4.12.1": version "4.12.1" resolved "https://registry.yarnpkg.com/@eslint-community/regexpp/-/regexpp-4.12.1.tgz#cfc6cffe39df390a3841cde2abccf92eaa7ae0e0" integrity sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ== -"@eslint/config-array@^0.19.0": - version "0.19.2" - resolved "https://registry.yarnpkg.com/@eslint/config-array/-/config-array-0.19.2.tgz#3060b809e111abfc97adb0bb1172778b90cb46aa" - integrity sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w== +"@eslint/config-array@^0.21.1": + version "0.21.1" + resolved "https://registry.yarnpkg.com/@eslint/config-array/-/config-array-0.21.1.tgz#7d1b0060fea407f8301e932492ba8c18aff29713" + integrity sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA== dependencies: - "@eslint/object-schema" "^2.1.6" + "@eslint/object-schema" "^2.1.7" debug "^4.3.1" minimatch "^3.1.2" -"@eslint/core@^0.10.0": - version "0.10.0" - resolved "https://registry.yarnpkg.com/@eslint/core/-/core-0.10.0.tgz#23727063c21b335f752dbb3a16450f6f9cbc9091" - integrity sha512-gFHJ+xBOo4G3WRlR1e/3G8A6/KZAH6zcE/hkLRCZTi/B9avAG365QhFA8uOGzTMqgTghpn7/fSnscW++dpMSAw== +"@eslint/config-helpers@^0.4.2": + version "0.4.2" + resolved "https://registry.yarnpkg.com/@eslint/config-helpers/-/config-helpers-0.4.2.tgz#1bd006ceeb7e2e55b2b773ab318d300e1a66aeda" + integrity sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw== dependencies: - "@types/json-schema" "^7.0.15" + "@eslint/core" "^0.17.0" -"@eslint/core@^0.11.0": - version "0.11.0" - resolved "https://registry.yarnpkg.com/@eslint/core/-/core-0.11.0.tgz#7a9226e850922e42cbd2ba71361eacbe74352a12" - integrity sha512-DWUB2pksgNEb6Bz2fggIy1wh6fGgZP4Xyy/Mt0QZPiloKKXerbqq9D3SBQTlCRYOrcRPu4vuz+CGjwdfqxnoWA== +"@eslint/core@^0.17.0": + version "0.17.0" + resolved "https://registry.yarnpkg.com/@eslint/core/-/core-0.17.0.tgz#77225820413d9617509da9342190a2019e78761c" + integrity sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ== dependencies: "@types/json-schema" "^7.0.15" -"@eslint/eslintrc@^3.2.0": - version "3.2.0" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-3.2.0.tgz#57470ac4e2e283a6bf76044d63281196e370542c" - integrity sha512-grOjVNN8P3hjJn/eIETF1wwd12DdnwFDoyceUJLYYdkpbwq3nLi+4fqrTAONx7XDALqlL220wC/RHSC/QTI/0w== +"@eslint/eslintrc@^3.3.1": + version "3.3.3" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-3.3.3.tgz#26393a0806501b5e2b6a43aa588a4d8df67880ac" + integrity sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ== dependencies: ajv "^6.12.4" debug "^4.3.2" @@ -396,26 +403,26 @@ globals "^14.0.0" ignore "^5.2.0" import-fresh "^3.2.1" - js-yaml "^4.1.0" + js-yaml "^4.1.1" minimatch "^3.1.2" strip-json-comments "^3.1.1" -"@eslint/js@9.20.0": - version "9.20.0" - resolved "https://registry.yarnpkg.com/@eslint/js/-/js-9.20.0.tgz#7421bcbe74889fcd65d1be59f00130c289856eb4" - integrity sha512-iZA07H9io9Wn836aVTytRaNqh00Sad+EamwOVJT12GTLw1VGMFV/4JaME+JjLtr9fiGaoWgYnS54wrfWsSs4oQ== +"@eslint/js@9.39.1": + version "9.39.1" + resolved "https://registry.yarnpkg.com/@eslint/js/-/js-9.39.1.tgz#0dd59c3a9f40e3f1882975c321470969243e0164" + integrity sha512-S26Stp4zCy88tH94QbBv3XCuzRQiZ9yXofEILmglYTh/Ug/a9/umqvgFtYBAo3Lp0nsI/5/qH1CCrbdK3AP1Tw== -"@eslint/object-schema@^2.1.6": - version "2.1.6" - resolved "https://registry.yarnpkg.com/@eslint/object-schema/-/object-schema-2.1.6.tgz#58369ab5b5b3ca117880c0f6c0b0f32f6950f24f" - integrity sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA== +"@eslint/object-schema@^2.1.7": + version "2.1.7" + resolved "https://registry.yarnpkg.com/@eslint/object-schema/-/object-schema-2.1.7.tgz#6e2126a1347e86a4dedf8706ec67ff8e107ebbad" + integrity sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA== -"@eslint/plugin-kit@^0.2.5": - version "0.2.5" - resolved "https://registry.yarnpkg.com/@eslint/plugin-kit/-/plugin-kit-0.2.5.tgz#ee07372035539e7847ef834e3f5e7b79f09e3a81" - integrity sha512-lB05FkqEdUg2AA0xEbUz0SnkXT1LcCTa438W4IWTUh4hdOnVbQyOJ81OrDXsJk/LSiJHubgGEFoR5EHq1NsH1A== +"@eslint/plugin-kit@^0.4.1": + version "0.4.1" + resolved "https://registry.yarnpkg.com/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz#9779e3fd9b7ee33571a57435cf4335a1794a6cb2" + integrity sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA== dependencies: - "@eslint/core" "^0.10.0" + "@eslint/core" "^0.17.0" levn "^0.4.1" "@humanfs/core@^0.19.1": @@ -441,10 +448,10 @@ resolved "https://registry.yarnpkg.com/@humanwhocodes/retry/-/retry-0.3.1.tgz#c72a5c76a9fbaf3488e231b13dc52c0da7bab42a" integrity sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA== -"@humanwhocodes/retry@^0.4.1": - version "0.4.1" - resolved "https://registry.yarnpkg.com/@humanwhocodes/retry/-/retry-0.4.1.tgz#9a96ce501bc62df46c4031fbd970e3cc6b10f07b" - integrity sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA== +"@humanwhocodes/retry@^0.4.2": + version "0.4.3" + resolved "https://registry.yarnpkg.com/@humanwhocodes/retry/-/retry-0.4.3.tgz#c2b9d2e374ee62c586d3adbea87199b1d7a7a6ba" + integrity sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ== "@istanbuljs/load-nyc-config@^1.0.0": version "1.1.0" @@ -1057,6 +1064,11 @@ acorn@^8.14.0: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.14.0.tgz#063e2c70cac5fb4f6467f0b11152e04c682795b0" integrity sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA== +acorn@^8.15.0: + version "8.15.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.15.0.tgz#a360898bc415edaac46c8241f6383975b930b816" + integrity sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg== + acorn@^8.4.1: version "8.7.0" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.7.0.tgz#90951fde0f8f09df93549481e5fc141445b791cf" @@ -1560,15 +1572,15 @@ eslint-plugin-unused-imports@^4.1.4: resolved "https://registry.yarnpkg.com/eslint-plugin-unused-imports/-/eslint-plugin-unused-imports-4.1.4.tgz#62ddc7446ccbf9aa7b6f1f0b00a980423cda2738" integrity sha512-YptD6IzQjDardkl0POxnnRBhU1OEePMV0nd6siHaRBbd+lyh6NAhFEobiznKU7kTsSsDeSD62Pe7kAM1b7dAZQ== -eslint-scope@^8.2.0: - version "8.2.0" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-8.2.0.tgz#377aa6f1cb5dc7592cfd0b7f892fd0cf352ce442" - integrity sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A== +eslint-scope@^8.4.0: + version "8.4.0" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-8.4.0.tgz#88e646a207fad61436ffa39eb505147200655c82" + integrity sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg== dependencies: esrecurse "^4.3.0" estraverse "^5.2.0" -eslint-visitor-keys@^3.3.0: +eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.3: version "3.4.3" resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800" integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag== @@ -1578,31 +1590,36 @@ eslint-visitor-keys@^4.2.0: resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz#687bacb2af884fcdda8a6e7d65c606f46a14cd45" integrity sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw== -eslint@^9.20.1: - version "9.20.1" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-9.20.1.tgz#923924c078f5226832449bac86662dd7e53c91d6" - integrity sha512-m1mM33o6dBUjxl2qb6wv6nGNwCAsns1eKtaQ4l/NPHeTvhiUPbtdfMyktxN4B3fgHIgsYh1VT3V9txblpQHq+g== +eslint-visitor-keys@^4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz#4cfea60fe7dd0ad8e816e1ed026c1d5251b512c1" + integrity sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ== + +eslint@^9.39.1: + version "9.39.1" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-9.39.1.tgz#be8bf7c6de77dcc4252b5a8dcb31c2efff74a6e5" + integrity sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g== dependencies: - "@eslint-community/eslint-utils" "^4.2.0" + "@eslint-community/eslint-utils" "^4.8.0" "@eslint-community/regexpp" "^4.12.1" - "@eslint/config-array" "^0.19.0" - "@eslint/core" "^0.11.0" - "@eslint/eslintrc" "^3.2.0" - "@eslint/js" "9.20.0" - "@eslint/plugin-kit" "^0.2.5" + "@eslint/config-array" "^0.21.1" + "@eslint/config-helpers" "^0.4.2" + "@eslint/core" "^0.17.0" + "@eslint/eslintrc" "^3.3.1" + "@eslint/js" "9.39.1" + "@eslint/plugin-kit" "^0.4.1" "@humanfs/node" "^0.16.6" "@humanwhocodes/module-importer" "^1.0.1" - "@humanwhocodes/retry" "^0.4.1" + "@humanwhocodes/retry" "^0.4.2" "@types/estree" "^1.0.6" - "@types/json-schema" "^7.0.15" ajv "^6.12.4" chalk "^4.0.0" cross-spawn "^7.0.6" debug "^4.3.2" escape-string-regexp "^4.0.0" - eslint-scope "^8.2.0" - eslint-visitor-keys "^4.2.0" - espree "^10.3.0" + eslint-scope "^8.4.0" + eslint-visitor-keys "^4.2.1" + espree "^10.4.0" esquery "^1.5.0" esutils "^2.0.2" fast-deep-equal "^3.1.3" @@ -1618,7 +1635,7 @@ eslint@^9.20.1: natural-compare "^1.4.0" optionator "^0.9.3" -espree@^10.0.1, espree@^10.3.0: +espree@^10.0.1: version "10.3.0" resolved "https://registry.yarnpkg.com/espree/-/espree-10.3.0.tgz#29267cf5b0cb98735b65e64ba07e0ed49d1eed8a" integrity sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg== @@ -1627,6 +1644,15 @@ espree@^10.0.1, espree@^10.3.0: acorn-jsx "^5.3.2" eslint-visitor-keys "^4.2.0" +espree@^10.4.0: + version "10.4.0" + resolved "https://registry.yarnpkg.com/espree/-/espree-10.4.0.tgz#d54f4949d4629005a1fa168d937c3ff1f7e2a837" + integrity sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ== + dependencies: + acorn "^8.15.0" + acorn-jsx "^5.3.2" + eslint-visitor-keys "^4.2.1" + esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" @@ -2440,10 +2466,10 @@ js-yaml@^3.13.1: argparse "^1.0.7" esprima "^4.0.0" -js-yaml@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" - integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== +js-yaml@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.1.tgz#854c292467705b699476e1a2decc0c8a3458806b" + integrity sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA== dependencies: argparse "^2.0.1"