1- # Simple MCP Client to Explore MCP Servers / TypeScript [ ![ License: MIT] ( https://img.shields.io/badge/License-MIT-blue.svg )] ( https://github.com/hideya/mcp-langchain-client-ts/blob/main/LICENSE ) [ ![ npm version] ( https://img.shields.io/npm/v/@h1deya/mcp-client-cli.svg )] ( https://www.npmjs.com/package/@h1deya/mcp-client-cli )
1+ # Simple MCP Client to Explore MCP Servers [ ![ License: MIT] ( https://img.shields.io/badge/License-MIT-blue.svg )] ( https://github.com/hideya/mcp-langchain-client-ts/blob/main/LICENSE ) [ ![ npm version] ( https://img.shields.io/npm/v/@h1deya/mcp-client-cli.svg )] ( https://www.npmjs.com/package/@h1deya/mcp-client-cli )
22
33
44** Quickly test and explore MCP servers from the command line!**
@@ -21,11 +21,12 @@ A Python equivalent of this utility is available [here](https://pypi.org/project
2121- Node.js 18+
2222- [ optional] [ ` uv ` (` uvx ` )] ( https://docs.astral.sh/uv/getting-started/installation/ )
2323 installed to run Python-based local (stdio) MCP servers
24- - LLM API keys from
24+ - LLM API key(s) from
2525 [ OpenAI] ( https://platform.openai.com/api-keys ) ,
2626 [ Anthropic] ( https://console.anthropic.com/settings/keys ) ,
27+ [ Google AI Studio (for GenAI/Gemini)] ( https://aistudio.google.com/apikey ) ,
2728 and/or
28- [ Google AI Studio (for GenAI/Gemini) ] ( https://aistudio.google.com/apikey )
29+ [ xAI ] ( https://console.x.ai/ ) ,
2930 as needed
3031
3132## Quick Start
@@ -51,6 +52,8 @@ A Python equivalent of this utility is available [here](https://pypi.org/project
5152 // "model": "claude-3-5-haiku-latest",
5253 // "model_provider": "google_genai",
5354 // "model": "gemini-2.5-flash",
55+ // "model_provider": "xai",
56+ // "model": "grok-3-mini",
5457 },
5558
5659 " mcp_servers" : {
@@ -71,7 +74,8 @@ A Python equivalent of this utility is available [here](https://pypi.org/project
7174 ``` bash
7275 echo " ANTHROPIC_API_KEY=sk-ant-...
7376 OPENAI_API_KEY=sk-proj-...
74- GOOGLE_API_KEY=AI..." > .env
77+ GOOGLE_API_KEY=AI...
78+ XAI_API_KEY=xai-..." > .env
7579
7680 code .env
7781 ```
@@ -163,7 +167,7 @@ Create a `llm_mcp_config.json5` file:
163167{
164168 " llm" : {
165169 " model_provider" : " openai" ,
166- " model" : " gpt-4.1-nano " ,
170+ " model" : " gpt-4o-mini " ,
167171 // model: "o4-mini",
168172 },
169173
@@ -177,7 +181,13 @@ Create a `llm_mcp_config.json5` file:
177181 // "model_provider": "google_genai",
178182 // "model": "gemini-2.5-flash",
179183 // // "model": "gemini-2.5-pro",
180- // }
184+ // },
185+
186+ // "llm": {
187+ // "model_provider": "xai",
188+ // "model": "grok-3-mini",
189+ // // "model": "grok-4",
190+ // },
181191
182192 " example_queries" : [
183193 " Tell me how LLMs work in a few sentences" ,
0 commit comments