Skip to content

Commit f61f3c6

Browse files
STetsingAniket-Engg
authored andcommitted
stable mcp handling
1 parent b82f742 commit f61f3c6

File tree

6 files changed

+105
-24
lines changed

6 files changed

+105
-24
lines changed

libs/remix-ai-core/src/helpers/streamHandler.ts

Lines changed: 71 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,14 @@ export const HandleStreamResponse = async (streamResponse, cb: (streamText: stri
2424
// Check for missing body in the streamResponse
2525
if (!reader) {
2626
// most likely no stream response, so we can just return the result
27-
cb(streamResponse.result)
28-
done_cb?.("");
27+
if (streamResponse.result) {
28+
cb(streamResponse.result)
29+
done_cb?.(streamResponse.result);
30+
} else {
31+
const errorMessage = "Error: Unable to to process your request. Try again!";
32+
cb(errorMessage);
33+
done_cb?.(errorMessage);
34+
}
2935
return;
3036
}
3137

@@ -44,7 +50,10 @@ export const HandleStreamResponse = async (streamResponse, cb: (streamText: stri
4450
}
4551
} catch (error) {
4652
console.error('Error parsing JSON:', error);
47-
return; // Just log the error, without unnecessary return value
53+
const errorMessage = "Error: Unable to decode the AI response. Please try again.";
54+
cb(errorMessage);
55+
done_cb?.(errorMessage);
56+
return;
4857
}
4958
}
5059

@@ -68,8 +77,14 @@ export const HandleOpenAIResponse = async (aiResponse: IAIStreamResponse | any,
6877
const toolCalls: Map<number, any> = new Map(); // Accumulate tool calls by index
6978

7079
if (!reader) { // normal response, not a stream
71-
cb(streamResponse.result)
72-
done_cb?.(streamResponse.result, streamResponse?.threadId || "");
80+
if (streamResponse.result) {
81+
cb(streamResponse.result)
82+
done_cb?.(streamResponse.result, streamResponse?.threadId || "");
83+
} else {
84+
const errorMessage = "Error: Unable to to process your request. Try again!";
85+
cb(errorMessage);
86+
done_cb?.(errorMessage, streamResponse?.threadId || "");
87+
}
7388
return;
7489
}
7590

@@ -90,6 +105,12 @@ export const HandleOpenAIResponse = async (aiResponse: IAIStreamResponse | any,
90105
done_cb?.(resultText, threadId);
91106
return;
92107
}
108+
109+
// Skip empty JSON strings
110+
if (!jsonStr || jsonStr.length === 0) {
111+
continue;
112+
}
113+
93114
try {
94115
const json = JSON.parse(jsonStr);
95116
threadId = json?.thread_id;
@@ -158,6 +179,9 @@ export const HandleOpenAIResponse = async (aiResponse: IAIStreamResponse | any,
158179
}
159180
} catch (e) {
160181
console.error("⚠️ OpenAI Stream parse error:", e);
182+
console.error("Problematic JSON string:", jsonStr);
183+
// Skip this chunk and continue processing the stream
184+
continue;
161185
}
162186
}
163187
}
@@ -175,8 +199,14 @@ export const HandleMistralAIResponse = async (aiResponse: IAIStreamResponse | an
175199
let resultText = "";
176200

177201
if (!reader) { // normal response, not a stream
178-
cb(streamResponse.result)
179-
done_cb?.(streamResponse.result, streamResponse?.threadId || "");
202+
if (streamResponse.result) {
203+
cb(streamResponse.result)
204+
done_cb?.(streamResponse.result, streamResponse?.threadId || "");
205+
} else {
206+
const errorMessage = "Error: Unable to to process your request. Try again!";
207+
cb(errorMessage);
208+
done_cb?.(errorMessage, streamResponse?.threadId || "");
209+
}
180210
return;
181211
}
182212

@@ -196,6 +226,11 @@ export const HandleMistralAIResponse = async (aiResponse: IAIStreamResponse | an
196226
return;
197227
}
198228

229+
// Skip empty JSON strings
230+
if (!jsonStr || jsonStr.length === 0) {
231+
continue;
232+
}
233+
199234
try {
200235
const json = JSON.parse(jsonStr);
201236
threadId = json?.id || threadId;
@@ -212,6 +247,9 @@ export const HandleMistralAIResponse = async (aiResponse: IAIStreamResponse | an
212247
}
213248
} catch (e) {
214249
console.error("MistralAI Stream parse error:", e);
250+
console.error("Problematic JSON string:", jsonStr);
251+
// Skip this chunk and continue processing the stream
252+
continue;
215253
}
216254
}
217255
}
@@ -230,8 +268,14 @@ export const HandleAnthropicResponse = async (aiResponse: IAIStreamResponse | an
230268
let currentBlockIndex: number = -1;
231269

232270
if (!reader) { // normal response, not a stream
233-
cb(streamResponse.result)
234-
done_cb?.(streamResponse.result, streamResponse?.threadId || "");
271+
if (streamResponse.result) {
272+
cb(streamResponse.result)
273+
done_cb?.(streamResponse.result, streamResponse?.threadId || "");
274+
} else {
275+
const errorMessage = "Error: Unable to to process your request. Try again!";
276+
cb(errorMessage);
277+
done_cb?.(errorMessage, streamResponse?.threadId || "");
278+
}
235279
return;
236280
}
237281

@@ -246,6 +290,12 @@ export const HandleAnthropicResponse = async (aiResponse: IAIStreamResponse | an
246290
for (const line of lines) {
247291
if (line.startsWith("data: ")) {
248292
const jsonStr = line.replace(/^data: /, "").trim();
293+
294+
// Skip empty or invalid JSON strings
295+
if (!jsonStr || jsonStr.length === 0) {
296+
continue;
297+
}
298+
249299
try {
250300
const json = JSON.parse(jsonStr);
251301

@@ -299,6 +349,9 @@ export const HandleAnthropicResponse = async (aiResponse: IAIStreamResponse | an
299349
}
300350
} catch (e) {
301351
console.error("Anthropic Stream parse error:", e);
352+
console.error("Problematic JSON string:", jsonStr);
353+
// Skip this chunk and continue processing the stream
354+
continue;
302355
}
303356
}
304357
}
@@ -315,8 +368,15 @@ export const HandleOllamaResponse = async (aiResponse: IAIStreamResponse | any,
315368
let inThinking = false;
316369

317370
if (!reader) { // normal response, not a stream
318-
cb(streamResponse.result || streamResponse.response || "");
319-
done_cb?.(streamResponse.result || streamResponse.response || "");
371+
const result = streamResponse.result || streamResponse.response;
372+
if (result) {
373+
cb(result);
374+
done_cb?.(result);
375+
} else {
376+
const errorMessage = "Error: Unable to to process your request. Try again!";
377+
cb(errorMessage);
378+
done_cb?.(errorMessage);
379+
}
320380
return;
321381
}
322382

libs/remix-ai-core/src/inferencers/mcp/mcpInferencer.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1189,7 +1189,8 @@ export class MCPInferencer extends RemoteInferencer implements ICompletions, IGe
11891189

11901190
// Send empty prompt - the tool results are in toolsMessages
11911191
// Don't add extra prompts as they cause Anthropic to summarize instead of using full tool results
1192-
return { streamResponse: await super.answer('', followUpOptions), callback: toolExecutionCallback } as IAIStreamResponse;
1192+
if (options.provider === 'openai' || options.provider === 'mistralai') return { streamResponse: await super.answer(prompt, followUpOptions), callback: toolExecutionCallback } as IAIStreamResponse;
1193+
else return { streamResponse: await super.answer("", followUpOptions), callback: toolExecutionCallback } as IAIStreamResponse;
11931194
}
11941195
}
11951196
}

libs/remix-ai-core/src/inferencers/remote/remoteInference.ts

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,17 @@ export class RemoteInferencer implements ICompletions, IGeneration {
2121
this.event = new EventEmitter()
2222
}
2323

24-
protected sanitizePromptByteSize(prompt: string, maxBytes: number = 50000): string {
24+
protected sanitizePromptByteSize(prompt: string, provider?: string): string {
25+
// Provider-specific max byte limits
26+
const providerLimits: Record<string, number> = {
27+
'mistralai': 30000,
28+
'anthropic': 40000,
29+
'openai': 40000
30+
};
31+
32+
// Get max bytes based on provider, default to 50KB
33+
const maxBytes = provider ? (providerLimits[provider.toLowerCase()] || 50000) : 50000;
34+
2535
const encoder = new TextEncoder();
2636
const promptBytes = encoder.encode(prompt); // rough estimation, real size might be 10% more
2737

@@ -39,7 +49,7 @@ export class RemoteInferencer implements ICompletions, IGeneration {
3949
currentBytes = encoder.encode(trimmedPrompt).length;
4050
}
4151

42-
console.warn(`[RemoteInferencer] Prompt exceeded ${maxBytes} bytes. Trimmed from ${promptBytes.length} to ${currentBytes} bytes.`);
52+
console.warn(`[RemoteInferencer] Prompt exceeded ${maxBytes} bytes for provider '${provider || 'default'}'. Trimmed from ${promptBytes.length} to ${currentBytes} bytes.`);
4353
return trimmedPrompt;
4454
}
4555

@@ -49,7 +59,7 @@ export class RemoteInferencer implements ICompletions, IGeneration {
4959

5060
// Sanitize prompt in payload if it exists
5161
if (payload.prompt) {
52-
payload.prompt = this.sanitizePromptByteSize(payload.prompt);
62+
payload.prompt = this.sanitizePromptByteSize(payload.prompt, payload.provider);
5363
}
5464

5565
try {
@@ -88,7 +98,7 @@ export class RemoteInferencer implements ICompletions, IGeneration {
8898

8999
// Sanitize prompt in payload if it exists
90100
if (payload.prompt) {
91-
payload.prompt = this.sanitizePromptByteSize(payload.prompt);
101+
payload.prompt = this.sanitizePromptByteSize(payload.prompt, payload.provider);
92102
}
93103

94104
try {

libs/remix-ai-core/src/prompts/chat.ts

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,18 @@ export abstract class ChatHistory{
66
static queueSize:number = 7 // change the queue size wrt the GPU size
77

88
public static pushHistory(prompt, result){
9-
if (result === "") return // do not allow empty assistant message due to nested stream handles on toolcalls
10-
const chat:ChatEntry = [prompt, result]
11-
this.chatEntries.push(chat)
12-
if (this.chatEntries.length > this.queueSize){this.chatEntries.shift()}
9+
if (result === "" || !result) return // do not allow empty assistant message due to nested stream handles on toolcalls
10+
11+
// Check if an entry with the same prompt already exists
12+
const existingEntryIndex = this.chatEntries.findIndex(entry => entry[0] === prompt)
13+
14+
if (existingEntryIndex !== -1) {
15+
this.chatEntries[existingEntryIndex][1] = result
16+
} else {
17+
const chat:ChatEntry = [prompt, result]
18+
this.chatEntries.push(chat)
19+
if (this.chatEntries.length > this.queueSize){this.chatEntries.shift()}
20+
}
1321
}
1422

1523
public static getHistory(){

libs/remix-ai-core/src/remix-mcp-server/handlers/DeploymentHandler.ts

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -115,8 +115,12 @@ export class DeployContractHandler extends BaseToolHandler {
115115
continueTxExecution()
116116
}, promptCb: () => {}, statusCb: (error) => {
117117
console.log(error)
118+
return this.createErrorResult(`Deployment error: ${error.message || error}`);
118119
}, finalCb: (error, contractObject, address: string, txResult: TxResult) => {
119-
if (error) return reject(error)
120+
if (error) {
121+
reject(error)
122+
return this.createErrorResult(`Deployment error: ${error.message || error}`);
123+
}
120124
resolve({ contractObject, address, txResult })
121125
} }
122126
const confirmationCb = (network, tx, gasEstimation, continueTxExecution, cancelCb) => {
@@ -261,7 +265,7 @@ export class CallContractHandler extends BaseToolHandler {
261265
let txReturn
262266
try {
263267
txReturn = await new Promise((resolve, reject) => {
264-
const params = funcABI.type !== 'fallback' ? args.args.join(',') : ''
268+
const params = funcABI.type !== 'fallback' ? (args.args? args.args.join(',') : ''): ''
265269
plugin.call('blockchain', 'runOrCallContractMethod',
266270
args.contractName,
267271
args.abi,

libs/remix-ai-core/src/remix-mcp-server/providers/TutorialsResourceProvider.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,6 @@ export class TutorialsResourceProvider extends BaseResourceProvider {
4343
}
4444

4545
async getResourceContent(uri: string, plugin: Plugin): Promise<IMCPResourceContent> {
46-
console.log('Getting resource content for URI:', uri);
4746
if (uri === 'tutorials://list') {
4847
return this.getTutorialsList(plugin);
4948
}
@@ -58,7 +57,6 @@ export class TutorialsResourceProvider extends BaseResourceProvider {
5857
private async getTutorialsList(plugin: Plugin): Promise<IMCPResourceContent> {
5958
try {
6059
const tutorials = await axios('https://raw.githubusercontent.com/remix-project-org/remix-workshops/refs/heads/json_desc/config-properties.json')
61-
console.log(tutorials)
6260
return this.createJsonContent('tutorials://list', tutorials);
6361
} catch (error) {
6462
return this.createTextContent('tutorials://list', `Error getting tutorials: ${error.message}`);

0 commit comments

Comments
 (0)