diff --git a/src/libs/agent-runtime/minimax/index.test.ts b/src/libs/agent-runtime/minimax/index.test.ts index 4b1591de59b0..01146687f94f 100644 --- a/src/libs/agent-runtime/minimax/index.test.ts +++ b/src/libs/agent-runtime/minimax/index.test.ts @@ -253,10 +253,10 @@ describe('LobeMinimaxAI', () => { }); }); - it('should include max tokens when model is abab6.5-chat', () => { + it('should include max tokens when model is abab6.5t-chat', () => { const payload: ChatStreamPayload = { messages: [{ content: 'Hello', role: 'user' }], - model: 'abab6.5-chat', + model: 'abab6.5t-chat', temperature: 0, top_p: 0, }; @@ -265,9 +265,9 @@ describe('LobeMinimaxAI', () => { expect(result).toEqual({ messages: [{ content: 'Hello', role: 'user' }], - model: 'abab6.5-chat', + model: 'abab6.5t-chat', stream: true, - max_tokens: 2048, + max_tokens: 4096, }); }); }); diff --git a/src/libs/agent-runtime/minimax/index.ts b/src/libs/agent-runtime/minimax/index.ts index c3da21886430..07b0ff5435cb 100644 --- a/src/libs/agent-runtime/minimax/index.ts +++ b/src/libs/agent-runtime/minimax/index.ts @@ -127,9 +127,14 @@ export class LobeMinimaxAI implements LobeRuntimeAI { // https://www.minimaxi.com/document/guides/chat-model/V2 private getMaxTokens(model: string): number | undefined { switch (model) { - case 'abab6.5-chat': + case 'abab6.5t-chat': + case 'abab6.5g-chat': + case 'abab5.5s-chat': + case 'abab5.5-chat':{ + return 4096; + } case 'abab6.5s-chat': { - return 2048; + return 8192; } } } @@ -139,12 +144,17 @@ export class LobeMinimaxAI implements LobeRuntimeAI { return { ...params, - max_tokens: this.getMaxTokens(payload.model), + frequency_penalty: undefined, + max_tokens: + payload.max_tokens !== undefined + ? payload.max_tokens + : this.getMaxTokens(payload.model), + presence_penalty: undefined, stream: true, temperature: - temperature === undefined || temperature <= 0 - ? undefined - : temperature / 2, + temperature === undefined || temperature <= 0 + ? undefined + : temperature / 2, tools: params.tools?.map((tool) => ({ function: { diff --git a/src/libs/agent-runtime/utils/streams/minimax.test.ts b/src/libs/agent-runtime/utils/streams/minimax.test.ts new file mode 100644 index 000000000000..307aa126745b --- /dev/null +++ b/src/libs/agent-runtime/utils/streams/minimax.test.ts @@ -0,0 +1,24 @@ +import { describe, it, expect } from 'vitest'; +import { processDoubleData } from './minimax'; // 假设文件名为 minimax.ts + +describe('processDoubleData', () => { + it('should remove the second "data: {"id": and everything after it when matchCount is 2', () => { + const chunkValue = `data: {"id":"first"} some other text + + data: {"id":"second"} more text`; + const result = processDoubleData(chunkValue); + expect(result).toBe('data: {"id":"first"} some other text'); + }); + + it('should not modify chunkValue when matchCount is not 2', () => { + const chunkValue = `data: {"id":"first"} some other text`; + const result = processDoubleData(chunkValue); + expect(result).toBe(chunkValue); + }); + + it('should not modify chunkValue when matchCount is more than 2', () => { + const chunkValue = `data: {"id":"first"} some other text data: {"id":"second"} more text data: {"id":"third"} even more text`; + const result = processDoubleData(chunkValue); + expect(result).toBe(chunkValue); + }); +}); diff --git a/src/libs/agent-runtime/utils/streams/minimax.ts b/src/libs/agent-runtime/utils/streams/minimax.ts index 05629bafdc01..e71ce6c5c3cc 100644 --- a/src/libs/agent-runtime/utils/streams/minimax.ts +++ b/src/libs/agent-runtime/utils/streams/minimax.ts @@ -4,6 +4,19 @@ import { ChatStreamCallbacks } from '../../types'; import { transformOpenAIStream } from './openai'; import { createCallbacksTransformer, createSSEProtocolTransformer } from './protocol'; +export const processDoubleData = (chunkValue: string): string => { + const dataPattern = /data: {"id":"/g; + const matchCount = (chunkValue.match(dataPattern) || []).length; + let modifiedChunkValue = chunkValue; + if (matchCount === 2) { + const secondDataIdIndex = chunkValue.indexOf('data: {"id":', chunkValue.indexOf('data: {"id":') + 1); + if (secondDataIdIndex !== -1) { + modifiedChunkValue = chunkValue.slice(0, secondDataIdIndex).trim(); + } + } + return modifiedChunkValue; +}; + const unit8ArrayToJSONChunk = (unit8Array: Uint8Array): OpenAI.ChatCompletionChunk => { const decoder = new TextDecoder(); @@ -12,6 +25,8 @@ const unit8ArrayToJSONChunk = (unit8Array: Uint8Array): OpenAI.ChatCompletionChu // chunkValue example: // data: {"id":"028a65377137d57aaceeffddf48ae99f","choices":[{"finish_reason":"tool_calls","index":0,"delta":{"role":"assistant","tool_calls":[{"id":"call_function_7371372822","type":"function","function":{"name":"realtime-weather____fetchCurrentWeather","arguments":"{\"city\": [\"杭州\", \"北京\"]}"}}]}}],"created":155511,"model":"abab6.5s-chat","object":"chat.completion.chunk"} + chunkValue = processDoubleData(chunkValue); + // so we need to remove `data:` prefix and then parse it as JSON if (chunkValue.startsWith('data:')) { chunkValue = chunkValue.slice(5).trim();