diff --git a/README.md b/README.md
index c6d69674..18b907ea 100644
--- a/README.md
+++ b/README.md
@@ -87,7 +87,7 @@ English | [Indonesia](README_IN.md) | &
- 📱 Support for mobile devices.
- 📓 Summarize any page with right-click menu. (Alt+B)
- 📖 Independent conversation page. (Ctrl+Shift+H)
-- 🔗 Multiple API support (Web API for Free and Plus users, GPT-3.5, GPT-4, Claude, New Bing, Moonshot, Self-Hosted, Azure etc.).
+- 🔗 Multiple API support (Web API for Free and Plus users, GPT-3.5, GPT-4, Claude, New Bing, Moonshot, MiniMax, DeepSeek, Self-Hosted, Azure etc.).
- 📦 Integration for various commonly used websites (Reddit, Quora, YouTube, GitHub, GitLab, StackOverflow, Zhihu, Bilibili). (Inspired by [wimdenherder](https://github.com/wimdenherder))
- 🔍 Integration to all mainstream search engines, and custom queries to support additional sites.
- 🧰 Selection tool and right-click menu to perform various tasks, such as translation, summarization, polishing,
diff --git a/src/background/index.mjs b/src/background/index.mjs
index 7fcaa428..1b370d91 100644
--- a/src/background/index.mjs
+++ b/src/background/index.mjs
@@ -38,6 +38,7 @@ import {
isUsingOpenRouterApiModel,
isUsingAimlApiModel,
isUsingDeepSeekApiModel,
+ isUsingMiniMaxApiModel,
} from '../config/index.mjs'
import '../_locales/i18n'
import { openUrl } from '../utils/open-url'
@@ -56,6 +57,7 @@ import { generateAnswersWithMoonshotCompletionApi } from '../services/apis/moons
import { generateAnswersWithMoonshotWebApi } from '../services/apis/moonshot-web.mjs'
import { isUsingModelName } from '../utils/model-name-convert.mjs'
import { generateAnswersWithDeepSeekApi } from '../services/apis/deepseek-api.mjs'
+import { generateAnswersWithMiniMaxApi } from '../services/apis/minimax-api.mjs'
import { redactSensitiveFields } from './redact.mjs'
const RECONNECT_CONFIG = {
@@ -527,6 +529,9 @@ async function executeApi(session, port, config) {
} else if (isUsingDeepSeekApiModel(session)) {
console.debug('[background] Using DeepSeek API Model')
await generateAnswersWithDeepSeekApi(port, session.question, session, config.deepSeekApiKey)
+ } else if (isUsingMiniMaxApiModel(session)) {
+ console.debug('[background] Using MiniMax API Model')
+ await generateAnswersWithMiniMaxApi(port, session.question, session, config.minimaxApiKey)
} else if (isUsingOllamaApiModel(session)) {
console.debug('[background] Using Ollama API Model')
await generateAnswersWithOllamaApi(port, session.question, session)
diff --git a/src/config/index.mjs b/src/config/index.mjs
index cb1e0c3e..9b5da4b0 100644
--- a/src/config/index.mjs
+++ b/src/config/index.mjs
@@ -112,6 +112,7 @@ export const moonshotApiModelKeys = [
'moonshot_v1_128k',
]
export const deepSeekApiModelKeys = ['deepseek_chat', 'deepseek_reasoner']
+export const miniMaxApiModelKeys = ['minimax_m27', 'minimax_m25', 'minimax_m25_highspeed']
export const openRouterApiModelKeys = [
'openRouter_auto',
'openRouter_free',
@@ -202,6 +203,10 @@ export const ModelGroups = {
value: deepSeekApiModelKeys,
desc: 'DeepSeek (API)',
},
+ miniMaxApiModelKeys: {
+ value: miniMaxApiModelKeys,
+ desc: 'MiniMax (API)',
+ },
openRouterApiModelKeys: {
value: openRouterApiModelKeys,
desc: 'OpenRouter (API)',
@@ -390,6 +395,19 @@ export const Models = {
desc: 'DeepSeek (Reasoner)',
},
+ minimax_m27: {
+ value: 'MiniMax-M2.7',
+ desc: 'MiniMax (M2.7)',
+ },
+ minimax_m25: {
+ value: 'MiniMax-M2.5',
+ desc: 'MiniMax (M2.5)',
+ },
+ minimax_m25_highspeed: {
+ value: 'MiniMax-M2.5-highspeed',
+ desc: 'MiniMax (M2.5 Highspeed, 204K)',
+ },
+
openRouter_anthropic_claude_sonnet4: {
value: 'anthropic/claude-sonnet-4',
desc: 'OpenRouter (Claude Sonnet 4)',
@@ -521,6 +539,7 @@ export const defaultConfig = {
chatglmApiKey: '',
moonshotApiKey: '',
deepSeekApiKey: '',
+ minimaxApiKey: '',
customApiKey: '',
@@ -717,6 +736,10 @@ export function isUsingDeepSeekApiModel(configOrSession) {
return isInApiModeGroup(deepSeekApiModelKeys, configOrSession)
}
+export function isUsingMiniMaxApiModel(configOrSession) {
+ return isInApiModeGroup(miniMaxApiModelKeys, configOrSession)
+}
+
export function isUsingOpenRouterApiModel(configOrSession) {
return isInApiModeGroup(openRouterApiModelKeys, configOrSession)
}
diff --git a/src/popup/sections/GeneralPart.jsx b/src/popup/sections/GeneralPart.jsx
index 22b46513..fce59c14 100644
--- a/src/popup/sections/GeneralPart.jsx
+++ b/src/popup/sections/GeneralPart.jsx
@@ -24,6 +24,7 @@ import {
isUsingOpenRouterApiModel,
isUsingAimlApiModel,
isUsingDeepSeekApiModel,
+ isUsingMiniMaxApiModel,
} from '../../config/index.mjs'
import Browser from 'webextension-polyfill'
import { languageList } from '../../config/language.mjs'
@@ -342,6 +343,17 @@ export function GeneralPart({ config, updateConfig, setTabIndex }) {
}}
/>
)}
+ {isUsingMiniMaxApiModel(config) && (
+ {
+ const apiKey = e.target.value
+ updateConfig({ minimaxApiKey: apiKey })
+ }}
+ />
+ )}
{isUsingOllamaApiModel(config) && (
{
+ test('all MiniMax model keys have corresponding Models entries', () => {
+ for (const key of miniMaxApiModelKeys) {
+ assert.ok(Models[key], `Models entry missing for ${key}`)
+ assert.ok(Models[key].value, `Models[${key}].value is empty`)
+ assert.ok(Models[key].desc, `Models[${key}].desc is empty`)
+ }
+ })
+
+ test('MiniMax model group is registered in ModelGroups', () => {
+ assert.ok(ModelGroups.miniMaxApiModelKeys, 'MiniMax group missing from ModelGroups')
+ assert.equal(ModelGroups.miniMaxApiModelKeys.desc, 'MiniMax (API)')
+ assert.deepEqual(ModelGroups.miniMaxApiModelKeys.value, miniMaxApiModelKeys)
+ })
+
+ test('MiniMax model values match expected API model names', () => {
+ assert.equal(Models.minimax_m27.value, 'MiniMax-M2.7')
+ assert.equal(Models.minimax_m25.value, 'MiniMax-M2.5')
+ assert.equal(Models.minimax_m25_highspeed.value, 'MiniMax-M2.5-highspeed')
+ })
+
+ test('isUsingMiniMaxApiModel does not match other provider models', () => {
+ const otherModels = [
+ 'chatgptApi4oMini',
+ 'deepseek_chat',
+ 'moonshot_v1_8k',
+ 'claude37SonnetApi',
+ 'customModel',
+ 'ollamaModel',
+ ]
+ for (const modelName of otherModels) {
+ assert.equal(isUsingMiniMaxApiModel({ modelName }), false, `Should not match ${modelName}`)
+ }
+ })
+
+ test('MiniMax model keys are unique and do not overlap with other groups', () => {
+ const allOtherKeys = []
+ for (const [groupName, group] of Object.entries(ModelGroups)) {
+ if (groupName === 'miniMaxApiModelKeys') continue
+ allOtherKeys.push(...group.value)
+ }
+ for (const key of miniMaxApiModelKeys) {
+ assert.equal(
+ allOtherKeys.includes(key),
+ false,
+ `MiniMax key ${key} overlaps with another group`,
+ )
+ }
+ })
+})
diff --git a/tests/unit/config/config-predicates.test.mjs b/tests/unit/config/config-predicates.test.mjs
index 8ea2f9a6..ee47a3a4 100644
--- a/tests/unit/config/config-predicates.test.mjs
+++ b/tests/unit/config/config-predicates.test.mjs
@@ -8,6 +8,7 @@ import {
claudeApiModelKeys,
openRouterApiModelKeys,
aimlApiModelKeys,
+ miniMaxApiModelKeys,
isUsingAimlApiModel,
isUsingAzureOpenAiApiModel,
isUsingBingWebModel,
@@ -19,6 +20,7 @@ import {
isUsingDeepSeekApiModel,
isUsingGeminiWebModel,
isUsingGithubThirdPartyApiModel,
+ isUsingMiniMaxApiModel,
isUsingMoonshotApiModel,
isUsingMoonshotWebModel,
isUsingMultiModeModel,
@@ -43,10 +45,7 @@ const representativeOpenRouterApiModelNames = [
'openRouter_anthropic_claude_sonnet4',
'openRouter_openai_o3',
]
-const representativeAimlApiModelNames = [
- 'aiml_claude_sonnet_4_6_20260218',
- 'aiml_openai_gpt_5_2',
-]
+const representativeAimlApiModelNames = ['aiml_claude_sonnet_4_6_20260218', 'aiml_openai_gpt_5_2']
const originalNavigatorDescriptor = Object.getOwnPropertyDescriptor(globalThis, 'navigator')
@@ -194,6 +193,19 @@ test('isUsingDeepSeekApiModel detects DeepSeek models', () => {
assert.equal(isUsingDeepSeekApiModel({ modelName: 'chatgptApi4oMini' }), false)
})
+test('isUsingMiniMaxApiModel detects MiniMax models', () => {
+ assert.equal(isUsingMiniMaxApiModel({ modelName: 'minimax_m27' }), true)
+ assert.equal(isUsingMiniMaxApiModel({ modelName: 'minimax_m25' }), true)
+ assert.equal(isUsingMiniMaxApiModel({ modelName: 'minimax_m25_highspeed' }), true)
+ assert.equal(isUsingMiniMaxApiModel({ modelName: 'chatgptApi4oMini' }), false)
+})
+
+test('isUsingMiniMaxApiModel accepts exported MiniMax API model keys', () => {
+ for (const modelName of miniMaxApiModelKeys) {
+ assert.equal(isUsingMiniMaxApiModel({ modelName }), true)
+ }
+})
+
test('isUsingOpenRouterApiModel matches representative OpenRouter API keys', () => {
for (const modelName of representativeOpenRouterApiModelNames) {
assert.equal(isUsingOpenRouterApiModel({ modelName }), true)
diff --git a/tests/unit/services/apis/minimax-api.test.mjs b/tests/unit/services/apis/minimax-api.test.mjs
new file mode 100644
index 00000000..403d5d77
--- /dev/null
+++ b/tests/unit/services/apis/minimax-api.test.mjs
@@ -0,0 +1,187 @@
+import assert from 'node:assert/strict'
+import { beforeEach, test } from 'node:test'
+import { generateAnswersWithMiniMaxApi } from '../../../../src/services/apis/minimax-api.mjs'
+import { createFakePort } from '../../helpers/port.mjs'
+import { createMockSseResponse } from '../../helpers/sse-response.mjs'
+
+const setStorage = (values) => {
+ globalThis.__TEST_BROWSER_SHIM__.replaceStorage(values)
+}
+
+beforeEach(() => {
+ globalThis.__TEST_BROWSER_SHIM__.clearStorage()
+})
+
+test('generateAnswersWithMiniMaxApi sends request to MiniMax base URL', async (t) => {
+ t.mock.method(console, 'debug', () => {})
+ setStorage({
+ maxConversationContextLength: 3,
+ maxResponseTokenLength: 256,
+ temperature: 0.7,
+ })
+
+ const session = {
+ modelName: 'minimax_m27',
+ conversationRecords: [{ question: 'PrevQ', answer: 'PrevA' }],
+ isRetry: false,
+ }
+ const port = createFakePort()
+
+ let capturedInput
+ let capturedInit
+ t.mock.method(globalThis, 'fetch', async (input, init) => {
+ capturedInput = input
+ capturedInit = init
+ return createMockSseResponse([
+ 'data: {"choices":[{"delta":{"content":"Hi"}}]}\n\n',
+ 'data: {"choices":[{"delta":{"content":" there"},"finish_reason":"stop"}]}\n\n',
+ ])
+ })
+
+ await generateAnswersWithMiniMaxApi(port, 'Hello', session, 'sk-minimax-test')
+
+ assert.equal(capturedInput, 'https://api.minimax.io/v1/chat/completions')
+ assert.equal(capturedInit.method, 'POST')
+ assert.equal(capturedInit.headers.Authorization, 'Bearer sk-minimax-test')
+
+ const body = JSON.parse(capturedInit.body)
+ assert.equal(body.stream, true)
+ assert.equal(body.max_tokens, 256)
+ assert.equal(body.temperature, 0.7)
+ assert.equal(Array.isArray(body.messages), true)
+ assert.deepEqual(body.messages[0], { role: 'user', content: 'PrevQ' })
+ assert.deepEqual(body.messages[1], { role: 'assistant', content: 'PrevA' })
+ assert.deepEqual(body.messages.at(-1), { role: 'user', content: 'Hello' })
+})
+
+test('generateAnswersWithMiniMaxApi aggregates SSE deltas and posts messages', async (t) => {
+ t.mock.method(console, 'debug', () => {})
+ setStorage({
+ maxConversationContextLength: 3,
+ maxResponseTokenLength: 128,
+ temperature: 0.5,
+ })
+
+ const session = {
+ modelName: 'minimax_m25',
+ conversationRecords: [],
+ isRetry: false,
+ }
+ const port = createFakePort()
+
+ t.mock.method(globalThis, 'fetch', async () =>
+ createMockSseResponse([
+ 'data: {"choices":[{"delta":{"content":"Hel"}}]}\n\n',
+ 'data: {"choices":[{"delta":{"content":"lo"},"finish_reason":"stop"}]}\n\n',
+ ]),
+ )
+
+ await generateAnswersWithMiniMaxApi(port, 'Test', session, 'sk-test')
+
+ assert.equal(
+ port.postedMessages.some((m) => m.done === false && m.answer === 'Hel'),
+ true,
+ )
+ assert.equal(
+ port.postedMessages.some((m) => m.done === false && m.answer === 'Hello'),
+ true,
+ )
+ assert.equal(
+ port.postedMessages.some((m) => m.done === true && m.session === session),
+ true,
+ )
+ assert.deepEqual(session.conversationRecords.at(-1), {
+ question: 'Test',
+ answer: 'Hello',
+ })
+})
+
+test('generateAnswersWithMiniMaxApi throws on non-ok response', async (t) => {
+ t.mock.method(console, 'debug', () => {})
+ setStorage({
+ maxConversationContextLength: 3,
+ maxResponseTokenLength: 128,
+ temperature: 0.5,
+ })
+
+ const session = {
+ modelName: 'minimax_m25_highspeed',
+ conversationRecords: [],
+ isRetry: false,
+ }
+ const port = createFakePort()
+
+ t.mock.method(globalThis, 'fetch', async () =>
+ createMockSseResponse([], {
+ ok: false,
+ status: 401,
+ statusText: 'Unauthorized',
+ json: async () => ({ error: { message: 'invalid api key' } }),
+ }),
+ )
+
+ await assert.rejects(async () => {
+ await generateAnswersWithMiniMaxApi(port, 'Test', session, 'sk-bad')
+ }, /invalid api key/)
+
+ assert.deepEqual(port.listenerCounts(), { onMessage: 0, onDisconnect: 0 })
+})
+
+test('generateAnswersWithMiniMaxApi supports message.content fallback', async (t) => {
+ t.mock.method(console, 'debug', () => {})
+ setStorage({
+ maxConversationContextLength: 2,
+ maxResponseTokenLength: 256,
+ temperature: 0.5,
+ })
+
+ const session = {
+ modelName: 'minimax_m27',
+ conversationRecords: [],
+ isRetry: false,
+ }
+ const port = createFakePort()
+
+ t.mock.method(globalThis, 'fetch', async () =>
+ createMockSseResponse([
+ 'data: {"choices":[{"message":{"content":"Full response"},"finish_reason":"stop"}]}\n\n',
+ ]),
+ )
+
+ await generateAnswersWithMiniMaxApi(port, 'Question', session, 'sk-test')
+
+ assert.equal(
+ port.postedMessages.some((m) => m.done === false && m.answer === 'Full response'),
+ true,
+ )
+ assert.deepEqual(session.conversationRecords.at(-1), {
+ question: 'Question',
+ answer: 'Full response',
+ })
+})
+
+test('generateAnswersWithMiniMaxApi throws on network error', async (t) => {
+ t.mock.method(console, 'debug', () => {})
+ setStorage({
+ maxConversationContextLength: 3,
+ maxResponseTokenLength: 128,
+ temperature: 0.5,
+ })
+
+ const session = {
+ modelName: 'minimax_m27',
+ conversationRecords: [],
+ isRetry: false,
+ }
+ const port = createFakePort()
+
+ t.mock.method(globalThis, 'fetch', async () => {
+ throw new TypeError('Failed to fetch')
+ })
+
+ await assert.rejects(async () => {
+ await generateAnswersWithMiniMaxApi(port, 'Test', session, 'sk-test')
+ }, /Failed to fetch/)
+
+ assert.deepEqual(port.listenerCounts(), { onMessage: 0, onDisconnect: 0 })
+})