Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,21 @@ npm run dev

---

## 🌐 Multi-Provider Support

All Model Chat now supports multiple AI providers beyond Google Gemini:

| Provider | Models | Context Window | Configuration |
| :--- | :--- | :--- | :--- |
| **Google Gemini** | Gemini 3.0, 2.5, Gemma | Up to 2M tokens | API Key or Environment Variable |
| **MiniMax AI** | MiniMax-M2.7, MiniMax-M2.7-highspeed | 204K tokens | Settings > API Configuration |

To use MiniMax models, enter your MiniMax API key in **Settings > API Configuration > MiniMax AI Configuration**, then select a MiniMax model from the model picker.

> Get your MiniMax API key at [platform.minimax.chat](https://platform.minimax.chat)

---

## 🛠️ 技术架构 / Technical Architecture

<table width="100%">
Expand Down
2 changes: 2 additions & 0 deletions all-model-chat/components/settings/SettingsContent.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,8 @@ export const SettingsContent: React.FC<SettingsContentProps> = ({
setApiProxyUrl={(val) => updateSetting('apiProxyUrl', val)}
useApiProxy={currentSettings.useApiProxy ?? false}
setUseApiProxy={(val) => updateSetting('useApiProxy', val)}
minimaxApiKey={currentSettings.minimaxApiKey ?? null}
setMinimaxApiKey={(val) => updateSetting('minimaxApiKey', val)}
availableModels={availableModels}
t={t as any}
/>
Expand Down
31 changes: 30 additions & 1 deletion all-model-chat/components/settings/sections/ApiConfigSection.tsx
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@

import React, { useState, useEffect } from 'react';
import { KeyRound } from 'lucide-react';
import { KeyRound, Sparkles } from 'lucide-react';
import { useResponsiveValue } from '../../../hooks/useDevice';
import { getClient } from '../../../services/api/baseApi';
import { parseApiKeys } from '../../../utils/apiUtils';
Expand All @@ -9,6 +9,7 @@ import { ApiKeyInput } from './api-config/ApiKeyInput';
import { ApiProxySettings } from './api-config/ApiProxySettings';
import { ApiConnectionTester } from './api-config/ApiConnectionTester';
import { ModelOption } from '../../../types';
import { SETTINGS_INPUT_CLASS } from '../../../constants/appConstants';

interface ApiConfigSectionProps {
useCustomApiConfig: boolean;
Expand All @@ -19,6 +20,8 @@ interface ApiConfigSectionProps {
setApiProxyUrl: (value: string | null) => void;
useApiProxy: boolean;
setUseApiProxy: (value: boolean) => void;
minimaxApiKey: string | null;
setMinimaxApiKey: (value: string | null) => void;
availableModels: ModelOption[];
t: (key: string) => string;
}
Expand All @@ -42,6 +45,8 @@ export const ApiConfigSection: React.FC<ApiConfigSectionProps> = ({
setApiProxyUrl,
useApiProxy,
setUseApiProxy,
minimaxApiKey,
setMinimaxApiKey,
availableModels,
t,
}) => {
Expand Down Expand Up @@ -170,6 +175,30 @@ export const ApiConfigSection: React.FC<ApiConfigSectionProps> = ({
</div>
</div>
</div>

{/* MiniMax API Configuration */}
<div className="mt-8 pt-6 border-t border-[var(--theme-border-primary)]">
<h3 className="text-base font-semibold text-[var(--theme-text-primary)] flex items-center gap-2 mb-4">
<Sparkles size={iconSize} className="text-[var(--theme-text-link)]" strokeWidth={1.5} />
{t('settingsMiniMaxConfig')}
</h3>
<p className="text-xs text-[var(--theme-text-tertiary)] mb-3">
{t('settingsMiniMaxHelp')}
</p>
<div className="relative">
<input
type="password"
value={minimaxApiKey || ''}
onChange={(e) => setMinimaxApiKey(e.target.value || null)}
placeholder={t('settingsMiniMaxKeyPlaceholder')}
className={`w-full px-4 py-2.5 rounded-lg text-sm border ${SETTINGS_INPUT_CLASS} focus:outline-none focus:ring-2 transition-colors duration-200`}
autoComplete="off"
/>
</div>
<p className="text-xs text-[var(--theme-text-tertiary)] mt-2">
{t('settingsMiniMaxModelsInfo')}
</p>
</div>
</div>
);
};
1 change: 1 addition & 0 deletions all-model-chat/constants/appConstants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,7 @@ export const DEFAULT_APP_SETTINGS: AppSettings = {
apiKey: null,
apiProxyUrl: "https://api-proxy.de/gemini/v1beta",
useApiProxy: false,
minimaxApiKey: null,
language: 'system',
isStreamingEnabled: DEFAULT_IS_STREAMING_ENABLED,
transcriptionModelId: DEFAULT_TRANSCRIPTION_MODEL_ID,
Expand Down
49 changes: 49 additions & 0 deletions all-model-chat/constants/providerConstants.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@

import { ModelOption } from '../types';

/**
* Provider type identifiers.
* 'gemini' uses the @google/genai SDK directly.
* 'openai-compatible' uses the OpenAI-compatible /v1/chat/completions endpoint.
*/
export type ProviderType = 'gemini' | 'openai-compatible';

export interface ProviderPreset {
name: string;
type: ProviderType;
baseUrl: string;
models: ModelOption[];
/** Model ID prefix used for detection. */
prefix: string;
}

export const MINIMAX_BASE_URL = 'https://api.minimax.io/v1';

export const MINIMAX_MODELS: ModelOption[] = [
{ id: 'MiniMax-M2.7', name: 'MiniMax M2.7', isPinned: true },
{ id: 'MiniMax-M2.7-highspeed', name: 'MiniMax M2.7 Highspeed', isPinned: true },
];

/**
* Check if a model ID belongs to MiniMax.
*/
export const isMiniMaxModel = (modelId: string): boolean => {
return modelId.startsWith('MiniMax-');
};

/**
* Check if a model uses the OpenAI-compatible API path.
*/
export const isOpenAICompatModel = (modelId: string): boolean => {
return isMiniMaxModel(modelId);
};

/**
* Get the base URL for an OpenAI-compatible model.
*/
export const getOpenAICompatBaseUrl = (modelId: string): string => {
if (isMiniMaxModel(modelId)) {
return MINIMAX_BASE_URL;
}
return '';
};
85 changes: 67 additions & 18 deletions all-model-chat/hooks/message-sender/standard/useApiInteraction.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@
import React, { useCallback, Dispatch, SetStateAction } from 'react';
import { AppSettings, ChatMessage, ChatSettings as IndividualChatSettings, UploadedFile } from '../../../types';
import { createChatHistoryForApi, isGemini3Model, logService } from '../../../utils/appUtils';
import { isOpenAICompatModel } from '../../../utils/modelHelpers';
import { buildGenerationConfig } from '../../../services/api/baseApi';
import { geminiServiceInstance } from '../../../services/geminiService';
import { sendOpenAICompatMessageStream, sendOpenAICompatMessageNonStream } from '../../../services/api/openaiCompatApi';
import { pyodideService } from '../../../services/pyodideService';
import { isLikelyHtml } from '../../../utils/codeUtils';
import { GetStreamHandlers } from '../types';
Expand Down Expand Up @@ -112,24 +114,6 @@ export const useApiInteraction = ({
const shouldStripThinking = sessionToUpdate.hideThinkingInContext ?? appSettings.hideThinkingInContext;
const historyForChat = await createChatHistoryForApi(baseMessagesForApi, shouldStripThinking);

const config = buildGenerationConfig(
activeModelId,
sessionToUpdate.systemInstruction,
{ temperature: sessionToUpdate.temperature, topP: sessionToUpdate.topP },
sessionToUpdate.showThoughts,
sessionToUpdate.thinkingBudget,
!!sessionToUpdate.isGoogleSearchEnabled,
!!sessionToUpdate.isCodeExecutionEnabled,
!!sessionToUpdate.isUrlContextEnabled,
sessionToUpdate.thinkingLevel,
aspectRatio,
sessionToUpdate.isDeepSearchEnabled,
imageSize,
sessionToUpdate.safetySettings,
sessionToUpdate.mediaResolution,
!!sessionToUpdate.isLocalPythonEnabled
);

const { streamOnError, streamOnComplete, streamOnPart, onThoughtChunk } = getStreamHandlers(
finalSessionId,
generationId,
Expand All @@ -149,6 +133,71 @@ export const useApiInteraction = ({
setSessionLoading(finalSessionId, true);
activeJobs.current.set(generationId, newAbortController);

// Route through OpenAI-compatible API for MiniMax and other non-Gemini providers
if (isOpenAICompatModel(activeModelId)) {
const minimaxKey = appSettings.minimaxApiKey;
if (!minimaxKey) {
streamOnError(new Error('MiniMax API Key is not configured. Please set it in Settings > API Configuration.'));
return;
}

const openaiConfig = {
temperature: sessionToUpdate.temperature,
topP: sessionToUpdate.topP,
systemInstruction: sessionToUpdate.systemInstruction,
};

if (appSettings.isStreamingEnabled) {
await sendOpenAICompatMessageStream(
minimaxKey,
activeModelId,
historyForChat,
finalParts,
openaiConfig,
newAbortController.signal,
streamOnPart,
onThoughtChunk,
streamOnError,
streamOnComplete,
finalRole
);
} else {
await sendOpenAICompatMessageNonStream(
minimaxKey,
activeModelId,
historyForChat,
finalParts,
openaiConfig,
newAbortController.signal,
streamOnError,
(parts, thoughts, usage, grounding) => {
for (const part of parts) streamOnPart(part);
if (thoughts) onThoughtChunk(thoughts);
streamOnComplete(usage, grounding);
}
);
}
return;
}

const config = buildGenerationConfig(
activeModelId,
sessionToUpdate.systemInstruction,
{ temperature: sessionToUpdate.temperature, topP: sessionToUpdate.topP },
sessionToUpdate.showThoughts,
sessionToUpdate.thinkingBudget,
!!sessionToUpdate.isGoogleSearchEnabled,
!!sessionToUpdate.isCodeExecutionEnabled,
!!sessionToUpdate.isUrlContextEnabled,
sessionToUpdate.thinkingLevel,
aspectRatio,
sessionToUpdate.isDeepSearchEnabled,
imageSize,
sessionToUpdate.safetySettings,
sessionToUpdate.mediaResolution,
!!sessionToUpdate.isLocalPythonEnabled
);

if (appSettings.isStreamingEnabled) {
await geminiServiceInstance.sendMessageStream(
keyToUse,
Expand Down
49 changes: 31 additions & 18 deletions all-model-chat/hooks/message-sender/useStandardChat.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@

import { useCallback } from 'react';
import { generateUniqueId, buildContentParts, getKeyForRequest, performOptimisticSessionUpdate, logService } from '../../utils/appUtils';
import { isOpenAICompatModel } from '../../utils/modelHelpers';
import { DEFAULT_CHAT_SETTINGS, MODELS_SUPPORTING_RAW_MODE } from '../../constants/appConstants';
import { UploadedFile, ChatMessage } from '../../types';
import { StandardChatProps } from './types';
Expand Down Expand Up @@ -66,25 +67,37 @@ export const useStandardChat = ({
logService.info(`Fast Mode activated (One-off): Overriding thinking level to ${targetLevel}.`);
}

const keyResult = getKeyForRequest(appSettings, settingsForApi);
if ('error' in keyResult) {
logService.error("Send message failed: API Key not configured.");
const errorMsg: ChatMessage = { id: generateUniqueId(), role: 'error', content: keyResult.error, timestamp: new Date() };
const newSessionId = generateUniqueId();

updateAndPersistSessions(prev => performOptimisticSessionUpdate(prev, {
activeSessionId: null,
newSessionId,
newMessages: [errorMsg],
settings: { ...DEFAULT_CHAT_SETTINGS, ...appSettings },
appSettings,
title: "API Key Error"
}));
setActiveSessionId(newSessionId);
return;
// For OpenAI-compatible providers (MiniMax), bypass Gemini key check
let keyToUse: string;
let isNewKey = false;
let shouldLockKey = false;

if (isOpenAICompatModel(activeModelId)) {
// MiniMax models use their own API key, managed in useApiInteraction
keyToUse = 'openai-compat-placeholder';
isNewKey = false;
} else {
const keyResult = getKeyForRequest(appSettings, settingsForApi);
if ('error' in keyResult) {
logService.error("Send message failed: API Key not configured.");
const errorMsg: ChatMessage = { id: generateUniqueId(), role: 'error', content: keyResult.error, timestamp: new Date() };
const newSessionId = generateUniqueId();

updateAndPersistSessions(prev => performOptimisticSessionUpdate(prev, {
activeSessionId: null,
newSessionId,
newMessages: [errorMsg],
settings: { ...DEFAULT_CHAT_SETTINGS, ...appSettings },
appSettings,
title: "API Key Error"
}));
setActiveSessionId(newSessionId);
return;
}
keyToUse = keyResult.key;
isNewKey = keyResult.isNewKey;
shouldLockKey = isNewKey && filesToUse.some(f => f.fileUri && f.uploadState === 'active');
}
const { key: keyToUse, isNewKey } = keyResult;
const shouldLockKey = isNewKey && filesToUse.some(f => f.fileUri && f.uploadState === 'active');

const newAbortController = new AbortController();

Expand Down
10 changes: 8 additions & 2 deletions all-model-chat/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,9 @@
"scripts": {
"dev": "vite",
"build": "vite build",
"preview": "vite preview"
"preview": "vite preview",
"test": "vitest run",
"test:watch": "vitest"
},
"dependencies": {
"@formkit/auto-animate": "^0.8.2",
Expand Down Expand Up @@ -39,6 +41,8 @@
"xlsx": "^0.18.5"
},
"devDependencies": {
"@testing-library/jest-dom": "^6.9.1",
"@testing-library/react": "^16.3.2",
"@types/dompurify": "^3.0.5",
"@types/katex": "^0.16.7",
"@types/node": "^20.14.10",
Expand All @@ -47,10 +51,12 @@
"@types/turndown": "^5.0.5",
"@vitejs/plugin-react": "^4.3.1",
"autoprefixer": "^10.4.19",
"jsdom": "^29.0.1",
"postcss": "^8.4.39",
"tailwindcss": "^3.4.4",
"typescript": "^5.5.3",
"vite": "^5.3.3",
"vite-plugin-static-copy": "^1.0.0"
"vite-plugin-static-copy": "^1.0.0",
"vitest": "^4.1.2"
}
}
Loading