Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ The PaperDebugger backend is built with modern technologies:
- **Language**: Go 1.24+
- **Framework**: Gin (HTTP) + gRPC (API)
- **Database**: MongoDB
- **AI Integration**: OpenAI API
- **AI Integration**: OpenAI API, [MiniMax](https://www.minimaxi.com/) (via OpenAI-compatible API)
- **Architecture**: Microservices with Protocol Buffers
- **Authentication**: JWT-based with OAuth support

Expand Down
14 changes: 14 additions & 0 deletions docs/DEVELOPMENT.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,20 @@ cp .env.example .env
# Edit the .env file based on your configuration
```

**Required environment variables:**
| Variable | Description |
|---|---|
| `OPENAI_API_KEY` | OpenAI API key (server-side default) |
| `OPENAI_BASE_URL` | OpenAI base URL (default: `https://api.openai.com/v1`) |
| `INFERENCE_API_KEY` | OpenRouter inference API key |
| `INFERENCE_BASE_URL` | Inference endpoint URL |

**Optional provider keys (for direct API access):**
| Variable | Description |
|---|---|
| `MINIMAX_API_KEY` | [MiniMax](https://www.minimaxi.com/) API key for direct access to MiniMax models (M2.7, M2.7-highspeed) |
| `MINIMAX_BASE_URL` | MiniMax base URL (default: `https://api.minimax.io/v1`) |

#### 4. Custom MCP Backend Orchestration [OPTIONAL FOR LOCAL DEV]
Our enhanced orchestration backend, [**XtraMCP**](https://github.com/4ndrelim/academic-paper-mcp-server), is partially deployed in-production, with selected components enabled to **balance stability and operational cost** at this stage. It is still under active development and remains closed-source for now.

Expand Down
31 changes: 31 additions & 0 deletions internal/api/chat/list_supported_models_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,26 @@ var allModels = []modelConfig{
outputPrice: 300,
requireOwnKey: false,
},
{
name: "MiniMax M2.7",
slugOpenRouter: "minimax/MiniMax-M2.7",
slugOpenAI: "MiniMax-M2.7",
totalContext: 1000000,
maxOutput: 128000,
inputPrice: 100, // $1.00
outputPrice: 400, // $4.00
requireOwnKey: false,
},
{
name: "MiniMax M2.7 Highspeed",
slugOpenRouter: "minimax/MiniMax-M2.7-highspeed",
slugOpenAI: "MiniMax-M2.7-highspeed",
totalContext: 1000000,
maxOutput: 128000,
inputPrice: 50, // $0.50
outputPrice: 200, // $2.00
requireOwnKey: false,
},
{
name: "o1 Mini",
slugOpenRouter: "openai/o1-mini",
Expand Down Expand Up @@ -221,6 +241,7 @@ func (s *ChatServerV2) ListSupportedModels(
}

hasOwnAPIKey := strings.TrimSpace(settings.OpenAIAPIKey) != ""
hasMiniMaxAPIKey := strings.TrimSpace(s.cfg.MiniMaxAPIKey) != ""

var models []*chatv2.SupportedModel
for _, config := range allModels {
Expand All @@ -233,6 +254,11 @@ func (s *ChatServerV2) ListSupportedModels(
slug = config.slugOpenAI
}

// For MiniMax models, use direct API slug when server has MiniMax API key
if hasMiniMaxAPIKey && isMiniMaxModelConfig(config) && strings.TrimSpace(config.slugOpenAI) != "" {
slug = config.slugOpenAI
}

model := &chatv2.SupportedModel{
Name: config.name,
Slug: slug,
Expand All @@ -256,6 +282,11 @@ func (s *ChatServerV2) ListSupportedModels(
}, nil
}

// isMiniMaxModelConfig checks if a model config belongs to MiniMax.
func isMiniMaxModelConfig(config modelConfig) bool {
return strings.Contains(strings.ToLower(config.slugOpenRouter), "minimax")
}

// stringPtr returns a pointer to the given string
func stringPtr(s string) *string {
return &s
Expand Down
130 changes: 130 additions & 0 deletions internal/api/chat/list_supported_models_v2_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
package chat

import (
"testing"

"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

func TestAllModels_ContainsMiniMax(t *testing.T) {
var miniMaxModels []modelConfig
for _, m := range allModels {
if isMiniMaxModelConfig(m) {
miniMaxModels = append(miniMaxModels, m)
}
}

require.Len(t, miniMaxModels, 2, "expected 2 MiniMax models in the registry")

// Verify MiniMax M2.7
assert.Equal(t, "MiniMax M2.7", miniMaxModels[0].name)
assert.Equal(t, "minimax/MiniMax-M2.7", miniMaxModels[0].slugOpenRouter)
assert.Equal(t, "MiniMax-M2.7", miniMaxModels[0].slugOpenAI)
assert.Equal(t, int64(1000000), miniMaxModels[0].totalContext)
assert.Equal(t, int64(128000), miniMaxModels[0].maxOutput)
assert.False(t, miniMaxModels[0].requireOwnKey)

// Verify MiniMax M2.7 Highspeed
assert.Equal(t, "MiniMax M2.7 Highspeed", miniMaxModels[1].name)
assert.Equal(t, "minimax/MiniMax-M2.7-highspeed", miniMaxModels[1].slugOpenRouter)
assert.Equal(t, "MiniMax-M2.7-highspeed", miniMaxModels[1].slugOpenAI)
assert.Equal(t, int64(1000000), miniMaxModels[1].totalContext)
assert.Equal(t, int64(128000), miniMaxModels[1].maxOutput)
assert.False(t, miniMaxModels[1].requireOwnKey)
}

func TestIsMiniMaxModelConfig(t *testing.T) {
tests := []struct {
name string
config modelConfig
expected bool
}{
{
name: "MiniMax model",
config: modelConfig{slugOpenRouter: "minimax/MiniMax-M2.7"},
expected: true,
},
{
name: "MiniMax highspeed model",
config: modelConfig{slugOpenRouter: "minimax/MiniMax-M2.7-highspeed"},
expected: true,
},
{
name: "OpenAI model",
config: modelConfig{slugOpenRouter: "openai/gpt-5.1"},
expected: false,
},
{
name: "Qwen model",
config: modelConfig{slugOpenRouter: "qwen/qwen-plus"},
expected: false,
},
{
name: "Gemini model",
config: modelConfig{slugOpenRouter: "google/gemini-2.5-flash"},
expected: false,
},
}

for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, tt.expected, isMiniMaxModelConfig(tt.config))
})
}
}

func TestAllModels_UniqueSlugOpenRouter(t *testing.T) {
slugs := make(map[string]bool)
for _, m := range allModels {
if slugs[m.slugOpenRouter] {
t.Errorf("duplicate OpenRouter slug: %s", m.slugOpenRouter)
}
slugs[m.slugOpenRouter] = true
}
}

func TestAllModels_UniqueSlugOpenAI(t *testing.T) {
slugs := make(map[string]bool)
for _, m := range allModels {
if m.slugOpenAI == "" {
continue
}
if slugs[m.slugOpenAI] {
t.Errorf("duplicate OpenAI slug: %s", m.slugOpenAI)
}
slugs[m.slugOpenAI] = true
}
}

func TestAllModels_ValidPricing(t *testing.T) {
for _, m := range allModels {
t.Run(m.name, func(t *testing.T) {
assert.GreaterOrEqual(t, m.inputPrice, int64(0), "input price should be non-negative")
assert.GreaterOrEqual(t, m.outputPrice, int64(0), "output price should be non-negative")
assert.Greater(t, m.totalContext, int64(0), "total context should be positive")
assert.Greater(t, m.maxOutput, int64(0), "max output should be positive")
})
}
}

func TestAllModels_MiniMaxPricingReasonable(t *testing.T) {
for _, m := range allModels {
if !isMiniMaxModelConfig(m) {
continue
}
t.Run(m.name, func(t *testing.T) {
assert.Greater(t, m.inputPrice, int64(0), "MiniMax input price should be positive")
assert.Greater(t, m.outputPrice, int64(0), "MiniMax output price should be positive")
// Highspeed variant should have lower or equal pricing
if m.slugOpenAI == "MiniMax-M2.7-highspeed" {
for _, other := range allModels {
if other.slugOpenAI == "MiniMax-M2.7" {
assert.LessOrEqual(t, m.inputPrice, other.inputPrice, "highspeed input should be <= standard")
assert.LessOrEqual(t, m.outputPrice, other.outputPrice, "highspeed output should be <= standard")
}
}
}
})
}
}
13 changes: 13 additions & 0 deletions internal/libs/cfg/cfg.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@ type Cfg struct {
InferenceAPIKey string
JwtSigningKey string

MiniMaxBaseURL string
MiniMaxAPIKey string

MongoURI string
XtraMCPURI string
MCPServerURL string
Expand All @@ -28,6 +31,8 @@ func GetCfg() *Cfg {
InferenceBaseURL: inferenceBaseURL(),
InferenceAPIKey: os.Getenv("INFERENCE_API_KEY"),
JwtSigningKey: os.Getenv("JWT_SIGNING_KEY"),
MiniMaxBaseURL: miniMaxBaseURL(),
MiniMaxAPIKey: os.Getenv("MINIMAX_API_KEY"),
MongoURI: mongoURI(),
XtraMCPURI: xtraMCPURI(),
MCPServerURL: mcpServerURL(),
Expand Down Expand Up @@ -69,6 +74,14 @@ func mongoURI() string {
return "mongodb://localhost:27017"
}

func miniMaxBaseURL() string {
val := os.Getenv("MINIMAX_BASE_URL")
if val != "" {
return val
}
return "https://api.minimax.io/v1"
}

func mcpServerURL() string {
val := os.Getenv("MCP_SERVER_URL")
if val != "" {
Expand Down
44 changes: 44 additions & 0 deletions internal/libs/cfg/minimax_cfg_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
package cfg

import (
"os"
"testing"

"github.com/stretchr/testify/assert"
)

func TestMiniMaxBaseURL_Default(t *testing.T) {
os.Unsetenv("MINIMAX_BASE_URL")
url := miniMaxBaseURL()
assert.Equal(t, "https://api.minimax.io/v1", url)
}

func TestMiniMaxBaseURL_Custom(t *testing.T) {
os.Setenv("MINIMAX_BASE_URL", "https://custom.minimax.io/v1")
defer os.Unsetenv("MINIMAX_BASE_URL")

url := miniMaxBaseURL()
assert.Equal(t, "https://custom.minimax.io/v1", url)
}

func TestCfg_MiniMaxFields(t *testing.T) {
os.Setenv("MINIMAX_API_KEY", "test-minimax-key")
os.Setenv("MINIMAX_BASE_URL", "https://test.minimax.io/v1")
defer func() {
os.Unsetenv("MINIMAX_API_KEY")
os.Unsetenv("MINIMAX_BASE_URL")
}()

c := GetCfg()
assert.Equal(t, "test-minimax-key", c.MiniMaxAPIKey)
assert.Equal(t, "https://test.minimax.io/v1", c.MiniMaxBaseURL)
}

func TestCfg_MiniMaxFieldsEmpty(t *testing.T) {
os.Unsetenv("MINIMAX_API_KEY")
os.Unsetenv("MINIMAX_BASE_URL")

c := GetCfg()
assert.Empty(t, c.MiniMaxAPIKey)
assert.Equal(t, "https://api.minimax.io/v1", c.MiniMaxBaseURL)
}
7 changes: 7 additions & 0 deletions internal/models/llm_provider.go
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
package models

import "strings"

// LLMProviderConfig holds the configuration for LLM API calls.
// If both Endpoint and APIKey are empty, the system default will be used.
type LLMProviderConfig struct {
Expand All @@ -12,3 +14,8 @@ type LLMProviderConfig struct {
func (c *LLMProviderConfig) IsCustom() bool {
return c != nil && c.APIKey != ""
}

// IsMiniMaxModel checks if the given model slug belongs to MiniMax.
func IsMiniMaxModel(slug string) bool {
return strings.Contains(strings.ToLower(slug), "minimax")
}
53 changes: 53 additions & 0 deletions internal/models/llm_provider_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
package models

import (
"testing"

"github.com/stretchr/testify/assert"
)

func TestIsMiniMaxModel(t *testing.T) {
tests := []struct {
slug string
expected bool
}{
{"MiniMax-M2.7", true},
{"MiniMax-M2.7-highspeed", true},
{"minimax/MiniMax-M2.7", true},
{"minimax/MiniMax-M2.7-highspeed", true},
{"MINIMAX-M2.7", true},
{"gpt-5.1", false},
{"openai/gpt-5.1", false},
{"qwen/qwen-plus", false},
{"google/gemini-2.5-flash", false},
{"", false},
}

for _, tt := range tests {
t.Run(tt.slug, func(t *testing.T) {
result := IsMiniMaxModel(tt.slug)
assert.Equal(t, tt.expected, result)
})
}
}

func TestLLMProviderConfig_IsCustom(t *testing.T) {
tests := []struct {
name string
config *LLMProviderConfig
expected bool
}{
{"nil config", nil, false},
{"empty config", &LLMProviderConfig{}, false},
{"with API key", &LLMProviderConfig{APIKey: "sk-test"}, true},
{"with endpoint only", &LLMProviderConfig{Endpoint: "https://api.example.com"}, false},
{"with both", &LLMProviderConfig{APIKey: "sk-test", Endpoint: "https://api.example.com"}, true},
}

for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := tt.config.IsCustom()
assert.Equal(t, tt.expected, result)
})
}
}
7 changes: 6 additions & 1 deletion internal/services/toolkit/client/client_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,17 @@ type AIClientV2 struct {
// If the config specifies a custom endpoint and API key, a new client is created for that endpoint.
// V2 uses the inference endpoint by default.
// When a user provides their own API key, use the /openai endpoint instead of /openrouter.
// MiniMax models are routed to the MiniMax API when MINIMAX_API_KEY is configured.
func (a *AIClientV2) GetOpenAIClient(llmConfig *models.LLMProviderConfig) *openai.Client {
var Endpoint string = llmConfig.Endpoint
var APIKey string = llmConfig.APIKey

if Endpoint == "" {
if APIKey != "" {
if models.IsMiniMaxModel(llmConfig.ModelName) && a.cfg.MiniMaxAPIKey != "" {
// Route MiniMax models to MiniMax API when server has MiniMax API key
Endpoint = a.cfg.MiniMaxBaseURL
APIKey = a.cfg.MiniMaxAPIKey
} else if APIKey != "" {
// User provided their own API key, use the OpenAI-compatible endpoint
Endpoint = a.cfg.OpenAIBaseURL // standard openai base url
} else {
Expand Down
2 changes: 2 additions & 0 deletions internal/services/toolkit/client/completion_v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,8 @@ func (a *AIClientV2) ChatCompletionStreamV2(ctx context.Context, callbackStream
streamHandler.SendFinalization()
}()

// Set model name for provider routing (e.g., MiniMax direct API)
llmProvider.ModelName = modelSlug
oaiClient := a.GetOpenAIClient(llmProvider)
params := getDefaultParamsV2(modelSlug, a.toolCallHandler.Registry)

Expand Down
Loading
Loading