Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions packages/core/src/core/openaiContentGenerator/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ import { OpenAIContentGenerator } from './openaiContentGenerator.js';
import {
DashScopeOpenAICompatibleProvider,
DeepSeekOpenAICompatibleProvider,
ModelScopeOpenAICompatibleProvider,
OpenRouterOpenAICompatibleProvider,
type OpenAICompatibleProvider,
DefaultOpenAICompatibleProvider,
Expand Down Expand Up @@ -78,6 +79,14 @@ export function determineProvider(
);
}

// Check for ModelScope provider
if (ModelScopeOpenAICompatibleProvider.isModelScopeProvider(config)) {
return new ModelScopeOpenAICompatibleProvider(
contentGeneratorConfig,
cliConfig,
);
}

// Default provider for standard OpenAI-compatible APIs
return new DefaultOpenAICompatibleProvider(contentGeneratorConfig, cliConfig);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
export { ModelScopeOpenAICompatibleProvider } from './modelscope.js';
export { DashScopeOpenAICompatibleProvider } from './dashscope.js';
export { DeepSeekOpenAICompatibleProvider } from './deepseek.js';
export { OpenRouterOpenAICompatibleProvider } from './openrouter.js';
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
/**
* @license
* Copyright 2025 Qwen
* SPDX-License-Identifier: Apache-2.0
*/

import { describe, it, expect, vi, beforeEach } from 'vitest';
import type OpenAI from 'openai';
import { ModelScopeOpenAICompatibleProvider } from './modelscope.js';
import type { Config } from '../../../config/config.js';
import type { ContentGeneratorConfig } from '../../contentGenerator.js';

vi.mock('openai');

describe('ModelScopeOpenAICompatibleProvider', () => {
let provider: ModelScopeOpenAICompatibleProvider;
let mockContentGeneratorConfig: ContentGeneratorConfig;
let mockCliConfig: Config;

beforeEach(() => {
mockContentGeneratorConfig = {
apiKey: 'test-api-key',
baseUrl: 'https://api.modelscope.cn/v1',
model: 'qwen-max',
} as ContentGeneratorConfig;

mockCliConfig = {
getCliVersion: vi.fn().mockReturnValue('1.0.0'),
} as unknown as Config;

provider = new ModelScopeOpenAICompatibleProvider(
mockContentGeneratorConfig,
mockCliConfig,
);
});

describe('isModelScopeProvider', () => {
it('should return true if baseUrl includes "modelscope"', () => {
const config = { baseUrl: 'https://api.modelscope.cn/v1' };
expect(
ModelScopeOpenAICompatibleProvider.isModelScopeProvider(
config as ContentGeneratorConfig,
),
).toBe(true);
});

it('should return false if baseUrl does not include "modelscope"', () => {
const config = { baseUrl: 'https://api.openai.com/v1' };
expect(
ModelScopeOpenAICompatibleProvider.isModelScopeProvider(
config as ContentGeneratorConfig,
),
).toBe(false);
});
});

describe('buildRequest', () => {
it('should remove stream_options when stream is false', () => {
const originalRequest: OpenAI.Chat.ChatCompletionCreateParams = {
model: 'qwen-max',
messages: [{ role: 'user', content: 'Hello!' }],
stream: false,
stream_options: { include_usage: true },
};

const result = provider.buildRequest(originalRequest, 'prompt-id');

expect(result).not.toHaveProperty('stream_options');
});

it('should keep stream_options when stream is true', () => {
const originalRequest: OpenAI.Chat.ChatCompletionCreateParams = {
model: 'qwen-max',
messages: [{ role: 'user', content: 'Hello!' }],
stream: true,
stream_options: { include_usage: true },
};

const result = provider.buildRequest(originalRequest, 'prompt-id');

expect(result).toHaveProperty('stream_options');
});

it('should handle requests without stream_options', () => {
const originalRequest: OpenAI.Chat.ChatCompletionCreateParams = {
model: 'qwen-max',
messages: [{ role: 'user', content: 'Hello!' }],
stream: false,
};

const result = provider.buildRequest(originalRequest, 'prompt-id');

expect(result).not.toHaveProperty('stream_options');
});
});
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import type OpenAI from 'openai';
import { DefaultOpenAICompatibleProvider } from './default.js';
import type { ContentGeneratorConfig } from '../../contentGenerator.js';

/**
* Provider for ModelScope API
*/
export class ModelScopeOpenAICompatibleProvider extends DefaultOpenAICompatibleProvider {
/**
* Checks if the configuration is for ModelScope.
*/
static isModelScopeProvider(config: ContentGeneratorConfig): boolean {
return !!config.baseUrl?.includes('modelscope');
}

/**
* ModelScope does not support `stream_options` when `stream` is false.
* This method removes `stream_options` if `stream` is not true.
*/
override buildRequest(
request: OpenAI.Chat.ChatCompletionCreateParams,
userPromptId: string,
): OpenAI.Chat.ChatCompletionCreateParams {
const newRequest = super.buildRequest(request, userPromptId);
if (!newRequest.stream) {
delete (newRequest as OpenAI.Chat.ChatCompletionCreateParamsNonStreaming)
.stream_options;
}

return newRequest;
}
}
Empty file added settings.json
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@DS-Controller2 Thanks for the contribution! Please go ahead and remove the settings.json file, and we'll get it merged.

Empty file.