Add support for ModelScope API

This commit is contained in:
yrk 2025-05-21 10:37:40 +08:00
parent 235362b044
commit 10f177adac
3 changed files with 58 additions and 0 deletions

View File

@ -74,6 +74,12 @@ def _generate_response(prompt: str) -> str:
base_url = config.app.get("deepseek_base_url")
if not base_url:
base_url = "https://api.deepseek.com"
elif llm_provider == "modelscope":
api_key = config.app.get("modelscope_api_key")
model_name = config.app.get("modelscope_model_name")
base_url = config.app.get("modelscope_base_url")
if not base_url:
base_url = "https://api-inference.modelscope.cn/v1/"
elif llm_provider == "ernie":
api_key = config.app.get("ernie_api_key")
secret_key = config.app.get("ernie_secret_key")
@ -264,6 +270,34 @@ def _generate_response(prompt: str) -> str:
api_version=api_version,
azure_endpoint=base_url,
)
if llm_provider == "modelscope":
content = ''
client = OpenAI(
api_key=api_key,
base_url=base_url,
)
response = client.chat.completions.create(
model=model_name,
messages=[{"role": "user", "content": prompt}],
extra_body={"enable_thinking": False},
stream=True
)
if response:
for chunk in response:
if not chunk.choices:
continue
delta = chunk.choices[0].delta
if delta and delta.content:
content += delta.content
if not content.strip():
raise ValueError("Empty content in stream response")
return content.replace("\n", "")
else:
raise Exception(f"[{llm_provider}] returned an empty response")
else:
client = OpenAI(
api_key=api_key,
@ -399,6 +433,8 @@ Please note that you must use English for generating video search terms; Chinese
for i in range(_max_retries):
try:
response = _generate_response(prompt)
print(response)
print('1111111111')
if "Error: " in response:
logger.error(f"failed to generate video script: {response}")
return response

View File

@ -99,6 +99,14 @@ deepseek_api_key = ""
deepseek_base_url = "https://api.deepseek.com"
deepseek_model_name = "deepseek-chat"
########## ModelScope API Key
# Visit https://modelscope.cn/docs/model-service/API-Inference/intro to get your API key
# And note that you need to bind your Alibaba Cloud account before using the API.
modelscope_api_key = ""
modelscope_base_url = "https://api-inference.modelscope.cn/v1/"
modelscope_model_name = "Qwen/Qwen3-32B"
# Subtitle Provider, "edge" or "whisper"
# If empty, the subtitle will not be generated
subtitle_provider = "edge"

View File

@ -231,6 +231,7 @@ if not config.app.get("hide_config", False):
"Azure",
"Qwen",
"DeepSeek",
"ModelScope",
"Gemini",
"Ollama",
"G4f",
@ -373,6 +374,19 @@ if not config.app.get("hide_config", False):
- **Model Name**: 固定为 deepseek-chat
"""
if llm_provider == "modelscope":
if not llm_model_name:
llm_model_name = "Qwen/Qwen3-32B"
if not llm_base_url:
llm_base_url = "https://api-inference.modelscope.cn/v1/"
with llm_helper:
tips = """
##### ModelScope 配置说明
- **API Key**: [点击到官网申请](https://modelscope.cn/docs/model-service/API-Inference/intro)
- **Base Url**: 固定为 https://api-inference.modelscope.cn/v1/
- **Model Name**: 比如 Qwen/Qwen3-32B[点击查看模型列表](https://modelscope.cn/models?filter=inference_type&page=1)
"""
if llm_provider == "ernie":
with llm_helper:
tips = """