Merge pull request #704 from yrk111222/main

Add support for ModelScope API
This commit is contained in:
Harry 2025-12-14 11:55:51 +08:00 committed by GitHub
commit aa2f16eb3e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 59 additions and 2 deletions

View File

@ -65,7 +65,7 @@ Picwish focuses on the **image processing field**, providing a rich set of **ima
supports `subtitle outlining`
- [x] Supports **background music**, either random or specified music files, with adjustable `background music volume`
- [x] Video material sources are **high-definition** and **royalty-free**, and you can also use your own **local materials**
- [x] Supports integration with various models such as **OpenAI**, **Moonshot**, **Azure**, **gpt4free**, **one-api**, **Qwen**, **Google Gemini**, **Ollama**, **DeepSeek**, **ERNIE**, **Pollinations** and more
- [x] Supports integration with various models such as **OpenAI**, **Moonshot**, **Azure**, **gpt4free**, **one-api**, **Qwen**, **Google Gemini**, **Ollama**, **DeepSeek**, **ERNIE**, **Pollinations**, **ModelScope** and more
### Future Plans 📅

View File

@ -58,7 +58,7 @@
- [x] 支持 **字幕生成**,可以调整 `字体`、`位置`、`颜色`、`大小`,同时支持`字幕描边`设置
- [x] 支持 **背景音乐**,随机或者指定音乐文件,可设置`背景音乐音量`
- [x] 视频素材来源 **高清**,而且 **无版权**,也可以使用自己的 **本地素材**
- [x] 支持 **OpenAI**、**Moonshot**、**Azure**、**gpt4free**、**one-api**、**通义千问**、**Google Gemini**、**Ollama**、**DeepSeek**、 **文心一言**, **Pollinations** 等多种模型接入
- [x] 支持 **OpenAI**、**Moonshot**、**Azure**、**gpt4free**、**one-api**、**通义千问**、**Google Gemini**、**Ollama**、**DeepSeek**、 **文心一言**, **Pollinations**、**ModelScope** 等多种模型接入
- 中国用户建议使用 **DeepSeek****Moonshot** 作为大模型提供商国内可直接访问不需要VPN。注册就送额度基本够用

View File

@ -74,6 +74,12 @@ def _generate_response(prompt: str) -> str:
base_url = config.app.get("deepseek_base_url")
if not base_url:
base_url = "https://api.deepseek.com"
elif llm_provider == "modelscope":
api_key = config.app.get("modelscope_api_key")
model_name = config.app.get("modelscope_model_name")
base_url = config.app.get("modelscope_base_url")
if not base_url:
base_url = "https://api-inference.modelscope.cn/v1/"
elif llm_provider == "ernie":
api_key = config.app.get("ernie_api_key")
secret_key = config.app.get("ernie_secret_key")
@ -267,6 +273,34 @@ def _generate_response(prompt: str) -> str:
api_version=api_version,
azure_endpoint=base_url,
)
if llm_provider == "modelscope":
content = ''
client = OpenAI(
api_key=api_key,
base_url=base_url,
)
response = client.chat.completions.create(
model=model_name,
messages=[{"role": "user", "content": prompt}],
extra_body={"enable_thinking": False},
stream=True
)
if response:
for chunk in response:
if not chunk.choices:
continue
delta = chunk.choices[0].delta
if delta and delta.content:
content += delta.content
if not content.strip():
raise ValueError("Empty content in stream response")
return content.replace("\n", "")
else:
raise Exception(f"[{llm_provider}] returned an empty response")
else:
client = OpenAI(
api_key=api_key,

View File

@ -30,6 +30,7 @@ pixabay_api_keys = []
# oneapi
# cloudflare
# ernie (文心一言)
# modelscope (魔搭社区)
llm_provider = "openai"
########## Pollinations AI Settings
@ -99,6 +100,14 @@ deepseek_api_key = ""
deepseek_base_url = "https://api.deepseek.com"
deepseek_model_name = "deepseek-chat"
########## ModelScope API Key
# Visit https://modelscope.cn/docs/model-service/API-Inference/intro to get your API key
# And note that you need to bind your Alibaba Cloud account before using the API.
modelscope_api_key = ""
modelscope_base_url = "https://api-inference.modelscope.cn/v1/"
modelscope_model_name = "Qwen/Qwen3-32B"
# Subtitle Provider, "edge" or "whisper"
# If empty, the subtitle will not be generated
subtitle_provider = "edge"

View File

@ -232,6 +232,7 @@ if not config.app.get("hide_config", False):
"Azure",
"Qwen",
"DeepSeek",
"ModelScope",
"Gemini",
"Ollama",
"G4f",
@ -374,6 +375,19 @@ if not config.app.get("hide_config", False):
- **Model Name**: 固定为 deepseek-chat
"""
if llm_provider == "modelscope":
if not llm_model_name:
llm_model_name = "Qwen/Qwen3-32B"
if not llm_base_url:
llm_base_url = "https://api-inference.modelscope.cn/v1/"
with llm_helper:
tips = """
##### ModelScope 配置说明
- **API Key**: [点击到官网申请](https://modelscope.cn/docs/model-service/API-Inference/intro)
- **Base Url**: 固定为 https://api-inference.modelscope.cn/v1/
- **Model Name**: 比如 Qwen/Qwen3-32B[点击查看模型列表](https://modelscope.cn/models?filter=inference_type&page=1)
"""
if llm_provider == "ernie":
with llm_helper:
tips = """