add hide_config

This commit is contained in:
harry 2024-04-26 14:48:33 +08:00
parent 376955d4a0
commit c161ab3124
2 changed files with 149 additions and 143 deletions

View File

@ -142,6 +142,11 @@ listen_port = 8502
# 文生视频时的最大并发任务数
max_concurrent_tasks = 5
# webui界面是否显示配置项
# webui hide baisc config panel
hide_config = false
[whisper]
# Only effective when subtitle_provider is "whisper"

View File

@ -150,167 +150,168 @@ def tr(key):
st.write(tr("Get Help"))
with st.expander(tr("Basic Settings"), expanded=False):
config_panels = st.columns(3)
left_config_panel = config_panels[0]
middle_config_panel = config_panels[1]
right_config_panel = config_panels[2]
with left_config_panel:
display_languages = []
selected_index = 0
for i, code in enumerate(locales.keys()):
display_languages.append(f"{code} - {locales[code].get('Language')}")
if code == st.session_state['ui_language']:
selected_index = i
if not config.app.get("hide_config", False):
with st.expander(tr("Basic Settings"), expanded=False):
config_panels = st.columns(3)
left_config_panel = config_panels[0]
middle_config_panel = config_panels[1]
right_config_panel = config_panels[2]
with left_config_panel:
display_languages = []
selected_index = 0
for i, code in enumerate(locales.keys()):
display_languages.append(f"{code} - {locales[code].get('Language')}")
if code == st.session_state['ui_language']:
selected_index = i
selected_language = st.selectbox(tr("Language"), options=display_languages,
index=selected_index)
if selected_language:
code = selected_language.split(" - ")[0].strip()
st.session_state['ui_language'] = code
config.ui['language'] = code
selected_language = st.selectbox(tr("Language"), options=display_languages,
index=selected_index)
if selected_language:
code = selected_language.split(" - ")[0].strip()
st.session_state['ui_language'] = code
config.ui['language'] = code
with middle_config_panel:
# openai
# moonshot (月之暗面)
# oneapi
# g4f
# azure
# qwen (通义千问)
# gemini
# ollama
llm_providers = ['OpenAI', 'Moonshot', 'Azure', 'Qwen', 'Gemini', 'Ollama', 'G4f', 'OneAPI', "Cloudflare"]
saved_llm_provider = config.app.get("llm_provider", "OpenAI").lower()
saved_llm_provider_index = 0
for i, provider in enumerate(llm_providers):
if provider.lower() == saved_llm_provider:
saved_llm_provider_index = i
break
with middle_config_panel:
# openai
# moonshot (月之暗面)
# oneapi
# g4f
# azure
# qwen (通义千问)
# gemini
# ollama
llm_providers = ['OpenAI', 'Moonshot', 'Azure', 'Qwen', 'Gemini', 'Ollama', 'G4f', 'OneAPI', "Cloudflare"]
saved_llm_provider = config.app.get("llm_provider", "OpenAI").lower()
saved_llm_provider_index = 0
for i, provider in enumerate(llm_providers):
if provider.lower() == saved_llm_provider:
saved_llm_provider_index = i
break
llm_provider = st.selectbox(tr("LLM Provider"), options=llm_providers, index=saved_llm_provider_index)
llm_helper = st.container()
llm_provider = llm_provider.lower()
config.app["llm_provider"] = llm_provider
llm_provider = st.selectbox(tr("LLM Provider"), options=llm_providers, index=saved_llm_provider_index)
llm_helper = st.container()
llm_provider = llm_provider.lower()
config.app["llm_provider"] = llm_provider
llm_api_key = config.app.get(f"{llm_provider}_api_key", "")
llm_base_url = config.app.get(f"{llm_provider}_base_url", "")
llm_model_name = config.app.get(f"{llm_provider}_model_name", "")
llm_account_id = config.app.get(f"{llm_provider}_account_id", "")
llm_api_key = config.app.get(f"{llm_provider}_api_key", "")
llm_base_url = config.app.get(f"{llm_provider}_base_url", "")
llm_model_name = config.app.get(f"{llm_provider}_model_name", "")
llm_account_id = config.app.get(f"{llm_provider}_account_id", "")
tips = ""
if llm_provider == 'ollama':
if not llm_model_name:
llm_model_name = "qwen:7b"
if not llm_base_url:
llm_base_url = "http://localhost:11434/v1"
tips = ""
if llm_provider == 'ollama':
if not llm_model_name:
llm_model_name = "qwen:7b"
if not llm_base_url:
llm_base_url = "http://localhost:11434/v1"
with llm_helper:
tips = """
##### Ollama配置说明
- **API Key**: 随便填写比如 123
- **Base Url**: 一般为 http://localhost:11434/v1
- 如果 `MoneyPrinterTurbo` `Ollama` **不在同一台机器上**需要填写 `Ollama` 机器的IP地址
- 如果 `MoneyPrinterTurbo` `Docker` 部署建议填写 `http://host.docker.internal:11434/v1`
- **Model Name**: 使用 `ollama list` 查看比如 `qwen:7b`
"""
with llm_helper:
tips = """
##### Ollama配置说明
- **API Key**: 随便填写比如 123
- **Base Url**: 一般为 http://localhost:11434/v1
- 如果 `MoneyPrinterTurbo` `Ollama` **不在同一台机器上**需要填写 `Ollama` 机器的IP地址
- 如果 `MoneyPrinterTurbo` `Docker` 部署建议填写 `http://host.docker.internal:11434/v1`
- **Model Name**: 使用 `ollama list` 查看比如 `qwen:7b`
"""
if llm_provider == 'openai':
if not llm_model_name:
llm_model_name = "gpt-3.5-turbo"
with llm_helper:
tips = """
##### OpenAI 配置说明
> 需要VPN开启全局流量模式
- **API Key**: [点击到官网申请](https://platform.openai.com/api-keys)
- **Base Url**: 可以留空
- **Model Name**: 填写**有权限**的模型[点击查看模型列表](https://platform.openai.com/settings/organization/limits)
"""
if llm_provider == 'openai':
if not llm_model_name:
llm_model_name = "gpt-3.5-turbo"
with llm_helper:
tips = """
##### OpenAI 配置说明
> 需要VPN开启全局流量模式
- **API Key**: [点击到官网申请](https://platform.openai.com/api-keys)
- **Base Url**: 可以留空
- **Model Name**: 填写**有权限**的模型[点击查看模型列表](https://platform.openai.com/settings/organization/limits)
"""
if llm_provider == 'moonshot':
if not llm_model_name:
llm_model_name = "moonshot-v1-8k"
with llm_helper:
tips = """
##### Moonshot 配置说明
- **API Key**: [点击到官网申请](https://platform.moonshot.cn/console/api-keys)
- **Base Url**: 固定为 https://api.moonshot.cn/v1
- **Model Name**: 比如 moonshot-v1-8k[点击查看模型列表](https://platform.moonshot.cn/docs/intro#%E6%A8%A1%E5%9E%8B%E5%88%97%E8%A1%A8)
"""
if llm_provider == 'moonshot':
if not llm_model_name:
llm_model_name = "moonshot-v1-8k"
with llm_helper:
tips = """
##### Moonshot 配置说明
- **API Key**: [点击到官网申请](https://platform.moonshot.cn/console/api-keys)
- **Base Url**: 固定为 https://api.moonshot.cn/v1
- **Model Name**: 比如 moonshot-v1-8k[点击查看模型列表](https://platform.moonshot.cn/docs/intro#%E6%A8%A1%E5%9E%8B%E5%88%97%E8%A1%A8)
"""
if llm_provider == 'qwen':
if not llm_model_name:
llm_model_name = "qwen-max"
with llm_helper:
tips = """
##### 通义千问Qwen 配置说明
- **API Key**: [点击到官网申请](https://dashscope.console.aliyun.com/apiKey)
- **Base Url**: 留空
- **Model Name**: 比如 qwen-max[点击查看模型列表](https://help.aliyun.com/zh/dashscope/developer-reference/model-introduction#3ef6d0bcf91wy)
"""
if llm_provider == 'qwen':
if not llm_model_name:
llm_model_name = "qwen-max"
with llm_helper:
tips = """
##### 通义千问Qwen 配置说明
- **API Key**: [点击到官网申请](https://dashscope.console.aliyun.com/apiKey)
- **Base Url**: 留空
- **Model Name**: 比如 qwen-max[点击查看模型列表](https://help.aliyun.com/zh/dashscope/developer-reference/model-introduction#3ef6d0bcf91wy)
"""
if llm_provider == 'g4f':
if not llm_model_name:
llm_model_name = "gpt-3.5-turbo"
with llm_helper:
tips = """
##### gpt4free 配置说明
> [GitHub开源项目](https://github.com/xtekky/gpt4free)可以免费使用GPT模型但是**稳定性较差**
- **API Key**: 随便填写比如 123
- **Base Url**: 留空
- **Model Name**: 比如 gpt-3.5-turbo[点击查看模型列表](https://github.com/xtekky/gpt4free/blob/main/g4f/models.py#L308)
"""
if llm_provider == 'azure':
with llm_helper:
tips = """
##### Azure 配置说明
> [点击查看如何部署模型](https://learn.microsoft.com/zh-cn/azure/ai-services/openai/how-to/create-resource)
- **API Key**: [点击到Azure后台创建](https://portal.azure.com/#view/Microsoft_Azure_ProjectOxford/CognitiveServicesHub/~/OpenAI)
- **Base Url**: 留空
- **Model Name**: 填写你实际的部署名
"""
if llm_provider == 'g4f':
if not llm_model_name:
llm_model_name = "gpt-3.5-turbo"
with llm_helper:
tips = """
##### gpt4free 配置说明
> [GitHub开源项目](https://github.com/xtekky/gpt4free)可以免费使用GPT模型但是**稳定性较差**
- **API Key**: 随便填写比如 123
- **Base Url**: 留空
- **Model Name**: 比如 gpt-3.5-turbo[点击查看模型列表](https://github.com/xtekky/gpt4free/blob/main/g4f/models.py#L308)
"""
if llm_provider == 'azure':
with llm_helper:
tips = """
##### Azure 配置说明
> [点击查看如何部署模型](https://learn.microsoft.com/zh-cn/azure/ai-services/openai/how-to/create-resource)
- **API Key**: [点击到Azure后台创建](https://portal.azure.com/#view/Microsoft_Azure_ProjectOxford/CognitiveServicesHub/~/OpenAI)
- **Base Url**: 留空
- **Model Name**: 填写你实际的部署名
"""
if llm_provider == 'gemini':
if not llm_model_name:
llm_model_name = "gemini-1.0-pro"
if llm_provider == 'gemini':
if not llm_model_name:
llm_model_name = "gemini-1.0-pro"
with llm_helper:
tips = """
##### Gemini 配置说明
> 需要VPN开启全局流量模式
- **API Key**: [点击到官网申请](https://ai.google.dev/)
- **Base Url**: 留空
- **Model Name**: 比如 gemini-1.0-pro
"""
with llm_helper:
tips = """
##### Gemini 配置说明
> 需要VPN开启全局流量模式
- **API Key**: [点击到官网申请](https://ai.google.dev/)
- **Base Url**: 留空
- **Model Name**: 比如 gemini-1.0-pro
"""
if tips and config.ui['language'] == 'zh':
st.info(tips)
if tips and config.ui['language'] == 'zh':
st.info(tips)
st_llm_api_key = st.text_input(tr("API Key"), value=llm_api_key, type="password")
st_llm_base_url = st.text_input(tr("Base Url"), value=llm_base_url)
st_llm_model_name = st.text_input(tr("Model Name"), value=llm_model_name)
st_llm_api_key = st.text_input(tr("API Key"), value=llm_api_key, type="password")
st_llm_base_url = st.text_input(tr("Base Url"), value=llm_base_url)
st_llm_model_name = st.text_input(tr("Model Name"), value=llm_model_name)
if st_llm_api_key:
config.app[f"{llm_provider}_api_key"] = st_llm_api_key
if st_llm_base_url:
config.app[f"{llm_provider}_base_url"] = st_llm_base_url
if st_llm_model_name:
config.app[f"{llm_provider}_model_name"] = st_llm_model_name
if st_llm_api_key:
config.app[f"{llm_provider}_api_key"] = st_llm_api_key
if st_llm_base_url:
config.app[f"{llm_provider}_base_url"] = st_llm_base_url
if st_llm_model_name:
config.app[f"{llm_provider}_model_name"] = st_llm_model_name
if llm_provider == 'cloudflare':
st_llm_account_id = st.text_input(tr("Account ID"), value=llm_account_id)
if st_llm_account_id:
config.app[f"{llm_provider}_account_id"] = st_llm_account_id
if llm_provider == 'cloudflare':
st_llm_account_id = st.text_input(tr("Account ID"), value=llm_account_id)
if st_llm_account_id:
config.app[f"{llm_provider}_account_id"] = st_llm_account_id
with right_config_panel:
pexels_api_keys = config.app.get("pexels_api_keys", [])
if isinstance(pexels_api_keys, str):
pexels_api_keys = [pexels_api_keys]
pexels_api_key = ", ".join(pexels_api_keys)
with right_config_panel:
pexels_api_keys = config.app.get("pexels_api_keys", [])
if isinstance(pexels_api_keys, str):
pexels_api_keys = [pexels_api_keys]
pexels_api_key = ", ".join(pexels_api_keys)
pexels_api_key = st.text_input(tr("Pexels API Key"), value=pexels_api_key, type="password")
pexels_api_key = pexels_api_key.replace(" ", "")
if pexels_api_key:
config.app["pexels_api_keys"] = pexels_api_key.split(",")
pexels_api_key = st.text_input(tr("Pexels API Key"), value=pexels_api_key, type="password")
pexels_api_key = pexels_api_key.replace(" ", "")
if pexels_api_key:
config.app["pexels_api_keys"] = pexels_api_key.split(",")
panel = st.columns(3)
left_panel = panel[0]