From 416a446b5e2ae43e9e3159de862b82d823803dcb Mon Sep 17 00:00:00 2001 From: harry Date: Fri, 5 Apr 2024 09:55:31 +0800 Subject: [PATCH] Code optimization --- app/controllers/v1/llm.py | 5 ++--- app/controllers/v1/video.py | 16 ++++++++-------- requirements.txt | 3 ++- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/app/controllers/v1/llm.py b/app/controllers/v1/llm.py index e382637..e30df67 100644 --- a/app/controllers/v1/llm.py +++ b/app/controllers/v1/llm.py @@ -3,14 +3,13 @@ from app.controllers.v1.base import new_router from app.models.schema import VideoScriptResponse, VideoScriptRequest, VideoTermsResponse, VideoTermsRequest from app.services import llm from app.utils import utils -from app.controllers import base # 认证依赖项 # router = new_router(dependencies=[Depends(base.verify_token)]) router = new_router() -@router.post("/generate_video_script", response_model=VideoScriptResponse, summary="Generate a video script") +@router.post("/scripts", response_model=VideoScriptResponse, summary="Create a script for the video") def generate_video_script(request: Request, body: VideoScriptRequest): video_script = llm.generate_script(video_subject=body.video_subject, language=body.video_language, @@ -21,7 +20,7 @@ def generate_video_script(request: Request, body: VideoScriptRequest): return utils.get_response(200, response) -@router.post("/generate_video_terms", response_model=VideoTermsResponse, summary="Generate video terms by video script") +@router.post("/terms", response_model=VideoTermsResponse, summary="Generate video terms based on the video script") def generate_video_terms(request: Request, body: VideoTermsRequest): video_terms = llm.generate_terms(video_subject=body.video_subject, video_script=body.video_script, diff --git a/app/controllers/v1/video.py b/app/controllers/v1/video.py index 685fde6..ca4a3ca 100644 --- a/app/controllers/v1/video.py +++ b/app/controllers/v1/video.py @@ -8,8 +8,8 @@ from app.config import config from app.controllers import base from app.controllers.v1.base import new_router from app.models.exception import HttpException -from app.models.schema import TaskVideoRequest, TaskQueryResponse, TaskResponse, TaskQueryRequest, BgmListResponse, \ - BgmUploadResponse +from app.models.schema import TaskVideoRequest, TaskQueryResponse, TaskResponse, TaskQueryRequest, \ + BgmUploadResponse, BgmRetrieveResponse from app.services import task as tm from app.services import state as sm from app.utils import utils @@ -69,7 +69,7 @@ def get_task(request: Request, task_id: str = Path(..., description="Task ID"), raise HttpException(task_id=task_id, status_code=404, message=f"{request_id}: task not found") -@router.get("/get_bgm_list", response_model=BgmListResponse, summary="get local bgm file list") +@router.get("/musics", response_model=BgmRetrieveResponse, summary="Retrieve local BGM files") def get_bgm_list(request: Request): suffix = "*.mp3" song_dir = utils.song_dir() @@ -77,17 +77,17 @@ def get_bgm_list(request: Request): bgm_list = [] for file in files: bgm_list.append({ - "filename": os.path.basename(file), + "name": os.path.basename(file), "size": os.path.getsize(file), - "filepath": file, + "file": file, }) response = { - "bgm_list": bgm_list + "files": bgm_list } return utils.get_response(200, response) -@router.post("/upload_bgm_file", response_model=BgmUploadResponse, summary="upload bgm file to songs directory") +@router.post("/musics", response_model=BgmUploadResponse, summary="Upload the BGM file to the songs directory") def upload_bgm_file(request: Request, file: UploadFile = File(...)): request_id = base.get_task_id(request) # check file ext @@ -100,7 +100,7 @@ def upload_bgm_file(request: Request, file: UploadFile = File(...)): file.file.seek(0) buffer.write(file.file.read()) response = { - "uploaded_path": save_path + "file": save_path } return utils.get_response(200, response) diff --git a/requirements.txt b/requirements.txt index d67aa75..3098c5c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,4 +14,5 @@ pillow~=9.5.0 pydantic~=2.6.3 g4f~=0.2.5.4 dashscope~=1.15.0 -google.generativeai~=0.4.1 \ No newline at end of file +google.generativeai~=0.4.1 +python-multipart~=0.0.9 \ No newline at end of file