From 5a11a870e2abb5201f62c253ca811e52035864ed Mon Sep 17 00:00:00 2001
From: zhaoqingang <zhaoqg0118@163.com>
Date: 星期五, 10 一月 2025 18:25:13 +0800
Subject: [PATCH] 知识库选择返回自己创建的
---
app/api/files.py | 199 ++++++++++++++++++++++++++++++++++++++++---------
1 files changed, 161 insertions(+), 38 deletions(-)
diff --git a/app/api/files.py b/app/api/files.py
index fe80f4a..380ea9c 100644
--- a/app/api/files.py
+++ b/app/api/files.py
@@ -1,18 +1,25 @@
-from typing import Optional
+import io
+from typing import Optional, List
import requests
from fastapi import Depends, APIRouter, HTTPException, UploadFile, File, Query, Form
from pydantic import BaseModel
from sqlalchemy.orm import Session
from starlette.responses import StreamingResponse
+from werkzeug.utils import send_file
from app.api import Response, get_current_user, ResponseList
from app.config.config import settings
+from app.config.const import DOCUMENT_TO_REPORT, IMAGE_TO_TEXT, DOCUMENT_TO_REPORT_TITLE, DOCUMENT_IA_QUESTIONS, \
+ DOCUMENT_TO_PAPER
+from app.models import MenuCapacityModel
from app.models.agent_model import AgentType, AgentModel
from app.models.base_model import get_db
from app.models.user_model import UserModel
from app.service.basic import BasicService
from app.service.bisheng import BishengService
+from app.service.v2.api_token import DfTokenDao
+from app.service.difyService import DifyService
from app.service.ragflow import RagflowService
from app.service.service_token import get_ragflow_token, get_bisheng_token
import urllib.parse
@@ -21,53 +28,117 @@
@router.post("/upload/{agent_id}", response_model=Response)
-async def upload_file(agent_id: str,
- file: UploadFile = File(...),
- chat_id: str = Query(None, description="The ID of the chat"),
- db: Session = Depends(get_db),
- current_user: UserModel = Depends(get_current_user)
- ):
- agent = db.query(AgentModel).filter(AgentModel.id == agent_id).first()
+async def upload_files(
+ agent_id: str,
+ file: List[UploadFile] = File(...), # 淇敼杩欓噷锛屾帴鍙楁枃浠跺垪琛�
+ chat_id: str = Query(None, description="The ID of the chat"),
+ db: Session = Depends(get_db),
+ current_user: UserModel = Depends(get_current_user)
+):
+ agent = db.query(MenuCapacityModel).filter(MenuCapacityModel.chat_id == agent_id).first()
if not agent:
- return Response(code=404, msg="Agent not found")
- # 璇诲彇涓婁紶鐨勬枃浠跺唴瀹�
- try:
- file_content = await file.read()
- except Exception as e:
- return Response(code=400, msg=str(e))
+ return ResponseList(code=404, msg="Agent not found")
+ agent_type = int(agent.capacity_type)
- if agent.agent_type == AgentType.RAGFLOW:
- token = get_ragflow_token(db, current_user.id)
- ragflow_service = RagflowService(base_url=settings.fwr_base_url)
- # 鏌ヨ浼氳瘽鏄惁瀛樺湪锛屼笉瀛樺湪鍏堝垱寤轰細璇�
- history = await ragflow_service.get_session_history(token, chat_id)
- if len(history) == 0:
- message = {"role": "user", "message": file.filename}
- await ragflow_service.set_session(token, agent_id, message, chat_id, True)
+ # 妫�鏌� agent 绫诲瀷锛岀‘瀹氭槸鍚﹀厑璁镐笂浼犲涓枃浠�
+ if agent_type in [AgentType.RAGFLOW, AgentType.BISHENG]:
+ if len(file) > 1:
+ return Response(code=400, msg="杩欎釜鏅鸿兘浣撳彧鏀寔浼犲崟涓枃浠�")
- ragflow_service = RagflowService(base_url=settings.fwr_base_url)
- token = get_ragflow_token(db, current_user.id)
- doc_ids = await ragflow_service.upload_and_parse(token, chat_id, file.filename, file_content)
- return Response(code=200, msg="", data={"doc_ids": doc_ids, "file_name": file.filename})
-
- elif agent.agent_type == AgentType.BISHENG:
- bisheng_service = BishengService(base_url=settings.sgb_base_url)
+ if agent_type == AgentType.RAGFLOW or agent_type == AgentType.BISHENG:
+ file = file[0]
+ # 璇诲彇涓婁紶鐨勬枃浠跺唴瀹�
try:
- token = get_bisheng_token(db, current_user.id)
- result = await bisheng_service.upload(token, file.filename, file_content)
+ file_content = await file.read()
except Exception as e:
- raise HTTPException(status_code=500, detail=str(e))
- result["file_name"] = file.filename
- return Response(code=200, msg="", data=result)
- elif agent.agent_type == AgentType.BASIC:
+ return Response(code=400, msg=str(e))
+
+ if agent_type == AgentType.RAGFLOW:
+ token = await get_ragflow_token(db, current_user.id)
+ ragflow_service = RagflowService(base_url=settings.fwr_base_url)
+ # 鏌ヨ浼氳瘽鏄惁瀛樺湪锛屼笉瀛樺湪鍏堝垱寤轰細璇�
+ history = await ragflow_service.get_session_history(token, chat_id)
+ if len(history) == 0:
+ message = {"role": "user", "message": file.filename}
+ await ragflow_service.set_session(token, agent_id, message, chat_id, True)
+
+ doc_ids = await ragflow_service.upload_and_parse(token, chat_id, file.filename, file_content)
+ # 瀵逛簬澶氭枃浠讹紝鍙兘闇�瑕佹敹闆嗘墍鏈塪oc_ids
+ return Response(code=200, msg="", data={"doc_ids": doc_ids, "file_name": file.filename})
+
+ elif agent_type == AgentType.BISHENG:
+ bisheng_service = BishengService(base_url=settings.sgb_base_url)
+ try:
+ token = await get_bisheng_token(db, current_user.id)
+ result = await bisheng_service.upload(token, file.filename, file_content)
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=str(e))
+ result["file_name"] = file.filename
+ return Response(code=200, msg="", data=result)
+
+ elif agent_type == AgentType.BASIC:
if agent_id == "basic_excel_talk":
+ # 澶勭悊鍗曚釜鏂囦欢鐨勬儏鍐�
+ file_list = file
+ if len(file) == 1: # and agent.agent_type != AgentType.BASIC
+ file_list = [file[0]] # 濡傛灉鍙湁涓�涓枃浠讹紝纭繚瀹冩槸涓�涓垪琛�
service = BasicService(base_url=settings.basic_base_url)
- result = await service.excel_talk_upload(chat_id, file.filename, file_content)
+ # 閬嶅巻file_list锛屽瓨鍒癴iles 鍒楄〃涓�
+ files = []
+ for item in file_list:
+ file_content = await item.read()
+ files.append(('files', (item.filename, file_content, 'application/octet-stream')))
+
+ result = await service.excel_talk_upload(chat_id, files)
+ if not result:
+ return Response(code=400, msg="涓婁紶鏂囦欢鍑洪敊")
return Response(code=200, msg="", data=result)
- else:
- return Response(code=200, msg="Unsupported agent type")
+ elif agent_id == "basic_paper_agent":
+ ...
+ # service = BasicService(base_url=settings.basic_paper_url)
+ # result = await service.paper_file_upload(chat_id, file.filename, file_content)
+
+ elif agent_type == AgentType.DIFY:
+ dify_service = DifyService(base_url=settings.dify_base_url)
+ if agent.chat_type == "imageTalk":
+ token = DfTokenDao(db).get_token_by_id(IMAGE_TO_TEXT)
+ if not token:
+ raise HTTPException(status_code=500, detail="鑾峰彇token澶辫触锛宨mage_and_text_conversion锛�")
+ file = file[0]
+ # 璇诲彇涓婁紶鐨勬枃浠跺唴瀹�
+ try:
+ file_content = await file.read()
+ except Exception as e:
+ return Response(code=400, msg=str(e))
+ try:
+ data = await dify_service.upload(token, file.filename, file_content, current_user.id)
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=str(e))
+ elif agent.chat_type == "reportWorkflow" or agent.chat_type == "documentIa" or agent.chat_type == "paperTalk":
+ token_dict = {
+ "reportWorkflow": DOCUMENT_TO_REPORT,
+ "documentIa": DOCUMENT_IA_QUESTIONS,
+ "paperTalk": DOCUMENT_TO_PAPER,
+ }
+ token = DfTokenDao(db).get_token_by_id(token_dict[agent.chat_type])
+ if not token:
+ raise HTTPException(status_code=500, detail="鑾峰彇token澶辫触锛宒ocument_to_report锛�")
+ result = []
+ for f in file:
+ try:
+ file_content = await f.read()
+ except Exception as e:
+ return Response(code=400, msg=str(e))
+ try:
+ file_upload = await dify_service.upload(token, f.filename, file_content, current_user.id)
+ result.append(file_upload)
+ except Exception as e:
+ raise HTTPException(status_code=500, detail=str(e))
+ # result["file_name"] = file.filename
+ data = {"files": result}
+ return Response(code=200, msg="", data=data)
@router.get("/download/", response_model=Response)
@@ -76,6 +147,8 @@
agent_id: str = Query(..., description="Agent ID"),
doc_id: Optional[str] = Query(None, description="Optional doc id for ragflow agents"),
doc_name: Optional[str] = Query(None, description="Optional doc name for ragflow agents"),
+ file_id: Optional[str] = Query(None, description="Optional file id for basic agents"),
+ file_type: Optional[str] = Query(None, description="Optional file type for basic agents"),
db: Session = Depends(get_db)
):
agent = db.query(AgentModel).filter(AgentModel.id == agent_id).first()
@@ -93,6 +166,12 @@
return Response(code=400, msg="doc_id is required")
url = f"{settings.fwr_base_url}/v1/document/get/{doc_id}"
filename = doc_name
+ elif agent.agent_type == AgentType.BASIC:
+ if agent_id == "basic_excel_talk":
+ return await download_basic_file(file_id, file_type)
+
+ elif agent_id == "basic_question_talk":
+ return await download_basic_file(file_id, file_type)
else:
return Response(code=400, msg="Unsupported agent type")
@@ -109,3 +188,47 @@
)
except Exception as e:
raise HTTPException(status_code=400, detail=f"Error downloading file: {e}")
+
+
+async def download_basic_file(file_id: str, file_type: str):
+ service = BasicService(base_url=settings.basic_base_url)
+ if not file_type or not file_id:
+ return Response(code=400, msg="file_type and file_id is required")
+ if file_type == "image":
+ content, filename, mimetype = await service.excel_talk_image_download(file_id)
+ return StreamingResponse(
+ io.BytesIO(content),
+ media_type=mimetype,
+ headers={"Content-Disposition": f"attachment; filename={filename}"}
+ )
+ elif file_type == "excel":
+ content, filename, mimetype = await service.excel_talk_excel_download(file_id)
+ return StreamingResponse(
+ io.BytesIO(content),
+ media_type=mimetype,
+ headers={"Content-Disposition": f"attachment; filename={filename}"}
+ )
+ elif file_type == "word":
+ content, filename, mimetype = await service.questions_talk_word_download(file_id)
+ return StreamingResponse(
+ io.BytesIO(content),
+ media_type=mimetype,
+ headers={"Content-Disposition": f"attachment; filename={filename}"}
+ )
+ else:
+ return Response(code=400, msg="Unsupported file type")
+
+
+@router.get("/image/{imageId}", response_model=Response)
+async def download_image_file(imageId: str, db=Depends(get_db)):
+ file_path = f"app/images/{imageId}.png"
+
+ def generate():
+ with open(file_path, "rb") as file:
+ while True:
+ data = file.read(1048576) # 璇诲彇1MB
+ if not data:
+ break
+ yield data
+
+ return StreamingResponse(generate(), media_type="application/octet-stream")
\ No newline at end of file
--
Gitblit v1.8.0