zhaoqingang
2025-03-12 3b1bf7da6771bd1d6852d3dcc1f906d5ae5c95d1
app/service/v2/chat.py
@@ -1,6 +1,8 @@
import asyncio
import io
import json
import time
import uuid
import fitz
from fastapi import HTTPException
@@ -10,7 +12,8 @@
    DF_CHAT_WORKFLOW, DF_UPLOAD_FILE, RG_ORIGINAL_URL
from app.config.config import settings
from app.config.const import *
from app.models import DialogModel, ApiTokenModel, UserTokenModel
from app.models import DialogModel, ApiTokenModel, UserTokenModel, ComplexChatSessionDao, ChatDataRequest, \
    ComplexChatDao
from app.models.v2.session_model import ChatSessionDao, ChatData
from app.service.v2.app_driver.chat_agent import ChatAgent
from app.service.v2.app_driver.chat_data import ChatBaseApply
@@ -47,12 +50,12 @@
        logger.error(e)
    return None
async def get_app_token(db, app_id):
    app_token = db.query(UserTokenModel).filter_by(id=app_id).first()
    if app_token:
        return app_token.access_token
    return ""
async def get_chat_token(db, app_id):
@@ -69,7 +72,6 @@
        db.commit()
    except Exception as e:
        logger.error(e)
async def get_chat_info(db, chat_id: str):
@@ -90,7 +92,7 @@
    token = await get_chat_token(db, rg_api_token)
    url = settings.fwr_base_url + RG_CHAT_DIALOG.format(chat_id)
    chat = ChatDialog()
    session = await add_session_log(db, session_id, question, chat_id, user_id, mode, session_id, 1)
    session = await add_session_log(db, session_id, question, chat_id, user_id, mode, session_id, RG_TYPE)
    if session:
        conversation_id = session.conversation_id
    message = {"role": "assistant", "answer": "", "reference": {}}
@@ -134,6 +136,7 @@
        message["role"] = "assistant"
        await update_session_log(db, session_id, message, conversation_id)
async def data_process(data):
    if isinstance(data, str):
        return data.replace("dify", "smart")
@@ -170,8 +173,9 @@
    if hasattr(chat_data, "query"):
        query = chat_data.query
    else:
        query = "start new workflow"
    session = await add_session_log(db, session_id,query if query else "start new conversation", chat_id, user_id, mode, conversation_id, 3)
        query = "start new conversation"
    session = await add_session_log(db, session_id, query if query else "start new conversation", chat_id, user_id,
                                    mode, conversation_id, DF_TYPE)
    if session:
        conversation_id = session.conversation_id
    try:
@@ -215,7 +219,7 @@
                    [workflow_started, node_started, node_finished].index(ans.get("event"))]
            elif ans.get("event") == workflow_finished:
                data = ans.get("data", {})
                answer_workflow = data.get("outputs", {}).get("output")
                answer_workflow = data.get("outputs", {}).get("output", data.get("outputs", {}).get("answer"))
                download_url = data.get("outputs", {}).get("download_url")
                event = smart_workflow_finished
                if data.get("status") == "failed":
@@ -242,14 +246,22 @@
        except:
            ...
    finally:
        await update_session_log(db, session_id, {"role": "assistant", "answer": answer_event or answer_agent or answer_workflow or error,
                                                  "download_url":download_url,
        await update_session_log(db, session_id, {"role": "assistant",
                                                  "answer": answer_event or answer_agent or answer_workflow or error,
                                                  "download_url": download_url,
                                                  "node_list": node_list, "task_id": task_id, "id": message_id,
                                                  "error": error}, conversation_id)
async def service_chat_basic(db, chat_id: str, chat_data: ChatData, session_id: str, user_id, mode: str):
    ...
    if chat_id == basic_report_talk:
        complex_chat = await ComplexChatDao(db).get_complex_chat_by_mode(chat_data.report_mode)
        if complex_chat:
            ...
async def service_chat_parameters(db, chat_id, user_id):
@@ -257,6 +269,7 @@
    if not chat_info:
        return {}
    return chat_info.parameters
async def service_chat_sessions(db, chat_id, name):
    token = await get_chat_token(db, rg_api_token)
@@ -276,14 +289,20 @@
        page=current,
        page_size=page_size
    )
    return json.dumps({"total":total, "rows": [session.to_dict() for session in session_list]})
    return json.dumps({"total": total, "rows": [session.to_dict() for session in session_list]})
async def service_chat_session_log(db, session_id):
    session_log = await ChatSessionDao(db).get_session_by_id(session_id)
    return json.dumps(session_log.log_to_json())
    if not session_log:
        return {}
    log_info =session_log.log_to_json()
    if session_log.event_type ==  complex_chat:
        total, message_list = await ComplexChatSessionDao(db).get_session_list(session_id)
        log_info["message"] = [message.log_to_json() for message in message_list[::-1]]
    return json.dumps(log_info)
async def service_chat_upload(db, chat_id, file, user_id):
@@ -316,6 +335,7 @@
    tokens = tokenizer.encode(input_str)
    return len(tokens)
async def read_pdf(pdf_stream):
    text = ""
    with fitz.open(stream=pdf_stream, filetype="pdf") as pdf_document:
@@ -335,6 +355,7 @@
    return text
async def read_file(file, filename, content_type):
    text = ""
    if content_type == "application/pdf" or filename.endswith('.pdf'):
@@ -348,28 +369,40 @@
    return await get_str_token(text)
async def service_chunk_retrieval(query, top_k, similarity_threshold, api_key):
    print(query)
async def service_chunk_retrieval(query, knowledge_id, top_k, similarity_threshold, api_key):
    # print(query)
    try:
        request_data = json.loads(query)
        payload = {
            "question": request_data.get("query", ""),
            "dataset_ids": request_data.get("dataset_ids", []),
            "page_size": top_k,
            "similarity_threshold": similarity_threshold if similarity_threshold else 0.2
        }
    except json.JSONDecodeError as e:
        fixed_json = query.replace("'", '"')
        print("Fixed JSON:", fixed_json)
        request_data = json.loads(fixed_json)
    payload = {
        "question": request_data.get("query", ""),
        "dataset_ids": request_data.get("dataset_ids", []),
        "page_size": top_k,
        "similarity_threshold": similarity_threshold
    }
        try:
            request_data = json.loads(fixed_json)
            payload = {
                "question": request_data.get("query", ""),
                "dataset_ids": request_data.get("dataset_ids", []),
                "page_size": top_k,
                "similarity_threshold": similarity_threshold if similarity_threshold else 0.2
            }
        except Exception:
            payload = {
                "question": query,
                "dataset_ids": [knowledge_id],
                "page_size": top_k,
                "similarity_threshold": similarity_threshold if similarity_threshold else 0.2
            }
    # print(payload)
    url = settings.fwr_base_url + RG_ORIGINAL_URL
    # url = "http://192.168.20.116:11080/" + RG_ORIGINAL_URL
    chat = ChatBaseApply()
    response = await  chat.chat_post(url, payload, await chat.get_headers(api_key))
    if not response:
        raise HTTPException(status_code=500, detail="服务异常!")
    print(response)
    records = [
        {
            "content": chunk["content"],
@@ -377,11 +410,15 @@
            "title": chunk.get("document_keyword", "Unknown Document"),
            "metadata": {"document_id": chunk["document_id"],
                         "path": f"{settings.fwr_base_url}/document/{chunk['document_id']}?ext={chunk.get('document_keyword').split('.')[-1]}&prefix=document",
                         'highlight': chunk.get("highlight") , "image_id":  chunk.get("image_id"), "positions": chunk.get("positions"),}
                         'highlight': chunk.get("highlight"), "image_id": chunk.get("image_id"),
                         "positions": chunk.get("positions"), }
        }
        for chunk in response.get("data", {}).get("chunks", [])
    ]
    # print(len(records))
    # print(records)
    return records
async def service_base_chunk_retrieval(query, knowledge_id, top_k, similarity_threshold, api_key):
    # request_data = json.loads(query)
@@ -409,15 +446,175 @@
    return records
async def add_complex_log(db, message_id, chat_id, session_id, chat_mode, query, user_id, mode, agent_type, message_type, conversation_id="", node_data=None, query_data=None):
    if not node_data:
        node_data = []
    if not query_data:
        query_data = {}
    try:
        complex_log = ComplexChatSessionDao(db)
        if not conversation_id:
            session = await complex_log.get_session_by_session_id(session_id, chat_id)
            if session:
                conversation_id = session.conversation_id
        await complex_log.create_session(message_id,
                                     chat_id=chat_id,
                                     session_id=session_id,
                                     chat_mode=chat_mode,
                                     message_type=message_type,
                                     content=query,
                                     event_type=mode,
                                     tenant_id=user_id,
                                     conversation_id=conversation_id,
                                     node_data=json.dumps(node_data),
                                     query=json.dumps(query_data),
                                     agent_type=agent_type)
        return conversation_id, True
    except Exception as e:
        logger.error(e)
        return conversation_id, False
async def service_complex_chat(db, chat_id, mode, user_id, chat_request: ChatDataRequest):
    answer_event = ""
    answer_agent = ""
    answer_workflow = ""
    download_url = ""
    message_id = ""
    task_id = ""
    error = ""
    files = []
    node_list = []
    conversation_id = ""
    token = await get_chat_token(db, chat_id)
    chat, url = await get_chat_object(mode)
    if chat_request.chatMode != complex_content_optimization_chat:
        await add_session_log(db, chat_request.sessionId, chat_request.query if chat_request.query else "未命名会话", chat_id, user_id,
                                mode, "", DF_TYPE)
        conversation_id, message = await add_complex_log(db, str(uuid.uuid4()),chat_id, chat_request.sessionId, chat_request.chatMode, chat_request.query, user_id, mode, DF_TYPE, 1, query_data=chat_request.to_dict())
        if not message:
            yield "data: " + json.dumps({"message": smart_message_error,
                                         "error": "\n**ERROR**: 创建会话失败!", "status": http_500},
                                        ensure_ascii=False) + "\n\n"
            return
    inputs = {"is_deep": chat_request.isDeep}
    if chat_request.chatMode == complex_knowledge_chat:
        inputs["query_json"] = json.dumps({"query": chat_request.query, "dataset_ids": chat_request.knowledgeId})
    elif chat_request.chatMode == complex_content_optimization_chat:
        inputs["type"] = chat_request.optimizeType
    try:
        async for ans in chat.chat_completions(url,
                                               await chat.complex_request_data(chat_request.query, conversation_id, str(user_id), files=chat_request.files, inputs=inputs),
                                               await chat.get_headers(token)):
            # print(ans)
            data = {}
            status = http_200
            conversation_id = ans.get("conversation_id")
            task_id = ans.get("task_id")
            if ans.get("event") == message_error:
                error = ans.get("message", "参数异常!")
                status = http_400
                event = smart_message_error
            elif ans.get("event") == message_agent:
                data = {"answer": ans.get("answer", ""), "id": ans.get("message_id", "")}
                answer_agent += ans.get("answer", "")
                message_id = ans.get("message_id", "")
                event = smart_message_stream
            elif ans.get("event") == message_event:
                data = {"answer": ans.get("answer", ""), "id": ans.get("message_id", "")}
                answer_event += ans.get("answer", "")
                message_id = ans.get("message_id", "")
                event = smart_message_stream
            elif ans.get("event") == message_file:
                data = {"url": ans.get("url", ""), "id": ans.get("id", ""),
                        "type": ans.get("type", "")}
                files.append(data)
                event = smart_message_file
            elif ans.get("event") in [workflow_started, node_started, node_finished]:
                data = ans.get("data", {})
                data["inputs"] = await data_process(data.get("inputs", {}))
                data["outputs"] = await data_process(data.get("outputs", {}))
                data["files"] = await data_process(data.get("files", []))
                data["process_data"] = ""
                if data.get("status") == "failed":
                    status = http_500
                    error = data.get("error", "")
                node_list.append(ans)
                event = [smart_workflow_started, smart_node_started, smart_node_finished][
                    [workflow_started, node_started, node_finished].index(ans.get("event"))]
            elif ans.get("event") == workflow_finished:
                data = ans.get("data", {})
                answer_workflow = data.get("outputs", {}).get("output", data.get("outputs", {}).get("answer"))
                download_url = data.get("outputs", {}).get("download_url")
                event = smart_workflow_finished
                if data.get("status") == "failed":
                    status = http_500
                    error = data.get("error", "")
                node_list.append(ans)
            elif ans.get("event") == message_end:
                event = smart_message_end
            else:
                continue
            yield "data: " + json.dumps(
                {"event": event, "data": data, "error": error, "status": status, "task_id": task_id, "message_id":message_id,
                 "session_id": chat_request.sessionId},
                ensure_ascii=False) + "\n\n"
    except Exception as e:
        logger.error(e)
        try:
            yield "data: " + json.dumps({"message": smart_message_error,
                                         "error": "\n**ERROR**: " + str(e), "status": http_500},
                                        ensure_ascii=False) + "\n\n"
        except:
            ...
    finally:
        # await update_session_log(db, session_id, {"role": "assistant",
        #                                           "answer": answer_event or answer_agent or answer_workflow or error,
        #                                           "download_url": download_url,
        #                                           "node_list": node_list, "task_id": task_id, "id": message_id,
        #                                           "error": error}, conversation_id)
        if message_id:
            await add_complex_log(db, message_id, chat_id, chat_request.sessionId, chat_request.chatMode, answer_event or answer_agent or answer_workflow or error, user_id, mode, DF_TYPE, 2, conversation_id, node_data=node_list, query_data=chat_request.to_dict())
async def service_complex_upload(db, chat_id, file, user_id):
    files = []
    token = await get_chat_token(db, chat_id)
    if not token:
        return files
    url = settings.dify_base_url + DF_UPLOAD_FILE
    chat = ChatBaseApply()
    for f in file:
        try:
            file_content = await f.read()
            file_upload = await chat.chat_upload(url, {"file": (f.filename, file_content)}, {"user": str(user_id)},
                                                 {'Authorization': f'Bearer {token}'})
            # try:
            #     tokens = await read_file(file_content, f.filename, f.content_type)
            #     file_upload["tokens"] = tokens
            # except:
            #     ...
            files.append(file_upload)
        except Exception as e:
            logger.error(e)
    return json.dumps(files) if files else ""
if __name__ == "__main__":
    q = json.dumps({"query": "设备", "dataset_ids": ["fc68db52f43111efb94a0242ac120004"]})
    top_k = 2
    similarity_threshold = 0.5
    api_key = "ragflow-Y4MGYwY2JlZjM2YjExZWY4ZWU5MDI0Mm"
    # a = service_chunk_retrieval(q, top_k, similarity_threshold, api_key)
    # print(a)
    async def a():
        b = await service_chunk_retrieval(q, top_k, similarity_threshold, api_key)
        print(b)
    asyncio.run(a())
    asyncio.run(a())