From af86455055918d26a0f6eebc270074c4863db0be Mon Sep 17 00:00:00 2001
From: zhaoqingang <zhaoqg0118@163.com>
Date: 星期五, 07 三月 2025 09:57:19 +0800
Subject: [PATCH] 增加深度思考逻辑

---
 app/service/v2/chat.py |  279 +++++++++++++++++++++++++++++++++++++++++++++++++++++--
 1 files changed, 266 insertions(+), 13 deletions(-)

diff --git a/app/service/v2/chat.py b/app/service/v2/chat.py
index a24f88d..83ea02a 100644
--- a/app/service/v2/chat.py
+++ b/app/service/v2/chat.py
@@ -1,14 +1,17 @@
+import asyncio
 import io
 import json
+import uuid
 
 import fitz
+from fastapi import HTTPException
 
 from Log import logger
 from app.config.agent_base_url import RG_CHAT_DIALOG, DF_CHAT_AGENT, DF_CHAT_PARAMETERS, RG_CHAT_SESSIONS, \
-    DF_CHAT_WORKFLOW, DF_UPLOAD_FILE
+    DF_CHAT_WORKFLOW, DF_UPLOAD_FILE, RG_ORIGINAL_URL
 from app.config.config import settings
 from app.config.const import *
-from app.models import DialogModel, ApiTokenModel, UserTokenModel
+from app.models import DialogModel, ApiTokenModel, UserTokenModel, ComplexChatSessionDao, ChatDataRequest
 from app.models.v2.session_model import ChatSessionDao, ChatData
 from app.service.v2.app_driver.chat_agent import ChatAgent
 from app.service.v2.app_driver.chat_data import ChatBaseApply
@@ -45,12 +48,12 @@
         logger.error(e)
     return None
 
+
 async def get_app_token(db, app_id):
     app_token = db.query(UserTokenModel).filter_by(id=app_id).first()
     if app_token:
         return app_token.access_token
     return ""
-
 
 
 async def get_chat_token(db, app_id):
@@ -67,7 +70,6 @@
         db.commit()
     except Exception as e:
         logger.error(e)
-
 
 
 async def get_chat_info(db, chat_id: str):
@@ -132,6 +134,7 @@
         message["role"] = "assistant"
         await update_session_log(db, session_id, message, conversation_id)
 
+
 async def data_process(data):
     if isinstance(data, str):
         return data.replace("dify", "smart")
@@ -168,8 +171,9 @@
     if hasattr(chat_data, "query"):
         query = chat_data.query
     else:
-        query = "start new workflow"
-    session = await add_session_log(db, session_id,query if query else "start new conversation", chat_id, user_id, mode, conversation_id, 3)
+        query = "start new conversation"
+    session = await add_session_log(db, session_id, query if query else "start new conversation", chat_id, user_id,
+                                    mode, conversation_id, 3)
     if session:
         conversation_id = session.conversation_id
     try:
@@ -213,7 +217,7 @@
                     [workflow_started, node_started, node_finished].index(ans.get("event"))]
             elif ans.get("event") == workflow_finished:
                 data = ans.get("data", {})
-                answer_workflow = data.get("outputs", {}).get("output")
+                answer_workflow = data.get("outputs", {}).get("output", data.get("outputs", {}).get("answer"))
                 download_url = data.get("outputs", {}).get("download_url")
                 event = smart_workflow_finished
                 if data.get("status") == "failed":
@@ -240,8 +244,9 @@
         except:
             ...
     finally:
-        await update_session_log(db, session_id, {"role": "assistant", "answer": answer_event or answer_agent or answer_workflow or error,
-                                                  "download_url":download_url,
+        await update_session_log(db, session_id, {"role": "assistant",
+                                                  "answer": answer_event or answer_agent or answer_workflow or error,
+                                                  "download_url": download_url,
                                                   "node_list": node_list, "task_id": task_id, "id": message_id,
                                                   "error": error}, conversation_id)
 
@@ -256,8 +261,10 @@
         return {}
     return chat_info.parameters
 
+
 async def service_chat_sessions(db, chat_id, name):
     token = await get_chat_token(db, rg_api_token)
+    # print(token)
     if not token:
         return {}
     url = settings.fwr_base_url + RG_CHAT_SESSIONS.format(chat_id)
@@ -273,14 +280,12 @@
         page=current,
         page_size=page_size
     )
-    return json.dumps({"total":total, "rows": [session.to_dict() for session in session_list]})
-
+    return json.dumps({"total": total, "rows": [session.to_dict() for session in session_list]})
 
 
 async def service_chat_session_log(db, session_id):
     session_log = await ChatSessionDao(db).get_session_by_id(session_id)
-    return json.dumps(session_log.log_to_json())
-
+    return json.dumps(session_log.log_to_json() if session_log else {})
 
 
 async def service_chat_upload(db, chat_id, file, user_id):
@@ -313,6 +318,7 @@
     tokens = tokenizer.encode(input_str)
     return len(tokens)
 
+
 async def read_pdf(pdf_stream):
     text = ""
     with fitz.open(stream=pdf_stream, filetype="pdf") as pdf_document:
@@ -332,6 +338,7 @@
 
     return text
 
+
 async def read_file(file, filename, content_type):
     text = ""
     if content_type == "application/pdf" or filename.endswith('.pdf'):
@@ -343,3 +350,249 @@
         text = await read_word(file)
 
     return await get_str_token(text)
+
+
+async def service_chunk_retrieval(query, knowledge_id, top_k, similarity_threshold, api_key):
+    # print(query)
+
+    try:
+        request_data = json.loads(query)
+        payload = {
+            "question": request_data.get("query", ""),
+            "dataset_ids": request_data.get("dataset_ids", []),
+            "page_size": top_k,
+            "similarity_threshold": similarity_threshold if similarity_threshold else 0.2
+        }
+    except json.JSONDecodeError as e:
+        fixed_json = query.replace("'", '"')
+        try:
+            request_data = json.loads(fixed_json)
+            payload = {
+                "question": request_data.get("query", ""),
+                "dataset_ids": request_data.get("dataset_ids", []),
+                "page_size": top_k,
+                "similarity_threshold": similarity_threshold if similarity_threshold else 0.2
+            }
+        except Exception:
+            payload = {
+                "question": query,
+                "dataset_ids": [knowledge_id],
+                "page_size": top_k,
+                "similarity_threshold": similarity_threshold if similarity_threshold else 0.2
+            }
+    # print(payload)
+    url = settings.fwr_base_url + RG_ORIGINAL_URL
+    chat = ChatBaseApply()
+    response = await  chat.chat_post(url, payload, await chat.get_headers(api_key))
+    if not response:
+        raise HTTPException(status_code=500, detail="鏈嶅姟寮傚父锛�")
+    records = [
+        {
+            "content": chunk["content"],
+            "score": chunk["similarity"],
+            "title": chunk.get("document_keyword", "Unknown Document"),
+            "metadata": {"document_id": chunk["document_id"],
+                         "path": f"{settings.fwr_base_url}/document/{chunk['document_id']}?ext={chunk.get('document_keyword').split('.')[-1]}&prefix=document",
+                         'highlight': chunk.get("highlight"), "image_id": chunk.get("image_id"),
+                         "positions": chunk.get("positions"), }
+        }
+        for chunk in response.get("data", {}).get("chunks", [])
+    ]
+    # print(len(records))
+    # print(records)
+    return records
+
+
+async def service_base_chunk_retrieval(query, knowledge_id, top_k, similarity_threshold, api_key):
+    # request_data = json.loads(query)
+    payload = {
+        "question": query,
+        "dataset_ids": [knowledge_id],
+        "page_size": top_k,
+        "similarity_threshold": similarity_threshold
+    }
+    url = settings.fwr_base_url + RG_ORIGINAL_URL
+    # url = "http://192.168.20.116:11080/" + RG_ORIGINAL_URL
+    chat = ChatBaseApply()
+    response = await chat.chat_post(url, payload, await chat.get_headers(api_key))
+    if not response:
+        raise HTTPException(status_code=500, detail="鏈嶅姟寮傚父锛�")
+    records = [
+        {
+            "content": chunk["content"],
+            "score": chunk["similarity"],
+            "title": chunk.get("document_keyword", "Unknown Document"),
+            "metadata": {"document_id": chunk["document_id"]}
+        }
+        for chunk in response.get("data", {}).get("chunks", [])
+    ]
+    return records
+
+
+async def add_complex_log(db, message_id, chat_id, session_id, chat_mode, query, user_id, mode, agent_type, message_type, conversation_id="", node_data=None, query_data=None):
+    if not node_data:
+        node_data = []
+    if not query_data:
+        query_data = {}
+    try:
+        complex_log = ComplexChatSessionDao(db)
+        if not conversation_id:
+            session = await complex_log.get_session_by_session_id(session_id, chat_id)
+            if session:
+                conversation_id = session.conversation_id
+        await complex_log.create_session(message_id,
+                                     chat_id=chat_id,
+                                     session_id=session_id,
+                                     chat_mode=chat_mode,
+                                     message_type=message_type,
+                                     content=query,
+                                     event_type=mode,
+                                     tenant_id=user_id,
+                                     conversation_id=conversation_id,
+                                     node_data=json.dumps(node_data),
+                                     query=json.dumps(query_data),
+                                     agent_type=agent_type)
+        return conversation_id, True
+
+    except Exception as e:
+        logger.error(e)
+        return conversation_id, False
+
+
+
+async def service_complex_chat(db, chat_id, mode, user_id, chat_request: ChatDataRequest):
+    answer_event = ""
+    answer_agent = ""
+    answer_workflow = ""
+    download_url = ""
+    message_id = ""
+    task_id = ""
+    error = ""
+    files = []
+    node_list = []
+    token = await get_chat_token(db, chat_id)
+    chat, url = await get_chat_object(mode)
+    conversation_id, message = await add_complex_log(db, str(uuid.uuid4()),chat_id, chat_request.sessionId, chat_request.chatMode, chat_request.query, user_id, mode, DF_TYPE, 1, query_data=chat_request.to_dict())
+    if not message:
+        yield "data: " + json.dumps({"message": smart_message_error,
+                                     "error": "\n**ERROR**: 鍒涘缓浼氳瘽澶辫触锛�", "status": http_500},
+                                    ensure_ascii=False) + "\n\n"
+        return
+    inputs = {"is_deep": chat_request.isDeep}
+    if chat_request.chatMode == complex_knowledge_chat:
+        inputs["query_json"] = json.dumps({"query": chat_request.query, "dataset_ids": chat_request.knowledgeId})
+
+    try:
+        async for ans in chat.chat_completions(url,
+                                               await chat.complex_request_data(chat_request.query, conversation_id, str(user_id), files=chat_request.files, inputs=inputs),
+                                               await chat.get_headers(token)):
+            print(ans)
+            data = {}
+            status = http_200
+            conversation_id = ans.get("conversation_id")
+            task_id = ans.get("task_id")
+            if ans.get("event") == message_error:
+                error = ans.get("message", "鍙傛暟寮傚父锛�")
+                status = http_400
+                event = smart_message_error
+            elif ans.get("event") == message_agent:
+                data = {"answer": ans.get("answer", ""), "id": ans.get("message_id", "")}
+                answer_agent += ans.get("answer", "")
+                message_id = ans.get("message_id", "")
+                event = smart_message_stream
+            elif ans.get("event") == message_event:
+                data = {"answer": ans.get("answer", ""), "id": ans.get("message_id", "")}
+                answer_event += ans.get("answer", "")
+                message_id = ans.get("message_id", "")
+                event = smart_message_stream
+            elif ans.get("event") == message_file:
+                data = {"url": ans.get("url", ""), "id": ans.get("id", ""),
+                        "type": ans.get("type", "")}
+                files.append(data)
+                event = smart_message_file
+            elif ans.get("event") in [workflow_started, node_started, node_finished]:
+                data = ans.get("data", {})
+                data["inputs"] = await data_process(data.get("inputs", {}))
+                data["outputs"] = await data_process(data.get("outputs", {}))
+                data["files"] = await data_process(data.get("files", []))
+                data["process_data"] = ""
+                if data.get("status") == "failed":
+                    status = http_500
+                    error = data.get("error", "")
+                node_list.append(ans)
+                event = [smart_workflow_started, smart_node_started, smart_node_finished][
+                    [workflow_started, node_started, node_finished].index(ans.get("event"))]
+            elif ans.get("event") == workflow_finished:
+                data = ans.get("data", {})
+                answer_workflow = data.get("outputs", {}).get("output", data.get("outputs", {}).get("answer"))
+                download_url = data.get("outputs", {}).get("download_url")
+                event = smart_workflow_finished
+                if data.get("status") == "failed":
+                    status = http_500
+                    error = data.get("error", "")
+                node_list.append(ans)
+
+            elif ans.get("event") == message_end:
+                event = smart_message_end
+            else:
+                continue
+
+            yield "data: " + json.dumps(
+                {"event": event, "data": data, "error": error, "status": status, "task_id": task_id, "message_id":message_id,
+                 "session_id": chat_request.sessionId},
+                ensure_ascii=False) + "\n\n"
+
+    except Exception as e:
+        logger.error(e)
+        try:
+            yield "data: " + json.dumps({"message": smart_message_error,
+                                         "error": "\n**ERROR**: " + str(e), "status": http_500},
+                                        ensure_ascii=False) + "\n\n"
+        except:
+            ...
+    finally:
+        # await update_session_log(db, session_id, {"role": "assistant",
+        #                                           "answer": answer_event or answer_agent or answer_workflow or error,
+        #                                           "download_url": download_url,
+        #                                           "node_list": node_list, "task_id": task_id, "id": message_id,
+        #                                           "error": error}, conversation_id)
+        if message_id:
+            await add_complex_log(db, message_id, chat_id, chat_request.sessionId, chat_request.chatMode, answer_event or answer_agent or answer_workflow or error, user_id, mode, DF_TYPE, 2, conversation_id, node_data=node_list, query_data=chat_request.to_dict())
+
+async def service_complex_upload(db, chat_id, file, user_id):
+    files = []
+    token = await get_chat_token(db, chat_id)
+    if not token:
+        return files
+    url = settings.dify_base_url + DF_UPLOAD_FILE
+    chat = ChatBaseApply()
+    for f in file:
+        try:
+            file_content = await f.read()
+            file_upload = await chat.chat_upload(url, {"file": (f.filename, file_content)}, {"user": str(user_id)},
+                                                 {'Authorization': f'Bearer {token}'})
+            # try:
+            #     tokens = await read_file(file_content, f.filename, f.content_type)
+            #     file_upload["tokens"] = tokens
+            # except:
+            #     ...
+            files.append(file_upload)
+        except Exception as e:
+            logger.error(e)
+    return json.dumps(files) if files else ""
+
+if __name__ == "__main__":
+    q = json.dumps({"query": "璁惧", "dataset_ids": ["fc68db52f43111efb94a0242ac120004"]})
+    top_k = 2
+    similarity_threshold = 0.5
+    api_key = "ragflow-Y4MGYwY2JlZjM2YjExZWY4ZWU5MDI0Mm"
+
+
+    # a = service_chunk_retrieval(q, top_k, similarity_threshold, api_key)
+    # print(a)
+    async def a():
+        b = await service_chunk_retrieval(q, top_k, similarity_threshold, api_key)
+        print(b)
+
+
+    asyncio.run(a())

--
Gitblit v1.8.0