From b9c7727dc6fbb3789f063c0616ef9392311fecb2 Mon Sep 17 00:00:00 2001
From: zhaoqingang <zhaoqg0118@163.com>
Date: 星期四, 06 二月 2025 10:58:14 +0800
Subject: [PATCH] merge master
---
app/config/env_conf/default_agent_conf.json | 21 +
app/models/session_model.py | 2
app/config/env_conf/api_key_conf.json | 3
app/service/label.py | 4
app/service/group.py | 5
app/api/chat.py | 397 +++++++++++++++++++++----
app/task/delete_execl_file.py | 21 +
app/service/user.py | 19
app/service/difyService.py | 5
app/config/env_conf/resource.json | 2
app/service/knowledge.py | 9
app/api/user.py | 10
alembic/versions/3091a16c34a6_sessions_add_index_update_time.py | 139 +++++++++
app/service/dialog.py | 25
app/config/const.py | 6
app/task/fetch_agent.py | 4
app/api/excel.py | 98 ++++-
main.py | 6
app/service/role.py | 3
app/utils/excelmerge/conformity.py | 15
app/models/dialog_model.py | 2
app/api/files.py | 44 +-
app/service/v2/initialize_data.py | 5
23 files changed, 674 insertions(+), 171 deletions(-)
diff --git a/alembic/versions/3091a16c34a6_sessions_add_index_update_time.py b/alembic/versions/3091a16c34a6_sessions_add_index_update_time.py
new file mode 100644
index 0000000..16eb4f3
--- /dev/null
+++ b/alembic/versions/3091a16c34a6_sessions_add_index_update_time.py
@@ -0,0 +1,139 @@
+"""sessions add index update_time
+
+Revision ID: 3091a16c34a6
+Revises:
+Create Date: 2025-01-15 18:07:00.151468
+
+"""
+from typing import Sequence, Union
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.dialects import mysql
+
+# revision identifiers, used by Alembic.
+revision: str = '3091a16c34a6'
+down_revision: Union[str, None] = None
+branch_labels: Union[str, Sequence[str], None] = None
+depends_on: Union[str, Sequence[str], None] = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_table('knowledgebase')
+ op.drop_table('user_tenant')
+ op.drop_table('apps')
+ op.drop_table('flow')
+ op.create_index(op.f('ix_sessions_update_date'), 'sessions', ['update_date'], unique=False)
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.drop_index(op.f('ix_sessions_update_date'), table_name='sessions')
+ op.add_column('dialogs', sa.Column('parameters', mysql.TEXT(), nullable=True))
+ op.create_table('flow',
+ sa.Column('id', mysql.VARCHAR(length=255), nullable=False),
+ sa.Column('name', mysql.VARCHAR(length=255), nullable=False),
+ sa.Column('status', mysql.INTEGER(), autoincrement=False, nullable=False),
+ sa.Column('description', mysql.VARCHAR(length=255), nullable=False),
+ sa.Column('user_id', mysql.INTEGER(), autoincrement=False, nullable=False),
+ sa.PrimaryKeyConstraint('id'),
+ mysql_collate='utf8mb4_0900_ai_ci',
+ mysql_default_charset='utf8mb4',
+ mysql_engine='InnoDB'
+ )
+ op.create_table('apps',
+ sa.Column('id', mysql.VARCHAR(length=36), nullable=False),
+ sa.Column('name', mysql.VARCHAR(length=255), nullable=False),
+ sa.Column('status', mysql.VARCHAR(length=16), nullable=False),
+ sa.Column('description', mysql.TEXT(), nullable=False),
+ sa.Column('tenant_id', mysql.VARCHAR(length=36), nullable=False),
+ sa.Column('mode', mysql.VARCHAR(length=36), nullable=False),
+ sa.PrimaryKeyConstraint('id'),
+ mysql_collate='utf8mb4_0900_ai_ci',
+ mysql_default_charset='utf8mb4',
+ mysql_engine='InnoDB'
+ )
+ op.create_table('user_tenant',
+ sa.Column('id', mysql.VARCHAR(length=36), nullable=False),
+ sa.Column('tenant_id', mysql.VARCHAR(length=32), nullable=True),
+ sa.Column('user_id', mysql.VARCHAR(length=32), nullable=True),
+ sa.Column('role', mysql.VARCHAR(length=32), nullable=True),
+ sa.PrimaryKeyConstraint('id'),
+ mysql_collate='utf8mb4_0900_ai_ci',
+ mysql_default_charset='utf8mb4',
+ mysql_engine='InnoDB'
+ )
+ op.create_table('knowledgebase',
+ sa.Column('id', mysql.VARCHAR(length=36), nullable=False),
+ sa.Column('name', mysql.VARCHAR(length=128), nullable=True),
+ sa.Column('permission', mysql.VARCHAR(length=32), nullable=True),
+ sa.Column('tenant_id', mysql.VARCHAR(length=32), nullable=True),
+ sa.Column('description', mysql.TEXT(), nullable=True),
+ sa.Column('status', mysql.VARCHAR(length=1), nullable=True),
+ sa.Column('doc_num', mysql.INTEGER(), autoincrement=False, nullable=True),
+ sa.PrimaryKeyConstraint('id'),
+ mysql_collate='utf8mb4_0900_ai_ci',
+ mysql_default_charset='utf8mb4',
+ mysql_engine='InnoDB'
+ )
+ op.create_table('chat_sessions',
+ sa.Column('id', mysql.VARCHAR(length=36), nullable=False),
+ sa.Column('name', mysql.VARCHAR(length=255), nullable=True),
+ sa.Column('agent_id', mysql.VARCHAR(length=255), nullable=True),
+ sa.Column('agent_type', mysql.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column('create_date', mysql.DATETIME(), nullable=True),
+ sa.Column('update_date', mysql.DATETIME(), nullable=True),
+ sa.Column('tenant_id', mysql.INTEGER(), autoincrement=False, nullable=True),
+ sa.Column('message', mysql.TEXT(), nullable=True),
+ sa.Column('reference', mysql.TEXT(), nullable=True),
+ sa.Column('conversation_id', mysql.VARCHAR(length=36), nullable=True),
+ sa.Column('event_type', mysql.VARCHAR(length=16), nullable=True),
+ sa.Column('session_type', mysql.VARCHAR(length=16), nullable=True),
+ sa.PrimaryKeyConstraint('id'),
+ mysql_collate='utf8mb4_0900_ai_ci',
+ mysql_default_charset='utf8mb4',
+ mysql_engine='InnoDB'
+ )
+ op.create_index('ix_chat_sessions_update_date', 'chat_sessions', ['update_date'], unique=False)
+ op.create_index('ix_chat_sessions_tenant_id', 'chat_sessions', ['tenant_id'], unique=False)
+ op.create_index('ix_chat_sessions_conversation_id', 'chat_sessions', ['conversation_id'], unique=False)
+ op.create_table('dialog',
+ sa.Column('id', mysql.VARCHAR(length=255), nullable=False),
+ sa.Column('name', mysql.VARCHAR(length=255), nullable=False),
+ sa.Column('status', mysql.VARCHAR(length=1), nullable=False),
+ sa.Column('description', mysql.VARCHAR(length=255), nullable=False),
+ sa.Column('tenant_id', mysql.VARCHAR(length=36), nullable=False),
+ sa.PrimaryKeyConstraint('id'),
+ mysql_collate='utf8mb4_0900_ai_ci',
+ mysql_default_charset='utf8mb4',
+ mysql_engine='InnoDB'
+ )
+ op.create_table('chat_api_tokens',
+ sa.Column('id', mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_general_ci', length=36), nullable=False),
+ sa.Column('app_id', mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_general_ci', length=36), nullable=True),
+ sa.Column('type', mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_general_ci', length=16), nullable=True),
+ sa.Column('token', mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_general_ci', length=255), nullable=True),
+ sa.Column('created_at', mysql.DATETIME(), nullable=True),
+ sa.Column('last_used_at', mysql.DATETIME(), nullable=True),
+ sa.PrimaryKeyConstraint('id'),
+ mysql_default_charset='utf8mb3',
+ mysql_engine='InnoDB',
+ mysql_row_format='DYNAMIC'
+ )
+ op.create_index('ix_chat_api_tokens_app_id', 'chat_api_tokens', ['app_id'], unique=False)
+ op.create_table('user_token',
+ sa.Column('id', mysql.VARCHAR(length=16), nullable=False),
+ sa.Column('account', mysql.VARCHAR(length=255), nullable=True),
+ sa.Column('password', mysql.VARCHAR(length=255), nullable=True),
+ sa.Column('access_token', mysql.VARCHAR(length=1000), nullable=True),
+ sa.Column('refresh_token', mysql.VARCHAR(length=1000), nullable=True),
+ sa.Column('created_at', mysql.DATETIME(), nullable=True),
+ sa.Column('updated_at', mysql.DATETIME(), nullable=True),
+ sa.PrimaryKeyConstraint('id'),
+ mysql_collate='utf8mb4_0900_ai_ci',
+ mysql_default_charset='utf8mb4',
+ mysql_engine='InnoDB'
+ )
+ # ### end Alembic commands ###
diff --git a/app/api/chat.py b/app/api/chat.py
index a824acd..2241161 100644
--- a/app/api/chat.py
+++ b/app/api/chat.py
@@ -10,7 +10,8 @@
from Log import logger
from app.api import get_current_user_websocket
from app.config.config import settings
-from app.config.const import IMAGE_TO_TEXT, DOCUMENT_TO_REPORT, DOCUMENT_TO_CLEANING, DOCUMENT_IA_QUESTIONS
+from app.config.const import IMAGE_TO_TEXT, DOCUMENT_TO_REPORT, DOCUMENT_TO_CLEANING, DOCUMENT_IA_QUESTIONS, \
+ DOCUMENT_TO_REPORT_TITLE, DOCUMENT_TO_TITLE, DOCUMENT_TO_PAPER
from app.models import MenuCapacityModel
from app.models.agent_model import AgentModel, AgentType
from app.models.base_model import get_db
@@ -55,7 +56,8 @@
ret = {"message": "Chat ID not found", "type": "close"}
await websocket.send_json(ret)
return
-
+ # print(agent_type)
+ # print(chat_type)
if agent_type == AgentType.RAGFLOW:
ragflow_service = RagflowService(settings.fwr_base_url)
token = await get_ragflow_token(db, current_user.id)
@@ -359,7 +361,7 @@
"upload_file_id": upload_file_id
})
async for rag_response in dify_service.chat(token, current_user.id, question, files,
- conversation_id):
+ conversation_id, {}):
# print(rag_response)
try:
if rag_response[:5] == "data:":
@@ -437,9 +439,12 @@
upload_files = receive_message.get('upload_files', [])
title = receive_message.get('title', "")
workflow_type = receive_message.get('workflow', 1)
- if not upload_files:
- await websocket.send_json({"message": "Invalid request", "type": "error"})
- continue
+ sub_titles = receive_message.get('sub_titles', "")
+ title_number = receive_message.get('title_number', 8)
+ title_style = receive_message.get('title_style', "")
+ title_query = receive_message.get('title_query', "")
+ if upload_files:
+ title_query = "start"
try:
session = SessionService(db).create_session(
chat_id,
@@ -466,87 +471,178 @@
if workflow_type == 2:
inputs["file_list"] = files
inputs["Completion_of_main_indicators"] = title
- token = DfTokenDao(db).get_token_by_id(DOCUMENT_TO_REPORT)
+ inputs["sub_titles"] = sub_titles
+ token = DfTokenDao(db).get_token_by_id(DOCUMENT_TO_REPORT_TITLE)
if not token:
await websocket.send_json(
{"message": "Invalid token document_to_cleaning", "type": "error"})
+ elif workflow_type == 3:
+ inputs["file_list"] = files
+ inputs["number_of_title"] = title_number
+ inputs["title_style"] = title_style
+ token = DfTokenDao(db).get_token_by_id(DOCUMENT_TO_TITLE)
+ if not token:
+ await websocket.send_json(
+ {"message": "Invalid token document_to_title", "type": "error"})
+
complete_response = ""
- async for rag_response in dify_service.workflow(token, current_user.id, inputs):
- # print(rag_response)
- try:
- if rag_response[:5] == "data:":
- # 濡傛灉鏄紝鍒欐埅鍙栨帀鍓�5涓瓧绗︼紝骞跺幓闄ら灏剧┖鐧界
- complete_response = rag_response[5:].strip()
- elif "event: ping" in rag_response:
- continue
- else:
- # 鍚﹀垯锛屼繚鎸佸師鏍�
- complete_response += rag_response
+ if workflow_type == 1 or workflow_type == 2:
+ async for rag_response in dify_service.workflow(token, current_user.id, inputs):
+ # print(rag_response)
try:
- data = json.loads(complete_response)
- complete_response = ""
- if data.get("event") == "node_started" or data.get("event") == "node_finished": # "event": "message_end"
- if "data" not in data or not data["data"]: # 淇℃伅杩囨护
- logger.error("闈炴硶鏁版嵁--------------------")
- logger.error(data)
- continue
- else: # 姝e父杈撳嚭
+ if rag_response[:5] == "data:":
+ # 濡傛灉鏄紝鍒欐埅鍙栨帀鍓�5涓瓧绗︼紝骞跺幓闄ら灏剧┖鐧界
+ complete_response = rag_response[5:].strip()
+ elif "event: ping" in rag_response:
+ continue
+ else:
+ # 鍚﹀垯锛屼繚鎸佸師鏍�
+ complete_response += rag_response
+ try:
+ data = json.loads(complete_response)
+ complete_response = ""
+ if data.get("event") == "node_started" or data.get("event") == "node_finished": # "event": "message_end"
+ if "data" not in data or not data["data"]: # 淇℃伅杩囨护
+ logger.error("闈炴硶鏁版嵁--------------------")
+ logger.error(data)
+ continue
+ else: # 姝e父杈撳嚭
+ answer = data.get("data", "")
+ if isinstance(answer, str):
+ logger.error("----------------鏈煡鏁版嵁--------------------")
+ logger.error(data)
+ continue
+ elif isinstance(answer, dict):
+
+ message = answer.get("title", "")
+
+ result = {"message": message, "type": "system"}
+ elif data.get("event") == "workflow_finished":
answer = data.get("data", "")
if isinstance(answer, str):
logger.error("----------------鏈煡鏁版嵁--------------------")
logger.error(data)
- continue
+ result = {"message": "", "type": "close", "download_url": ""}
elif isinstance(answer, dict):
+ download_url = ""
+ outputs = answer.get("outputs", {})
+ if outputs:
+ message = outputs.get("output", "")
+ download_url = outputs.get("download_url", "")
+ else:
+ message = answer.get("error", "")
- message = answer.get("title", "")
+ result = {"message": message, "type": "message", "download_url": download_url}
+ try:
+ SessionService(db).update_session(chat_id,
+ message={"role": "assistant",
+ "content": {
+ "answer": message,
+ "download_url": download_url}},
+ conversation_id=data.get(
+ "conversation_id"))
+ except Exception as e:
+ logger.error("淇濆瓨dify鐨勪細璇濆紓甯革紒")
+ logger.error(e)
+ await websocket.send_json(result)
+ result = {"message": "", "type": "close", "download_url": ""}
- result = {"message": message, "type": "system"}
- elif data.get("event") == "workflow_finished":
- answer = data.get("data", "")
- if isinstance(answer, str):
- logger.error("----------------鏈煡鏁版嵁--------------------")
- logger.error(data)
- result = {"message": "", "type": "close", "download_url": ""}
- elif isinstance(answer, dict):
- download_url = ""
- outputs = answer.get("outputs", {})
- if outputs:
- message = outputs.get("output", "")
- download_url = outputs.get("download_url", "")
- else:
- message = answer.get("error", "")
- result = {"message": message, "type": "message", "download_url": download_url}
+ else:
+ continue
+ try:
+ await websocket.send_json(result)
+ except Exception as e:
+ logger.error(e)
+ logger.error("杩斿洖瀹㈡埛绔秷鎭紓甯�!")
+ complete_response = ""
+ except json.JSONDecodeError as e:
+ print(f"Error decoding JSON: {e}")
+ # print(f"Response text: {text}")
+ except Exception as e2:
+ result = {"message": f"鍐呴儴閿欒锛� {e2}", "type": "close"}
+ await websocket.send_json(result)
+ print(f"Error process message of ragflow: {e2}")
+ elif workflow_type == 3:
+ image_list = []
+ # print(inputs)
+ complete_response = ""
+ async for rag_response in dify_service.chat(token, current_user.id, title_query, [],
+ conversation_id, inputs):
+ print(rag_response)
+ try:
+ if rag_response[:5] == "data:":
+ # 濡傛灉鏄紝鍒欐埅鍙栨帀鍓�5涓瓧绗︼紝骞跺幓闄ら灏剧┖鐧界
+ complete_response = rag_response[5:].strip()
+ elif "event: ping" in rag_response:
+ continue
+ else:
+ # 鍚﹀垯锛屼繚鎸佸師鏍�
+ complete_response += rag_response
+ try:
+ data = json.loads(complete_response)
+ complete_response = ""
+ if data.get("event") == "node_started" or data.get(
+ "event") == "node_finished": # "event": "message_end"
+ if "data" not in data or not data["data"]: # 淇℃伅杩囨护
+ logger.error("闈炴硶鏁版嵁--------------------")
+ logger.error(data)
+ continue
+ else: # 姝e父杈撳嚭
+ answer = data.get("data", "")
+ if isinstance(answer, str):
+ logger.error("----------------鏈煡鏁版嵁--------------------")
+ logger.error(data)
+ continue
+ elif isinstance(answer, dict):
+
+ message = answer.get("title", "")
+
+ result = {"message": message, "type": "system"}
+ elif data.get("event") == "message":
+ message = data.get("answer", "")
+ # try:
+ # msg_dict = json.loads(answer)
+ # message = msg_dict.get("output", "")
+ # except Exception as e:
+ # print(e)
+ # continue
+ result = {"message": message, "type": "message",
+ "download_url": ""}
try:
SessionService(db).update_session(chat_id,
message={"role": "assistant",
"content": {
"answer": message,
- "download_url": download_url}},
+ "download_url": ""}},
conversation_id=data.get(
"conversation_id"))
except Exception as e:
logger.error("淇濆瓨dify鐨勪細璇濆紓甯革紒")
logger.error(e)
- await websocket.send_json(result)
+ # try:
+ # await websocket.send_json(result)
+ # except Exception as e:
+ # logger.error(e)
+ # logger.error("杩斿洖瀹㈡埛绔秷鎭紓甯�!")
+
+ elif data.get("event") == "message_end":
result = {"message": "", "type": "close", "download_url": ""}
-
-
- else:
- continue
- try:
- await websocket.send_json(result)
- except Exception as e:
- logger.error(e)
- logger.error("杩斿洖瀹㈡埛绔秷鎭紓甯�!")
- complete_response = ""
- except json.JSONDecodeError as e:
- print(f"Error decoding JSON: {e}")
- # print(f"Response text: {text}")
- except Exception as e2:
- result = {"message": f"鍐呴儴閿欒锛� {e2}", "type": "close"}
- await websocket.send_json(result)
- print(f"Error process message of ragflow: {e2}")
+ else:
+ continue
+ try:
+ await websocket.send_json(result)
+ except Exception as e:
+ logger.error(e)
+ logger.error("dify杩斿洖瀹㈡埛绔秷鎭紓甯�!")
+ complete_response = ""
+ except json.JSONDecodeError as e:
+ print(f"Error decoding JSON: {e}")
+ # print(f"Response text: {text}")
+ except Exception as e2:
+ result = {"message": f"鍐呴儴閿欒锛� {e2}", "type": "close"}
+ await websocket.send_json(result)
+ print(f"Error process message of ragflow: {e2}")
elif chat_type == "documentIa":
# print(122112)
token = DfTokenDao(db).get_token_by_id(DOCUMENT_IA_QUESTIONS)
@@ -588,8 +684,8 @@
answer_str = ""
complete_response = ""
async for rag_response in dify_service.chat(token, current_user.id, question, files,
- conversation_id):
- # print(rag_response)
+ conversation_id, {}):
+ print(rag_response)
try:
if rag_response[:5] == "data:":
# 濡傛灉鏄紝鍒欐埅鍙栨帀鍓�5涓瓧绗︼紝骞跺幓闄ら灏剧┖鐧界
@@ -616,19 +712,24 @@
elif isinstance(answer, dict):
message = answer.get("title", "")
+ if answer.get("status") == "failed":
+ message = answer.get("error")
result = {"message": message, "type": "system"}
- continue
+ # continue
elif data.get("event") == "message": # "event": "message_end"
# 姝e父杈撳嚭
answer = data.get("answer", "")
result = {"message": answer, "type": "stream"}
+ elif data.get("event") == "error":
+ answer = data.get("message", "")
+ result = {"message": answer, "type": "system"}
elif data.get("event") == "workflow_finished":
answer = data.get("data", "")
if isinstance(answer, str):
logger.error("----------------鏈煡鏁版嵁--------------------")
logger.error(data)
- result = {"message": "", "type": "close", "download_url": ""}
+ # result = {"message": "", "type": "close", "download_url": ""}
elif isinstance(answer, dict):
download_url = ""
outputs = answer.get("outputs", {})
@@ -638,8 +739,8 @@
else:
message = answer.get("error", "")
- # result = {"message": message, "type": "message",
- # "download_url": download_url}
+ result = {"message": message, "type": "system",
+ "download_url": download_url}
try:
SessionService(db).update_session(chat_id,
message={"role": "assistant",
@@ -652,7 +753,7 @@
logger.error("淇濆瓨dify鐨勪細璇濆紓甯革紒")
logger.error(e)
# await websocket.send_json(result)
- continue
+ # continue
elif data.get("event") == "message_end":
result = {"message": "", "type": "close"}
@@ -671,6 +772,162 @@
result = {"message": f"鍐呴儴閿欒锛� {e2}", "type": "close"}
await websocket.send_json(result)
print(f"Error process message of ragflow: {e2}")
+ elif chat_type == "paperTalk":
+ token = DfTokenDao(db).get_token_by_id(DOCUMENT_TO_PAPER)
+ # print(token)
+ if not token:
+ await websocket.send_json({"message": "Invalid token", "type": "error"})
+
+ while True:
+ conversation_id = ""
+ inputs = {}
+ # print(4343)
+ receive_message = await websocket.receive_json()
+ print(f"Received from client {chat_id}: {receive_message}")
+ if "difficulty" in receive_message:
+ inputs["Question_Difficulty"] = receive_message["difficulty"]
+ if "is_paper" in receive_message:
+ inputs["Generate_test_paper"] = receive_message["is_paper"]
+ if "single_choice" in receive_message:
+ inputs["Multiple_choice_questions"] = receive_message["single_choice"]
+ if "gap_filling" in receive_message:
+ inputs["Fill_in_blank"] = receive_message["gap_filling"]
+ if "true_or_false" in receive_message:
+ inputs["true_or_false"] = receive_message["true_or_false"]
+ if "multiple_choice" in receive_message:
+ inputs["Multiple_Choice"] = receive_message["multiple_choice"]
+ if "easy_question" in receive_message:
+ inputs["Short_Answer_Questions"] = receive_message["easy_question"]
+ if "case_questions" in receive_message:
+ inputs["Case_Questions"] = receive_message["case_questions"]
+ if "key_words" in receive_message:
+ inputs["key_words"] = receive_message["key_words"]
+ upload_files = receive_message.get('upload_files', [])
+ question = receive_message.get('message', "")
+ session_log = SessionService(db).get_session_by_id(chat_id)
+ if not session_log and not upload_files:
+ await websocket.send_json({"message": "闇�瑕佷笂浼犳枃妗o紒", "type": "error"})
+ continue
+ try:
+ session = SessionService(db).create_session(
+ chat_id,
+ question if question else "寮�濮嬪嚭棰�",
+ agent_id,
+ AgentType.DIFY,
+ current_user.id
+ )
+ conversation_id = session.conversation_id
+ except Exception as e:
+ logger.error(e)
+ # complete_response = ""
+
+ files = []
+ for fileId in upload_files:
+ files.append({
+ "type": "document",
+ "transfer_method": "local_file",
+ "url": "",
+ "upload_file_id": fileId
+ })
+ if files:
+ inputs["upload_files"] = files
+ # print(inputs)
+ if not question and not inputs:
+ await websocket.send_json({"message": "Invalid request", "type": "error"})
+ continue
+
+ if not question:
+ question = "寮�濮嬪嚭棰�"
+ complete_response = ""
+ async for rag_response in dify_service.chat(token, current_user.id, question, files,
+ conversation_id, inputs):
+ # print(rag_response)
+ try:
+ if rag_response[:5] == "data:":
+ # 濡傛灉鏄紝鍒欐埅鍙栨帀鍓�5涓瓧绗︼紝骞跺幓闄ら灏剧┖鐧界
+ complete_response = rag_response[5:].strip()
+ elif "event: ping" in rag_response:
+ continue
+ else:
+ # 鍚﹀垯锛屼繚鎸佸師鏍�
+ complete_response += rag_response
+ try:
+ data = json.loads(complete_response)
+ # print(data)
+ if data.get("event") == "node_started" or data.get(
+ "event") == "node_finished": # "event": "message_end"
+ if "data" not in data or not data["data"]: # 淇℃伅杩囨护
+ logger.error("闈炴硶鏁版嵁--------------------")
+ logger.error(data)
+ continue
+ else: # 姝e父杈撳嚭
+ answer = data.get("data", "")
+ if isinstance(answer, str):
+ logger.error("----------------鏈煡鏁版嵁--------------------")
+ logger.error(data)
+ continue
+ elif isinstance(answer, dict):
+
+ message = answer.get("title", "")
+
+ result = {"message": message, "type": "system"}
+ # continue
+ elif data.get("event") == "message": # "event": "message_end"
+ # 姝e父杈撳嚭
+ answer = data.get("answer", "")
+ result = {"message": answer, "type": "stream"}
+ elif data.get("event") == "error":
+ answer = data.get("message", "")
+ result = {"message": answer, "type": "system"}
+ elif data.get("event") == "workflow_finished":
+ answer = data.get("data", "")
+ if isinstance(answer, str):
+ logger.error("----------------鏈煡鏁版嵁--------------------")
+ logger.error(data)
+ result = {"message": "", "type": "close", "download_url": ""}
+ elif isinstance(answer, dict):
+ download_url = ""
+ outputs = answer.get("outputs", {})
+ if outputs:
+ message = outputs.get("answer", "")
+ download_url = outputs.get("download_url", "")
+ else:
+ message = answer.get("error", "")
+
+ result = {"message": message, "type": "system",
+ "download_url": download_url}
+ try:
+ SessionService(db).update_session(chat_id,
+ message={"role": "assistant",
+ "content": {
+ "answer": message,
+ "download_url": download_url}},
+ conversation_id=data.get(
+ "conversation_id"))
+ except Exception as e:
+ logger.error("淇濆瓨dify鐨勪細璇濆紓甯革紒")
+ logger.error(e)
+ # await websocket.send_json(result)
+ # continue
+ elif data.get("event") == "message_end":
+ result = {"message": "", "type": "close"}
+
+ else:
+ continue
+ try:
+ await websocket.send_json(result)
+ except Exception as e:
+ logger.error(e)
+ logger.error("杩斿洖瀹㈡埛绔秷鎭紓甯�!")
+ complete_response = ""
+ except json.JSONDecodeError as e:
+ print(f"Error decoding JSON: {e}")
+ # print(f"Response text: {text}")
+ except Exception as e2:
+ result = {"message": f"鍐呴儴閿欒锛� {e2}", "type": "close"}
+ await websocket.send_json(result)
+ print(f"Error process message of ragflow: {e2}")
+
# 鍚姩浠诲姟澶勭悊瀹㈡埛绔秷鎭�
tasks = [
diff --git a/app/api/excel.py b/app/api/excel.py
index 7b8e792..e8cc6ec 100644
--- a/app/api/excel.py
+++ b/app/api/excel.py
@@ -1,9 +1,15 @@
-from fastapi import APIRouter, File, UploadFile, Form, BackgroundTasks, Depends
+import random
+import string
+
+from fastapi import APIRouter, File, UploadFile, Form, BackgroundTasks, Depends, Request
from fastapi.responses import JSONResponse, FileResponse
+from sqlalchemy.orm import Session
from starlette.websockets import WebSocket
-from app.api import get_current_user, get_current_user_websocket
-from app.models import UserModel
+from app.api import get_current_user, get_current_user_websocket, Response
+from app.models import UserModel, AgentType
+from app.models.base_model import get_db
+from app.service.session import SessionService
from app.utils.excelmerge.conformity import run_conformity
import shutil
import os
@@ -41,13 +47,30 @@
return os.path.join(path, userid)
-@router.post('/excel/upload')
+def generate_db_id(prefix: str = "me") -> str:
+ random_part = ''.join(random.choices(string.ascii_letters + string.digits, k=13))
+ return prefix + random_part
+
+
+def db_create_session(db: Session, user_id: str):
+ db_id = generate_db_id()
+ session = SessionService(db).create_session(
+ db_id,
+ "鍚堝苟Excel",
+ "basic_excel_merge",
+ AgentType.BASIC,
+ int(user_id)
+ )
+ return session
+
+
+@router.post('/excel/upload', response_model=Response)
async def upload_file(files: list[UploadFile] = File(...), current_user: UserModel = Depends(get_current_user)):
user_id = str(current_user.id)
if not any(file.filename for file in files):
- return JSONResponse(content={"error": "娌℃湁鏂囦欢閮ㄥ垎"}, status_code=400)
+ return Response(code=400, msg="娌℃湁鏂囦欢閮ㄥ垎", data={})
if not user_id:
- return JSONResponse(content={"error": "缂哄皯鍙傛暟user_id"}, status_code=400)
+ return Response(code=400, msg="缂哄皯鍙傛暟user_id", data={})
user_source = user_file_path(user_id, SOURCE_FILES_PATH)
user_excel = EXCEL_FILES_PATH
@@ -57,21 +80,21 @@
save_path_list = []
for file in files:
- if file.filename == '':
- return JSONResponse(content={"error": "娌℃湁閫夋嫨鏂囦欢"}, status_code=400)
if file and allowed_file(file.filename):
save_path = os.path.join(user_source, file.filename)
with open(save_path, 'wb') as buffer:
shutil.copyfileobj(file.file, buffer)
save_path_list.append(save_path)
else:
- return JSONResponse(content={"error": "涓嶅厑璁哥殑鏂囦欢绫诲瀷"}, status_code=400)
- return JSONResponse(content={"code": 200, "msg": "", "data": {}}, status_code=200)
+ return Response(code=400, msg="涓嶅厑璁哥殑鏂囦欢绫诲瀷", data={})
+ return Response(code=200, msg="涓婁紶鎴愬姛", data={})
# ws://localhost:9201/api/document/ws/excel
@router.websocket("/ws/excel")
-async def ws_excel(websocket: WebSocket, current_user: UserModel = Depends(get_current_user_websocket)):
+async def ws_excel(websocket: WebSocket,
+ current_user: UserModel = Depends(get_current_user_websocket),
+ db: Session = Depends(get_db)):
await websocket.accept()
user_id = str(current_user.id)
@@ -88,41 +111,66 @@
if merge_file is not None:
await websocket.send_json({
- "message": "鏂囨。鍚堝苟鎴愬姛锛�",
"type": "stream",
- "file_name": f"{merge_file}.xlsx",
- "download_url": f"./api/document/download/{merge_file}.xlsx"
+ "files": [
+ {
+ "file_name": "Excel",
+ "file_url": f"./api/document/download/{merge_file}.xlsx?file_type=excel",
+ }
+ ]
})
await websocket.send_json({
- "message": "鏂囨。鍚堝苟鎴愬姛锛�",
+ "message": "鍚堝苟鎴愬姛",
"type": "close",
})
+ # 鍒涘缓浼氳瘽璁板綍
+ session = db_create_session(db, user_id)
+ # 鏇存柊浼氳瘽璁板綍
+ if session:
+ session_id = session.id
+ new_message = {
+ "role": "assistant",
+ "content": {
+ "message": "\u5408\u5e76\u6210\u529f",
+ "type": "message",
+ "file_name": "Excel",
+ "file_url": f"/api/document/download/{merge_file}.xlsx?file_type=excel"
+ }
+ }
+ session_service = SessionService(db)
+ session_service.update_session(session_id, message=new_message)
else:
await websocket.send_json({"error": "鍚堝苟澶辫触", "type": "stream", "files": []})
+ await websocket.close()
else:
print(f"Received data: {data}")
await websocket.send_json({"error": "鏈煡鎸囦护", "data": str(data)})
+ await websocket.close()
except Exception as e:
await websocket.send_json({"error": str(e)})
await websocket.close()
@router.get("/download/{file_full_name}")
-async def download_file(background_tasks: BackgroundTasks, file_full_name: str):
+async def download_file(file_full_name: str):
file_name = os.path.basename(file_full_name)
user_excel = EXCEL_FILES_PATH
file_path = os.path.join(user_excel, file_full_name)
if not os.path.exists(file_path):
return JSONResponse(content={"error": "鏂囦欢涓嶅瓨鍦�"}, status_code=404)
-
- def delete_file():
- try:
- os.unlink(file_path)
- except OSError as e:
- print(f"Deleting file error")
+ return FileResponse(
+ path=file_path,
+ filename="Excel.xlsx",
+ media_type='application/octet-stream',
+ )
+ # def delete_file():
+ # try:
+ # os.unlink(file_path)
+ # except OSError as e:
+ # print(f"Deleting file error")
# 寰呬笅杞藉畬鎴愬悗鍒犻櫎鐢熸垚鐨勬枃浠�
- background_tasks.add_task(delete_file)
- return FileResponse(path=file_path, filename=file_name,
- media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
+ # background_tasks.add_task(delete_file)
+ # return FileResponse(path=file_path, filename=file_name,
+ # media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
diff --git a/app/api/files.py b/app/api/files.py
index a84a0b3..c9f0524 100644
--- a/app/api/files.py
+++ b/app/api/files.py
@@ -10,7 +10,9 @@
from app.api import Response, get_current_user, ResponseList
from app.config.config import settings
-from app.config.const import DOCUMENT_TO_REPORT, IMAGE_TO_TEXT
+from app.config.const import DOCUMENT_TO_REPORT, IMAGE_TO_TEXT, DOCUMENT_TO_REPORT_TITLE, DOCUMENT_IA_QUESTIONS, \
+ DOCUMENT_TO_PAPER
+from app.models import MenuCapacityModel
from app.models.agent_model import AgentType, AgentModel
from app.models.base_model import get_db
from app.models.user_model import UserModel
@@ -33,16 +35,17 @@
db: Session = Depends(get_db),
current_user: UserModel = Depends(get_current_user)
):
- agent = db.query(AgentModel).filter(AgentModel.id == agent_id).first()
+ agent = db.query(MenuCapacityModel).filter(MenuCapacityModel.chat_id == agent_id).first()
if not agent:
- return Response(code=404, msg="Agent not found")
+ return ResponseList(code=404, msg="Agent not found")
+ agent_type = int(agent.capacity_type)
# 妫�鏌� agent 绫诲瀷锛岀‘瀹氭槸鍚﹀厑璁镐笂浼犲涓枃浠�
- if agent.agent_type in [AgentType.RAGFLOW, AgentType.BISHENG]:
+ if agent_type in [AgentType.RAGFLOW, AgentType.BISHENG]:
if len(file) > 1:
return Response(code=400, msg="杩欎釜鏅鸿兘浣撳彧鏀寔浼犲崟涓枃浠�")
- if agent.agent_type == AgentType.RAGFLOW or agent.agent_type == AgentType.BISHENG:
+ if agent_type == AgentType.RAGFLOW or agent_type == AgentType.BISHENG:
file = file[0]
# 璇诲彇涓婁紶鐨勬枃浠跺唴瀹�
try:
@@ -50,7 +53,7 @@
except Exception as e:
return Response(code=400, msg=str(e))
- if agent.agent_type == AgentType.RAGFLOW:
+ if agent_type == AgentType.RAGFLOW:
token = await get_ragflow_token(db, current_user.id)
ragflow_service = RagflowService(base_url=settings.fwr_base_url)
# 鏌ヨ浼氳瘽鏄惁瀛樺湪锛屼笉瀛樺湪鍏堝垱寤轰細璇�
@@ -63,7 +66,7 @@
# 瀵逛簬澶氭枃浠讹紝鍙兘闇�瑕佹敹闆嗘墍鏈塪oc_ids
return Response(code=200, msg="", data={"doc_ids": doc_ids, "file_name": file.filename})
- elif agent.agent_type == AgentType.BISHENG:
+ elif agent_type == AgentType.BISHENG:
bisheng_service = BishengService(base_url=settings.sgb_base_url)
try:
token = await get_bisheng_token(db, current_user.id)
@@ -73,7 +76,7 @@
result["file_name"] = file.filename
return Response(code=200, msg="", data=result)
- elif agent.agent_type == AgentType.BASIC:
+ elif agent_type == AgentType.BASIC:
if agent_id == "basic_excel_talk":
# 澶勭悊鍗曚釜鏂囦欢鐨勬儏鍐�
file_list = file
@@ -97,9 +100,9 @@
# service = BasicService(base_url=settings.basic_paper_url)
# result = await service.paper_file_upload(chat_id, file.filename, file_content)
- elif agent.agent_type == AgentType.DIFY:
+ elif agent_type == AgentType.DIFY:
dify_service = DifyService(base_url=settings.dify_base_url)
- if agent.type == "imageTalk":
+ if agent.chat_type == "imageTalk":
token = DfTokenDao(db).get_token_by_id(IMAGE_TO_TEXT)
if not token:
raise HTTPException(status_code=500, detail="鑾峰彇token澶辫触锛宨mage_and_text_conversion锛�")
@@ -113,8 +116,13 @@
data = await dify_service.upload(token, file.filename, file_content, current_user.id)
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
- elif agent.type == "reportWorkflow" or agent.type == "documentIa":
- token = DfTokenDao(db).get_token_by_id(DOCUMENT_TO_REPORT)
+ elif agent.chat_type == "reportWorkflow" or agent.chat_type == "documentIa" or agent.chat_type == "paperTalk":
+ token_dict = {
+ "reportWorkflow": DOCUMENT_TO_REPORT,
+ "documentIa": DOCUMENT_IA_QUESTIONS,
+ "paperTalk": DOCUMENT_TO_PAPER,
+ }
+ token = DfTokenDao(db).get_token_by_id(token_dict[agent.chat_type])
if not token:
raise HTTPException(status_code=500, detail="鑾峰彇token澶辫触锛宒ocument_to_report锛�")
result = []
@@ -143,22 +151,24 @@
file_type: Optional[str] = Query(None, description="Optional file type for basic agents"),
db: Session = Depends(get_db)
):
- agent = db.query(AgentModel).filter(AgentModel.id == agent_id).first()
+ # agent = db.query(AgentModel).filter(AgentModel.id == agent_id).first()
+ agent = db.query(MenuCapacityModel).filter(MenuCapacityModel.chat_id == agent_id).first()
+
if not agent:
return Response(code=404, msg="Agent not found")
-
- if agent.agent_type == AgentType.BISHENG:
+ agent_type = int(agent.capacity_type)
+ if agent_type == AgentType.BISHENG:
url = urllib.parse.unquote(url)
# 浠� URL 涓彁鍙栨枃浠跺悕
parsed_url = urllib.parse.urlparse(url)
filename = urllib.parse.unquote(parsed_url.path.split('/')[-1])
url = url.replace("http://minio:9000", settings.sgb_base_url)
- elif agent.agent_type == AgentType.RAGFLOW:
+ elif agent_type == AgentType.RAGFLOW:
if not doc_id:
return Response(code=400, msg="doc_id is required")
url = f"{settings.fwr_base_url}/v1/document/get/{doc_id}"
filename = doc_name
- elif agent.agent_type == AgentType.BASIC:
+ elif agent_type == AgentType.BASIC:
if agent_id == "basic_excel_talk":
return await download_basic_file(file_id, file_type)
diff --git a/app/api/user.py b/app/api/user.py
index abd2180..57171ff 100644
--- a/app/api/user.py
+++ b/app/api/user.py
@@ -4,7 +4,7 @@
from app.models.base_model import get_db
from app.models.user import PageParameter, UserStatus, UserInfo, LoginData, UserPassword, UserDept
from app.models.user_model import UserModel
-from app.service.auth import is_valid_password, verify_password
+from app.service.auth import is_valid_password, verify_password, password_rsa
from app.service.user import get_user_list, edit_user_status, delete_user_data, create_user, edit_user_data, \
edit_user_pwd, get_user_info, get_user_routers, get_user_menus, get_user_permission, get_user_dept, change_user_pwd, \
user_data_service, edit_user_dept
@@ -87,11 +87,13 @@
user_info = db.query(UserModel).filter(UserModel.id==current_user.id).first()
if not user_info:
return Response(code=401, msg="Incorrect change password !")
- if not verify_password(user.oldPassword, user_info.hashed_password):
+ old_password = await password_rsa(user.oldPassword)
+ new_password = await password_rsa(user.newPassword)
+ if not verify_password(old_password, user_info.hashed_password):
return Response(code=400, msg="Incorrect password !")
- if not is_valid_password(user.newPassword):
+ if not is_valid_password(new_password):
return Response(code=400, msg="The password must be at least 8 and contain both numbers and letters")
- is_edit = await change_user_pwd(db, user_info.id, user.newPassword)
+ is_edit = await change_user_pwd(db, user_info.id, new_password)
if not is_edit:
return Response(code=500, msg="user pwd change failure", data={})
return Response(code=200, msg="user pwd change success", data={})
diff --git a/app/config/const.py b/app/config/const.py
index 48fa3ed..41d16eb 100644
--- a/app/config/const.py
+++ b/app/config/const.py
@@ -4,6 +4,12 @@
DOCUMENT_TO_REPORT = "document_to_report"
IMAGE_TO_TEXT = "image_and_text_conversion"
DOCUMENT_IA_QUESTIONS = "document_ia_questions"
+DOCUMENT_TO_REPORT_TITLE = "document_to_report_title"
+DOCUMENT_TO_TITLE = "document_to_title"
+DOCUMENT_TO_PAPER = "document_to_paper"
+
+
+### --------- file path------------------
ENV_CONF_PATH = "app/config/env_conf"
APP_SERVICE_PATH = "app/service/"
diff --git a/app/config/env_conf/api_key_conf.json b/app/config/env_conf/api_key_conf.json
index 8a5b98e..cf7de8c 100644
--- a/app/config/env_conf/api_key_conf.json
+++ b/app/config/env_conf/api_key_conf.json
@@ -1,5 +1,6 @@
{
"image_and_text_conversion": "",
"document_to_report": "",
- "document_to_cleaning": ""
+ "document_to_cleaning": "",
+ "document_to_paper": ""
}
\ No newline at end of file
diff --git a/app/config/env_conf/default_agent_conf.json b/app/config/env_conf/default_agent_conf.json
index 396fbb9..86d538b 100644
--- a/app/config/env_conf/default_agent_conf.json
+++ b/app/config/env_conf/default_agent_conf.json
@@ -34,7 +34,9 @@
"file_upload":{
"enabled": false
}
- }
+ },
+ "dialogType": "3",
+ "mode": "agent-basic"
},
{
"id": "basic_excel_talk",
@@ -78,7 +80,9 @@
"file_upload":{
"enabled": false
}
- }
+ },
+ "dialogType": "3",
+ "mode": "agent-basic"
},
{
"id": "basic_question_talk",
@@ -94,7 +98,18 @@
"file_upload":{
"enabled": false
}
- }
+ },
+ "dialogType": "3",
+ "mode": "agent-basic"
+ },
+ {
+ "id": "basic_paper_talk",
+ "name": "鏂囨。鍑哄嵎",
+ "description": "鏂囨。鍑哄嵎",
+ "icon": "intellFrame4",
+ "agentType": "paperTalk",
+ "dialogType": "3",
+ "mode": "agent-basic"
}
],
"bs": []
diff --git a/app/config/env_conf/resource.json b/app/config/env_conf/resource.json
index 88d0d9b..2530c6d 100644
--- a/app/config/env_conf/resource.json
+++ b/app/config/env_conf/resource.json
@@ -342,7 +342,7 @@
"seq": 0,
"target": null,
"canbdeeleted": null,
- "resource_type_id": "3",
+ "resource_type_id": "1",
"resource_id": "0642e4a6-3d48-4635-ba2a-bf4e39c351ed",
"status": "1",
"hidden": 0
diff --git a/app/models/dialog_model.py b/app/models/dialog_model.py
index 1ab181f..2abb060 100644
--- a/app/models/dialog_model.py
+++ b/app/models/dialog_model.py
@@ -50,7 +50,7 @@
create_date = Column(DateTime) # 鍒涘缓鏃堕棿
create_time = Column(BigInteger)
update_date = Column(DateTime) # 鏇存柊鏃堕棿
- update_time = Column(BigInteger)
+ update_time = Column(BigInteger, index=True)
tenant_id = Column(Integer) # 鍒涘缓浜�
dialog_id = Column(String(32))
name = Column(String(255)) # 鍚嶇О
diff --git a/app/models/session_model.py b/app/models/session_model.py
index b7fae97..87f63b7 100644
--- a/app/models/session_model.py
+++ b/app/models/session_model.py
@@ -20,7 +20,7 @@
agent_id = Column(String(255))
agent_type = Column(SQLAlchemyEnum(AgentType), nullable=False) # 鐩墠鍙瓨basic鐨勶紝ragflow鍜宐isheng鐨勮皟鎺ュ彛鑾峰彇
create_date = Column(DateTime, default=current_time) # 鍒涘缓鏃堕棿锛岄粯璁ゅ�间负褰撳墠鏃跺尯鏃堕棿
- update_date = Column(DateTime, default=current_time, onupdate=current_time) # 鏇存柊鏃堕棿锛岄粯璁ゅ�间负褰撳墠鏃跺尯鏃堕棿锛屾洿鏂版椂鑷姩鏇存柊
+ update_date = Column(DateTime, default=current_time, onupdate=current_time, index=True) # 鏇存柊鏃堕棿锛岄粯璁ゅ�间负褰撳墠鏃跺尯鏃堕棿锛屾洿鏂版椂鑷姩鏇存柊
tenant_id = Column(Integer) # 鍒涘缓浜�
message = Column(TEXT) # 璇存槑
conversation_id = Column(String(64))
diff --git a/app/service/dialog.py b/app/service/dialog.py
index 19f253c..81316ff 100644
--- a/app/service/dialog.py
+++ b/app/service/dialog.py
@@ -24,14 +24,15 @@
else:
query = query.filter(DialogModel.status != Dialog_STATSU_DELETE)
id_list = []
+ # if label:
+ # id_list = [i.object_id for i in db.query(LabelWorkerModel).filter(LabelWorkerModel.label_id==label).all()]
+ if user.permission != "admin":
+ dia_list = [j.id for i in user.groups for j in i.dialogs]
+ query = query.filter(or_(DialogModel.tenant_id == user_id, DialogModel.id.in_(dia_list)))
+ # else:
if label:
id_list = [i.object_id for i in db.query(LabelWorkerModel).filter(LabelWorkerModel.label_id == label).all()]
- if user.permission != "admin":
- dia_list = [j.id for i in user.groups for j in i.dialogs if not label or j.id in id_list]
- query = query.filter(or_(DialogModel.tenant_id == user_id, DialogModel.id.in_(dia_list)))
- else:
- if label:
- query = query.filter(or_(DialogModel.id.in_(id_list)))
+ query = query.filter(DialogModel.id.in_(id_list))
if keyword:
query = query.filter(DialogModel.name.like('%{}%'.format(keyword)))
@@ -179,16 +180,14 @@
if user is None:
return {"rows": []}
query = db.query(DialogModel).filter(DialogModel.status != Dialog_STATSU_DELETE)
- id_list = []
+ if user.permission != "admin":
+ dia_list = [j.id for i in user.groups for j in i.dialogs]
+ query = query.filter(or_(DialogModel.tenant_id == user_id, DialogModel.id.in_(dia_list)))
+
if label:
id_list = set(
[i.object_id for i in db.query(LabelWorkerModel).filter(LabelWorkerModel.label_id.in_(label)).all()])
- if user.permission != "admin":
- dia_list = [j.id for i in user.groups for j in i.dialogs if not label or j.id in id_list]
- query = query.filter(or_(DialogModel.tenant_id == user_id, DialogModel.id.in_(dia_list)))
- else:
- if label:
- query = query.filter(or_(DialogModel.id.in_(id_list)))
+ query = query.filter(DialogModel.id.in_(id_list))
if keyword:
query = query.filter(DialogModel.name.like('%{}%'.format(keyword)))
diff --git a/app/service/difyService.py b/app/service/difyService.py
index 97de46b..3c67d09 100644
--- a/app/service/difyService.py
+++ b/app/service/difyService.py
@@ -127,11 +127,10 @@
- async def chat(self, token: str, user_id: int, message: str, files: [], conversation_id: str):
-
+ async def chat(self, token: str, user_id: int, message: str, files: [], conversation_id: str, inputs: dict):
target_url = f"{self.base_url}/v1/chat-messages"
data = {
- "inputs": {},
+ "inputs": inputs,
"query": message,
"response_mode": "streaming",
"conversation_id": conversation_id,
diff --git a/app/service/group.py b/app/service/group.py
index 474084b..c65dbd8 100644
--- a/app/service/group.py
+++ b/app/service/group.py
@@ -7,6 +7,7 @@
from app.service.ragflow import RagflowService
from app.service.service_token import get_ragflow_token
from Log import logger
+from sqlalchemy import and_
async def group_list(db, page_size: int, page_index: int, keyword: str):
@@ -61,7 +62,9 @@
not_group_user = []
in_group_user = []
user_list = [u.id for u in db.query(GroupModel).filter(GroupModel.id.__eq__(group_id)).first().users]
- for u in db.query(UserModel.id, UserModel.username,UserModel.permission).filter(UserModel.permission!="admin").order_by(UserModel.id.desc()).all():
+ for u in db.query(UserModel.id, UserModel.username, UserModel.permission).filter(
+ and_(UserModel.permission != "admin", UserModel.status != 2)
+ ).order_by(UserModel.id.desc()).all():
if u.id in user_list:
in_group_user.append({"userId": u.id, "userName": u.username})
else:
diff --git a/app/service/knowledge.py b/app/service/knowledge.py
index ffd6dcf..e175f87 100644
--- a/app/service/knowledge.py
+++ b/app/service/knowledge.py
@@ -15,9 +15,11 @@
query = db.query(KnowledgeModel).filter(KnowledgeModel.knowledge_type==knowledge)
if user.permission != "admin":
klg_list = [j.id for i in user.groups for j in i.knowledges]
- query = query.filter(or_(KnowledgeModel.tenant_id == user_id, KnowledgeModel.id.in_(klg_list)))
+ query = query.filter(or_(KnowledgeModel.id.in_(klg_list), KnowledgeModel.tenant_id == str(user_id)))
+
if location:
- query = query.filter(KnowledgeModel.permission == "team")
+ query = query.filter(or_(KnowledgeModel.permission == "team", KnowledgeModel.tenant_id == str(user_id)))
+
if keyword:
query = query.filter(KnowledgeModel.name.like('%{}%'.format(keyword)))
total = query.count()
@@ -28,7 +30,7 @@
async def create_knowledge_service(db, klg_id, name, description, icon, klg_type, user_id):
try:
- dialog_model = KnowledgeModel(id=klg_id,name=name, description=description,icon=icon, knowledge_type=klg_type, tenant_id=user_id)
+ dialog_model = KnowledgeModel(id=klg_id,name=name, description=description,icon=icon, knowledge_type=klg_type, tenant_id=user_id,update_date=datetime.now(),create_date=datetime.now())
db.add(dialog_model)
db.commit()
db.refresh(dialog_model)
@@ -50,6 +52,7 @@
knowledge.permission = app_knowledge["permission"]
knowledge.documents = app_knowledge["doc_num"]
knowledge.status = app_knowledge["status"]
+ knowledge.update_date = datetime.now()
db.add(knowledge)
db.commit()
db.refresh(knowledge)
diff --git a/app/service/label.py b/app/service/label.py
index c72d61e..f69234c 100644
--- a/app/service/label.py
+++ b/app/service/label.py
@@ -62,9 +62,11 @@
logger.error(e)
db.rollback()
# return False
+ print(delete_list)
for label_id in delete_list:
try:
- LabelWorkerModel(id=label_id).delete()
+ db.query(LabelWorkerModel).filter_by(id=label_id).delete()
+ db.commit()
except Exception as e:
logger.error(e)
db.rollback()
diff --git a/app/service/role.py b/app/service/role.py
index c7da9b6..ecc8c4a 100644
--- a/app/service/role.py
+++ b/app/service/role.py
@@ -1,6 +1,7 @@
import uuid
from Log import logger
+from app.config.const import DEPT_STATUS_ON
from app.models import OrganizationModel
from app.models.resource_model import ResourceModel
from app.models.role_model import RoleModel
@@ -16,7 +17,7 @@
total = query.count()
roles = query.order_by(RoleModel.created_at.desc()).limit(page_size).offset(
(page_index - 1) * page_size).all()
- return {"total": total, "rows": [role.to_json() for role in roles]}
+ return {"total": total, "rows": [role.to_json() for role in roles if role.status == DEPT_STATUS_ON]}
async def create_role(db, role_name: str, description: str, role_key, data_scope, user_id):
diff --git a/app/service/user.py b/app/service/user.py
index b6c916e..3536b89 100644
--- a/app/service/user.py
+++ b/app/service/user.py
@@ -337,8 +337,8 @@
knowledge_dict = {}
user = db.query(UserModel).filter_by(id=user_id).first()
parent_id = ""
- # print(111111111111111)
- async def role_resource(role_set, permissions, roles):
+
+ async def role_resource_type(role_set, permissions, roles, type):
nonlocal parent_id
for role in roles:
if role.id not in role_set:
@@ -346,20 +346,19 @@
for r in role.resources:
if r.resource_type_id != "1":
- # if not r.resource_id:
- # parent_id = r.id
- # continue
- permissions[r.id] = r.to_router_dict()
+ permission_dict = r.to_router_dict()
+ permission_dict["type"] = type
+ permissions[r.id] = permission_dict
- await role_resource(role_set, permissions, user.roles)
+ await role_resource_type(role_set, permissions, user.roles, "user")
for ogt in user.organizations:
if ogt.roles:
- await role_resource(role_set, permissions, ogt.roles)
+ await role_resource_type(role_set, permissions, ogt.roles, "org")
parent_ogt = ogt.parent
while parent_ogt:
if parent_ogt.id not in dept_set:
- await role_resource(role_set, permissions, parent_ogt.roles)
+ await role_resource_type(role_set, permissions, parent_ogt.roles, "org")
dept_set.add(parent_ogt.id)
parent_ogt = parent_ogt.parent
@@ -390,7 +389,7 @@
async def get_user_dept(db, user_id):
res = {}
user = db.query(UserModel).filter_by(id=user_id).first()
- res["rows"] = [i.to_dict() for i in user.organizations]
+ res["rows"] = [i.to_dict() for i in user.organizations if i.status == DEPT_STATUS_ON]
return res
diff --git a/app/service/v2/initialize_data.py b/app/service/v2/initialize_data.py
index 32e7020..3b92694 100644
--- a/app/service/v2/initialize_data.py
+++ b/app/service/v2/initialize_data.py
@@ -141,9 +141,8 @@
logger.error(e)
else:
try:
- dialog = DialogModel(id=agent["id"], name=agent["name"], description=agent["description"],
- icon=agent["icon"], tenant_id=user.id if user else "", dialog_type="3",
- parameters = json.dumps(agent["parameters"]))
+ dialog = DialogModel(id=agent["id"], name=agent["name"], description=agent["description"],
+ icon=agent["icon"], tenant_id=user.id if user else "", dialog_type=agent["dialogType"], mode=agent["mode"],parameters = json.dumps(agent["parameters"]))
db.add(dialog)
db.commit()
db.refresh(dialog)
diff --git a/app/task/delete_execl_file.py b/app/task/delete_execl_file.py
new file mode 100644
index 0000000..0ba45ea
--- /dev/null
+++ b/app/task/delete_execl_file.py
@@ -0,0 +1,21 @@
+import os.path
+from datetime import datetime, timedelta
+
+
+def delete_file_after_delay():
+ path = "data/output"
+ data_dir = os.path.dirname(path)
+ if not os.path.exists(data_dir):
+ return
+ if not os.path.exists(path):
+ return
+ now = datetime.now()
+ five_minutes_ago = now - timedelta(minutes=5)
+ for filename in os.listdir(path):
+ file_path = os.path.join(path, filename)
+ if os.path.isfile(file_path):
+ creation_time = os.path.getctime(file_path)
+ file_creation_datetime = datetime.fromtimestamp(creation_time)
+ if file_creation_datetime < five_minutes_ago:
+ os.remove(file_path)
+ print(f"瀹氭椂鍒犻櫎鏂囦欢: {file_path}")
diff --git a/app/task/fetch_agent.py b/app/task/fetch_agent.py
index f361265..f394dc2 100644
--- a/app/task/fetch_agent.py
+++ b/app/task/fetch_agent.py
@@ -163,7 +163,7 @@
('basic_excel_talk', 6, '鏅鸿兘鏁版嵁', 'BASIC', 'excelTalk'),
('basic_question_talk', 7, '鍑洪缁勫嵎', 'BASIC', 'questionTalk'),
('9d75142a-66eb-4e23-b7d4-03efe4584915', 8, '灏忔暟缁樺浘', 'DIFY', 'imageTalk'),
- ('basic_paper_talk', 9, '鏂囨。鍑哄嵎', 'BASIC', 'paperTalk'),
+ ('2f6ddf93-7ba6-4b2d-b991-d96421404600', 9, '鏂囨。鍑哄嵎', 'DIFY', 'paperTalk'),
('basic_report_clean', 10, '鏂囨。鎶ュ憡', 'DIFY', 'reportWorkflow')
]
@@ -490,7 +490,7 @@
import_type_table(db, node)
print("add resourceType record successfully")
else:
- print("sync resources successfully")
+ print("sync resourcesType successfully")
if db.query(ResourceModel).count() == 0:
with open(os.path.join(ENV_CONF_PATH, "resource.json"), 'r', encoding='utf-8') as file:
json_data = json.load(file)
diff --git a/app/utils/excelmerge/conformity.py b/app/utils/excelmerge/conformity.py
index 3df2e86..fed5e19 100644
--- a/app/utils/excelmerge/conformity.py
+++ b/app/utils/excelmerge/conformity.py
@@ -2,8 +2,8 @@
import random
import shutil
import string
-from datetime import datetime
+from datetime import datetime
from openpyxl import load_workbook
@@ -65,17 +65,16 @@
template_excel.save(output_path)
template_excel.close()
- # 鍚堝苟瀹屾垚鍚庡垹闄ゆ棤鐢ㄦ枃浠�
- for filename in os.listdir(file_path):
- file_path_full = os.path.join(file_path, filename)
- try:
+ try:
+ for filename in os.listdir(file_path):
+ file_path_full = os.path.join(file_path, filename)
if os.path.isfile(file_path_full) or os.path.islink(file_path_full):
os.unlink(file_path_full)
elif os.path.isdir(file_path_full):
shutil.rmtree(file_path_full)
- os.rmdir(file_path)
- except Exception as e:
- print(f"鍒犻櫎鏂囦欢鏃跺彂鐢熼敊璇�: {e}")
+ os.rmdir(file_path)
+ except Exception as e:
+ print(f"鍒犻櫎鏂囦欢鏃跺彂鐢熼敊璇�: {e}")
return file_name
except Exception as e:
diff --git a/main.py b/main.py
index fc24d9c..898348d 100644
--- a/main.py
+++ b/main.py
@@ -24,6 +24,7 @@
from app.api.group import group_router
from app.api.role import role_router
from app.models.base_model import init_db
+from app.task.delete_execl_file import delete_file_after_delay
# from app.models.base_model import init_db
from app.task.fetch_agent import sync_agents, initialize_agents, sync_agents_v2, sync_knowledge, \
sync_resources_from_json
@@ -42,9 +43,7 @@
# sync_agents()
-
await sync_default_data()
- #
sync_agents_v2()
sync_knowledge()
sync_resources_from_json()
@@ -63,7 +62,7 @@
# 璁剧疆 CORS 涓棿浠�
# app.add_middleware(
# CORSMiddleware,
-# allow_origins=["http://192.168.20.119:9303", "http://192.168.20.119:9301", "http://smartai.com:8293","http://localhost:9303", "http://127.0.0.1:9303","http://localhost:5173","http://192.168.20.158:5173"],
+# allow_origins=["*"],
# allow_credentials=True,
# allow_methods=["*"], # 鍏佽鎵�鏈夋柟娉�
# allow_headers=["*"], # 鍏佽鎵�鏈夊ご閮�
@@ -72,6 +71,7 @@
scheduler = BackgroundScheduler()
scheduler.add_job(sync_agents_v2, 'interval', minutes=60, id="sync_resource_data")
scheduler.add_job(sync_token, 'interval', minutes=5, id="sync_token_1")
+# scheduler.add_job(delete_file_after_delay, 'interval', minutes=10, id="delete_file_after_delay")
scheduler.start()
app.include_router(auth_router, prefix='/api/auth', tags=["auth"])
--
Gitblit v1.8.0