New file |
| | |
| | | """sessions add index update_time |
| | | |
| | | Revision ID: 3091a16c34a6 |
| | | Revises: |
| | | Create Date: 2025-01-15 18:07:00.151468 |
| | | |
| | | """ |
| | | from typing import Sequence, Union |
| | | |
| | | from alembic import op |
| | | import sqlalchemy as sa |
| | | from sqlalchemy.dialects import mysql |
| | | |
| | | # revision identifiers, used by Alembic. |
| | | revision: str = '3091a16c34a6' |
| | | down_revision: Union[str, None] = None |
| | | branch_labels: Union[str, Sequence[str], None] = None |
| | | depends_on: Union[str, Sequence[str], None] = None |
| | | |
| | | |
| | | def upgrade() -> None: |
| | | # ### commands auto generated by Alembic - please adjust! ### |
| | | op.drop_table('knowledgebase') |
| | | op.drop_table('user_tenant') |
| | | op.drop_table('apps') |
| | | op.drop_table('flow') |
| | | op.create_index(op.f('ix_sessions_update_date'), 'sessions', ['update_date'], unique=False) |
| | | # ### end Alembic commands ### |
| | | |
| | | |
| | | def downgrade() -> None: |
| | | # ### commands auto generated by Alembic - please adjust! ### |
| | | op.drop_index(op.f('ix_sessions_update_date'), table_name='sessions') |
| | | op.add_column('dialogs', sa.Column('parameters', mysql.TEXT(), nullable=True)) |
| | | op.create_table('flow', |
| | | sa.Column('id', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('name', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('status', mysql.INTEGER(), autoincrement=False, nullable=False), |
| | | sa.Column('description', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('user_id', mysql.INTEGER(), autoincrement=False, nullable=False), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_table('apps', |
| | | sa.Column('id', mysql.VARCHAR(length=36), nullable=False), |
| | | sa.Column('name', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('status', mysql.VARCHAR(length=16), nullable=False), |
| | | sa.Column('description', mysql.TEXT(), nullable=False), |
| | | sa.Column('tenant_id', mysql.VARCHAR(length=36), nullable=False), |
| | | sa.Column('mode', mysql.VARCHAR(length=36), nullable=False), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_table('user_tenant', |
| | | sa.Column('id', mysql.VARCHAR(length=36), nullable=False), |
| | | sa.Column('tenant_id', mysql.VARCHAR(length=32), nullable=True), |
| | | sa.Column('user_id', mysql.VARCHAR(length=32), nullable=True), |
| | | sa.Column('role', mysql.VARCHAR(length=32), nullable=True), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_table('knowledgebase', |
| | | sa.Column('id', mysql.VARCHAR(length=36), nullable=False), |
| | | sa.Column('name', mysql.VARCHAR(length=128), nullable=True), |
| | | sa.Column('permission', mysql.VARCHAR(length=32), nullable=True), |
| | | sa.Column('tenant_id', mysql.VARCHAR(length=32), nullable=True), |
| | | sa.Column('description', mysql.TEXT(), nullable=True), |
| | | sa.Column('status', mysql.VARCHAR(length=1), nullable=True), |
| | | sa.Column('doc_num', mysql.INTEGER(), autoincrement=False, nullable=True), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_table('chat_sessions', |
| | | sa.Column('id', mysql.VARCHAR(length=36), nullable=False), |
| | | sa.Column('name', mysql.VARCHAR(length=255), nullable=True), |
| | | sa.Column('agent_id', mysql.VARCHAR(length=255), nullable=True), |
| | | sa.Column('agent_type', mysql.INTEGER(), autoincrement=False, nullable=True), |
| | | sa.Column('create_date', mysql.DATETIME(), nullable=True), |
| | | sa.Column('update_date', mysql.DATETIME(), nullable=True), |
| | | sa.Column('tenant_id', mysql.INTEGER(), autoincrement=False, nullable=True), |
| | | sa.Column('message', mysql.TEXT(), nullable=True), |
| | | sa.Column('reference', mysql.TEXT(), nullable=True), |
| | | sa.Column('conversation_id', mysql.VARCHAR(length=36), nullable=True), |
| | | sa.Column('event_type', mysql.VARCHAR(length=16), nullable=True), |
| | | sa.Column('session_type', mysql.VARCHAR(length=16), nullable=True), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_index('ix_chat_sessions_update_date', 'chat_sessions', ['update_date'], unique=False) |
| | | op.create_index('ix_chat_sessions_tenant_id', 'chat_sessions', ['tenant_id'], unique=False) |
| | | op.create_index('ix_chat_sessions_conversation_id', 'chat_sessions', ['conversation_id'], unique=False) |
| | | op.create_table('dialog', |
| | | sa.Column('id', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('name', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('status', mysql.VARCHAR(length=1), nullable=False), |
| | | sa.Column('description', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('tenant_id', mysql.VARCHAR(length=36), nullable=False), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_table('chat_api_tokens', |
| | | sa.Column('id', mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_general_ci', length=36), nullable=False), |
| | | sa.Column('app_id', mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_general_ci', length=36), nullable=True), |
| | | sa.Column('type', mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_general_ci', length=16), nullable=True), |
| | | sa.Column('token', mysql.VARCHAR(charset='utf8mb3', collation='utf8mb3_general_ci', length=255), nullable=True), |
| | | sa.Column('created_at', mysql.DATETIME(), nullable=True), |
| | | sa.Column('last_used_at', mysql.DATETIME(), nullable=True), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_default_charset='utf8mb3', |
| | | mysql_engine='InnoDB', |
| | | mysql_row_format='DYNAMIC' |
| | | ) |
| | | op.create_index('ix_chat_api_tokens_app_id', 'chat_api_tokens', ['app_id'], unique=False) |
| | | op.create_table('user_token', |
| | | sa.Column('id', mysql.VARCHAR(length=16), nullable=False), |
| | | sa.Column('account', mysql.VARCHAR(length=255), nullable=True), |
| | | sa.Column('password', mysql.VARCHAR(length=255), nullable=True), |
| | | sa.Column('access_token', mysql.VARCHAR(length=1000), nullable=True), |
| | | sa.Column('refresh_token', mysql.VARCHAR(length=1000), nullable=True), |
| | | sa.Column('created_at', mysql.DATETIME(), nullable=True), |
| | | sa.Column('updated_at', mysql.DATETIME(), nullable=True), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | # ### end Alembic commands ### |
| | |
| | | from Log import logger |
| | | from app.api import get_current_user_websocket |
| | | from app.config.config import settings |
| | | from app.config.const import IMAGE_TO_TEXT, DOCUMENT_TO_REPORT, DOCUMENT_TO_CLEANING, DOCUMENT_IA_QUESTIONS |
| | | from app.config.const import IMAGE_TO_TEXT, DOCUMENT_TO_REPORT, DOCUMENT_TO_CLEANING, DOCUMENT_IA_QUESTIONS, \ |
| | | DOCUMENT_TO_REPORT_TITLE, DOCUMENT_TO_TITLE, DOCUMENT_TO_PAPER |
| | | from app.models import MenuCapacityModel |
| | | from app.models.agent_model import AgentModel, AgentType |
| | | from app.models.base_model import get_db |
| | |
| | | ret = {"message": "Chat ID not found", "type": "close"} |
| | | await websocket.send_json(ret) |
| | | return |
| | | |
| | | # print(agent_type) |
| | | # print(chat_type) |
| | | if agent_type == AgentType.RAGFLOW: |
| | | ragflow_service = RagflowService(settings.fwr_base_url) |
| | | token = await get_ragflow_token(db, current_user.id) |
| | |
| | | "upload_file_id": upload_file_id |
| | | }) |
| | | async for rag_response in dify_service.chat(token, current_user.id, question, files, |
| | | conversation_id): |
| | | conversation_id, {}): |
| | | # print(rag_response) |
| | | try: |
| | | if rag_response[:5] == "data:": |
| | |
| | | upload_files = receive_message.get('upload_files', []) |
| | | title = receive_message.get('title', "") |
| | | workflow_type = receive_message.get('workflow', 1) |
| | | if not upload_files: |
| | | await websocket.send_json({"message": "Invalid request", "type": "error"}) |
| | | continue |
| | | sub_titles = receive_message.get('sub_titles', "") |
| | | title_number = receive_message.get('title_number', 8) |
| | | title_style = receive_message.get('title_style', "") |
| | | title_query = receive_message.get('title_query', "") |
| | | if upload_files: |
| | | title_query = "start" |
| | | try: |
| | | session = SessionService(db).create_session( |
| | | chat_id, |
| | |
| | | if workflow_type == 2: |
| | | inputs["file_list"] = files |
| | | inputs["Completion_of_main_indicators"] = title |
| | | token = DfTokenDao(db).get_token_by_id(DOCUMENT_TO_REPORT) |
| | | inputs["sub_titles"] = sub_titles |
| | | token = DfTokenDao(db).get_token_by_id(DOCUMENT_TO_REPORT_TITLE) |
| | | if not token: |
| | | await websocket.send_json( |
| | | {"message": "Invalid token document_to_cleaning", "type": "error"}) |
| | | elif workflow_type == 3: |
| | | inputs["file_list"] = files |
| | | inputs["number_of_title"] = title_number |
| | | inputs["title_style"] = title_style |
| | | token = DfTokenDao(db).get_token_by_id(DOCUMENT_TO_TITLE) |
| | | if not token: |
| | | await websocket.send_json( |
| | | {"message": "Invalid token document_to_title", "type": "error"}) |
| | | |
| | | complete_response = "" |
| | | async for rag_response in dify_service.workflow(token, current_user.id, inputs): |
| | | # print(rag_response) |
| | | try: |
| | | if rag_response[:5] == "data:": |
| | | # 如果是,则截取掉前5个字符,并去除首尾空白符 |
| | | complete_response = rag_response[5:].strip() |
| | | elif "event: ping" in rag_response: |
| | | continue |
| | | else: |
| | | # 否则,保持原样 |
| | | complete_response += rag_response |
| | | if workflow_type == 1 or workflow_type == 2: |
| | | async for rag_response in dify_service.workflow(token, current_user.id, inputs): |
| | | # print(rag_response) |
| | | try: |
| | | data = json.loads(complete_response) |
| | | complete_response = "" |
| | | if data.get("event") == "node_started" or data.get("event") == "node_finished": # "event": "message_end" |
| | | if "data" not in data or not data["data"]: # 信息过滤 |
| | | logger.error("非法数据--------------------") |
| | | logger.error(data) |
| | | continue |
| | | else: # 正常输出 |
| | | if rag_response[:5] == "data:": |
| | | # 如果是,则截取掉前5个字符,并去除首尾空白符 |
| | | complete_response = rag_response[5:].strip() |
| | | elif "event: ping" in rag_response: |
| | | continue |
| | | else: |
| | | # 否则,保持原样 |
| | | complete_response += rag_response |
| | | try: |
| | | data = json.loads(complete_response) |
| | | complete_response = "" |
| | | if data.get("event") == "node_started" or data.get("event") == "node_finished": # "event": "message_end" |
| | | if "data" not in data or not data["data"]: # 信息过滤 |
| | | logger.error("非法数据--------------------") |
| | | logger.error(data) |
| | | continue |
| | | else: # 正常输出 |
| | | answer = data.get("data", "") |
| | | if isinstance(answer, str): |
| | | logger.error("----------------未知数据--------------------") |
| | | logger.error(data) |
| | | continue |
| | | elif isinstance(answer, dict): |
| | | |
| | | message = answer.get("title", "") |
| | | |
| | | result = {"message": message, "type": "system"} |
| | | elif data.get("event") == "workflow_finished": |
| | | answer = data.get("data", "") |
| | | if isinstance(answer, str): |
| | | logger.error("----------------未知数据--------------------") |
| | | logger.error(data) |
| | | continue |
| | | result = {"message": "", "type": "close", "download_url": ""} |
| | | elif isinstance(answer, dict): |
| | | download_url = "" |
| | | outputs = answer.get("outputs", {}) |
| | | if outputs: |
| | | message = outputs.get("output", "") |
| | | download_url = outputs.get("download_url", "") |
| | | else: |
| | | message = answer.get("error", "") |
| | | |
| | | message = answer.get("title", "") |
| | | result = {"message": message, "type": "message", "download_url": download_url} |
| | | try: |
| | | SessionService(db).update_session(chat_id, |
| | | message={"role": "assistant", |
| | | "content": { |
| | | "answer": message, |
| | | "download_url": download_url}}, |
| | | conversation_id=data.get( |
| | | "conversation_id")) |
| | | except Exception as e: |
| | | logger.error("保存dify的会话异常!") |
| | | logger.error(e) |
| | | await websocket.send_json(result) |
| | | result = {"message": "", "type": "close", "download_url": ""} |
| | | |
| | | result = {"message": message, "type": "system"} |
| | | elif data.get("event") == "workflow_finished": |
| | | answer = data.get("data", "") |
| | | if isinstance(answer, str): |
| | | logger.error("----------------未知数据--------------------") |
| | | logger.error(data) |
| | | result = {"message": "", "type": "close", "download_url": ""} |
| | | elif isinstance(answer, dict): |
| | | download_url = "" |
| | | outputs = answer.get("outputs", {}) |
| | | if outputs: |
| | | message = outputs.get("output", "") |
| | | download_url = outputs.get("download_url", "") |
| | | else: |
| | | message = answer.get("error", "") |
| | | |
| | | result = {"message": message, "type": "message", "download_url": download_url} |
| | | else: |
| | | continue |
| | | try: |
| | | await websocket.send_json(result) |
| | | except Exception as e: |
| | | logger.error(e) |
| | | logger.error("返回客户端消息异常!") |
| | | complete_response = "" |
| | | except json.JSONDecodeError as e: |
| | | print(f"Error decoding JSON: {e}") |
| | | # print(f"Response text: {text}") |
| | | except Exception as e2: |
| | | result = {"message": f"内部错误: {e2}", "type": "close"} |
| | | await websocket.send_json(result) |
| | | print(f"Error process message of ragflow: {e2}") |
| | | elif workflow_type == 3: |
| | | image_list = [] |
| | | # print(inputs) |
| | | complete_response = "" |
| | | async for rag_response in dify_service.chat(token, current_user.id, title_query, [], |
| | | conversation_id, inputs): |
| | | print(rag_response) |
| | | try: |
| | | if rag_response[:5] == "data:": |
| | | # 如果是,则截取掉前5个字符,并去除首尾空白符 |
| | | complete_response = rag_response[5:].strip() |
| | | elif "event: ping" in rag_response: |
| | | continue |
| | | else: |
| | | # 否则,保持原样 |
| | | complete_response += rag_response |
| | | try: |
| | | data = json.loads(complete_response) |
| | | complete_response = "" |
| | | if data.get("event") == "node_started" or data.get( |
| | | "event") == "node_finished": # "event": "message_end" |
| | | if "data" not in data or not data["data"]: # 信息过滤 |
| | | logger.error("非法数据--------------------") |
| | | logger.error(data) |
| | | continue |
| | | else: # 正常输出 |
| | | answer = data.get("data", "") |
| | | if isinstance(answer, str): |
| | | logger.error("----------------未知数据--------------------") |
| | | logger.error(data) |
| | | continue |
| | | elif isinstance(answer, dict): |
| | | |
| | | message = answer.get("title", "") |
| | | |
| | | result = {"message": message, "type": "system"} |
| | | elif data.get("event") == "message": |
| | | message = data.get("answer", "") |
| | | # try: |
| | | # msg_dict = json.loads(answer) |
| | | # message = msg_dict.get("output", "") |
| | | # except Exception as e: |
| | | # print(e) |
| | | # continue |
| | | result = {"message": message, "type": "message", |
| | | "download_url": ""} |
| | | try: |
| | | SessionService(db).update_session(chat_id, |
| | | message={"role": "assistant", |
| | | "content": { |
| | | "answer": message, |
| | | "download_url": download_url}}, |
| | | "download_url": ""}}, |
| | | conversation_id=data.get( |
| | | "conversation_id")) |
| | | except Exception as e: |
| | | logger.error("保存dify的会话异常!") |
| | | logger.error(e) |
| | | await websocket.send_json(result) |
| | | # try: |
| | | # await websocket.send_json(result) |
| | | # except Exception as e: |
| | | # logger.error(e) |
| | | # logger.error("返回客户端消息异常!") |
| | | |
| | | elif data.get("event") == "message_end": |
| | | result = {"message": "", "type": "close", "download_url": ""} |
| | | |
| | | |
| | | else: |
| | | continue |
| | | try: |
| | | await websocket.send_json(result) |
| | | except Exception as e: |
| | | logger.error(e) |
| | | logger.error("返回客户端消息异常!") |
| | | complete_response = "" |
| | | except json.JSONDecodeError as e: |
| | | print(f"Error decoding JSON: {e}") |
| | | # print(f"Response text: {text}") |
| | | except Exception as e2: |
| | | result = {"message": f"内部错误: {e2}", "type": "close"} |
| | | await websocket.send_json(result) |
| | | print(f"Error process message of ragflow: {e2}") |
| | | else: |
| | | continue |
| | | try: |
| | | await websocket.send_json(result) |
| | | except Exception as e: |
| | | logger.error(e) |
| | | logger.error("dify返回客户端消息异常!") |
| | | complete_response = "" |
| | | except json.JSONDecodeError as e: |
| | | print(f"Error decoding JSON: {e}") |
| | | # print(f"Response text: {text}") |
| | | except Exception as e2: |
| | | result = {"message": f"内部错误: {e2}", "type": "close"} |
| | | await websocket.send_json(result) |
| | | print(f"Error process message of ragflow: {e2}") |
| | | elif chat_type == "documentIa": |
| | | # print(122112) |
| | | token = DfTokenDao(db).get_token_by_id(DOCUMENT_IA_QUESTIONS) |
| | |
| | | answer_str = "" |
| | | complete_response = "" |
| | | async for rag_response in dify_service.chat(token, current_user.id, question, files, |
| | | conversation_id): |
| | | # print(rag_response) |
| | | conversation_id, {}): |
| | | print(rag_response) |
| | | try: |
| | | if rag_response[:5] == "data:": |
| | | # 如果是,则截取掉前5个字符,并去除首尾空白符 |
| | |
| | | elif isinstance(answer, dict): |
| | | |
| | | message = answer.get("title", "") |
| | | if answer.get("status") == "failed": |
| | | message = answer.get("error") |
| | | |
| | | result = {"message": message, "type": "system"} |
| | | continue |
| | | # continue |
| | | elif data.get("event") == "message": # "event": "message_end" |
| | | # 正常输出 |
| | | answer = data.get("answer", "") |
| | | result = {"message": answer, "type": "stream"} |
| | | elif data.get("event") == "error": |
| | | answer = data.get("message", "") |
| | | result = {"message": answer, "type": "system"} |
| | | elif data.get("event") == "workflow_finished": |
| | | answer = data.get("data", "") |
| | | if isinstance(answer, str): |
| | | logger.error("----------------未知数据--------------------") |
| | | logger.error(data) |
| | | result = {"message": "", "type": "close", "download_url": ""} |
| | | # result = {"message": "", "type": "close", "download_url": ""} |
| | | elif isinstance(answer, dict): |
| | | download_url = "" |
| | | outputs = answer.get("outputs", {}) |
| | |
| | | else: |
| | | message = answer.get("error", "") |
| | | |
| | | # result = {"message": message, "type": "message", |
| | | # "download_url": download_url} |
| | | result = {"message": message, "type": "system", |
| | | "download_url": download_url} |
| | | try: |
| | | SessionService(db).update_session(chat_id, |
| | | message={"role": "assistant", |
| | |
| | | logger.error("保存dify的会话异常!") |
| | | logger.error(e) |
| | | # await websocket.send_json(result) |
| | | continue |
| | | # continue |
| | | elif data.get("event") == "message_end": |
| | | result = {"message": "", "type": "close"} |
| | | |
| | |
| | | result = {"message": f"内部错误: {e2}", "type": "close"} |
| | | await websocket.send_json(result) |
| | | print(f"Error process message of ragflow: {e2}") |
| | | elif chat_type == "paperTalk": |
| | | token = DfTokenDao(db).get_token_by_id(DOCUMENT_TO_PAPER) |
| | | # print(token) |
| | | if not token: |
| | | await websocket.send_json({"message": "Invalid token", "type": "error"}) |
| | | |
| | | while True: |
| | | conversation_id = "" |
| | | inputs = {} |
| | | # print(4343) |
| | | receive_message = await websocket.receive_json() |
| | | print(f"Received from client {chat_id}: {receive_message}") |
| | | if "difficulty" in receive_message: |
| | | inputs["Question_Difficulty"] = receive_message["difficulty"] |
| | | if "is_paper" in receive_message: |
| | | inputs["Generate_test_paper"] = receive_message["is_paper"] |
| | | if "single_choice" in receive_message: |
| | | inputs["Multiple_choice_questions"] = receive_message["single_choice"] |
| | | if "gap_filling" in receive_message: |
| | | inputs["Fill_in_blank"] = receive_message["gap_filling"] |
| | | if "true_or_false" in receive_message: |
| | | inputs["true_or_false"] = receive_message["true_or_false"] |
| | | if "multiple_choice" in receive_message: |
| | | inputs["Multiple_Choice"] = receive_message["multiple_choice"] |
| | | if "easy_question" in receive_message: |
| | | inputs["Short_Answer_Questions"] = receive_message["easy_question"] |
| | | if "case_questions" in receive_message: |
| | | inputs["Case_Questions"] = receive_message["case_questions"] |
| | | if "key_words" in receive_message: |
| | | inputs["key_words"] = receive_message["key_words"] |
| | | upload_files = receive_message.get('upload_files', []) |
| | | question = receive_message.get('message', "") |
| | | session_log = SessionService(db).get_session_by_id(chat_id) |
| | | if not session_log and not upload_files: |
| | | await websocket.send_json({"message": "需要上传文档!", "type": "error"}) |
| | | continue |
| | | try: |
| | | session = SessionService(db).create_session( |
| | | chat_id, |
| | | question if question else "开始出题", |
| | | agent_id, |
| | | AgentType.DIFY, |
| | | current_user.id |
| | | ) |
| | | conversation_id = session.conversation_id |
| | | except Exception as e: |
| | | logger.error(e) |
| | | # complete_response = "" |
| | | |
| | | files = [] |
| | | for fileId in upload_files: |
| | | files.append({ |
| | | "type": "document", |
| | | "transfer_method": "local_file", |
| | | "url": "", |
| | | "upload_file_id": fileId |
| | | }) |
| | | if files: |
| | | inputs["upload_files"] = files |
| | | # print(inputs) |
| | | if not question and not inputs: |
| | | await websocket.send_json({"message": "Invalid request", "type": "error"}) |
| | | continue |
| | | |
| | | if not question: |
| | | question = "开始出题" |
| | | complete_response = "" |
| | | async for rag_response in dify_service.chat(token, current_user.id, question, files, |
| | | conversation_id, inputs): |
| | | # print(rag_response) |
| | | try: |
| | | if rag_response[:5] == "data:": |
| | | # 如果是,则截取掉前5个字符,并去除首尾空白符 |
| | | complete_response = rag_response[5:].strip() |
| | | elif "event: ping" in rag_response: |
| | | continue |
| | | else: |
| | | # 否则,保持原样 |
| | | complete_response += rag_response |
| | | try: |
| | | data = json.loads(complete_response) |
| | | # print(data) |
| | | if data.get("event") == "node_started" or data.get( |
| | | "event") == "node_finished": # "event": "message_end" |
| | | if "data" not in data or not data["data"]: # 信息过滤 |
| | | logger.error("非法数据--------------------") |
| | | logger.error(data) |
| | | continue |
| | | else: # 正常输出 |
| | | answer = data.get("data", "") |
| | | if isinstance(answer, str): |
| | | logger.error("----------------未知数据--------------------") |
| | | logger.error(data) |
| | | continue |
| | | elif isinstance(answer, dict): |
| | | |
| | | message = answer.get("title", "") |
| | | |
| | | result = {"message": message, "type": "system"} |
| | | # continue |
| | | elif data.get("event") == "message": # "event": "message_end" |
| | | # 正常输出 |
| | | answer = data.get("answer", "") |
| | | result = {"message": answer, "type": "stream"} |
| | | elif data.get("event") == "error": |
| | | answer = data.get("message", "") |
| | | result = {"message": answer, "type": "system"} |
| | | elif data.get("event") == "workflow_finished": |
| | | answer = data.get("data", "") |
| | | if isinstance(answer, str): |
| | | logger.error("----------------未知数据--------------------") |
| | | logger.error(data) |
| | | result = {"message": "", "type": "close", "download_url": ""} |
| | | elif isinstance(answer, dict): |
| | | download_url = "" |
| | | outputs = answer.get("outputs", {}) |
| | | if outputs: |
| | | message = outputs.get("answer", "") |
| | | download_url = outputs.get("download_url", "") |
| | | else: |
| | | message = answer.get("error", "") |
| | | |
| | | result = {"message": message, "type": "system", |
| | | "download_url": download_url} |
| | | try: |
| | | SessionService(db).update_session(chat_id, |
| | | message={"role": "assistant", |
| | | "content": { |
| | | "answer": message, |
| | | "download_url": download_url}}, |
| | | conversation_id=data.get( |
| | | "conversation_id")) |
| | | except Exception as e: |
| | | logger.error("保存dify的会话异常!") |
| | | logger.error(e) |
| | | # await websocket.send_json(result) |
| | | # continue |
| | | elif data.get("event") == "message_end": |
| | | result = {"message": "", "type": "close"} |
| | | |
| | | else: |
| | | continue |
| | | try: |
| | | await websocket.send_json(result) |
| | | except Exception as e: |
| | | logger.error(e) |
| | | logger.error("返回客户端消息异常!") |
| | | complete_response = "" |
| | | except json.JSONDecodeError as e: |
| | | print(f"Error decoding JSON: {e}") |
| | | # print(f"Response text: {text}") |
| | | except Exception as e2: |
| | | result = {"message": f"内部错误: {e2}", "type": "close"} |
| | | await websocket.send_json(result) |
| | | print(f"Error process message of ragflow: {e2}") |
| | | |
| | | |
| | | # 启动任务处理客户端消息 |
| | | tasks = [ |
| | |
| | | from fastapi import APIRouter, File, UploadFile, Form, BackgroundTasks, Depends |
| | | import random |
| | | import string |
| | | |
| | | from fastapi import APIRouter, File, UploadFile, Form, BackgroundTasks, Depends, Request |
| | | from fastapi.responses import JSONResponse, FileResponse |
| | | from sqlalchemy.orm import Session |
| | | from starlette.websockets import WebSocket |
| | | |
| | | from app.api import get_current_user, get_current_user_websocket |
| | | from app.models import UserModel |
| | | from app.api import get_current_user, get_current_user_websocket, Response |
| | | from app.models import UserModel, AgentType |
| | | from app.models.base_model import get_db |
| | | from app.service.session import SessionService |
| | | from app.utils.excelmerge.conformity import run_conformity |
| | | import shutil |
| | | import os |
| | |
| | | return os.path.join(path, userid) |
| | | |
| | | |
| | | @router.post('/excel/upload') |
| | | def generate_db_id(prefix: str = "me") -> str: |
| | | random_part = ''.join(random.choices(string.ascii_letters + string.digits, k=13)) |
| | | return prefix + random_part |
| | | |
| | | |
| | | def db_create_session(db: Session, user_id: str): |
| | | db_id = generate_db_id() |
| | | session = SessionService(db).create_session( |
| | | db_id, |
| | | "合并Excel", |
| | | "basic_excel_merge", |
| | | AgentType.BASIC, |
| | | int(user_id) |
| | | ) |
| | | return session |
| | | |
| | | |
| | | @router.post('/excel/upload', response_model=Response) |
| | | async def upload_file(files: list[UploadFile] = File(...), current_user: UserModel = Depends(get_current_user)): |
| | | user_id = str(current_user.id) |
| | | if not any(file.filename for file in files): |
| | | return JSONResponse(content={"error": "没有文件部分"}, status_code=400) |
| | | return Response(code=400, msg="没有文件部分", data={}) |
| | | if not user_id: |
| | | return JSONResponse(content={"error": "缺少参数user_id"}, status_code=400) |
| | | return Response(code=400, msg="缺少参数user_id", data={}) |
| | | user_source = user_file_path(user_id, SOURCE_FILES_PATH) |
| | | user_excel = EXCEL_FILES_PATH |
| | | |
| | |
| | | |
| | | save_path_list = [] |
| | | for file in files: |
| | | if file.filename == '': |
| | | return JSONResponse(content={"error": "没有选择文件"}, status_code=400) |
| | | if file and allowed_file(file.filename): |
| | | save_path = os.path.join(user_source, file.filename) |
| | | with open(save_path, 'wb') as buffer: |
| | | shutil.copyfileobj(file.file, buffer) |
| | | save_path_list.append(save_path) |
| | | else: |
| | | return JSONResponse(content={"error": "不允许的文件类型"}, status_code=400) |
| | | return JSONResponse(content={"code": 200, "msg": "", "data": {}}, status_code=200) |
| | | return Response(code=400, msg="不允许的文件类型", data={}) |
| | | return Response(code=200, msg="上传成功", data={}) |
| | | |
| | | |
| | | # ws://localhost:9201/api/document/ws/excel |
| | | @router.websocket("/ws/excel") |
| | | async def ws_excel(websocket: WebSocket, current_user: UserModel = Depends(get_current_user_websocket)): |
| | | async def ws_excel(websocket: WebSocket, |
| | | current_user: UserModel = Depends(get_current_user_websocket), |
| | | db: Session = Depends(get_db)): |
| | | await websocket.accept() |
| | | user_id = str(current_user.id) |
| | | |
| | |
| | | if merge_file is not None: |
| | | |
| | | await websocket.send_json({ |
| | | "message": "文档合并成功!", |
| | | "type": "stream", |
| | | "file_name": f"{merge_file}.xlsx", |
| | | "download_url": f"./api/document/download/{merge_file}.xlsx" |
| | | "files": [ |
| | | { |
| | | "file_name": "Excel", |
| | | "file_url": f"./api/document/download/{merge_file}.xlsx?file_type=excel", |
| | | } |
| | | ] |
| | | }) |
| | | await websocket.send_json({ |
| | | "message": "文档合并成功!", |
| | | "message": "合并成功", |
| | | "type": "close", |
| | | }) |
| | | # 创建会话记录 |
| | | session = db_create_session(db, user_id) |
| | | # 更新会话记录 |
| | | if session: |
| | | session_id = session.id |
| | | new_message = { |
| | | "role": "assistant", |
| | | "content": { |
| | | "message": "\u5408\u5e76\u6210\u529f", |
| | | "type": "message", |
| | | "file_name": "Excel", |
| | | "file_url": f"/api/document/download/{merge_file}.xlsx?file_type=excel" |
| | | } |
| | | } |
| | | session_service = SessionService(db) |
| | | session_service.update_session(session_id, message=new_message) |
| | | else: |
| | | await websocket.send_json({"error": "合并失败", "type": "stream", "files": []}) |
| | | await websocket.close() |
| | | else: |
| | | print(f"Received data: {data}") |
| | | await websocket.send_json({"error": "未知指令", "data": str(data)}) |
| | | await websocket.close() |
| | | except Exception as e: |
| | | await websocket.send_json({"error": str(e)}) |
| | | await websocket.close() |
| | | |
| | | |
| | | @router.get("/download/{file_full_name}") |
| | | async def download_file(background_tasks: BackgroundTasks, file_full_name: str): |
| | | async def download_file(file_full_name: str): |
| | | file_name = os.path.basename(file_full_name) |
| | | user_excel = EXCEL_FILES_PATH |
| | | file_path = os.path.join(user_excel, file_full_name) |
| | | |
| | | if not os.path.exists(file_path): |
| | | return JSONResponse(content={"error": "文件不存在"}, status_code=404) |
| | | |
| | | def delete_file(): |
| | | try: |
| | | os.unlink(file_path) |
| | | except OSError as e: |
| | | print(f"Deleting file error") |
| | | return FileResponse( |
| | | path=file_path, |
| | | filename="Excel.xlsx", |
| | | media_type='application/octet-stream', |
| | | ) |
| | | # def delete_file(): |
| | | # try: |
| | | # os.unlink(file_path) |
| | | # except OSError as e: |
| | | # print(f"Deleting file error") |
| | | |
| | | # 待下载完成后删除生成的文件 |
| | | background_tasks.add_task(delete_file) |
| | | return FileResponse(path=file_path, filename=file_name, |
| | | media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet") |
| | | # background_tasks.add_task(delete_file) |
| | | # return FileResponse(path=file_path, filename=file_name, |
| | | # media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet") |
| | |
| | | |
| | | from app.api import Response, get_current_user, ResponseList |
| | | from app.config.config import settings |
| | | from app.config.const import DOCUMENT_TO_REPORT, IMAGE_TO_TEXT |
| | | from app.config.const import DOCUMENT_TO_REPORT, IMAGE_TO_TEXT, DOCUMENT_TO_REPORT_TITLE, DOCUMENT_IA_QUESTIONS, \ |
| | | DOCUMENT_TO_PAPER |
| | | from app.models import MenuCapacityModel |
| | | from app.models.agent_model import AgentType, AgentModel |
| | | from app.models.base_model import get_db |
| | | from app.models.user_model import UserModel |
| | |
| | | db: Session = Depends(get_db), |
| | | current_user: UserModel = Depends(get_current_user) |
| | | ): |
| | | agent = db.query(AgentModel).filter(AgentModel.id == agent_id).first() |
| | | agent = db.query(MenuCapacityModel).filter(MenuCapacityModel.chat_id == agent_id).first() |
| | | if not agent: |
| | | return Response(code=404, msg="Agent not found") |
| | | return ResponseList(code=404, msg="Agent not found") |
| | | agent_type = int(agent.capacity_type) |
| | | |
| | | # 检查 agent 类型,确定是否允许上传多个文件 |
| | | if agent.agent_type in [AgentType.RAGFLOW, AgentType.BISHENG]: |
| | | if agent_type in [AgentType.RAGFLOW, AgentType.BISHENG]: |
| | | if len(file) > 1: |
| | | return Response(code=400, msg="这个智能体只支持传单个文件") |
| | | |
| | | if agent.agent_type == AgentType.RAGFLOW or agent.agent_type == AgentType.BISHENG: |
| | | if agent_type == AgentType.RAGFLOW or agent_type == AgentType.BISHENG: |
| | | file = file[0] |
| | | # 读取上传的文件内容 |
| | | try: |
| | |
| | | except Exception as e: |
| | | return Response(code=400, msg=str(e)) |
| | | |
| | | if agent.agent_type == AgentType.RAGFLOW: |
| | | if agent_type == AgentType.RAGFLOW: |
| | | token = await get_ragflow_token(db, current_user.id) |
| | | ragflow_service = RagflowService(base_url=settings.fwr_base_url) |
| | | # 查询会话是否存在,不存在先创建会话 |
| | |
| | | # 对于多文件,可能需要收集所有doc_ids |
| | | return Response(code=200, msg="", data={"doc_ids": doc_ids, "file_name": file.filename}) |
| | | |
| | | elif agent.agent_type == AgentType.BISHENG: |
| | | elif agent_type == AgentType.BISHENG: |
| | | bisheng_service = BishengService(base_url=settings.sgb_base_url) |
| | | try: |
| | | token = await get_bisheng_token(db, current_user.id) |
| | |
| | | result["file_name"] = file.filename |
| | | return Response(code=200, msg="", data=result) |
| | | |
| | | elif agent.agent_type == AgentType.BASIC: |
| | | elif agent_type == AgentType.BASIC: |
| | | if agent_id == "basic_excel_talk": |
| | | # 处理单个文件的情况 |
| | | file_list = file |
| | |
| | | # service = BasicService(base_url=settings.basic_paper_url) |
| | | # result = await service.paper_file_upload(chat_id, file.filename, file_content) |
| | | |
| | | elif agent.agent_type == AgentType.DIFY: |
| | | elif agent_type == AgentType.DIFY: |
| | | dify_service = DifyService(base_url=settings.dify_base_url) |
| | | if agent.type == "imageTalk": |
| | | if agent.chat_type == "imageTalk": |
| | | token = DfTokenDao(db).get_token_by_id(IMAGE_TO_TEXT) |
| | | if not token: |
| | | raise HTTPException(status_code=500, detail="获取token失败,image_and_text_conversion!") |
| | |
| | | data = await dify_service.upload(token, file.filename, file_content, current_user.id) |
| | | except Exception as e: |
| | | raise HTTPException(status_code=500, detail=str(e)) |
| | | elif agent.type == "reportWorkflow" or agent.type == "documentIa": |
| | | token = DfTokenDao(db).get_token_by_id(DOCUMENT_TO_REPORT) |
| | | elif agent.chat_type == "reportWorkflow" or agent.chat_type == "documentIa" or agent.chat_type == "paperTalk": |
| | | token_dict = { |
| | | "reportWorkflow": DOCUMENT_TO_REPORT, |
| | | "documentIa": DOCUMENT_IA_QUESTIONS, |
| | | "paperTalk": DOCUMENT_TO_PAPER, |
| | | } |
| | | token = DfTokenDao(db).get_token_by_id(token_dict[agent.chat_type]) |
| | | if not token: |
| | | raise HTTPException(status_code=500, detail="获取token失败,document_to_report!") |
| | | result = [] |
| | |
| | | file_type: Optional[str] = Query(None, description="Optional file type for basic agents"), |
| | | db: Session = Depends(get_db) |
| | | ): |
| | | agent = db.query(AgentModel).filter(AgentModel.id == agent_id).first() |
| | | # agent = db.query(AgentModel).filter(AgentModel.id == agent_id).first() |
| | | agent = db.query(MenuCapacityModel).filter(MenuCapacityModel.chat_id == agent_id).first() |
| | | |
| | | if not agent: |
| | | return Response(code=404, msg="Agent not found") |
| | | |
| | | if agent.agent_type == AgentType.BISHENG: |
| | | agent_type = int(agent.capacity_type) |
| | | if agent_type == AgentType.BISHENG: |
| | | url = urllib.parse.unquote(url) |
| | | # 从 URL 中提取文件名 |
| | | parsed_url = urllib.parse.urlparse(url) |
| | | filename = urllib.parse.unquote(parsed_url.path.split('/')[-1]) |
| | | url = url.replace("http://minio:9000", settings.sgb_base_url) |
| | | elif agent.agent_type == AgentType.RAGFLOW: |
| | | elif agent_type == AgentType.RAGFLOW: |
| | | if not doc_id: |
| | | return Response(code=400, msg="doc_id is required") |
| | | url = f"{settings.fwr_base_url}/v1/document/get/{doc_id}" |
| | | filename = doc_name |
| | | elif agent.agent_type == AgentType.BASIC: |
| | | elif agent_type == AgentType.BASIC: |
| | | if agent_id == "basic_excel_talk": |
| | | return await download_basic_file(file_id, file_type) |
| | | |
| | |
| | | from app.models.base_model import get_db |
| | | from app.models.user import PageParameter, UserStatus, UserInfo, LoginData, UserPassword, UserDept |
| | | from app.models.user_model import UserModel |
| | | from app.service.auth import is_valid_password, verify_password |
| | | from app.service.auth import is_valid_password, verify_password, password_rsa |
| | | from app.service.user import get_user_list, edit_user_status, delete_user_data, create_user, edit_user_data, \ |
| | | edit_user_pwd, get_user_info, get_user_routers, get_user_menus, get_user_permission, get_user_dept, change_user_pwd, \ |
| | | user_data_service, edit_user_dept |
| | |
| | | user_info = db.query(UserModel).filter(UserModel.id==current_user.id).first() |
| | | if not user_info: |
| | | return Response(code=401, msg="Incorrect change password !") |
| | | if not verify_password(user.oldPassword, user_info.hashed_password): |
| | | old_password = await password_rsa(user.oldPassword) |
| | | new_password = await password_rsa(user.newPassword) |
| | | if not verify_password(old_password, user_info.hashed_password): |
| | | return Response(code=400, msg="Incorrect password !") |
| | | if not is_valid_password(user.newPassword): |
| | | if not is_valid_password(new_password): |
| | | return Response(code=400, msg="The password must be at least 8 and contain both numbers and letters") |
| | | is_edit = await change_user_pwd(db, user_info.id, user.newPassword) |
| | | is_edit = await change_user_pwd(db, user_info.id, new_password) |
| | | if not is_edit: |
| | | return Response(code=500, msg="user pwd change failure", data={}) |
| | | return Response(code=200, msg="user pwd change success", data={}) |
| | |
| | | DOCUMENT_TO_REPORT = "document_to_report" |
| | | IMAGE_TO_TEXT = "image_and_text_conversion" |
| | | DOCUMENT_IA_QUESTIONS = "document_ia_questions" |
| | | DOCUMENT_TO_REPORT_TITLE = "document_to_report_title" |
| | | DOCUMENT_TO_TITLE = "document_to_title" |
| | | DOCUMENT_TO_PAPER = "document_to_paper" |
| | | |
| | | |
| | | ### --------- file path------------------ |
| | | ENV_CONF_PATH = "app/config/env_conf" |
| | | APP_SERVICE_PATH = "app/service/" |
| | | |
| | |
| | | { |
| | | "image_and_text_conversion": "", |
| | | "document_to_report": "", |
| | | "document_to_cleaning": "" |
| | | "document_to_cleaning": "", |
| | | "document_to_paper": "" |
| | | } |
| | |
| | | "file_upload":{ |
| | | "enabled": false |
| | | } |
| | | } |
| | | }, |
| | | "dialogType": "3", |
| | | "mode": "agent-basic" |
| | | }, |
| | | { |
| | | "id": "basic_excel_talk", |
| | |
| | | "file_upload":{ |
| | | "enabled": false |
| | | } |
| | | } |
| | | }, |
| | | "dialogType": "3", |
| | | "mode": "agent-basic" |
| | | }, |
| | | { |
| | | "id": "basic_question_talk", |
| | |
| | | "file_upload":{ |
| | | "enabled": false |
| | | } |
| | | } |
| | | }, |
| | | "dialogType": "3", |
| | | "mode": "agent-basic" |
| | | }, |
| | | { |
| | | "id": "basic_paper_talk", |
| | | "name": "文档出卷", |
| | | "description": "文档出卷", |
| | | "icon": "intellFrame4", |
| | | "agentType": "paperTalk", |
| | | "dialogType": "3", |
| | | "mode": "agent-basic" |
| | | } |
| | | ], |
| | | "bs": [] |
| | |
| | | "seq": 0, |
| | | "target": null, |
| | | "canbdeeleted": null, |
| | | "resource_type_id": "3", |
| | | "resource_type_id": "1", |
| | | "resource_id": "0642e4a6-3d48-4635-ba2a-bf4e39c351ed", |
| | | "status": "1", |
| | | "hidden": 0 |
| | |
| | | create_date = Column(DateTime) # 创建时间 |
| | | create_time = Column(BigInteger) |
| | | update_date = Column(DateTime) # 更新时间 |
| | | update_time = Column(BigInteger) |
| | | update_time = Column(BigInteger, index=True) |
| | | tenant_id = Column(Integer) # 创建人 |
| | | dialog_id = Column(String(32)) |
| | | name = Column(String(255)) # 名称 |
| | |
| | | agent_id = Column(String(255)) |
| | | agent_type = Column(SQLAlchemyEnum(AgentType), nullable=False) # 目前只存basic的,ragflow和bisheng的调接口获取 |
| | | create_date = Column(DateTime, default=current_time) # 创建时间,默认值为当前时区时间 |
| | | update_date = Column(DateTime, default=current_time, onupdate=current_time) # 更新时间,默认值为当前时区时间,更新时自动更新 |
| | | update_date = Column(DateTime, default=current_time, onupdate=current_time, index=True) # 更新时间,默认值为当前时区时间,更新时自动更新 |
| | | tenant_id = Column(Integer) # 创建人 |
| | | message = Column(TEXT) # 说明 |
| | | conversation_id = Column(String(64)) |
| | |
| | | else: |
| | | query = query.filter(DialogModel.status != Dialog_STATSU_DELETE) |
| | | id_list = [] |
| | | # if label: |
| | | # id_list = [i.object_id for i in db.query(LabelWorkerModel).filter(LabelWorkerModel.label_id==label).all()] |
| | | if user.permission != "admin": |
| | | dia_list = [j.id for i in user.groups for j in i.dialogs] |
| | | query = query.filter(or_(DialogModel.tenant_id == user_id, DialogModel.id.in_(dia_list))) |
| | | # else: |
| | | if label: |
| | | id_list = [i.object_id for i in db.query(LabelWorkerModel).filter(LabelWorkerModel.label_id == label).all()] |
| | | if user.permission != "admin": |
| | | dia_list = [j.id for i in user.groups for j in i.dialogs if not label or j.id in id_list] |
| | | query = query.filter(or_(DialogModel.tenant_id == user_id, DialogModel.id.in_(dia_list))) |
| | | else: |
| | | if label: |
| | | query = query.filter(or_(DialogModel.id.in_(id_list))) |
| | | query = query.filter(DialogModel.id.in_(id_list)) |
| | | |
| | | if keyword: |
| | | query = query.filter(DialogModel.name.like('%{}%'.format(keyword))) |
| | |
| | | if user is None: |
| | | return {"rows": []} |
| | | query = db.query(DialogModel).filter(DialogModel.status != Dialog_STATSU_DELETE) |
| | | id_list = [] |
| | | if user.permission != "admin": |
| | | dia_list = [j.id for i in user.groups for j in i.dialogs] |
| | | query = query.filter(or_(DialogModel.tenant_id == user_id, DialogModel.id.in_(dia_list))) |
| | | |
| | | if label: |
| | | id_list = set( |
| | | [i.object_id for i in db.query(LabelWorkerModel).filter(LabelWorkerModel.label_id.in_(label)).all()]) |
| | | if user.permission != "admin": |
| | | dia_list = [j.id for i in user.groups for j in i.dialogs if not label or j.id in id_list] |
| | | query = query.filter(or_(DialogModel.tenant_id == user_id, DialogModel.id.in_(dia_list))) |
| | | else: |
| | | if label: |
| | | query = query.filter(or_(DialogModel.id.in_(id_list))) |
| | | query = query.filter(DialogModel.id.in_(id_list)) |
| | | |
| | | if keyword: |
| | | query = query.filter(DialogModel.name.like('%{}%'.format(keyword))) |
| | |
| | | |
| | | |
| | | |
| | | async def chat(self, token: str, user_id: int, message: str, files: [], conversation_id: str): |
| | | |
| | | async def chat(self, token: str, user_id: int, message: str, files: [], conversation_id: str, inputs: dict): |
| | | target_url = f"{self.base_url}/v1/chat-messages" |
| | | data = { |
| | | "inputs": {}, |
| | | "inputs": inputs, |
| | | "query": message, |
| | | "response_mode": "streaming", |
| | | "conversation_id": conversation_id, |
| | |
| | | from app.service.ragflow import RagflowService |
| | | from app.service.service_token import get_ragflow_token |
| | | from Log import logger |
| | | from sqlalchemy import and_ |
| | | |
| | | |
| | | async def group_list(db, page_size: int, page_index: int, keyword: str): |
| | |
| | | not_group_user = [] |
| | | in_group_user = [] |
| | | user_list = [u.id for u in db.query(GroupModel).filter(GroupModel.id.__eq__(group_id)).first().users] |
| | | for u in db.query(UserModel.id, UserModel.username,UserModel.permission).filter(UserModel.permission!="admin").order_by(UserModel.id.desc()).all(): |
| | | for u in db.query(UserModel.id, UserModel.username, UserModel.permission).filter( |
| | | and_(UserModel.permission != "admin", UserModel.status != 2) |
| | | ).order_by(UserModel.id.desc()).all(): |
| | | if u.id in user_list: |
| | | in_group_user.append({"userId": u.id, "userName": u.username}) |
| | | else: |
| | |
| | | query = db.query(KnowledgeModel).filter(KnowledgeModel.knowledge_type==knowledge) |
| | | if user.permission != "admin": |
| | | klg_list = [j.id for i in user.groups for j in i.knowledges] |
| | | query = query.filter(or_(KnowledgeModel.tenant_id == user_id, KnowledgeModel.id.in_(klg_list))) |
| | | query = query.filter(or_(KnowledgeModel.id.in_(klg_list), KnowledgeModel.tenant_id == str(user_id))) |
| | | |
| | | if location: |
| | | query = query.filter(KnowledgeModel.permission == "team") |
| | | query = query.filter(or_(KnowledgeModel.permission == "team", KnowledgeModel.tenant_id == str(user_id))) |
| | | |
| | | if keyword: |
| | | query = query.filter(KnowledgeModel.name.like('%{}%'.format(keyword))) |
| | | total = query.count() |
| | |
| | | |
| | | async def create_knowledge_service(db, klg_id, name, description, icon, klg_type, user_id): |
| | | try: |
| | | dialog_model = KnowledgeModel(id=klg_id,name=name, description=description,icon=icon, knowledge_type=klg_type, tenant_id=user_id) |
| | | dialog_model = KnowledgeModel(id=klg_id,name=name, description=description,icon=icon, knowledge_type=klg_type, tenant_id=user_id,update_date=datetime.now(),create_date=datetime.now()) |
| | | db.add(dialog_model) |
| | | db.commit() |
| | | db.refresh(dialog_model) |
| | |
| | | knowledge.permission = app_knowledge["permission"] |
| | | knowledge.documents = app_knowledge["doc_num"] |
| | | knowledge.status = app_knowledge["status"] |
| | | knowledge.update_date = datetime.now() |
| | | db.add(knowledge) |
| | | db.commit() |
| | | db.refresh(knowledge) |
| | |
| | | logger.error(e) |
| | | db.rollback() |
| | | # return False |
| | | print(delete_list) |
| | | for label_id in delete_list: |
| | | try: |
| | | LabelWorkerModel(id=label_id).delete() |
| | | db.query(LabelWorkerModel).filter_by(id=label_id).delete() |
| | | db.commit() |
| | | except Exception as e: |
| | | logger.error(e) |
| | | db.rollback() |
| | |
| | | import uuid |
| | | |
| | | from Log import logger |
| | | from app.config.const import DEPT_STATUS_ON |
| | | from app.models import OrganizationModel |
| | | from app.models.resource_model import ResourceModel |
| | | from app.models.role_model import RoleModel |
| | |
| | | total = query.count() |
| | | roles = query.order_by(RoleModel.created_at.desc()).limit(page_size).offset( |
| | | (page_index - 1) * page_size).all() |
| | | return {"total": total, "rows": [role.to_json() for role in roles]} |
| | | return {"total": total, "rows": [role.to_json() for role in roles if role.status == DEPT_STATUS_ON]} |
| | | |
| | | |
| | | async def create_role(db, role_name: str, description: str, role_key, data_scope, user_id): |
| | |
| | | knowledge_dict = {} |
| | | user = db.query(UserModel).filter_by(id=user_id).first() |
| | | parent_id = "" |
| | | # print(111111111111111) |
| | | async def role_resource(role_set, permissions, roles): |
| | | |
| | | async def role_resource_type(role_set, permissions, roles, type): |
| | | nonlocal parent_id |
| | | for role in roles: |
| | | if role.id not in role_set: |
| | |
| | | |
| | | for r in role.resources: |
| | | if r.resource_type_id != "1": |
| | | # if not r.resource_id: |
| | | # parent_id = r.id |
| | | # continue |
| | | permissions[r.id] = r.to_router_dict() |
| | | permission_dict = r.to_router_dict() |
| | | permission_dict["type"] = type |
| | | permissions[r.id] = permission_dict |
| | | |
| | | await role_resource(role_set, permissions, user.roles) |
| | | await role_resource_type(role_set, permissions, user.roles, "user") |
| | | for ogt in user.organizations: |
| | | if ogt.roles: |
| | | await role_resource(role_set, permissions, ogt.roles) |
| | | await role_resource_type(role_set, permissions, ogt.roles, "org") |
| | | parent_ogt = ogt.parent |
| | | while parent_ogt: |
| | | |
| | | if parent_ogt.id not in dept_set: |
| | | await role_resource(role_set, permissions, parent_ogt.roles) |
| | | await role_resource_type(role_set, permissions, parent_ogt.roles, "org") |
| | | dept_set.add(parent_ogt.id) |
| | | |
| | | parent_ogt = parent_ogt.parent |
| | |
| | | async def get_user_dept(db, user_id): |
| | | res = {} |
| | | user = db.query(UserModel).filter_by(id=user_id).first() |
| | | res["rows"] = [i.to_dict() for i in user.organizations] |
| | | res["rows"] = [i.to_dict() for i in user.organizations if i.status == DEPT_STATUS_ON] |
| | | return res |
| | | |
| | | |
| | |
| | | logger.error(e) |
| | | else: |
| | | try: |
| | | dialog = DialogModel(id=agent["id"], name=agent["name"], description=agent["description"], |
| | | icon=agent["icon"], tenant_id=user.id if user else "", dialog_type="3", |
| | | parameters = json.dumps(agent["parameters"])) |
| | | dialog = DialogModel(id=agent["id"], name=agent["name"], description=agent["description"], |
| | | icon=agent["icon"], tenant_id=user.id if user else "", dialog_type=agent["dialogType"], mode=agent["mode"],parameters = json.dumps(agent["parameters"])) |
| | | db.add(dialog) |
| | | db.commit() |
| | | db.refresh(dialog) |
New file |
| | |
| | | import os.path |
| | | from datetime import datetime, timedelta |
| | | |
| | | |
| | | def delete_file_after_delay(): |
| | | path = "data/output" |
| | | data_dir = os.path.dirname(path) |
| | | if not os.path.exists(data_dir): |
| | | return |
| | | if not os.path.exists(path): |
| | | return |
| | | now = datetime.now() |
| | | five_minutes_ago = now - timedelta(minutes=5) |
| | | for filename in os.listdir(path): |
| | | file_path = os.path.join(path, filename) |
| | | if os.path.isfile(file_path): |
| | | creation_time = os.path.getctime(file_path) |
| | | file_creation_datetime = datetime.fromtimestamp(creation_time) |
| | | if file_creation_datetime < five_minutes_ago: |
| | | os.remove(file_path) |
| | | print(f"定时删除文件: {file_path}") |
| | |
| | | ('basic_excel_talk', 6, '智能数据', 'BASIC', 'excelTalk'), |
| | | ('basic_question_talk', 7, '出题组卷', 'BASIC', 'questionTalk'), |
| | | ('9d75142a-66eb-4e23-b7d4-03efe4584915', 8, '小数绘图', 'DIFY', 'imageTalk'), |
| | | ('basic_paper_talk', 9, '文档出卷', 'BASIC', 'paperTalk'), |
| | | ('2f6ddf93-7ba6-4b2d-b991-d96421404600', 9, '文档出卷', 'DIFY', 'paperTalk'), |
| | | ('basic_report_clean', 10, '文档报告', 'DIFY', 'reportWorkflow') |
| | | ] |
| | | |
| | |
| | | import_type_table(db, node) |
| | | print("add resourceType record successfully") |
| | | else: |
| | | print("sync resources successfully") |
| | | print("sync resourcesType successfully") |
| | | if db.query(ResourceModel).count() == 0: |
| | | with open(os.path.join(ENV_CONF_PATH, "resource.json"), 'r', encoding='utf-8') as file: |
| | | json_data = json.load(file) |
| | |
| | | import random |
| | | import shutil |
| | | import string |
| | | from datetime import datetime |
| | | |
| | | from datetime import datetime |
| | | from openpyxl import load_workbook |
| | | |
| | | |
| | |
| | | template_excel.save(output_path) |
| | | template_excel.close() |
| | | |
| | | # 合并完成后删除无用文件 |
| | | for filename in os.listdir(file_path): |
| | | file_path_full = os.path.join(file_path, filename) |
| | | try: |
| | | try: |
| | | for filename in os.listdir(file_path): |
| | | file_path_full = os.path.join(file_path, filename) |
| | | if os.path.isfile(file_path_full) or os.path.islink(file_path_full): |
| | | os.unlink(file_path_full) |
| | | elif os.path.isdir(file_path_full): |
| | | shutil.rmtree(file_path_full) |
| | | os.rmdir(file_path) |
| | | except Exception as e: |
| | | print(f"删除文件时发生错误: {e}") |
| | | os.rmdir(file_path) |
| | | except Exception as e: |
| | | print(f"删除文件时发生错误: {e}") |
| | | |
| | | return file_name |
| | | except Exception as e: |
| | |
| | | from app.api.group import group_router |
| | | from app.api.role import role_router |
| | | from app.models.base_model import init_db |
| | | from app.task.delete_execl_file import delete_file_after_delay |
| | | # from app.models.base_model import init_db |
| | | from app.task.fetch_agent import sync_agents, initialize_agents, sync_agents_v2, sync_knowledge, \ |
| | | sync_resources_from_json |
| | |
| | | # sync_agents() |
| | | |
| | | |
| | | |
| | | await sync_default_data() |
| | | # |
| | | sync_agents_v2() |
| | | sync_knowledge() |
| | | sync_resources_from_json() |
| | |
| | | # 设置 CORS 中间件 |
| | | # app.add_middleware( |
| | | # CORSMiddleware, |
| | | # allow_origins=["http://192.168.20.119:9303", "http://192.168.20.119:9301", "http://smartai.com:8293","http://localhost:9303", "http://127.0.0.1:9303","http://localhost:5173","http://192.168.20.158:5173"], |
| | | # allow_origins=["*"], |
| | | # allow_credentials=True, |
| | | # allow_methods=["*"], # 允许所有方法 |
| | | # allow_headers=["*"], # 允许所有头部 |
| | |
| | | scheduler = BackgroundScheduler() |
| | | scheduler.add_job(sync_agents_v2, 'interval', minutes=60, id="sync_resource_data") |
| | | scheduler.add_job(sync_token, 'interval', minutes=5, id="sync_token_1") |
| | | # scheduler.add_job(delete_file_after_delay, 'interval', minutes=10, id="delete_file_after_delay") |
| | | scheduler.start() |
| | | |
| | | app.include_router(auth_router, prefix='/api/auth', tags=["auth"]) |