New file |
| | |
| | | """dialog type update 12-16 |
| | | |
| | | Revision ID: 16efd20cce44 |
| | | Revises: 5a1f335449b9 |
| | | Create Date: 2024-12-16 16:53:25.633589 |
| | | |
| | | """ |
| | | from typing import Sequence, Union |
| | | |
| | | from alembic import op |
| | | import sqlalchemy as sa |
| | | from sqlalchemy.dialects import mysql |
| | | |
| | | # revision identifiers, used by Alembic. |
| | | revision: str = '16efd20cce44' |
| | | down_revision: Union[str, None] = '5a1f335449b9' |
| | | branch_labels: Union[str, Sequence[str], None] = None |
| | | depends_on: Union[str, Sequence[str], None] = None |
| | | |
| | | |
| | | def upgrade() -> None: |
| | | # ### commands auto generated by Alembic - please adjust! ### |
| | | op.alter_column('dialogs', 'tenant_id', |
| | | existing_type=mysql.VARCHAR(length=32), |
| | | type_=sa.String(length=36), |
| | | existing_nullable=True) |
| | | op.drop_column('dialogs', 'agent_id') |
| | | # ### end Alembic commands ### |
| | | |
| | | |
| | | def downgrade() -> None: |
| | | # ### commands auto generated by Alembic - please adjust! ### |
| | | op.add_column('dialogs', sa.Column('agent_id', mysql.VARCHAR(length=36), nullable=True)) |
| | | op.alter_column('dialogs', 'tenant_id', |
| | | existing_type=sa.String(length=36), |
| | | type_=mysql.VARCHAR(length=32), |
| | | existing_nullable=True) |
| | | # ### end Alembic commands ### |
New file |
| | |
| | | """knowledge add permission |
| | | |
| | | Revision ID: 5a1f335449b9 |
| | | Revises: ba24e02a6610 |
| | | Create Date: 2024-12-16 09:51:41.915994 |
| | | |
| | | """ |
| | | from typing import Sequence, Union |
| | | |
| | | from alembic import op |
| | | import sqlalchemy as sa |
| | | |
| | | |
| | | # revision identifiers, used by Alembic. |
| | | revision: str = '5a1f335449b9' |
| | | down_revision: Union[str, None] = 'ba24e02a6610' |
| | | branch_labels: Union[str, Sequence[str], None] = None |
| | | depends_on: Union[str, Sequence[str], None] = None |
| | | |
| | | |
| | | def upgrade() -> None: |
| | | # ### commands auto generated by Alembic - please adjust! ### |
| | | op.add_column('knowledgebase', sa.Column('permission', sa.String(length=32), nullable=True)) |
| | | # ### end Alembic commands ### |
| | | |
| | | |
| | | def downgrade() -> None: |
| | | # ### commands auto generated by Alembic - please adjust! ### |
| | | op.drop_column('knowledgebase', 'permission') |
| | | # ### end Alembic commands ### |
| | |
| | | from app.api import get_current_user_websocket |
| | | from app.config.config import settings |
| | | from app.config.const import IMAGE_TO_TEXT, DOCUMENT_TO_REPORT, DOCUMENT_TO_CLEANING |
| | | from app.models import MenuCapacityModel |
| | | from app.models.agent_model import AgentModel, AgentType |
| | | from app.models.base_model import get_db |
| | | from app.models.user_model import UserModel |
| | |
| | | tasks = [] |
| | | await websocket.accept() |
| | | print(f"Client {agent_id} connected") |
| | | |
| | | agent = db.query(AgentModel).filter(AgentModel.id == agent_id).first() |
| | | agent = db.query(MenuCapacityModel).filter(MenuCapacityModel.chat_id == agent_id).first() |
| | | if not agent: |
| | | agent = db.query(AgentModel).filter(AgentModel.id == agent_id).first() |
| | | agent_type = agent.agent_type |
| | | chat_type = agent.type |
| | | else: |
| | | agent_type = agent.capacity_type |
| | | chat_type = agent.chat_type |
| | | if not agent: |
| | | ret = {"message": "Agent not found", "type": "close"} |
| | | await websocket.send_json(ret) |
| | | return |
| | | print(1111) |
| | | agent_type = agent.agent_type |
| | | print(agent_type) |
| | | |
| | | if chat_id == "" or chat_id == "0": |
| | | ret = {"message": "Chat ID not found", "type": "close"} |
| | | await websocket.send_json(ret) |
| | | return |
| | | |
| | | if agent_type == AgentType.RAGFLOW: |
| | | print(222) |
| | | ragflow_service = RagflowService(settings.fwr_base_url) |
| | | token = await get_ragflow_token(db, current_user.id) |
| | | try: |
| | |
| | | await websocket.send_json({"message": "Invalid request", "type": "error"}) |
| | | continue |
| | | logger.error(agent.type) |
| | | if agent.type == "questionTalk": |
| | | if chat_type == "questionTalk": |
| | | |
| | | try: |
| | | data = await service.questions_talk(question, chat_id) |
| | |
| | | # token = get_dify_token(db, current_user.id) |
| | | try: |
| | | async def forward_to_dify(): |
| | | if agent.type == "imageTalk": |
| | | if chat_type == "imageTalk": |
| | | token = DfTokenDao(db).get_token_by_id(IMAGE_TO_TEXT) |
| | | if not token: |
| | | await websocket.send_json({"message": "Invalid token", "type": "error"}) |
| | |
| | | result = {"message": f"内部错误: {e2}", "type": "close"} |
| | | await websocket.send_json(result) |
| | | print(f"Error process message of ragflow: {e2}") |
| | | elif agent.type == "reportWorkflow": |
| | | elif chat_type == "reportWorkflow": |
| | | |
| | | token = DfTokenDao(db).get_token_by_id(DOCUMENT_TO_CLEANING) |
| | | if not token: |
| | |
| | | current: int, |
| | | pageSize: int, |
| | | keyword: str="", |
| | | status: str="", |
| | | knowledge: str="1", |
| | | location: str="", |
| | | current_user: UserModel = Depends(get_current_user), |
| | | db=Depends(get_db)): |
| | | if current and not pageSize: |
| | | return ResponseList(code=400, msg="缺少参数") |
| | | getknowledgelist = await get_knowledge_list(db, current_user.id, keyword, pageSize, current) |
| | | getknowledgelist = await get_knowledge_list(db, current_user.id, keyword, pageSize, current, status, knowledge, location) |
| | | |
| | | return Response(code=200, msg="", data=getknowledgelist) |
| | | |
| | |
| | | from Log import logger |
| | | from app.api import get_current_user_websocket, ResponseList, get_current_user, format_file_url, process_files |
| | | from app.config.config import settings |
| | | from app.models import MenuCapacityModel |
| | | from app.models.agent_model import AgentModel, AgentType |
| | | from app.models.base_model import get_db |
| | | from app.models.user_model import UserModel |
| | |
| | | chat_id: str, |
| | | current_user: UserModel = Depends(get_current_user_websocket), |
| | | db: Session = Depends(get_db)): |
| | | agent = db.query(AgentModel).filter(AgentModel.id == agent_id).first() |
| | | agent = db.query(MenuCapacityModel).filter(MenuCapacityModel.chat_id == agent_id).first() |
| | | if not agent: |
| | | agent = db.query(AgentModel).filter(AgentModel.id == agent_id).first() |
| | | agent_type = agent.agent_type |
| | | chat_type = agent.type |
| | | else: |
| | | agent_type = agent.capacity_type |
| | | chat_type = agent.chat_type |
| | | if not agent: |
| | | ret = {"message": "Agent not found", "type": "close"} |
| | | return websocket.send_json(ret) |
| | | agent_type = agent.agent_type |
| | | # agent_type = agent.agent_type |
| | | if chat_id == "" or chat_id == "0": |
| | | ret = {"message": "Chat ID not found", "type": "close"} |
| | | return websocket.send_json(ret) |
| | |
| | | dify_workflow_clean: str = '' |
| | | dify_workflow_report: str = '' |
| | | postgresql_database_url: str = '' |
| | | dify_database_url: str = '' |
| | | def __init__(self, **kwargs): |
| | | # 替换配置中的IP地址 |
| | | host_ip = os.getenv('HOST_IP', '127.0.0.1') |
| | |
| | | kwargs['fwr_db_url'] = kwargs.get('fwr_db_url', '').replace('127.0.0.1', host_ip) |
| | | kwargs['dify_base_url'] = kwargs.get('dify_base_url', '').replace('127.0.0.1', host_ip) |
| | | kwargs['basic_base_url'] = kwargs.get('basic_base_url', '').replace('127.0.0.1', host_ip) |
| | | kwargs['dify_database_url'] = kwargs.get('dify_database_url', '').replace('127.0.0.1', host_ip) |
| | | # Check if all required fields are provided and set them |
| | | for field in self.__annotations__.keys(): |
| | | if field not in kwargs: |
| | |
| | | dify_api_token: app-YmOAMDsPpDDlqryMHnc9TzTO |
| | | postgresql_database_url: postgresql+asyncpg://kong:kongpass@192.168.20.119:5432/kong |
| | | dify_workflow_clean: app-OpF0drPu0XcgqcekQpT4FA8a |
| | | dify_workflow_report: app-0MAkdFWqh9zxwmU69O0BFU1s |
| | | dify_workflow_report: app-0MAkdFWqh9zxwmU69O0BFU1s |
| | | dify_database_url: postgresql+psycopg2://postgres:difyai123456@192.168.20.116:15432/dify |
| | |
| | | await app_register_sync(db) # 注册的应用 |
| | | await basic_agent_sync(db) # 开发的agent |
| | | # await default_role_sync(db) # 页面资源配置信息 |
| | | # await default_role_sync(db) # 默认的角色资源 |
| | | # await default_role_sync(db) # 默认的角色资源 |
| | | |
| | | |
| | |
| | | id = Column(String(36), primary_key=True) # id |
| | | create_date = Column(DateTime, default=datetime.now()) # 创建时间 |
| | | update_date = Column(DateTime, default=datetime.now(), onupdate=datetime.now()) # 更新时间 |
| | | tenant_id = Column(String(32)) # 创建人 |
| | | tenant_id = Column(String(36)) # 创建人 |
| | | name = Column(String(255)) # 名称 |
| | | description = Column(Text) # 说明 |
| | | icon = Column(Text) # 图标 |
| | | icon = Column(Text, default="intelligentFrame1") # 图标 |
| | | status = Column(String(1), default="1") # 状态 |
| | | dialog_type = Column(String(1)) # 平台 |
| | | agent_id = Column(String(36)) |
| | | # agent_id = Column(String(36)) |
| | | mode = Column(String(36)) |
| | | |
| | | def get_id(self): |
| | |
| | | 'icon': self.icon, |
| | | 'status': self.status, |
| | | 'agentType': self.dialog_type, |
| | | 'agentId': self.agent_id, |
| | | # 'agentId': self.agent_id, |
| | | 'mode': self.mode, |
| | | } |
| | | |
| | |
| | | |
| | | def to_json(self): |
| | | json = { |
| | | 'roleId': self.id, |
| | | 'groupId': self.id, |
| | | 'createTime': self.created_at.strftime('%Y-%m-%d %H:%M:%S'), |
| | | 'updateTime': self.updated_at.strftime('%Y-%m-%d %H:%M:%S'), |
| | | 'groupName': self.group_name, |
| | | 'remark': self.group_description, |
| | | 'description': self.group_description, |
| | | 'status': self.status, |
| | | } |
| | | if len(self.knowledges.all()) > 0: |
| | | json['knowledges'] = [knowledge.to_json() for knowledge in self.knowledges] |
| | | # if len(self.knowledges.all()) > 0: |
| | | json['knowledges'] = [knowledge.to_json() for knowledge in self.knowledges] |
| | | |
| | | if len(self.dialogs.all()) > 0: |
| | | json['dialogs'] = [dialog.to_json() for dialog in self.dialogs] |
| | | # if len(self.dialogs.all()) > 0: |
| | | json['dialogs'] = [dialog.to_json() for dialog in self.dialogs] |
| | | |
| | | # if len(self.agents.all()) > 0: |
| | | # json['agents'] = [agent.to_json() for agent in self.agents] |
| | | |
| | | if len(self.llms.all()) > 0: |
| | | json['llms'] = [llm.to_json() for llm in self.llms] |
| | | # if len(self.llms.all()) > 0: |
| | | json['llms'] = [llm.to_json() for llm in self.llms] |
| | | |
| | | json['users'] = [user.to_dict() for user in self.users] |
| | | |
| | | if hasattr(self, 'flag'): |
| | | json['flag'] = self.flag |
| | | # if hasattr(self, 'flag'): |
| | | # json['flag'] = self.flag |
| | | |
| | | return json |
| | | |
| | |
| | | name = Column(String(128)) # 名称 |
| | | create_date = Column(DateTime, default=datetime.now()) # 创建时间 |
| | | update_date = Column(DateTime, default=datetime.now(), onupdate=datetime.now()) # 更新时间 |
| | | # avatar = Column(Text) # 图标 |
| | | permission = Column(String(32), default="me") |
| | | tenant_id = Column(String(32)) # 创建人id |
| | | description = Column(Text) # 说明 |
| | | status = Column(String(1)) # 状态 |
| | |
| | | 'name': self.name, |
| | | 'create_time': self.create_date.strftime('%Y-%m-%d %H:%M:%S'), |
| | | 'update_time': self.update_date.strftime('%Y-%m-%d %H:%M:%S'), |
| | | # 'avatar': self.avatar, |
| | | 'permission': self.permission, |
| | | 'user_id': self.tenant_id, |
| | | 'description': self.description, |
| | | 'status': self.status, |
| | | 'documents': self.documents, |
| | | 'documents': self.documents if self.documents else 0, |
| | | 'icon': self.icon |
| | | } |
| | | def __repr__(self): |
| | |
| | | address: Optional[str] = "" |
| | | parentId: Optional[str] = "" |
| | | status: str |
| | | roles: list |
| | | roles: Optional[list] = [] |
| | | groups: Optional[list] = [] |
| | | |
| | | |
| | |
| | | from sqlalchemy import Column, Integer, DateTime, Text |
| | | from sqlalchemy.orm import Session |
| | | |
| | | from Log import logger |
| | | # from Log import logger |
| | | from app.models.base_model import Base |
| | | |
| | | |
| | |
| | | db.refresh(db_token) |
| | | |
| | | except Exception as e: |
| | | logger.error(e) |
| | | # logger.error(e) |
| | | # 异常处理 |
| | | db.rollback() # 回滚事务 |
| | | |
| | |
| | | |
| | | |
| | | def is_valid_password(password: str) -> bool: |
| | | if len(password) <= 8: |
| | | if len(password) < 8: |
| | | return False |
| | | has_digit = re.search(r'[0-9]', password) |
| | | has_letter = re.search(r'[A-Za-z]', password) |
| | |
| | | print(total) |
| | | items = query.order_by(GroupModel.id.desc()).limit(page_size).offset((page_index - 1) * page_size) |
| | | |
| | | return {"total":total , "items": [item.to_dict() for item in items.all()]} |
| | | return {"total":total , "items": [item.to_json() for item in items.all()]} |
| | | |
| | | |
| | | async def create_group(db, group_name: str, group_description: str, user_id): |
| | |
| | | from Log import logger |
| | | |
| | | |
| | | async def get_knowledge_list(db, user_id, keyword, page_size, page_index): |
| | | async def get_knowledge_list(db, user_id, keyword, page_size, page_index, status, knowledge, location): |
| | | user = db.query(UserModel).filter(UserModel.id == user_id).first() |
| | | if user is None: |
| | | return {"rows": []} |
| | | query = db.query(KnowledgeModel) |
| | | query = db.query(KnowledgeModel).filter(KnowledgeModel.knowledge_type==knowledge) |
| | | if user.permission != "admin": |
| | | klg_list = [j.id for i in user.groups for j in i.knowledges] |
| | | query = query.filter(or_(KnowledgeModel.tenant_id == user_id, KnowledgeModel.id.in_(klg_list))) |
| | | if location: |
| | | query = query.filter(KnowledgeModel.permission == "team") |
| | | if keyword: |
| | | query = query.filter(KnowledgeModel.name.like('%{}%'.format(keyword))) |
| | | total = query.count() |
| | |
| | | await ragflow_service.add_user_tenant(token,u.app_id, |
| | | user.email, |
| | | user.app_id) |
| | | |
| | | await ragflow_service.add_user_tenant(token, u.app_id, |
| | | user.email, |
| | | user.app_id) |
| | | await ragflow_service.add_user_tenant(token, user.app_id, |
| | | u.email, |
| | | u.app_id) |
| | | await ragflow_service.add_user_tenant(token, user.app_id, |
| | | u.email, |
| | | u.app_id) |
| | |
| | | from pickle import PROTO |
| | | from typing import Dict, List, Tuple |
| | | |
| | | from sqlalchemy import create_engine, Column, String, Integer |
| | | from sqlalchemy import create_engine, Column, String, Integer, Text |
| | | from sqlalchemy.exc import IntegrityError |
| | | from sqlalchemy.orm import sessionmaker |
| | | |
| | | from app.config.config import settings |
| | | from app.config.const import RAGFLOW, BISHENG, DIFY |
| | | from app.models import DialogModel |
| | | from app.models.agent_model import AgentModel |
| | | from app.models.base_model import SessionLocal, Base |
| | | from app.service.v2.app_register import AppRegisterDao |
| | | |
| | | # 创建数据库引擎和会话工厂 |
| | | engine_bisheng = create_engine(settings.sgb_db_url) |
| | | engine_ragflow = create_engine(settings.fwr_db_url) |
| | | engine_dify = create_engine(settings.dify_database_url) |
| | | |
| | | SessionBisheng = sessionmaker(autocommit=False, autoflush=False, bind=engine_bisheng) |
| | | SessionRagflow = sessionmaker(autocommit=False, autoflush=False, bind=engine_ragflow) |
| | | SessionDify = sessionmaker(autocommit=False, autoflush=False, bind=engine_dify) |
| | | |
| | | |
| | | class Flow(Base): |
| | |
| | | id = Column(String(255), primary_key=True) |
| | | name = Column(String(255), nullable=False) |
| | | status = Column(Integer, nullable=False) |
| | | description = Column(String(255), nullable=False) |
| | | user_id = Column(Integer, nullable=False) |
| | | |
| | | |
| | | class Dialog(Base): |
| | |
| | | id = Column(String(255), primary_key=True) |
| | | name = Column(String(255), nullable=False) |
| | | status = Column(String(1), nullable=False) |
| | | description = Column(String(255), nullable=False) |
| | | tenant_id = Column(String(36), nullable=False) |
| | | |
| | | |
| | | class DfApps(Base): |
| | | __tablename__ = 'apps' |
| | | id = Column(String(36), primary_key=True) |
| | | name = Column(String(255), nullable=False) |
| | | status = Column(String(16), nullable=False) |
| | | description = Column(Text, nullable=False) |
| | | tenant_id = Column(String(36), nullable=False) |
| | | |
| | | |
| | | # 解析名字 |
| | |
| | | print(f"Failed to sync agents: {str(e)}") |
| | | |
| | | |
| | | def update_ids_in_local(data: List[Tuple]): |
| | | db = SessionLocal() |
| | | try: |
| | | for row in data: |
| | | name = row[1] |
| | | new_id = row[0] |
| | | existing_agent = db.query(AgentModel).filter_by(name=name).first() |
| | | if existing_agent: |
| | | existing_agent.id = new_id |
| | | db.add(existing_agent) |
| | | db.commit() |
| | | except IntegrityError: |
| | | db.rollback() |
| | | raise |
| | | finally: |
| | | db.close() |
| | | |
| | | def get_data_from_bisheng_v2(names: List[str]) -> List[Dict]: |
| | | db = SessionBisheng() |
| | | try: |
| | | if names: |
| | | query = db.query(Flow.id, Flow.name, Flow.description, Flow.status, Flow.user_id) \ |
| | | .filter(Flow.name.in_(names), Flow.status==2) |
| | | else: |
| | | query = db.query(Flow.id, Flow.name, Flow.description, Flow.status, Flow.user_id).filter(Flow.status==2) |
| | | |
| | | results = query.all() |
| | | # print(f"Executing query: {query}") |
| | | # 格式化id为UUID |
| | | formatted_results = [{"id":format_uuid(row[0]), "name": row[1], "description": row[2], "status": str(row[3]-1), "user_id": str(row[4])} for row in results] |
| | | return formatted_results |
| | | finally: |
| | | db.close() |
| | | |
| | | def get_data_from_ragflow_v2(names: List[str]) -> List[Dict]: |
| | | db = SessionRagflow() |
| | | try: |
| | | if names: |
| | | query = db.query(Dialog.id, Dialog.name, Dialog.description, Dialog.status, Dialog.tenant_id) \ |
| | | .filter( Dialog.name.in_(names)) |
| | | else: |
| | | query = db.query(Dialog.id, Dialog.name, Dialog.description, Dialog.status, Dialog.tenant_id) |
| | | |
| | | results = query.all() |
| | | formatted_results = [ |
| | | {"id": format_uuid(row[0]), "name": row[1], "description": row[2], "status": str(row[3]), |
| | | "user_id": str(row[4])} for row in results] |
| | | return formatted_results |
| | | finally: |
| | | db.close() |
| | | |
| | | |
| | | def get_data_from_dify_v2(names: List[str]) -> List[Dict]: |
| | | db = SessionDify() |
| | | try: |
| | | if names: |
| | | query = db.query(DfApps.id, DfApps.name, DfApps.description, DfApps.status, DfApps.tenant_id) \ |
| | | .filter( DfApps.name.in_(names)) |
| | | else: |
| | | query = db.query(DfApps.id, DfApps.name, DfApps.description, DfApps.status, DfApps.tenant_id) |
| | | |
| | | results = query.all() |
| | | formatted_results = [ |
| | | {"id": str(row[0]), "name": row[1], "description": row[2], "status": "1", |
| | | "user_id": str(row[4])} for row in results] |
| | | return formatted_results |
| | | finally: |
| | | db.close() |
| | | |
| | | |
| | | |
| | | def update_ids_in_local_v2(data: List[Dict], dialog_type:str): |
| | | db = SessionLocal() |
| | | agent_id_list = [] |
| | | print("----------------------------------------") |
| | | print(data) |
| | | print("*********************************************") |
| | | try: |
| | | for row in data: |
| | | agent_id_list.append(row["id"]) |
| | | existing_agent = db.query(DialogModel).filter_by(id=row["id"]).first() |
| | | if existing_agent: |
| | | existing_agent.name = row["name"] |
| | | existing_agent.description = row["description"] |
| | | else: |
| | | existing = DialogModel(id=row["id"], name=row["name"], description=row["description"], tenant_id=row["user_id"], dialog_type=dialog_type) |
| | | db.add(existing) |
| | | db.commit() |
| | | for dialog in db.query(DialogModel).filter_by(dialog_type=dialog_type).all(): |
| | | if dialog.id not in agent_id_list: |
| | | db.query(DialogModel).filter_by(id=dialog.id).delete() |
| | | db.commit() |
| | | except IntegrityError: |
| | | db.rollback() |
| | | raise |
| | | finally: |
| | | db.close() |
| | | |
| | | |
| | | |
| | | def get_data_from_ragflow_knowledge(): |
| | | ... |
| | | |
| | | def sync_agents_v2(): |
| | | db = SessionLocal() |
| | | |
| | | try: |
| | | app_register = AppRegisterDao(db).get_apps() |
| | | for app in app_register: |
| | | if app["id"] == RAGFLOW: |
| | | ragflow_data = get_data_from_ragflow_v2([]) |
| | | update_ids_in_local_v2(ragflow_data, "1") |
| | | elif app["id"] == BISHENG: |
| | | bisheng_data = get_data_from_bisheng_v2([]) |
| | | update_ids_in_local_v2(bisheng_data, "2") |
| | | elif app["id"] == DIFY: |
| | | dify_data = get_data_from_dify_v2([]) |
| | | update_ids_in_local_v2(dify_data, "4") |
| | | print("Agents synchronized successfully") |
| | | except Exception as e: |
| | | print(f"Failed to sync agents: {str(e)}") |
| | | |
| | | |
| | | |
| | | def sync_knowledge(): |
| | | db = SessionLocal() |
| | | |
| | | try: |
| | | app_register = AppRegisterDao(db).get_apps() |
| | | for app in app_register: |
| | | if app["id"] == RAGFLOW: |
| | | ragflow_data = get_data_from_ragflow_knowledge([]) |
| | | update_ids_in_local_v2(ragflow_data, "1") |
| | | # elif app["id"] == BISHENG: |
| | | # bisheng_data = get_data_from_bisheng_v2([]) |
| | | # update_ids_in_local_v2(bisheng_data, "2") |
| | | # elif app["id"] == DIFY: |
| | | # dify_data = get_data_from_dify_v2([]) |
| | | # update_ids_in_local_v2(dify_data, "4") |
| | | print("Agents synchronized successfully") |
| | | except Exception as e: |
| | | print(f"Failed to sync agents: {str(e)}") |
| | | |
| | | |
| | | if __name__ == "__main__": |
| | | a = get_data_from_dify_v2([]) |
| | | print(a) |
| | |
| | | from app.api.user import user_router |
| | | from app.api.group import group_router |
| | | from app.api.role import role_router |
| | | from app.task.fetch_agent import sync_agents, initialize_agents |
| | | from app.task.fetch_agent import sync_agents, initialize_agents, sync_agents_v2 |
| | | from app.init_config.init_run_data import sync_default_data |
| | | |
| | | |
| | |
| | | # 在应用启动时同步代理 |
| | | sync_agents() |
| | | await sync_default_data() |
| | | |
| | | sync_agents_v2() |
| | | yield |
| | | # 在应用关闭时执行清理操作(如果需要) |
| | | pass |
| | |
| | | ) |
| | | |
| | | # 设置 CORS 中间件 |
| | | app.add_middleware( |
| | | CORSMiddleware, |
| | | allow_origins=["*"], # 允许所有域名跨域访问 |
| | | allow_credentials=True, |
| | | allow_methods=["*"], # 允许所有方法 |
| | | allow_headers=["*"], # 允许所有头部 |
| | | ) |
| | | # app.add_middleware( |
| | | # CORSMiddleware, |
| | | # allow_origins=["http://192.168.20.119:9303", "http://192.168.20.119:9301", "http://smartai.com:8293","http://localhost:9303", "http://127.0.0.1:9303","http://localhost:5173","http://192.168.20.158:5173"], |
| | | # allow_credentials=True, |
| | | # allow_methods=["*"], # 允许所有方法 |
| | | # allow_headers=["*"], # 允许所有头部 |
| | | # ) |
| | | # 创建调度器 |
| | | # scheduler = BackgroundScheduler() |
| | | # scheduler.add_job(sync_resource, 'interval', minutes=1, id="sync_resource_data") |