New file |
| | |
| | | # A generic, single database configuration. |
| | | |
| | | [alembic] |
| | | # path to migration scripts |
| | | # Use forward slashes (/) also on windows to provide an os agnostic path |
| | | script_location = alembic |
| | | |
| | | # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s |
| | | # Uncomment the line below if you want the files to be prepended with date and time |
| | | # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file |
| | | # for all available tokens |
| | | # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s |
| | | |
| | | # sys.path path, will be prepended to sys.path if present. |
| | | # defaults to the current working directory. |
| | | prepend_sys_path = . |
| | | |
| | | # timezone to use when rendering the date within the migration file |
| | | # as well as the filename. |
| | | # If specified, requires the python>=3.9 or backports.zoneinfo library. |
| | | # Any required deps can installed by adding `alembic[tz]` to the pip requirements |
| | | # string value is passed to ZoneInfo() |
| | | # leave blank for localtime |
| | | # timezone = |
| | | |
| | | # max length of characters to apply to the "slug" field |
| | | # truncate_slug_length = 40 |
| | | |
| | | # set to 'true' to run the environment during |
| | | # the 'revision' command, regardless of autogenerate |
| | | # revision_environment = false |
| | | |
| | | # set to 'true' to allow .pyc and .pyo files without |
| | | # a source .py file to be detected as revisions in the |
| | | # versions/ directory |
| | | # sourceless = false |
| | | |
| | | # version location specification; This defaults |
| | | # to alembic/versions. When using multiple version |
| | | # directories, initial revisions must be specified with --version-path. |
| | | # The path separator used here should be the separator specified by "version_path_separator" below. |
| | | # version_locations = %(here)s/bar:%(here)s/bat:alembic/versions |
| | | |
| | | # version path separator; As mentioned above, this is the character used to split |
| | | # version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. |
| | | # If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. |
| | | # Valid values for version_path_separator are: |
| | | # |
| | | # version_path_separator = : |
| | | # version_path_separator = ; |
| | | # version_path_separator = space |
| | | # version_path_separator = newline |
| | | version_path_separator = os # Use os.pathsep. Default configuration used for new projects. |
| | | |
| | | # set to 'true' to search source files recursively |
| | | # in each "version_locations" directory |
| | | # new in Alembic version 1.10 |
| | | # recursive_version_locations = false |
| | | |
| | | # the output encoding used when revision files |
| | | # are written from script.py.mako |
| | | # output_encoding = utf-8 |
| | | |
| | | sqlalchemy.url = mysql+pymysql://root:infini_rag_flow@192.168.20.119:5455/rag_basic |
| | | |
| | | |
| | | [post_write_hooks] |
| | | # post_write_hooks defines scripts or Python functions that are run |
| | | # on newly generated revision scripts. See the documentation for further |
| | | # detail and examples |
| | | |
| | | # format using "black" - use the console_scripts runner, against the "black" entrypoint |
| | | # hooks = black |
| | | # black.type = console_scripts |
| | | # black.entrypoint = black |
| | | # black.options = -l 79 REVISION_SCRIPT_FILENAME |
| | | |
| | | # lint with attempts to fix using "ruff" - use the exec runner, execute a binary |
| | | # hooks = ruff |
| | | # ruff.type = exec |
| | | # ruff.executable = %(here)s/.venv/bin/ruff |
| | | # ruff.options = --fix REVISION_SCRIPT_FILENAME |
| | | |
| | | # Logging configuration |
| | | [loggers] |
| | | keys = root,sqlalchemy,alembic |
| | | |
| | | [handlers] |
| | | keys = console |
| | | |
| | | [formatters] |
| | | keys = generic |
| | | |
| | | [logger_root] |
| | | level = WARNING |
| | | handlers = console |
| | | qualname = |
| | | |
| | | [logger_sqlalchemy] |
| | | level = WARNING |
| | | handlers = |
| | | qualname = sqlalchemy.engine |
| | | |
| | | [logger_alembic] |
| | | level = INFO |
| | | handlers = |
| | | qualname = alembic |
| | | |
| | | [handler_console] |
| | | class = StreamHandler |
| | | args = (sys.stderr,) |
| | | level = NOTSET |
| | | formatter = generic |
| | | |
| | | [formatter_generic] |
| | | format = %(levelname)-5.5s [%(name)s] %(message)s |
| | | datefmt = %H:%M:%S |
New file |
| | |
| | | Generic single-database configuration. |
New file |
| | |
| | | from logging.config import fileConfig |
| | | |
| | | from sqlalchemy import engine_from_config |
| | | from sqlalchemy import pool |
| | | |
| | | from alembic import context |
| | | |
| | | # this is the Alembic Config object, which provides |
| | | # access to the values within the .ini file in use. |
| | | config = context.config |
| | | |
| | | # Interpret the config file for Python logging. |
| | | # This line sets up loggers basically. |
| | | if config.config_file_name is not None: |
| | | fileConfig(config.config_file_name) |
| | | |
| | | # add your model's MetaData object here |
| | | # for 'autogenerate' support |
| | | # from myapp import mymodel |
| | | from app.models.base_model import Base |
| | | target_metadata = Base.metadata |
| | | # target_metadata = None |
| | | |
| | | # other values from the config, defined by the needs of env.py, |
| | | # can be acquired: |
| | | # my_important_option = config.get_main_option("my_important_option") |
| | | # ... etc. |
| | | |
| | | |
| | | def run_migrations_offline() -> None: |
| | | """Run migrations in 'offline' mode. |
| | | |
| | | This configures the context with just a URL |
| | | and not an Engine, though an Engine is acceptable |
| | | here as well. By skipping the Engine creation |
| | | we don't even need a DBAPI to be available. |
| | | |
| | | Calls to context.execute() here emit the given string to the |
| | | script output. |
| | | |
| | | """ |
| | | url = config.get_main_option("sqlalchemy.url") |
| | | context.configure( |
| | | url=url, |
| | | target_metadata=target_metadata, |
| | | literal_binds=True, |
| | | dialect_opts={"paramstyle": "named"}, |
| | | ) |
| | | |
| | | with context.begin_transaction(): |
| | | context.run_migrations() |
| | | |
| | | |
| | | def run_migrations_online() -> None: |
| | | """Run migrations in 'online' mode. |
| | | |
| | | In this scenario we need to create an Engine |
| | | and associate a connection with the context. |
| | | |
| | | """ |
| | | connectable = engine_from_config( |
| | | config.get_section(config.config_ini_section, {}), |
| | | prefix="sqlalchemy.", |
| | | poolclass=pool.NullPool, |
| | | ) |
| | | |
| | | with connectable.connect() as connection: |
| | | context.configure( |
| | | connection=connection, target_metadata=target_metadata |
| | | ) |
| | | |
| | | with context.begin_transaction(): |
| | | context.run_migrations() |
| | | |
| | | |
| | | if context.is_offline_mode(): |
| | | run_migrations_offline() |
| | | else: |
| | | run_migrations_online() |
New file |
| | |
| | | """${message} |
| | | |
| | | Revision ID: ${up_revision} |
| | | Revises: ${down_revision | comma,n} |
| | | Create Date: ${create_date} |
| | | |
| | | """ |
| | | from typing import Sequence, Union |
| | | |
| | | from alembic import op |
| | | import sqlalchemy as sa |
| | | ${imports if imports else ""} |
| | | |
| | | # revision identifiers, used by Alembic. |
| | | revision: str = ${repr(up_revision)} |
| | | down_revision: Union[str, None] = ${repr(down_revision)} |
| | | branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} |
| | | depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} |
| | | |
| | | |
| | | def upgrade() -> None: |
| | | ${upgrades if upgrades else "pass"} |
| | | |
| | | |
| | | def downgrade() -> None: |
| | | ${downgrades if downgrades else "pass"} |
New file |
| | |
| | | """Initial migration |
| | | |
| | | Revision ID: 580e984b9882 |
| | | Revises: |
| | | Create Date: 2024-12-06 10:54:42.146182 |
| | | |
| | | """ |
| | | from typing import Sequence, Union |
| | | |
| | | from alembic import op |
| | | import sqlalchemy as sa |
| | | from sqlalchemy.dialects import mysql |
| | | |
| | | # revision identifiers, used by Alembic. |
| | | revision: str = '580e984b9882' |
| | | down_revision: Union[str, None] = None |
| | | branch_labels: Union[str, Sequence[str], None] = None |
| | | depends_on: Union[str, Sequence[str], None] = None |
| | | |
| | | |
| | | def upgrade() -> None: |
| | | # ### commands auto generated by Alembic - please adjust! ### |
| | | op.create_table('user_test', |
| | | sa.Column('id', sa.Integer(), nullable=False), |
| | | sa.Column('username', sa.String(length=255), nullable=True), |
| | | sa.Column('hashed_password', sa.String(length=255), nullable=True), |
| | | sa.Column('password', sa.String(length=255), nullable=True), |
| | | sa.Column('compellation', sa.String(length=255), nullable=False), |
| | | sa.Column('phone', sa.String(length=255), nullable=False), |
| | | sa.Column('email', sa.String(length=255), nullable=False), |
| | | sa.Column('description', sa.String(length=255), nullable=False), |
| | | sa.Column('ragflow_id', sa.String(length=32), nullable=True), |
| | | sa.Column('bisheng_id', sa.Integer(), nullable=True), |
| | | sa.PrimaryKeyConstraint('id') |
| | | ) |
| | | op.create_index(op.f('ix_user_test_id'), 'user_test', ['id'], unique=False) |
| | | op.create_index(op.f('ix_user_test_username'), 'user_test', ['username'], unique=True) |
| | | op.drop_table('df_api_token') |
| | | op.drop_table('sessions') |
| | | op.drop_table('flow_test') |
| | | op.drop_table('app_register') |
| | | op.drop_table('user_canvas') |
| | | op.drop_table('flow') |
| | | op.drop_index('ix_group_info_group_id', table_name='group_info') |
| | | op.drop_index('ix_group_info_group_name', table_name='group_info') |
| | | op.drop_table('group_info') |
| | | op.drop_table('group_agent') |
| | | op.drop_table('organization_group') |
| | | op.drop_table('token') |
| | | op.drop_table('dialog') |
| | | op.drop_index('ix_agent_id', table_name='agent') |
| | | op.add_column('user', sa.Column('updated_at11', sa.Integer(), nullable=True)) |
| | | op.alter_column('user', 'compellation', |
| | | existing_type=mysql.VARCHAR(length=255), |
| | | nullable=False) |
| | | op.alter_column('user', 'phone', |
| | | existing_type=mysql.VARCHAR(length=255), |
| | | nullable=False) |
| | | op.alter_column('user', 'email', |
| | | existing_type=mysql.VARCHAR(length=255), |
| | | nullable=False) |
| | | op.alter_column('user', 'description', |
| | | existing_type=mysql.VARCHAR(length=255), |
| | | nullable=False) |
| | | op.alter_column('user', 'ragflow_id', |
| | | existing_type=mysql.VARCHAR(length=32), |
| | | nullable=True) |
| | | op.alter_column('user', 'bisheng_id', |
| | | existing_type=mysql.INTEGER(), |
| | | nullable=True) |
| | | # ### end Alembic commands ### |
| | | |
| | | |
| | | def downgrade() -> None: |
| | | # ### commands auto generated by Alembic - please adjust! ### |
| | | op.alter_column('user', 'bisheng_id', |
| | | existing_type=mysql.INTEGER(), |
| | | nullable=False) |
| | | op.alter_column('user', 'ragflow_id', |
| | | existing_type=mysql.VARCHAR(length=32), |
| | | nullable=False) |
| | | op.alter_column('user', 'description', |
| | | existing_type=mysql.VARCHAR(length=255), |
| | | nullable=True) |
| | | op.alter_column('user', 'email', |
| | | existing_type=mysql.VARCHAR(length=255), |
| | | nullable=True) |
| | | op.alter_column('user', 'phone', |
| | | existing_type=mysql.VARCHAR(length=255), |
| | | nullable=True) |
| | | op.alter_column('user', 'compellation', |
| | | existing_type=mysql.VARCHAR(length=255), |
| | | nullable=True) |
| | | op.drop_column('user', 'updated_at11') |
| | | op.create_index('ix_agent_id', 'agent', ['id'], unique=False) |
| | | op.create_table('dialog', |
| | | sa.Column('id', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('name', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('llm_id', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('status', mysql.VARCHAR(length=1), nullable=False), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_table('token', |
| | | sa.Column('id', mysql.INTEGER(), autoincrement=True, nullable=False), |
| | | sa.Column('user_id', mysql.INTEGER(), autoincrement=False, nullable=True), |
| | | sa.Column('token', mysql.TEXT(), nullable=True), |
| | | sa.Column('bisheng_token', mysql.TEXT(), nullable=True), |
| | | sa.Column('ragflow_token', mysql.TEXT(), nullable=True), |
| | | sa.Column('created_at', mysql.DATETIME(), nullable=True), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_table('organization_group', |
| | | sa.Column('group_id', mysql.INTEGER(), autoincrement=False, nullable=True), |
| | | sa.Column('organization_id', mysql.VARCHAR(length=36), nullable=True), |
| | | sa.ForeignKeyConstraint(['group_id'], ['group.id'], name='organization_group_ibfk_1'), |
| | | sa.ForeignKeyConstraint(['organization_id'], ['organization.id'], name='organization_group_ibfk_2'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_table('group_agent', |
| | | sa.Column('group_id', mysql.INTEGER(), autoincrement=False, nullable=True), |
| | | sa.Column('agent_id', mysql.VARCHAR(length=36), nullable=True), |
| | | sa.ForeignKeyConstraint(['agent_id'], ['canvas.id'], name='group_agent_ibfk_2', ondelete='CASCADE'), |
| | | sa.ForeignKeyConstraint(['group_id'], ['group.id'], name='group_agent_ibfk_1', ondelete='CASCADE'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_table('group_info', |
| | | sa.Column('group_id', mysql.INTEGER(), autoincrement=True, nullable=False), |
| | | sa.Column('group_name', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('group_description', mysql.VARCHAR(length=255), nullable=True), |
| | | sa.Column('group_status', mysql.INTEGER(), autoincrement=False, nullable=False), |
| | | sa.Column('created_at', mysql.DATETIME(), nullable=True), |
| | | sa.Column('updated_at', mysql.DATETIME(), nullable=True), |
| | | sa.PrimaryKeyConstraint('group_id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_index('ix_group_info_group_name', 'group_info', ['group_name'], unique=True) |
| | | op.create_index('ix_group_info_group_id', 'group_info', ['group_id'], unique=False) |
| | | op.create_table('flow', |
| | | sa.Column('id', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('name', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('status', mysql.INTEGER(), autoincrement=False, nullable=False), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_table('user_canvas', |
| | | sa.Column('id', mysql.VARCHAR(length=32), nullable=False), |
| | | sa.Column('create_date', mysql.DATETIME(), nullable=True), |
| | | sa.Column('update_date', mysql.DATETIME(), nullable=True), |
| | | sa.Column('avatar', mysql.TEXT(), nullable=True), |
| | | sa.Column('user_id', mysql.VARCHAR(length=255), nullable=True), |
| | | sa.Column('title', mysql.VARCHAR(length=255), nullable=True), |
| | | sa.Column('description', mysql.TEXT(), nullable=True), |
| | | sa.Column('canvas_type', mysql.VARCHAR(length=32), nullable=True), |
| | | sa.Column('dsl', mysql.TEXT(), nullable=True), |
| | | sa.Column('agent_type', mysql.VARCHAR(length=2), nullable=True), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_table('app_register', |
| | | sa.Column('id', mysql.VARCHAR(length=36), nullable=False), |
| | | sa.Column('name', mysql.VARCHAR(length=255), nullable=True), |
| | | sa.Column('status', mysql.INTEGER(), autoincrement=False, nullable=False), |
| | | sa.Column('created_at', mysql.DATETIME(), nullable=True), |
| | | sa.Column('updated_at', mysql.DATETIME(), nullable=True), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_table('flow_test', |
| | | sa.Column('id', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('name', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('status', mysql.INTEGER(), autoincrement=False, nullable=False), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_table('sessions', |
| | | sa.Column('id', mysql.VARCHAR(length=255), nullable=False), |
| | | sa.Column('name', mysql.VARCHAR(length=255), nullable=True), |
| | | sa.Column('agent_id', mysql.VARCHAR(length=255), nullable=True), |
| | | sa.Column('agent_type', mysql.ENUM('RAGFLOW', 'BISHENG', 'BASIC', 'DIFY'), nullable=False), |
| | | sa.Column('create_date', mysql.DATETIME(), nullable=True), |
| | | sa.Column('update_date', mysql.DATETIME(), nullable=True), |
| | | sa.Column('tenant_id', mysql.INTEGER(), autoincrement=False, nullable=True), |
| | | sa.Column('message', mysql.TEXT(), nullable=True), |
| | | sa.Column('conversation_id', mysql.VARCHAR(length=64), nullable=True), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.create_table('df_api_token', |
| | | sa.Column('id', mysql.VARCHAR(length=36), nullable=False), |
| | | sa.Column('token', mysql.VARCHAR(length=36), nullable=True), |
| | | sa.Column('created_at', mysql.DATETIME(), nullable=True), |
| | | sa.Column('updated_at', mysql.DATETIME(), nullable=True), |
| | | sa.PrimaryKeyConstraint('id'), |
| | | mysql_collate='utf8mb4_0900_ai_ci', |
| | | mysql_default_charset='utf8mb4', |
| | | mysql_engine='InnoDB' |
| | | ) |
| | | op.drop_index(op.f('ix_user_test_username'), table_name='user_test') |
| | | op.drop_index(op.f('ix_user_test_id'), table_name='user_test') |
| | | op.drop_table('user_test') |
| | | # ### end Alembic commands ### |
| | |
| | | await update_token(db, user.id, access_token, token_dict) |
| | | result = await pdb.execute(select(AppToken).where(AppToken.id == user.id)) |
| | | db_app_token = result.scalars().first() |
| | | if isinstance(access_token, bytes): |
| | | access_token = access_token.decode() |
| | | if not db_app_token: |
| | | app_token_str = json.dumps(token_dict) |
| | | # print(app_token_str) |
| | | app_token = AppToken(id=user.id, token=access_token.decode(), app_token=app_token_str) |
| | | app_token = AppToken(id=user.id, token=access_token, app_token=app_token_str) |
| | | pdb.add(app_token) |
| | | await pdb.commit() |
| | | await pdb.refresh(app_token) |
| | | else: |
| | | db_app_token.token = access_token.decode() |
| | | db_app_token.token = access_token |
| | | db_app_token.app_token = json.dumps(token_dict) |
| | | await pdb.commit() |
| | | await pdb.refresh(db_app_token) |
| | |
| | | kwargs['fwr_base_url'] = kwargs.get('fwr_base_url', '').replace('127.0.0.1', host_ip) |
| | | kwargs['sgb_db_url'] = kwargs.get('sgb_db_url', '').replace('127.0.0.1', host_ip) |
| | | kwargs['fwr_db_url'] = kwargs.get('fwr_db_url', '').replace('127.0.0.1', host_ip) |
| | | kwargs['dify_base_url'] = kwargs.get('dify_base_url', '').replace('127.0.0.1', host_ip) |
| | | kwargs['basic_base_url'] = kwargs.get('basic_base_url', '').replace('127.0.0.1', host_ip) |
| | | # Check if all required fields are provided and set them |
| | | for field in self.__annotations__.keys(): |
| | | if field not in kwargs: |
| | |
| | | BISHENG = 2 |
| | | BASIC = 3 |
| | | DIFY = 4 |
| | | OTHER = 5 |
| | | |
| | | |
| | | class AgentModel(Base): |
| | |
| | | age = Column(Integer) |
| | | created_at = Column(DateTime, default=datetime.now()) |
| | | updated_at = Column(DateTime, default=datetime.now(), onupdate=datetime.now()) |
| | | updated_at11 = Column(Integer) |
| | | |
| | | |
| | | |
| | | organizations = relationship('OrganizationModel', |
| | |
| | | return cipher_suite.encrypt(password.encode("utf-8")).decode("utf-8") |
| | | |
| | | def decrypted_password(self): |
| | | return cipher_suite.decrypt(self.password).decode("utf-8") |
| | | return cipher_suite.decrypt(self.password).decode("utf-8") |
| | | |
| | | |
| | | |
| | | class UserModel(Base): |
| | | __tablename__ = "user_test" |
| | | id = Column(Integer, primary_key=True, index=True) |
| | | username = Column(String(255), unique=True, index=True) |
| | | hashed_password = Column(String(255)) |
| | | password = Column(String(255)) |
| | | compellation = Column(String(255), nullable=False, default="") |
| | | phone = Column(String(255), nullable=False, default="") |
| | | email = Column(String(255), nullable=False, default="") |
| | | description = Column(String(255), nullable=False, default="") |
| | | ragflow_id = Column(String(32)) |
| | | bisheng_id = Column(Integer) |
| | |
| | | try: |
| | | count = db.query(AgentModel).count() |
| | | if count > 0: |
| | | return |
| | | result = db.query(AgentModel).delete() |
| | | db.commit() # 提交事务 |
| | | initial_agents = [ |
| | | ('80ee430a-e396-48c4-a12c-7c7cdf5eda51', 1, '报告生成', 'BISHENG', 'report'), |
| | | ('basic_excel_merge', 2, '报表合并', 'BASIC', 'excelMerge'), |