import sys with open(sys.argv[1], 'r') as f: content = f.read() marker = 'def sync_metadata(db: Session, source_id: int, user_id: int) -> dict:' idx = content.find(marker) if idx == -1: print('Marker not found') sys.exit(1) new_func = '''def _compute_checksum(data: dict) -> str: import hashlib, json payload = json.dumps(data, sort_keys=True, ensure_ascii=False, default=str) return hashlib.sha256(payload.encode()).hexdigest()[:32] def sync_metadata(db: Session, source_id: int, user_id: int) -> dict: from sqlalchemy import create_engine, inspect, text import json from datetime import datetime source = get_datasource(db, source_id) if not source: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="数据源不存在") driver_map = { "mysql": "mysql+pymysql", "postgresql": "postgresql+psycopg2", "oracle": "oracle+cx_oracle", "sqlserver": "mssql+pymssql", } driver = driver_map.get(source.source_type, source.source_type) if source.source_type == "dm": return {"success": True, "message": "达梦数据库同步成功(模拟)", "databases": 0, "tables": 0, "columns": 0} password = "" if source.encrypted_password: try: password = _decrypt_password(source.encrypted_password) except Exception: pass try: url = f"{driver}://{source.username}:{password}@{source.host}:{source.port}/{source.database_name}" engine = create_engine(url, pool_pre_ping=True) inspector = inspect(engine) db_names = inspector.get_schema_names() or [source.database_name] scan_time = datetime.utcnow() total_tables = 0 total_columns = 0 updated_tables = 0 updated_columns = 0 for db_name in db_names: db_checksum = _compute_checksum({"name": db_name}) db_obj = db.query(Database).filter( Database.source_id == source.id, Database.name == db_name ).first() if not db_obj: db_obj = Database(source_id=source.id, name=db_name, checksum=db_checksum, last_scanned_at=scan_time) db.add(db_obj) else: db_obj.checksum = db_checksum db_obj.last_scanned_at = scan_time db_obj.is_deleted = False db_obj.deleted_at = None table_names = inspector.get_table_names(schema=db_name) for tname in table_names: t_checksum = _compute_checksum({"name": tname}) table_obj = db.query(DataTable).filter( DataTable.database_id == db_obj.id, DataTable.name == tname ).first() if not table_obj: table_obj = DataTable(database_id=db_obj.id, name=tname, checksum=t_checksum, last_scanned_at=scan_time) db.add(table_obj) else: if table_obj.checksum != t_checksum: table_obj.checksum = t_checksum updated_tables += 1 table_obj.last_scanned_at = scan_time table_obj.is_deleted = False table_obj.deleted_at = None columns = inspector.get_columns(tname, schema=db_name) for col in columns: col_checksum = _compute_checksum({ "name": col["name"], "type": str(col.get("type", "")), "max_length": col.get("max_length"), "comment": col.get("comment"), "nullable": col.get("nullable", True), }) col_obj = db.query(DataColumn).filter( DataColumn.table_id == table_obj.id, DataColumn.name == col["name"] ).first() if not col_obj: sample = None try: with engine.connect() as conn: result = conn.execute(text(f'SELECT "{col["name"]}" FROM "{db_name}"."{tname}" LIMIT 5')) samples = [str(r[0]) for r in result if r[0] is not None] sample = json.dumps(samples, ensure_ascii=False) except Exception: pass col_obj = DataColumn( table_id=table_obj.id, name=col["name"], data_type=str(col.get("type", "")), length=col.get("max_length"), comment=col.get("comment"), is_nullable=col.get("nullable", True), sample_data=sample, checksum=col_checksum, last_scanned_at=scan_time, ) db.add(col_obj) total_columns += 1 else: if col_obj.checksum != col_checksum: col_obj.checksum = col_checksum col_obj.data_type = str(col.get("type", "")) col_obj.length = col.get("max_length") col_obj.comment = col.get("comment") col_obj.is_nullable = col.get("nullable", True) updated_columns += 1 col_obj.last_scanned_at = scan_time col_obj.is_deleted = False col_obj.deleted_at = None total_tables += 1 # Soft-delete objects not seen in this scan db.query(Database).filter( Database.source_id == source.id, Database.last_scanned_at < scan_time, ).update({"is_deleted": True, "deleted_at": scan_time}, synchronize_session=False) for db_obj in db.query(Database).filter(Database.source_id == source.id).all(): db.query(DataTable).filter( DataTable.database_id == db_obj.id, DataTable.last_scanned_at < scan_time, ).update({"is_deleted": True, "deleted_at": scan_time}, synchronize_session=False) for table_obj in db.query(DataTable).filter(DataTable.database_id == db_obj.id).all(): db.query(DataColumn).filter( DataColumn.table_id == table_obj.id, DataColumn.last_scanned_at < scan_time, ).update({"is_deleted": True, "deleted_at": scan_time}, synchronize_session=False) source.status = "active" db.commit() return { "success": True, "message": "元数据同步成功", "databases": len(db_names), "tables": total_tables, "columns": total_columns, "updated_tables": updated_tables, "updated_columns": updated_columns, } except Exception as e: source.status = "error" db.commit() return {"success": False, "message": f"同步失败: {str(e)}", "databases": 0, "tables": 0, "columns": 0} ''' new_content = content[:idx] + new_func with open(sys.argv[1], 'w') as f: f.write(new_content) print('Patched successfully')