6d70520e79
- 新增后端模块:Alert、APIAsset、Compliance、Lineage、Masking、Risk、SchemaChange、Unstructured、Watermark - 新增前端模块页面与API接口 - 新增Alembic迁移脚本(002-014)覆盖全量业务表 - 新增测试数据生成脚本与集成测试脚本 - 修复metadata模型JSON类型导入缺失导致启动失败的问题 - 修复前端Alert/APIAsset页面request模块路径错误 - 更新docker-compose与开发计划文档
184 lines
6.8 KiB
Plaintext
184 lines
6.8 KiB
Plaintext
from typing import Optional, List, Tuple
|
|
from sqlalchemy.orm import Session
|
|
from fastapi import HTTPException, status
|
|
|
|
from app.models.metadata import DataSource, Database, DataTable, DataColumn
|
|
from app.services.datasource_service import get_datasource, _decrypt_password
|
|
|
|
|
|
def get_database(db: Session, db_id: int) -> Optional[Database]:
|
|
return db.query(Database).filter(Database.id == db_id).first()
|
|
|
|
|
|
def get_table(db: Session, table_id: int) -> Optional[DataTable]:
|
|
return db.query(DataTable).filter(DataTable.id == table_id).first()
|
|
|
|
|
|
def get_column(db: Session, column_id: int) -> Optional[DataColumn]:
|
|
return db.query(DataColumn).filter(DataColumn.id == column_id).first()
|
|
|
|
|
|
def list_databases(db: Session, source_id: Optional[int] = None) -> List[Database]:
|
|
query = db.query(Database)
|
|
if source_id:
|
|
query = query.filter(Database.source_id == source_id)
|
|
return query.all()
|
|
|
|
|
|
def list_tables(db: Session, database_id: Optional[int] = None, keyword: Optional[str] = None) -> Tuple[List[DataTable], int]:
|
|
query = db.query(DataTable)
|
|
if database_id:
|
|
query = query.filter(DataTable.database_id == database_id)
|
|
if keyword:
|
|
query = query.filter(
|
|
(DataTable.name.contains(keyword)) | (DataTable.comment.contains(keyword))
|
|
)
|
|
return query.all(), query.count()
|
|
|
|
|
|
def list_columns(db: Session, table_id: Optional[int] = None, keyword: Optional[str] = None, page: int = 1, page_size: int = 50) -> Tuple[List[DataColumn], int]:
|
|
query = db.query(DataColumn)
|
|
if table_id:
|
|
query = query.filter(DataColumn.table_id == table_id)
|
|
if keyword:
|
|
query = query.filter(
|
|
(DataColumn.name.contains(keyword)) | (DataColumn.comment.contains(keyword))
|
|
)
|
|
total = query.count()
|
|
items = query.offset((page - 1) * page_size).limit(page_size).all()
|
|
return items, total
|
|
|
|
|
|
def build_tree(db: Session, source_id: Optional[int] = None) -> List[dict]:
|
|
sources = db.query(DataSource)
|
|
if source_id:
|
|
sources = sources.filter(DataSource.id == source_id)
|
|
sources = sources.all()
|
|
|
|
result = []
|
|
for s in sources:
|
|
source_node = {
|
|
"id": s.id,
|
|
"name": s.name,
|
|
"type": "source",
|
|
"children": [],
|
|
"meta": {"source_type": s.source_type, "status": s.status},
|
|
}
|
|
for d in s.databases:
|
|
db_node = {
|
|
"id": d.id,
|
|
"name": d.name,
|
|
"type": "database",
|
|
"children": [],
|
|
"meta": {"charset": d.charset, "table_count": d.table_count},
|
|
}
|
|
for t in d.tables:
|
|
table_node = {
|
|
"id": t.id,
|
|
"name": t.name,
|
|
"type": "table",
|
|
"children": [],
|
|
"meta": {"comment": t.comment, "row_count": t.row_count, "column_count": t.column_count},
|
|
}
|
|
db_node["children"].append(table_node)
|
|
source_node["children"].append(db_node)
|
|
result.append(source_node)
|
|
return result
|
|
|
|
|
|
def sync_metadata(db: Session, source_id: int, user_id: int) -> dict:
|
|
from sqlalchemy import create_engine, inspect, text
|
|
import json
|
|
|
|
source = get_datasource(db, source_id)
|
|
if not source:
|
|
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="数据源不存在")
|
|
|
|
driver_map = {
|
|
"mysql": "mysql+pymysql",
|
|
"postgresql": "postgresql+psycopg2",
|
|
"oracle": "oracle+cx_oracle",
|
|
"sqlserver": "mssql+pymssql",
|
|
}
|
|
driver = driver_map.get(source.source_type, source.source_type)
|
|
|
|
if source.source_type == "dm":
|
|
return {"success": True, "message": "达梦数据库同步成功(模拟)", "databases": 0, "tables": 0, "columns": 0}
|
|
|
|
password = ""
|
|
if source.encrypted_password:
|
|
try:
|
|
password = _decrypt_password(source.encrypted_password)
|
|
except Exception:
|
|
pass
|
|
|
|
try:
|
|
url = f"{driver}://{source.username}:{password}@{source.host}:{source.port}/{source.database_name}"
|
|
engine = create_engine(url, pool_pre_ping=True)
|
|
inspector = inspect(engine)
|
|
|
|
db_names = inspector.get_schema_names() or [source.database_name]
|
|
total_tables = 0
|
|
total_columns = 0
|
|
|
|
for db_name in db_names:
|
|
db_obj = db.query(Database).filter(Database.source_id == source.id, Database.name == db_name).first()
|
|
if not db_obj:
|
|
db_obj = Database(source_id=source.id, name=db_name)
|
|
db.add(db_obj)
|
|
db.commit()
|
|
db.refresh(db_obj)
|
|
|
|
table_names = inspector.get_table_names(schema=db_name)
|
|
for tname in table_names:
|
|
table_obj = db.query(DataTable).filter(DataTable.database_id == db_obj.id, DataTable.name == tname).first()
|
|
if not table_obj:
|
|
table_obj = DataTable(database_id=db_obj.id, name=tname)
|
|
db.add(table_obj)
|
|
db.commit()
|
|
db.refresh(table_obj)
|
|
|
|
columns = inspector.get_columns(tname, schema=db_name)
|
|
for col in columns:
|
|
col_obj = db.query(DataColumn).filter(DataColumn.table_id == table_obj.id, DataColumn.name == col["name"]).first()
|
|
if not col_obj:
|
|
sample = None
|
|
try:
|
|
with engine.connect() as conn:
|
|
result = conn.execute(text(f'SELECT "{col["name"]}" FROM "{db_name}"."{tname}" LIMIT 5'))
|
|
samples = [str(r[0]) for r in result if r[0] is not None]
|
|
sample = json.dumps(samples, ensure_ascii=False)
|
|
except Exception:
|
|
pass
|
|
|
|
col_obj = DataColumn(
|
|
table_id=table_obj.id,
|
|
name=col["name"],
|
|
data_type=str(col.get("type", "")),
|
|
length=col.get("max_length"),
|
|
comment=col.get("comment"),
|
|
is_nullable=col.get("nullable", True),
|
|
sample_data=sample,
|
|
)
|
|
db.add(col_obj)
|
|
total_columns += 1
|
|
|
|
total_tables += 1
|
|
|
|
db.commit()
|
|
|
|
source.status = "active"
|
|
db.commit()
|
|
|
|
return {
|
|
"success": True,
|
|
"message": "元数据同步成功",
|
|
"databases": len(db_names),
|
|
"tables": total_tables,
|
|
"columns": total_columns,
|
|
}
|
|
except Exception as e:
|
|
source.status = "error"
|
|
db.commit()
|
|
return {"success": False, "message": f"同步失败: {str(e)}", "databases": 0, "tables": 0, "columns": 0}
|