from app import db from datetime import datetime from sqlalchemy import Column, Integer, String, Text, DateTime, Boolean, JSON class DataTable(db.Model): """Model for dynamically created data tables""" __tablename__ = 'data_tables' id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(100), unique=True, nullable=False) description = db.Column(db.Text) schema = db.Column(db.JSON) # Store table schema as JSON created_at = db.Column(db.DateTime, default=datetime.utcnow) updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) is_active = db.Column(db.Boolean, default=True) class DataPipeline(db.Model): """Model for data processing pipelines""" __tablename__ = 'data_pipelines' id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(100), unique=True, nullable=False) description = db.Column(db.Text) source_table = db.Column(db.String(100), nullable=False) target_table = db.Column(db.String(100)) pipeline_config = db.Column(db.JSON) # Store pipeline configuration yaml_config = db.Column(db.Text) # Store original YAML configuration is_active = db.Column(db.Boolean, default=True) created_at = db.Column(db.DateTime, default=datetime.utcnow) updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) class DataRecord(db.Model): """Generic model for storing data records""" __tablename__ = 'data_records' id = db.Column(db.Integer, primary_key=True) table_name = db.Column(db.String(100), nullable=False, index=True) data = db.Column(db.JSON, nullable=False) # Store actual data as JSON created_at = db.Column(db.DateTime, default=datetime.utcnow) updated_at = db.Column(db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow)