mirror of
https://github.com/ferdzo/iotDashboard.git
synced 2026-04-05 09:06:26 +00:00
Migration to UV, introducing db_write, mqtt_ingestion, db_migrations, half working prototype.
This commit is contained in:
1
db_migrations/.env.example
Normal file
1
db_migrations/.env.example
Normal file
@@ -0,0 +1 @@
|
||||
CONNECTION_STRING=postgresql://user:password@localhost:5432/iotdashboard
|
||||
5
db_migrations/.gitignore
vendored
Normal file
5
db_migrations/.gitignore
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*.so
|
||||
.Python
|
||||
*.egg-info/
|
||||
111
db_migrations/README.md
Normal file
111
db_migrations/README.md
Normal file
@@ -0,0 +1,111 @@
|
||||
# Database Migrations
|
||||
|
||||
This directory contains all database schema definitions and migrations for the IoT Dashboard project.
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Install Dependencies
|
||||
|
||||
```bash
|
||||
pip install alembic sqlalchemy python-dotenv psycopg2-binary
|
||||
```
|
||||
|
||||
### 2. Configure Database
|
||||
|
||||
Set `CONNECTION_STRING` or `DATABASE_URL` in the root `.env` file:
|
||||
|
||||
```bash
|
||||
CONNECTION_STRING=postgresql://user:password@localhost:5432/iotdashboard
|
||||
```
|
||||
|
||||
### 3. Create Initial Migration
|
||||
|
||||
```bash
|
||||
chmod +x migrate.sh
|
||||
./migrate.sh create "initial schema"
|
||||
```
|
||||
|
||||
### 4. Review Migration
|
||||
|
||||
Check the generated file in `alembic/versions/`
|
||||
|
||||
### 5. Apply Migration
|
||||
|
||||
```bash
|
||||
./migrate.sh upgrade
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Create a New Migration
|
||||
|
||||
After editing `models.py`:
|
||||
|
||||
```bash
|
||||
./migrate.sh create "add new column"
|
||||
```
|
||||
|
||||
### Apply Migrations
|
||||
|
||||
```bash
|
||||
./migrate.sh upgrade
|
||||
```
|
||||
|
||||
### Check Current Version
|
||||
|
||||
```bash
|
||||
./migrate.sh current
|
||||
```
|
||||
|
||||
### View History
|
||||
|
||||
```bash
|
||||
./migrate.sh history
|
||||
```
|
||||
|
||||
### Rollback
|
||||
|
||||
```bash
|
||||
./migrate.sh downgrade 1
|
||||
```
|
||||
|
||||
## File Structure
|
||||
|
||||
```
|
||||
db_migrations/
|
||||
├── models.py # SQLAlchemy models (schema definition)
|
||||
├── alembic.ini # Alembic configuration
|
||||
├── alembic/
|
||||
│ ├── env.py # Migration environment
|
||||
│ ├── script.py.mako # Migration template
|
||||
│ └── versions/ # Generated migrations
|
||||
├── migrate.sh # Helper script
|
||||
└── README.md # This file
|
||||
```
|
||||
|
||||
## Modifying Schema
|
||||
|
||||
1. Edit `models.py` to define your changes
|
||||
2. Run `./migrate.sh create "description"`
|
||||
3. Review the generated migration
|
||||
4. Run `./migrate.sh upgrade`
|
||||
|
||||
## Using Models in Services
|
||||
|
||||
Services can import models from here:
|
||||
|
||||
```python
|
||||
# In services/db_write/db_writer.py or any other service
|
||||
import sys
|
||||
sys.path.insert(0, '/path/to/db_migrations')
|
||||
from models import SensorReading
|
||||
```
|
||||
|
||||
Or better, use relative imports if properly structured.
|
||||
|
||||
## Notes
|
||||
|
||||
- The `.env` file should be in the project root (`../.env`)
|
||||
- Migrations are applied to whatever database is in `CONNECTION_STRING`
|
||||
- Always review generated migrations before applying
|
||||
- Keep `models.py` as the single source of truth for schema
|
||||
40
db_migrations/alembic.ini
Normal file
40
db_migrations/alembic.ini
Normal file
@@ -0,0 +1,40 @@
|
||||
# Alembic Configuration
|
||||
|
||||
[alembic]
|
||||
script_location = alembic
|
||||
file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s
|
||||
timezone = UTC
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
90
db_migrations/alembic/env.py
Normal file
90
db_migrations/alembic/env.py
Normal file
@@ -0,0 +1,90 @@
|
||||
from logging.config import fileConfig
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from alembic import context
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Add parent directory to path so we can import models
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
|
||||
|
||||
# Load environment variables
|
||||
from dotenv import load_dotenv
|
||||
load_dotenv(os.path.join(os.path.dirname(__file__), '../.env'))
|
||||
|
||||
# Import your models
|
||||
from models import Base
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Set database URL from environment
|
||||
database_url = os.getenv('CONNECTION_STRING') or os.getenv('DATABASE_URL')
|
||||
if database_url:
|
||||
config.set_main_option('sqlalchemy.url', database_url)
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection, target_metadata=target_metadata
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
28
db_migrations/alembic/script.py.mako
Normal file
28
db_migrations/alembic/script.py.mako
Normal file
@@ -0,0 +1,28 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
${downgrades if downgrades else "pass"}
|
||||
@@ -0,0 +1,50 @@
|
||||
"""Initial
|
||||
|
||||
Revision ID: dae12d7f4ddf
|
||||
Revises:
|
||||
Create Date: 2025-10-28 22:06:45.637836+00:00
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'dae12d7f4ddf'
|
||||
down_revision: Union[str, Sequence[str], None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('devices',
|
||||
sa.Column('id', sa.Text(), nullable=False),
|
||||
sa.Column('name', sa.Text(), nullable=False),
|
||||
sa.Column('location', sa.Text(), nullable=True),
|
||||
sa.Column('is_active', sa.Boolean(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('telemetry',
|
||||
sa.Column('time', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('device_id', sa.Text(), nullable=False),
|
||||
sa.Column('metric', sa.Text(), nullable=False),
|
||||
sa.Column('value', sa.Float(), nullable=False),
|
||||
sa.Column('unit', sa.Text(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['device_id'], ['devices.id'], ),
|
||||
sa.PrimaryKeyConstraint('time', 'device_id', 'metric')
|
||||
)
|
||||
op.create_index('idx_telemetry_device_time', 'telemetry', ['device_id', 'time'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('idx_telemetry_device_time', table_name='telemetry')
|
||||
op.drop_table('telemetry')
|
||||
op.drop_table('devices')
|
||||
# ### end Alembic commands ###
|
||||
48
db_migrations/models.py
Normal file
48
db_migrations/models.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""
|
||||
Database models for the IoT Dashboard.
|
||||
|
||||
To modify schema:
|
||||
1. Edit models here
|
||||
2. Run: alembic revision --autogenerate -m "description"
|
||||
3. Review the generated migration in alembic/versions/
|
||||
4. Run: alembic upgrade head
|
||||
"""
|
||||
from sqlalchemy import Boolean, Column, Float, ForeignKey, Index,Text, DateTime
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
class Device(Base):
|
||||
"""IoT devices registered in the system."""
|
||||
__tablename__ = 'devices'
|
||||
|
||||
id = Column(Text, primary_key=True)
|
||||
name = Column(Text, nullable=False)
|
||||
location = Column(Text)
|
||||
is_active = Column(Boolean, default=True)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Device(id={self.id}, name={self.name})>"
|
||||
|
||||
|
||||
class Telemetry(Base):
|
||||
"""
|
||||
Time-series telemetry data from devices.
|
||||
This will be converted to a TimescaleDB hypertable.
|
||||
"""
|
||||
__tablename__ = 'telemetry'
|
||||
|
||||
time = Column(DateTime(timezone=True), primary_key=True, nullable=False)
|
||||
device_id = Column(Text, ForeignKey('devices.id'), primary_key=True, nullable=False)
|
||||
metric = Column(Text, primary_key=True, nullable=False) # e.g., 'light', 'temperature'
|
||||
value = Column(Float, nullable=False)
|
||||
unit = Column(Text)
|
||||
|
||||
__table_args__ = (
|
||||
Index('idx_telemetry_device_time', 'device_id', 'time'),
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Telemetry(device={self.device_id}, metric={self.metric}, value={self.value})>"
|
||||
|
||||
Reference in New Issue
Block a user