Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Locations: DB Model update & migration to add location table #463

Draft
wants to merge 1 commit into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
"""Create location table and migrate data from spool.

Revision ID: 4d0257b98228
Revises: 415a8f855e14
Create Date: 2024-08-27 18:42:54.365608
"""

from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column, select
from sqlalchemy.types import String, Integer
from datetime import datetime
from typing import Dict

# revision identifiers, used by Alembic.
revision = "4d0257b98228"
down_revision = "415a8f855e14"
branch_labels: str | None = None
depends_on: str | None = None


def upgrade() -> None:
"""Perform the upgrade."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"location",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("registered", sa.DateTime(), nullable=False, server_default=sa.func.now()),
sa.Column("name", sa.String(length=64), nullable=False),
sa.PrimaryKeyConstraint("id", name="pk_location"),
sa.UniqueConstraint("name", name="uq_location_name"),
)
op.create_table(
"location_field",
sa.Column("location_id", sa.Integer(), nullable=False),
sa.Column("key", sa.String(length=64), nullable=False),
sa.Column("value", sa.Text(), nullable=False),
sa.ForeignKeyConstraint(
["location_id"],
["location.id"],
name="fk_location_field_location_id",
),
sa.PrimaryKeyConstraint("location_id", "key", name="pk_location_field"),
)
op.create_index(op.f("ix_location_field_key"), "location_field", ["key"], unique=False)
op.create_index(op.f("ix_location_field_location_id"), "location_field", ["location_id"], unique=False)

with op.batch_alter_table("spool") as batch_op:
batch_op.add_column(sa.Column("location_id", sa.Integer(), nullable=True))
batch_op.create_foreign_key("fk_spool_location_id", "location", ["location_id"], ["id"])

# Migrate data from spool to location
spool_table = table("spool", column("location", String), column("id", Integer), column("location_id", Integer))
location_table = table("location", column("id", Integer), column("name", String), column("registered", sa.DateTime))

connection = op.get_bind()
result = connection.execute(select(spool_table.c.location).distinct())

location_id_map: Dict[str, int] = {}

for row in result:
location_name = row[0]
if location_name and location_name not in location_id_map:
insert_result = connection.execute(
location_table.insert()
.values(name=location_name, registered=datetime.utcnow())
.returning(location_table.c.id)
)
location_id = insert_result.scalar()
if location_id is not None:
location_id_map[location_name] = location_id

# Update spool table with location_id
for location_name, location_id in location_id_map.items():
connection.execute(
spool_table.update().where(spool_table.c.location == location_name).values(location_id=location_id)
)

# Drop the old location column from spool
with op.batch_alter_table("spool") as batch_op:
batch_op.drop_column("location")
# ### end Alembic commands ###


def downgrade() -> None:
"""Revert the upgrade."""
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("spool") as batch_op:
batch_op.add_column(sa.Column("location", sa.String(length=255), nullable=True))
batch_op.drop_constraint("fk_spool_location_id", type_="foreignkey")
batch_op.drop_column("location_id")

op.drop_index(op.f("ix_location_field_location_id"), table_name="location_field")
op.drop_index(op.f("ix_location_field_key"), table_name="location_field")
op.drop_table("location_field")
op.drop_table("location")
# ### end Alembic commands ###
26 changes: 25 additions & 1 deletion spoolman/database/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@ class Spool(Base):
initial_weight: Mapped[Optional[float]] = mapped_column()
spool_weight: Mapped[Optional[float]] = mapped_column()
used_weight: Mapped[float] = mapped_column()
location: Mapped[Optional[str]] = mapped_column(String(64))
location_id: Mapped[Optional[int]] = mapped_column(ForeignKey("location.id"))
location: Mapped[Optional["Location"]] = relationship("Location", back_populates="spools")
lot_nr: Mapped[Optional[str]] = mapped_column(String(64))
comment: Mapped[Optional[str]] = mapped_column(String(1024))
archived: Mapped[Optional[bool]] = mapped_column()
Expand All @@ -83,6 +84,20 @@ class Spool(Base):
)


class Location(Base):
__tablename__ = "location"

id: Mapped[int] = mapped_column(primary_key=True, index=True)
registered: Mapped[datetime] = mapped_column()
name: Mapped[str] = mapped_column(String(64), unique=True, nullable=False)
spools: Mapped[list["Spool"]] = relationship("Spool", back_populates="location")
extra: Mapped[list["LocationField"]] = relationship(
back_populates="location",
cascade="save-update, merge, delete, delete-orphan",
lazy="joined",
)


class Setting(Base):
__tablename__ = "setting"

Expand Down Expand Up @@ -116,3 +131,12 @@ class SpoolField(Base):
spool: Mapped["Spool"] = relationship(back_populates="extra")
key: Mapped[str] = mapped_column(String(64), primary_key=True, index=True)
value: Mapped[str] = mapped_column(Text())


class LocationField(Base):
__tablename__ = "location_field"

location_id: Mapped[int] = mapped_column(ForeignKey("location.id"), primary_key=True, index=True)
location: Mapped["Location"] = relationship(back_populates="extra")
key: Mapped[str] = mapped_column(String(64), primary_key=True, index=True)
value: Mapped[str] = mapped_column(Text())
Loading