summaryrefslogtreecommitdiffstats
path: root/alembic
diff options
context:
space:
mode:
Diffstat (limited to 'alembic')
-rw-r--r--alembic/README1
-rw-r--r--alembic/env.py79
-rw-r--r--alembic/script.py.mako24
-rw-r--r--alembic/versions/4a3773e332a0_use_utc_datetime_in_db.py.old110
4 files changed, 214 insertions, 0 deletions
diff --git a/alembic/README b/alembic/README
new file mode 100644
index 0000000..98e4f9c
--- /dev/null
+++ b/alembic/README
@@ -0,0 +1 @@
+Generic single-database configuration. \ No newline at end of file
diff --git a/alembic/env.py b/alembic/env.py
new file mode 100644
index 0000000..2e65ca4
--- /dev/null
+++ b/alembic/env.py
@@ -0,0 +1,79 @@
+from logging.config import fileConfig
+
+from sqlalchemy import engine_from_config
+from sqlalchemy import pool
+
+from alembic import context
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# Interpret the config file for Python logging.
+# This line sets up loggers basically.
+if config.config_file_name is not None:
+ fileConfig(config.config_file_name)
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+# target_metadata = mymodel.Base.metadata
+from app import Base
+target_metadata = Base.metadata
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def run_migrations_offline() -> None:
+ """Run migrations in 'offline' mode.
+
+ This configures the context with just a URL
+ and not an Engine, though an Engine is acceptable
+ here as well. By skipping the Engine creation
+ we don't even need a DBAPI to be available.
+
+ Calls to context.execute() here emit the given string to the
+ script output.
+
+ """
+ url = config.get_main_option("sqlalchemy.url")
+ context.configure(
+ url=url,
+ target_metadata=target_metadata,
+ literal_binds=True,
+ dialect_opts={"paramstyle": "named"},
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+def run_migrations_online() -> None:
+ """Run migrations in 'online' mode.
+
+ In this scenario we need to create an Engine
+ and associate a connection with the context.
+
+ """
+ connectable = engine_from_config(
+ config.get_section(config.config_ini_section),
+ prefix="sqlalchemy.",
+ poolclass=pool.NullPool,
+ )
+
+ with connectable.connect() as connection:
+ context.configure(
+ connection=connection, target_metadata=target_metadata
+ )
+
+ with context.begin_transaction():
+ context.run_migrations()
+
+
+if context.is_offline_mode():
+ run_migrations_offline()
+else:
+ run_migrations_online()
diff --git a/alembic/script.py.mako b/alembic/script.py.mako
new file mode 100644
index 0000000..55df286
--- /dev/null
+++ b/alembic/script.py.mako
@@ -0,0 +1,24 @@
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+branch_labels = ${repr(branch_labels)}
+depends_on = ${repr(depends_on)}
+
+
+def upgrade() -> None:
+ ${upgrades if upgrades else "pass"}
+
+
+def downgrade() -> None:
+ ${downgrades if downgrades else "pass"}
diff --git a/alembic/versions/4a3773e332a0_use_utc_datetime_in_db.py.old b/alembic/versions/4a3773e332a0_use_utc_datetime_in_db.py.old
new file mode 100644
index 0000000..e011a3a
--- /dev/null
+++ b/alembic/versions/4a3773e332a0_use_utc_datetime_in_db.py.old
@@ -0,0 +1,110 @@
+"""use UTC DateTime in DB
+
+Revision ID: 4a3773e332a0
+Revises:
+Create Date: 2022-11-15 17:35:11.717714
+
+"""
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '4a3773e332a0'
+down_revision = None
+branch_labels = None
+depends_on = None
+
+
+def upgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column('borrows', sa.Column('purchase_utc', sa.DateTime(), nullable=True))
+ op.add_column('borrows', sa.Column('expiration_utc', sa.DateTime(), nullable=True))
+ op.add_column('borrows', sa.Column('purchase_timezone', sa.Integer(), nullable=True))
+ op.add_column('borrows', sa.Column('expiration_timezone', sa.Integer(), nullable=True))
+ borrows = sa.Table(
+ "borrows",
+ sa.MetaData(),
+ sa.Column("id", sa.Integer, primary_key=True, nullable=False),
+ sa.Column("purchase", sa.String, nullable=True),
+ sa.Column("expiration", sa.String, nullable=True),
+ sa.Column("purchase_utc", sa.DateTime, nullable=True),
+ sa.Column("expiration_utc", sa.DateTime, nullable=True),
+ sa.Column("purchase_timezone", sa.Integer, nullable=True),
+ sa.Column("expiration_timezone", sa.Integer, nullable=True)
+ )
+ connection = op.get_bind()
+ results = connection.execute(sa.select([
+ borrows.c.id,
+ borrows.c.purchase,
+ borrows.c.expiration,
+ borrows.c.purchase_utc,
+ borrows.c.expiration_utc,
+ borrows.c.purchase_timezone,
+ borrows.c.expiration_timezone
+ ])).fetchall()
+ from datetime import datetime, timezone
+ for id, purchase, expiration, purchase_utc, expiration_utc, purchase_timezone, expiration_timezone in results:
+ if id % 1000 == 0:
+ print(f"... obdelujem id {id}", end="\r")
+ if purchase == None:
+ print(f"at id {id} purchase is None")
+ continue
+ purchase_utc = datetime.strptime(purchase, "%Y-%m-%dT%H:%M:%S%z")
+ expiration_utc = datetime.strptime(expiration, "%Y-%m-%dT%H:%M:%S%z")
+ purchase_timezone = purchase_utc.tzinfo.utcoffset(None).seconds
+ expiration_timezone = expiration_utc.tzinfo.utcoffset(None).seconds
+ purchase_utc = purchase_utc.astimezone(timezone.utc).replace(tzinfo=None)
+ expiration_utc = expiration_utc.astimezone(timezone.utc).replace(tzinfo=None)
+ connection.execute(borrows.update().where(borrows.c.id == id).values(
+ purchase_utc = purchase_utc,
+ expiration_utc = expiration_utc,
+ purchase_timezone = purchase_timezone,
+ expiration_timezone = expiration_timezone
+ ))
+ op.drop_column('borrows', 'expiration')
+ op.drop_column('borrows', 'purchase')
+ # ### end Alembic commands ###
+
+
+def downgrade() -> None:
+ # ### commands auto generated by Alembic - please adjust! ###
+ op.add_column('borrows', sa.Column('purchase', sa.VARCHAR(), nullable=True))
+ op.add_column('borrows', sa.Column('expiration', sa.VARCHAR(), nullable=True))
+ borrows = sa.Table(
+ "borrows",
+ sa.MetaData(),
+ sa.Column("id", sa.Integer, primary_key=True, nullable=False),
+ sa.Column("purchase", sa.String, nullable=True),
+ sa.Column("expiration", sa.String, nullable=True),
+ sa.Column("purchase_utc", sa.DateTime, nullable=True),
+ sa.Column("expiration_utc", sa.DateTime, nullable=True),
+ sa.Column("purchase_timezone", sa.Integer, nullable=True),
+ sa.Column("expiration_timezone", sa.Integer, nullable=True)
+ )
+ connection = op.get_bind()
+ results = connection.execute(sa.select([
+ borrows.c.id,
+ borrows.c.purchase,
+ borrows.c.expiration,
+ borrows.c.purchase_utc,
+ borrows.c.expiration_utc,
+ borrows.c.purchase_timezone,
+ borrows.c.expiration_timezone
+ ])).fetchall()
+ from datetime import datetime, timezone, timedelta
+ for id, purchase, expiration, purchase_utc, expiration_utc, purchase_timezone, expiration_timezone in results:
+ if id % 1000 == 0:
+ print(f"... obdelujem id {id}", end="\r")
+ if purchase_utc == None:
+ print(f"at id {id} purchase_utc is None")
+ continue
+ connection.execute(borrows.update().where(borrows.c.id == id).values(
+ purchase = purchase_utc.astimezone(timezone(timedelta(seconds=purchase_timezone))).isoformat(),
+ expiration = expiration_utc.astimezone(timezone(timedelta(seconds=expiration_timezone))).isoformat()
+ ))
+ op.drop_column('borrows', 'expiration_timezone')
+ op.drop_column('borrows', 'purchase_timezone')
+ op.drop_column('borrows', 'expiration_utc')
+ op.drop_column('borrows', 'purchase_utc')
+ # ### end Alembic commands ###