Iniital release of DosVault.

This commit is contained in:
2025-09-06 13:53:44 -04:00
commit b3e71456c8
41 changed files with 7391 additions and 0 deletions

92
migrations/env.py Normal file
View File

@@ -0,0 +1,92 @@
import sys
from pathlib import Path
# Add src to Python path
src_path = Path(__file__).parent.parent / "src"
sys.path.insert(0, str(src_path))
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# Import your models
from libs.database import Base
from libs.config import Config
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Set the SQLAlchemy URL from our config
app_config = Config()
config.set_main_option("sqlalchemy.url", f"sqlite:///{app_config.database_path}")
# add your model's MetaData object here
# for 'autogenerate' support
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
render_as_batch=True, # Enable batch mode for SQLite
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
render_as_batch=True, # Enable batch mode for SQLite
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

24
migrations/script.py.mako Normal file
View File

@@ -0,0 +1,24 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,106 @@
"""Initial database schema
Revision ID: 001
Revises:
Create Date: 2024-01-01 10:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.types import TypeDecorator
from pathlib import Path
# revision identifiers, used by Alembic.
revision = '001'
down_revision = None
branch_labels = None
depends_on = None
# Define the PathType here since it's needed for the migration
class PathType(TypeDecorator):
impl = sa.String
cache_ok = True
def process_bind_param(self, value, dialect):
return None if value is None else str(value)
def process_result_value(self, value, dialect):
return None if value is None else Path(value)
def upgrade() -> None:
# This represents the initial schema from the original system
# The tables (tags, genre, game, metadata, metadata_genres, metadata_tags)
# already exist in the database, so this migration is just for tracking
# If running on a fresh database, these would create the tables:
# Create tags table
op.create_table('tags',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=30), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_tags'))
)
op.create_index('ix_tags_name', 'tags', ['name'], unique=True)
# Create genre table
op.create_table('genre',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=30), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_genre'))
)
op.create_index('ix_genre_name', 'genre', ['name'], unique=True)
# Create game table
op.create_table('game',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=66), nullable=False),
sa.Column('path', PathType(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_game')),
sa.UniqueConstraint('path', name=op.f('uq_game_path'))
)
op.create_index('ix_game_title', 'game', ['title'])
# Create metadata table
op.create_table('metadata',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('game_id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=66), nullable=False),
sa.Column('description', sa.String(), nullable=True),
sa.Column('year', sa.Integer(), nullable=True),
sa.Column('developer', sa.String(length=255), nullable=True),
sa.Column('publisher', sa.String(length=255), nullable=True),
sa.Column('players', sa.Integer(), nullable=True),
sa.Column('cover_image', sa.String(), nullable=True),
sa.Column('screenshot', sa.String(), nullable=True),
sa.ForeignKeyConstraint(['game_id'], ['game.id'], name=op.f('fk_metadata_game_id_game'), ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f('pk_metadata')),
sa.UniqueConstraint('game_id', name=op.f('uq_metadata_game_id'))
)
# Create association tables
op.create_table('metadata_genres',
sa.Column('metadata_id', sa.Integer(), nullable=False),
sa.Column('genre_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['genre_id'], ['genre.id'], name=op.f('fk_metadata_genres_genre_id_genre'), ondelete='CASCADE'),
sa.ForeignKeyConstraint(['metadata_id'], ['metadata.id'], name=op.f('fk_metadata_genres_metadata_id_metadata'), ondelete='CASCADE'),
sa.PrimaryKeyConstraint('metadata_id', 'genre_id', name=op.f('pk_metadata_genres')),
sa.UniqueConstraint('metadata_id', 'genre_id', name=op.f('uq_metadata_genres_metadata_id'))
)
op.create_table('metadata_tags',
sa.Column('metadata_id', sa.Integer(), nullable=False),
sa.Column('tag_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['metadata_id'], ['metadata.id'], name=op.f('fk_metadata_tags_metadata_id_metadata'), ondelete='CASCADE'),
sa.ForeignKeyConstraint(['tag_id'], ['tags.id'], name=op.f('fk_metadata_tags_tag_id_tags'), ondelete='CASCADE'),
sa.PrimaryKeyConstraint('metadata_id', 'tag_id', name=op.f('pk_metadata_tags')),
sa.UniqueConstraint('metadata_id', 'tag_id', name=op.f('uq_metadata_tags_metadata_id'))
)
def downgrade() -> None:
op.drop_table('metadata_tags')
op.drop_table('metadata_genres')
op.drop_table('metadata')
op.drop_table('game')
op.drop_table('genre')
op.drop_table('tags')

View File

@@ -0,0 +1,47 @@
"""Add user authentication system
Revision ID: 002
Revises: 001
Create Date: 2024-01-01 11:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '002'
down_revision = '001'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create users table
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=50), nullable=False),
sa.Column('email', sa.String(length=100), nullable=False),
sa.Column('password_hash', sa.String(length=255), nullable=False),
sa.Column('role', sa.String(length=20), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('last_login', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_users'))
)
op.create_index('ix_users_email', 'users', ['email'], unique=True)
op.create_index('ix_users_username', 'users', ['username'], unique=True)
# Create user_favorites association table
op.create_table('user_favorites',
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('game_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['game_id'], ['game.id'], name=op.f('fk_user_favorites_game_id_game'), ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_user_favorites_user_id_users'), ondelete='CASCADE'),
sa.PrimaryKeyConstraint('user_id', 'game_id', name=op.f('pk_user_favorites')),
sa.UniqueConstraint('user_id', 'game_id', name=op.f('uq_user_favorites_user_id'))
)
def downgrade() -> None:
op.drop_table('user_favorites')
op.drop_table('users')

View File

@@ -0,0 +1,33 @@
"""Example migration - add rating column to metadata
This is an example of how to create a migration.
To use this:
1. Remove the .example extension
2. Update the revision ID and down_revision
3. Run: migrate upgrade
Revision ID: 002
Revises: 001
Create Date: 2024-01-01 11:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '002'
down_revision = '001'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add a rating column to the metadata table
with op.batch_alter_table('metadata', schema=None) as batch_op:
batch_op.add_column(sa.Column('rating', sa.Float(), nullable=True))
def downgrade() -> None:
# Remove the rating column from the metadata table
with op.batch_alter_table('metadata', schema=None) as batch_op:
batch_op.drop_column('rating')

View File

@@ -0,0 +1,38 @@
"""add local image path fields
Revision ID: 3e8f92662c04
Revises: 002
Create Date: 2025-09-06 01:18:21.497321
"""
from alembic import op
import sqlalchemy as sa
import sys
from pathlib import Path
sys.path.append(str(Path(__file__).parent.parent.parent / 'src'))
from libs.database import PathType
# revision identifiers, used by Alembic.
revision = '3e8f92662c04'
down_revision = '002'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('metadata', schema=None) as batch_op:
batch_op.add_column(sa.Column('cover_image_path', PathType(), nullable=True))
batch_op.add_column(sa.Column('screenshot_path', PathType(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('metadata', schema=None) as batch_op:
batch_op.drop_column('screenshot_path')
batch_op.drop_column('cover_image_path')
# ### end Alembic commands ###