What's this, an ACTUAL WORKING DATABASE?

parent e5d867ec
# A generic, single database configuration.
[alembic]
# path to migration scripts
script_location = where/alembic
# template used to generate migration files
# file_template = %%(rev)s_%%(slug)s
# timezone to use when rendering the date
# within the migration file as well as the filename.
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =
# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; this defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
sqlalchemy.url = sqlite:///db.sqlite3
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks=black
# black.type=console_scripts
# black.entrypoint=black
# black.options=-l 79
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
Generic single-database configuration.
\ No newline at end of file
from logging.config import fileConfig
from sqlalchemy import engine_from_config
from sqlalchemy import pool
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
import sys, os
sys.path.append(os.getcwd())
from where.sa import Base
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}
"""inital database models
Revision ID: 4af5fa1a7676
Revises:
Create Date: 2020-02-16 19:07:30.646828
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '4af5fa1a7676'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
op.create_table('category',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('slug', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name'),
sa.UniqueConstraint('slug')
)
op.create_table('field',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('type', sa.Enum('STRING', 'FLOAT', 'INTEGER', 'BOOLEAN', 'RATING', name='fieldtype'), nullable=False),
sa.Column('unit', sa.String(), nullable=True),
sa.Column('category_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(('category_id',), ['category.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('point',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('lat', sa.Float(), nullable=False),
sa.Column('lon', sa.Float(), nullable=False),
sa.Column('attributes', sa.JSON(), nullable=False),
sa.Column('category_id', sa.Integer(), nullable=False),
sa.Column('parent_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(('category_id',), ['category.id'], ),
sa.ForeignKeyConstraint(('parent_id',), ['point.id'], ),
sa.PrimaryKeyConstraint('id')
)
def downgrade():
op.drop_table('point')
op.drop_table('field')
op.drop_table('category')
from flask import Flask
import sa
app = Flask(__name__)
@app.route('/')
def index():
return "<h1>Hello</h1>"
......
......@@ -15,3 +15,5 @@ class FieldType(enum.Enum):
if field not in data:
raise ValueError(f"Fields of type {self.name} need a {field} field")
self.value[field](data[field]) # This will throw ValueError if it fails to validate
if len(data) != len(self.value):
raise ValueError(f"Too many fields for field of type {self.name}")
from sqlalchemy import String, ARRAY, Table, ForeignKey, Enum, Integer, Float
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy import String, ForeignKey, Enum, Integer, Float, JSON
from sqlalchemy.ext.declarative import as_declarative
from sqlalchemy.orm import relationship, validates
from sqlalchemy.schema import Column
from .field_types import FieldType
# TODO declarative_base
@as_declarative()
class Base(object):
# id = Column(UUID, primary_key=True, default=lambda: str(uuid.uuid4()))
id = Column(Integer, primary_key=True, autoincrement=True)
# This table maintains the field <-> ObjectType links.
type_links = Table('type_links', Base.metadata, # ???
Column('type_id', Integer, ForeignKey('objecttype.id')),
Column('field_id', Integer, ForeignKey('field.id')))
class Point(Base):
"""
TODO docstring
......@@ -26,7 +19,7 @@ class Point(Base):
name = Column(String, nullable=True)
lat = Column(Float, nullable=False)
lon = Column(Float, nullable=False)
attributes = Column(JSONB, nullable=False)
attributes = Column(JSON, nullable=False)
# Relationships
category_id = Column(Integer, ForeignKey('category.id'), nullable=False)
......@@ -80,8 +73,4 @@ class Field(Base):
"""
Verify that data is the correct type for this Field.
"""
if type(data) is not dict:
raise ValueError('Input "{}" for field {} is not of type dict'.format(data, self.name))
print(type(data))
print(self.type)
raise ValueError('Invalid input "{}" for field {}'.format(data, self.name))
self.type.validate(data)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment