diff --git a/TODO.md b/TODO.md index e15ee80c48278..336510e995012 100644 --- a/TODO.md +++ b/TODO.md @@ -6,3 +6,5 @@ * SQL: Find a way to manage granularity * Create ~/.panoramix/ to host DB and config, generate default config there * Add a per-datasource permission +* Reintroduce query and stopwatch +* Sort tooltip diff --git a/panoramix/bin/panoramix b/panoramix/bin/panoramix index 17bb22f7690fe..eed9b6073bd6c 100755 --- a/panoramix/bin/panoramix +++ b/panoramix/bin/panoramix @@ -10,6 +10,7 @@ from sqlalchemy import Column, Integer, String from panoramix import config, models import csv import gzip +import json manager = Manager(app) @@ -57,7 +58,6 @@ def load_examples(): pass Base.metadata.create_all(db.engine) session = db.session() - with gzip.open(config.basedir + '/data/birth_names.csv.gz') as f: bb_csv = csv.reader(f) for i, (state, year, name, gender, num) in enumerate(bb_csv): @@ -71,33 +71,203 @@ def load_examples(): state=state, year=year, ds=ds, name=name, num=num, gender=gender)) - if i % 5000 == 0: + if i % 1000 == 0: print("{} loaded out of 82527 rows".format(i)) session.commit() session.commit() + #if i>5000: break print("Done loading table!") + print("-" * 80) + print("Creating database reference") DB = models.Database dbobj = session.query(DB).filter_by(database_name='main').first() if not dbobj: - dbobj = DB() - dbobj.database_name = "main" + dbobj = DB(database_name="main") dbobj.sqlalchemy_uri = config.SQLALCHEMY_DATABASE_URI - session.merge(dbobj) + session.add(dbobj) session.commit() - + + print("Creating table reference") TBL = models.Table obj = session.query(TBL).filter_by(table_name='birth_names').first() if not obj: - obj = TBL() - obj.table_name = 'birth_names' + obj = TBL(table_name = 'birth_names') obj.main_dttm_col = 'ds' obj.default_endpoint = "/panoramix/datasource/table/1/?viz_type=table&granularity=one+day&since=100+years&until=now&row_limit=10&where=&flt_col_0=ds&flt_op_0=in&flt_eq_0=&flt_col_1=ds&flt_op_1=in&flt_eq_1=&slice_name=TEST&datasource_name=birth_names&datasource_id=1&datasource_type=table" obj.database = dbobj + obj.columns = [models.TableColumn( + column_name="num", sum=True, type="INTEGER")] obj.fetch_metadata() - session.merge(obj) + models.Table + session.add(obj) session.commit() + tbl = obj + + print("Creating some slices") + def get_slice_json( + slice_name, filter_value, viz_type="table", group_by=None, + granularity="all", filter_operator='in', + row_limit=config.ROW_LIMIT, flt_col_1="gender"): + group_by = group_by if group_by is not None else ["name"] + default_json = { + "compare_lag": "10", + "compare_suffix": "o10Y", + "datasource_id": "1", + "datasource_name": "birth_names", + "datasource_type": "table", + "limit": "25", + "flt_col_1": flt_col_1, + "flt_eq_1": filter_value, + "flt_op_1": filter_operator, + "granularity": granularity, + "groupby": group_by, + "metric": 'sum__num', + "metrics": [ + "sum__num" + ], + "row_limit": row_limit, + "since": "100 years", + "slice_name": slice_name, + "until": "now", + "viz_type": viz_type, + "where": "" + } + return json.dumps(default_json, indent=4, sort_keys=True) + Slice = models.Slice + slices = [] + + slice_name = "Girls" + slc = session.query(Slice).filter_by(slice_name=slice_name).first() + if not slc: + slc = Slice( + slice_name=slice_name, + viz_type='table', + datasource_type='table', + table=tbl, + params=get_slice_json(slice_name, "girl", row_limit=50)) + session.add(slc) + slices.append(slc) + + slice_name = "Boys" + slc = session.query(Slice).filter_by(slice_name=slice_name).first() + if not slc: + slc = Slice( + slice_name=slice_name, + viz_type='table', + datasource_type='table', + table=tbl, + params=get_slice_json(slice_name, "boy", row_limit=50)) + session.add(slc) + slices.append(slc) + + slice_name = "Participants" + slc = session.query(Slice).filter_by(slice_name=slice_name).first() + if not slc: + slc = Slice( + slice_name=slice_name, + viz_type='big_number', + datasource_type='table', + table=tbl, + params=get_slice_json(slice_name, "", "big_number", [], "1 day")) + session.add(slc) + slices.append(slc) - session.close() + slice_name = "Genders" + slc = session.query(Slice).filter_by(slice_name=slice_name).first() + if not slc: + slc = Slice( + slice_name=slice_name, + viz_type='pie', + datasource_type='table', + table=tbl, + params=get_slice_json(slice_name, "", "pie", ['gender'])) + session.add(slc) + slices.append(slc) + + slice_name = "States" + slc = session.query(Slice).filter_by(slice_name=slice_name).first() + if not slc: + slc = Slice( + slice_name=slice_name, + viz_type='dist_bar', + datasource_type='table', + table=tbl, + params=get_slice_json( + slice_name, "other", "dist_bar", ['state'], + filter_operator='not in', flt_col_1='state')) + session.add(slc) + slices.append(slc) + + slice_name = "Trends" + slc = session.query(Slice).filter_by(slice_name=slice_name).first() + if not slc: + slc = Slice( + slice_name=slice_name, + viz_type='line', + datasource_type='table', + table=tbl, + params=get_slice_json(slice_name, "", "line", ['name'], '1 day')) + session.add(slc) + slices.append(slc) + + + print("Creating a dashboard") + Dash = models.Dashboard + dash = session.query(Dash).filter_by(dashboard_title="Births").first() + if not dash: + dash = Dash( + dashboard_title="Births", + position_json=""" + [ + { + "size_y": 5, + "size_x": 2, + "col": 5, + "slice_id": "1", + "row": 1 + }, + { + "size_y": 5, + "size_x": 2, + "col": 7, + "slice_id": "2", + "row": 1 + }, + { + "size_y": 2, + "size_x": 2, + "col": 1, + "slice_id": "3", + "row": 1 + }, + { + "size_y": 2, + "size_x": 2, + "col": 3, + "slice_id": "4", + "row": 1 + }, + { + "size_y": 3, + "size_x": 4, + "col": 1, + "slice_id": "5", + "row": 3 + }, + { + "size_y": 6, + "size_x": 8, + "col": 1, + "slice_id": "6", + "row": 6 + } + ] + """ + ) + session.add(dash) + for s in slices: + dash.slices.append(s) + session.commit() if __name__ == "__main__": diff --git a/panoramix/migrations/versions/4e6a06bad7a8_init.py b/panoramix/migrations/versions/4e6a06bad7a8_init.py new file mode 100644 index 0000000000000..3b8a9bff452d1 --- /dev/null +++ b/panoramix/migrations/versions/4e6a06bad7a8_init.py @@ -0,0 +1,215 @@ +"""Init + +Revision ID: 4e6a06bad7a8 +Revises: None +Create Date: 2015-09-21 17:30:38.442998 + +""" + +# revision identifiers, used by Alembic. +revision = '4e6a06bad7a8' +down_revision = None + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + ### commands auto generated by Alembic - please adjust! ### + op.create_table('clusters', + sa.Column('created_on', sa.DateTime(), nullable=False), + sa.Column('changed_on', sa.DateTime(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('cluster_name', sa.String(length=250), nullable=True), + sa.Column('coordinator_host', sa.String(length=256), nullable=True), + sa.Column('coordinator_port', sa.Integer(), nullable=True), + sa.Column('coordinator_endpoint', sa.String(length=256), nullable=True), + sa.Column('broker_host', sa.String(length=256), nullable=True), + sa.Column('broker_port', sa.Integer(), nullable=True), + sa.Column('broker_endpoint', sa.String(length=256), nullable=True), + sa.Column('metadata_last_refreshed', sa.DateTime(), nullable=True), + sa.Column('created_by_fk', sa.Integer(), nullable=True), + sa.Column('changed_by_fk', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ), + sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('cluster_name') + ) + op.create_table('dashboards', + sa.Column('created_on', sa.DateTime(), nullable=False), + sa.Column('changed_on', sa.DateTime(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('dashboard_title', sa.String(length=500), nullable=True), + sa.Column('position_json', sa.Text(), nullable=True), + sa.Column('created_by_fk', sa.Integer(), nullable=True), + sa.Column('changed_by_fk', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ), + sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('dbs', + sa.Column('created_on', sa.DateTime(), nullable=False), + sa.Column('changed_on', sa.DateTime(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('database_name', sa.String(length=250), nullable=True), + sa.Column('sqlalchemy_uri', sa.String(length=1024), nullable=True), + sa.Column('created_by_fk', sa.Integer(), nullable=True), + sa.Column('changed_by_fk', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ), + sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('database_name') + ) + op.create_table('datasources', + sa.Column('created_on', sa.DateTime(), nullable=False), + sa.Column('changed_on', sa.DateTime(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('datasource_name', sa.String(length=250), nullable=True), + sa.Column('is_featured', sa.Boolean(), nullable=True), + sa.Column('is_hidden', sa.Boolean(), nullable=True), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('default_endpoint', sa.Text(), nullable=True), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('cluster_name', sa.String(length=250), nullable=True), + sa.Column('changed_by_fk', sa.Integer(), nullable=False), + sa.Column('created_by_fk', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ), + sa.ForeignKeyConstraint(['cluster_name'], ['clusters.cluster_name'], ), + sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['ab_user.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('datasource_name') + ) + op.create_table('tables', + sa.Column('created_on', sa.DateTime(), nullable=False), + sa.Column('changed_on', sa.DateTime(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('table_name', sa.String(length=250), nullable=True), + sa.Column('main_dttm_col', sa.String(length=250), nullable=True), + sa.Column('default_endpoint', sa.Text(), nullable=True), + sa.Column('database_id', sa.Integer(), nullable=False), + sa.Column('created_by_fk', sa.Integer(), nullable=True), + sa.Column('changed_by_fk', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ), + sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ), + sa.ForeignKeyConstraint(['database_id'], ['dbs.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('table_name') + ) + op.create_table('columns', + sa.Column('created_on', sa.DateTime(), nullable=False), + sa.Column('changed_on', sa.DateTime(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('datasource_name', sa.String(length=250), nullable=True), + sa.Column('column_name', sa.String(length=256), nullable=True), + sa.Column('is_active', sa.Boolean(), nullable=True), + sa.Column('type', sa.String(length=32), nullable=True), + sa.Column('groupby', sa.Boolean(), nullable=True), + sa.Column('count_distinct', sa.Boolean(), nullable=True), + sa.Column('sum', sa.Boolean(), nullable=True), + sa.Column('max', sa.Boolean(), nullable=True), + sa.Column('min', sa.Boolean(), nullable=True), + sa.Column('filterable', sa.Boolean(), nullable=True), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('created_by_fk', sa.Integer(), nullable=True), + sa.Column('changed_by_fk', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ), + sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ), + sa.ForeignKeyConstraint(['datasource_name'], ['datasources.datasource_name'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('metrics', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('metric_name', sa.String(length=512), nullable=True), + sa.Column('verbose_name', sa.String(length=1024), nullable=True), + sa.Column('metric_type', sa.String(length=32), nullable=True), + sa.Column('datasource_name', sa.String(length=250), nullable=True), + sa.Column('json', sa.Text(), nullable=True), + sa.Column('description', sa.Text(), nullable=True), + sa.ForeignKeyConstraint(['datasource_name'], ['datasources.datasource_name'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('slices', + sa.Column('created_on', sa.DateTime(), nullable=False), + sa.Column('changed_on', sa.DateTime(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('slice_name', sa.String(length=250), nullable=True), + sa.Column('druid_datasource_id', sa.Integer(), nullable=True), + sa.Column('table_id', sa.Integer(), nullable=True), + sa.Column('datasource_type', sa.String(length=200), nullable=True), + sa.Column('datasource_name', sa.String(length=2000), nullable=True), + sa.Column('viz_type', sa.String(length=250), nullable=True), + sa.Column('params', sa.Text(), nullable=True), + sa.Column('created_by_fk', sa.Integer(), nullable=True), + sa.Column('changed_by_fk', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ), + sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ), + sa.ForeignKeyConstraint(['druid_datasource_id'], ['datasources.id'], ), + sa.ForeignKeyConstraint(['table_id'], ['tables.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('sql_metrics', + sa.Column('created_on', sa.DateTime(), nullable=False), + sa.Column('changed_on', sa.DateTime(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('metric_name', sa.String(length=512), nullable=True), + sa.Column('verbose_name', sa.String(length=1024), nullable=True), + sa.Column('metric_type', sa.String(length=32), nullable=True), + sa.Column('table_id', sa.Integer(), nullable=True), + sa.Column('expression', sa.Text(), nullable=True), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('created_by_fk', sa.Integer(), nullable=True), + sa.Column('changed_by_fk', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ), + sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ), + sa.ForeignKeyConstraint(['table_id'], ['tables.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('table_columns', + sa.Column('created_on', sa.DateTime(), nullable=False), + sa.Column('changed_on', sa.DateTime(), nullable=False), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('table_id', sa.Integer(), nullable=True), + sa.Column('column_name', sa.String(length=256), nullable=True), + sa.Column('is_dttm', sa.Boolean(), nullable=True), + sa.Column('is_active', sa.Boolean(), nullable=True), + sa.Column('type', sa.String(length=32), nullable=True), + sa.Column('groupby', sa.Boolean(), nullable=True), + sa.Column('count_distinct', sa.Boolean(), nullable=True), + sa.Column('sum', sa.Boolean(), nullable=True), + sa.Column('max', sa.Boolean(), nullable=True), + sa.Column('min', sa.Boolean(), nullable=True), + sa.Column('filterable', sa.Boolean(), nullable=True), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('created_by_fk', sa.Integer(), nullable=True), + sa.Column('changed_by_fk', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['changed_by_fk'], ['ab_user.id'], ), + sa.ForeignKeyConstraint(['created_by_fk'], ['ab_user.id'], ), + sa.ForeignKeyConstraint(['table_id'], ['tables.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('dashboard_slices', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('dashboard_id', sa.Integer(), nullable=True), + sa.Column('slice_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['dashboard_id'], ['dashboards.id'], ), + sa.ForeignKeyConstraint(['slice_id'], ['slices.id'], ), + sa.PrimaryKeyConstraint('id') + ) + ### end Alembic commands ### + + +def downgrade(): + ### commands auto generated by Alembic - please adjust! ### + op.drop_table('dashboard_slices') + op.drop_table('table_columns') + op.drop_table('sql_metrics') + op.drop_table('slices') + op.drop_table('metrics') + op.drop_table('columns') + op.drop_table('tables') + op.drop_table('datasources') + op.drop_table('dbs') + op.drop_table('dashboards') + op.drop_table('clusters') + ### end Alembic commands ### diff --git a/panoramix/views.py b/panoramix/views.py index 4300c1992ea43..c5148fbe25bcb 100644 --- a/panoramix/views.py +++ b/panoramix/views.py @@ -151,8 +151,6 @@ class SliceModelView(ModelView, DeleteMixin): edit_columns = [ 'slice_name', 'viz_type', 'druid_datasource', 'table', 'dashboards', 'params'] -for p in range(1000): - print SliceModelView.list_columns appbuilder.add_view( SliceModelView,