Skip to content

Commit

Permalink
Merge pull request #70 from lyft/merge_apache_20190918b
Browse files Browse the repository at this point in the history
Merge apache 20190918b
  • Loading branch information
Beto Dealmeida authored Sep 19, 2019
2 parents 9678836 + 5271d05 commit 60f11fd
Show file tree
Hide file tree
Showing 5 changed files with 21 additions and 68 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ Superset
=========

[![Build Status](https://travis-ci.org/apache/incubator-superset.svg?branch=master)](https://travis-ci.org/apache/incubator-superset)
[![PyPI version](https://badge.fury.io/py/superset.svg)](https://badge.fury.io/py/superset)
[![PyPI version](https://badge.fury.io/py/apache-superset.svg)](https://badge.fury.io/py/apache-superset)
[![Coverage Status](https://codecov.io/github/apache/incubator-superset/coverage.svg?branch=master)](https://codecov.io/github/apache/incubator-superset)
[![PyPI](https://img.shields.io/pypi/pyversions/superset.svg?maxAge=2592000)](https://pypi.python.org/pypi/superset)
[![Join the chat at https://gitter.im/airbnb/superset](https://badges.gitter.im/apache/incubator-superset.svg)](https://gitter.im/airbnb/superset?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
Expand Down
4 changes: 2 additions & 2 deletions docs/installation.rst
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ Superset installation and initialization
Follow these few simple steps to install Superset.::

# Install superset
pip install superset
pip install apache-superset

# Initialize the database
superset db upgrade
Expand Down Expand Up @@ -748,7 +748,7 @@ Upgrading

Upgrading should be as straightforward as running::

pip install superset --upgrade
pip install apache-superset --upgrade
superset db upgrade
superset init

Expand Down
5 changes: 0 additions & 5 deletions superset/db_engine_specs/presto.py
Original file line number Diff line number Diff line change
Expand Up @@ -926,11 +926,6 @@ def expand_data(
if not is_feature_enabled("PRESTO_EXPAND_DATA"):
return columns, data, []

# insert a custom column that tracks the original row
columns.insert(0, {"name": "__row_id", "type": "BIGINT"})
for i, row in enumerate(data):
row["__row_id"] = i

# process each column, unnesting ARRAY types and expanding ROW types into new columns
to_process = deque((column, 0) for column in columns)
all_columns: List[dict] = []
Expand Down
23 changes: 12 additions & 11 deletions superset/views/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -2626,19 +2626,10 @@ def sql_json_call(self, request):
limit = limit or app.config.get("SQL_MAX_ROW")

session = db.session()
mydb = session.query(models.Database).filter_by(id=database_id).first()
mydb = session.query(models.Database).filter_by(id=database_id).one_or_none()

if not mydb:
json_error_response("Database with id {} is missing.".format(database_id))

rejected_tables = security_manager.rejected_tables(sql, mydb, schema)
if rejected_tables:
return json_error_response(
security_manager.get_table_access_error_msg(rejected_tables),
link=security_manager.get_table_access_link(rejected_tables),
status=403,
)
session.commit()
return json_error_response(f"Database with id {database_id} is missing.")

select_as_cta = request.form.get("select_as_cta") == "true"
tmp_table_name = request.form.get("tmp_table_name")
Expand Down Expand Up @@ -2667,6 +2658,16 @@ def sql_json_call(self, request):
raise Exception(_("Query record was not created as expected."))
logging.info("Triggering query_id: {}".format(query_id))

rejected_tables = security_manager.rejected_tables(sql, mydb, schema)
if rejected_tables:
query.status = QueryStatus.FAILED
session.commit()
return json_error_response(
security_manager.get_table_access_error_msg(rejected_tables),
link=security_manager.get_table_access_link(rejected_tables),
status=403,
)

try:
template_processor = get_template_processor(
database=query.database, query=query
Expand Down
55 changes: 6 additions & 49 deletions tests/db_engine_specs_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -655,49 +655,18 @@ def test_presto_expand_data_with_simple_structural_columns(self):
cols, data
)
expected_cols = [
{"name": "__row_id", "type": "BIGINT"},
{"name": "row_column", "type": "ROW(NESTED_OBJ VARCHAR)"},
{"name": "row_column.nested_obj", "type": "VARCHAR"},
{"name": "array_column", "type": "ARRAY(BIGINT)"},
]

expected_data = [
{
"__row_id": 0,
"array_column": 1,
"row_column": ["a"],
"row_column.nested_obj": "a",
},
{
"__row_id": "",
"array_column": 2,
"row_column": "",
"row_column.nested_obj": "",
},
{
"__row_id": "",
"array_column": 3,
"row_column": "",
"row_column.nested_obj": "",
},
{
"__row_id": 1,
"array_column": 4,
"row_column": ["b"],
"row_column.nested_obj": "b",
},
{
"__row_id": "",
"array_column": 5,
"row_column": "",
"row_column.nested_obj": "",
},
{
"__row_id": "",
"array_column": 6,
"row_column": "",
"row_column.nested_obj": "",
},
{"array_column": 1, "row_column": ["a"], "row_column.nested_obj": "a"},
{"array_column": 2, "row_column": "", "row_column.nested_obj": ""},
{"array_column": 3, "row_column": "", "row_column.nested_obj": ""},
{"array_column": 4, "row_column": ["b"], "row_column.nested_obj": "b"},
{"array_column": 5, "row_column": "", "row_column.nested_obj": ""},
{"array_column": 6, "row_column": "", "row_column.nested_obj": ""},
]

expected_expanded_cols = [{"name": "row_column.nested_obj", "type": "VARCHAR"}]
Expand All @@ -720,7 +689,6 @@ def test_presto_expand_data_with_complex_row_columns(self):
cols, data
)
expected_cols = [
{"name": "__row_id", "type": "BIGINT"},
{
"name": "row_column",
"type": "ROW(NESTED_OBJ1 VARCHAR, NESTED_ROW ROW(NESTED_OBJ2 VARCHAR))",
Expand All @@ -731,14 +699,12 @@ def test_presto_expand_data_with_complex_row_columns(self):
]
expected_data = [
{
"__row_id": 0,
"row_column": ["a1", ["a2"]],
"row_column.nested_obj1": "a1",
"row_column.nested_row": ["a2"],
"row_column.nested_row.nested_obj2": "a2",
},
{
"__row_id": 1,
"row_column": ["b1", ["b2"]],
"row_column.nested_obj1": "b1",
"row_column.nested_row": ["b2"],
Expand Down Expand Up @@ -774,7 +740,6 @@ def test_presto_expand_data_with_complex_array_columns(self):
cols, data
)
expected_cols = [
{"name": "__row_id", "type": "BIGINT"},
{"name": "int_column", "type": "BIGINT"},
{
"name": "array_column",
Expand All @@ -788,56 +753,48 @@ def test_presto_expand_data_with_complex_array_columns(self):
]
expected_data = [
{
"__row_id": 0,
"array_column": [[["a"], ["b"]]],
"array_column.nested_array": ["a"],
"array_column.nested_array.nested_obj": "a",
"int_column": 1,
},
{
"__row_id": "",
"array_column": "",
"array_column.nested_array": ["b"],
"array_column.nested_array.nested_obj": "b",
"int_column": "",
},
{
"__row_id": "",
"array_column": [[["c"], ["d"]]],
"array_column.nested_array": ["c"],
"array_column.nested_array.nested_obj": "c",
"int_column": "",
},
{
"__row_id": "",
"array_column": "",
"array_column.nested_array": ["d"],
"array_column.nested_array.nested_obj": "d",
"int_column": "",
},
{
"__row_id": 1,
"array_column": [[["e"], ["f"]]],
"array_column.nested_array": ["e"],
"array_column.nested_array.nested_obj": "e",
"int_column": 2,
},
{
"__row_id": "",
"array_column": "",
"array_column.nested_array": ["f"],
"array_column.nested_array.nested_obj": "f",
"int_column": "",
},
{
"__row_id": "",
"array_column": [[["g"], ["h"]]],
"array_column.nested_array": ["g"],
"array_column.nested_array.nested_obj": "g",
"int_column": "",
},
{
"__row_id": "",
"array_column": "",
"array_column.nested_array": ["h"],
"array_column.nested_array.nested_obj": "h",
Expand Down

0 comments on commit 60f11fd

Please sign in to comment.