diff --git a/README.md b/README.md index 194e7f0f19f8c..de557a18544ee 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ Superset ========= [![Build Status](https://travis-ci.org/apache/incubator-superset.svg?branch=master)](https://travis-ci.org/apache/incubator-superset) -[![PyPI version](https://badge.fury.io/py/superset.svg)](https://badge.fury.io/py/superset) +[![PyPI version](https://badge.fury.io/py/apache-superset.svg)](https://badge.fury.io/py/apache-superset) [![Coverage Status](https://codecov.io/github/apache/incubator-superset/coverage.svg?branch=master)](https://codecov.io/github/apache/incubator-superset) [![PyPI](https://img.shields.io/pypi/pyversions/superset.svg?maxAge=2592000)](https://pypi.python.org/pypi/superset) [![Join the chat at https://gitter.im/airbnb/superset](https://badges.gitter.im/apache/incubator-superset.svg)](https://gitter.im/airbnb/superset?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) diff --git a/docs/installation.rst b/docs/installation.rst index c8b007f211b99..2f5d0e1d29de3 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -176,7 +176,7 @@ Superset installation and initialization Follow these few simple steps to install Superset.:: # Install superset - pip install superset + pip install apache-superset # Initialize the database superset db upgrade @@ -748,7 +748,7 @@ Upgrading Upgrading should be as straightforward as running:: - pip install superset --upgrade + pip install apache-superset --upgrade superset db upgrade superset init diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py index ddc6442ac1937..65d5988687ce9 100644 --- a/superset/db_engine_specs/presto.py +++ b/superset/db_engine_specs/presto.py @@ -926,11 +926,6 @@ def expand_data( if not is_feature_enabled("PRESTO_EXPAND_DATA"): return columns, data, [] - # insert a custom column that tracks the original row - columns.insert(0, {"name": "__row_id", "type": "BIGINT"}) - for i, row in enumerate(data): - row["__row_id"] = i - # process each column, unnesting ARRAY types and expanding ROW types into new columns to_process = deque((column, 0) for column in columns) all_columns: List[dict] = [] diff --git a/superset/views/core.py b/superset/views/core.py index 35cdfebaa0cb2..a9f687761655b 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -2626,19 +2626,10 @@ def sql_json_call(self, request): limit = limit or app.config.get("SQL_MAX_ROW") session = db.session() - mydb = session.query(models.Database).filter_by(id=database_id).first() + mydb = session.query(models.Database).filter_by(id=database_id).one_or_none() if not mydb: - json_error_response("Database with id {} is missing.".format(database_id)) - - rejected_tables = security_manager.rejected_tables(sql, mydb, schema) - if rejected_tables: - return json_error_response( - security_manager.get_table_access_error_msg(rejected_tables), - link=security_manager.get_table_access_link(rejected_tables), - status=403, - ) - session.commit() + return json_error_response(f"Database with id {database_id} is missing.") select_as_cta = request.form.get("select_as_cta") == "true" tmp_table_name = request.form.get("tmp_table_name") @@ -2667,6 +2658,16 @@ def sql_json_call(self, request): raise Exception(_("Query record was not created as expected.")) logging.info("Triggering query_id: {}".format(query_id)) + rejected_tables = security_manager.rejected_tables(sql, mydb, schema) + if rejected_tables: + query.status = QueryStatus.FAILED + session.commit() + return json_error_response( + security_manager.get_table_access_error_msg(rejected_tables), + link=security_manager.get_table_access_link(rejected_tables), + status=403, + ) + try: template_processor = get_template_processor( database=query.database, query=query diff --git a/tests/db_engine_specs_test.py b/tests/db_engine_specs_test.py index ec6ff24f228aa..5975f954996cf 100644 --- a/tests/db_engine_specs_test.py +++ b/tests/db_engine_specs_test.py @@ -655,49 +655,18 @@ def test_presto_expand_data_with_simple_structural_columns(self): cols, data ) expected_cols = [ - {"name": "__row_id", "type": "BIGINT"}, {"name": "row_column", "type": "ROW(NESTED_OBJ VARCHAR)"}, {"name": "row_column.nested_obj", "type": "VARCHAR"}, {"name": "array_column", "type": "ARRAY(BIGINT)"}, ] expected_data = [ - { - "__row_id": 0, - "array_column": 1, - "row_column": ["a"], - "row_column.nested_obj": "a", - }, - { - "__row_id": "", - "array_column": 2, - "row_column": "", - "row_column.nested_obj": "", - }, - { - "__row_id": "", - "array_column": 3, - "row_column": "", - "row_column.nested_obj": "", - }, - { - "__row_id": 1, - "array_column": 4, - "row_column": ["b"], - "row_column.nested_obj": "b", - }, - { - "__row_id": "", - "array_column": 5, - "row_column": "", - "row_column.nested_obj": "", - }, - { - "__row_id": "", - "array_column": 6, - "row_column": "", - "row_column.nested_obj": "", - }, + {"array_column": 1, "row_column": ["a"], "row_column.nested_obj": "a"}, + {"array_column": 2, "row_column": "", "row_column.nested_obj": ""}, + {"array_column": 3, "row_column": "", "row_column.nested_obj": ""}, + {"array_column": 4, "row_column": ["b"], "row_column.nested_obj": "b"}, + {"array_column": 5, "row_column": "", "row_column.nested_obj": ""}, + {"array_column": 6, "row_column": "", "row_column.nested_obj": ""}, ] expected_expanded_cols = [{"name": "row_column.nested_obj", "type": "VARCHAR"}] @@ -720,7 +689,6 @@ def test_presto_expand_data_with_complex_row_columns(self): cols, data ) expected_cols = [ - {"name": "__row_id", "type": "BIGINT"}, { "name": "row_column", "type": "ROW(NESTED_OBJ1 VARCHAR, NESTED_ROW ROW(NESTED_OBJ2 VARCHAR))", @@ -731,14 +699,12 @@ def test_presto_expand_data_with_complex_row_columns(self): ] expected_data = [ { - "__row_id": 0, "row_column": ["a1", ["a2"]], "row_column.nested_obj1": "a1", "row_column.nested_row": ["a2"], "row_column.nested_row.nested_obj2": "a2", }, { - "__row_id": 1, "row_column": ["b1", ["b2"]], "row_column.nested_obj1": "b1", "row_column.nested_row": ["b2"], @@ -774,7 +740,6 @@ def test_presto_expand_data_with_complex_array_columns(self): cols, data ) expected_cols = [ - {"name": "__row_id", "type": "BIGINT"}, {"name": "int_column", "type": "BIGINT"}, { "name": "array_column", @@ -788,56 +753,48 @@ def test_presto_expand_data_with_complex_array_columns(self): ] expected_data = [ { - "__row_id": 0, "array_column": [[["a"], ["b"]]], "array_column.nested_array": ["a"], "array_column.nested_array.nested_obj": "a", "int_column": 1, }, { - "__row_id": "", "array_column": "", "array_column.nested_array": ["b"], "array_column.nested_array.nested_obj": "b", "int_column": "", }, { - "__row_id": "", "array_column": [[["c"], ["d"]]], "array_column.nested_array": ["c"], "array_column.nested_array.nested_obj": "c", "int_column": "", }, { - "__row_id": "", "array_column": "", "array_column.nested_array": ["d"], "array_column.nested_array.nested_obj": "d", "int_column": "", }, { - "__row_id": 1, "array_column": [[["e"], ["f"]]], "array_column.nested_array": ["e"], "array_column.nested_array.nested_obj": "e", "int_column": 2, }, { - "__row_id": "", "array_column": "", "array_column.nested_array": ["f"], "array_column.nested_array.nested_obj": "f", "int_column": "", }, { - "__row_id": "", "array_column": [[["g"], ["h"]]], "array_column.nested_array": ["g"], "array_column.nested_array.nested_obj": "g", "int_column": "", }, { - "__row_id": "", "array_column": "", "array_column.nested_array": ["h"], "array_column.nested_array.nested_obj": "h",