diff --git a/core/trino-main/src/main/java/io/trino/sql/rewrite/ShowQueriesRewrite.java b/core/trino-main/src/main/java/io/trino/sql/rewrite/ShowQueriesRewrite.java index a33509b08deb..45910506b23f 100644 --- a/core/trino-main/src/main/java/io/trino/sql/rewrite/ShowQueriesRewrite.java +++ b/core/trino-main/src/main/java/io/trino/sql/rewrite/ShowQueriesRewrite.java @@ -64,6 +64,7 @@ import io.trino.sql.tree.Explain; import io.trino.sql.tree.ExplainAnalyze; import io.trino.sql.tree.Expression; +import io.trino.sql.tree.FunctionCall; import io.trino.sql.tree.Identifier; import io.trino.sql.tree.LikePredicate; import io.trino.sql.tree.LongLiteral; @@ -75,6 +76,7 @@ import io.trino.sql.tree.QualifiedName; import io.trino.sql.tree.Query; import io.trino.sql.tree.Relation; +import io.trino.sql.tree.Row; import io.trino.sql.tree.ShowCatalogs; import io.trino.sql.tree.ShowColumns; import io.trino.sql.tree.ShowCreate; @@ -563,6 +565,12 @@ private static Expression toExpression(Object value) .collect(toList())); } + if (value instanceof Map map) { + return new FunctionCall(QualifiedName.of("map_from_entries"), ImmutableList.of(new Array(map.entrySet().stream() + .map(entry -> new Row(ImmutableList.of(toExpression(entry.getKey()), toExpression(entry.getValue())))) + .collect(toImmutableList())))); + } + throw new TrinoException(INVALID_TABLE_PROPERTY, format("Failed to convert object of type %s to expression: %s", value.getClass().getName(), value)); } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java index 395736172fd7..89f47be31a16 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveMetadata.java @@ -222,6 +222,7 @@ import static io.trino.plugin.hive.HiveTableProperties.CSV_QUOTE; import static io.trino.plugin.hive.HiveTableProperties.CSV_SEPARATOR; import static io.trino.plugin.hive.HiveTableProperties.EXTERNAL_LOCATION_PROPERTY; +import static io.trino.plugin.hive.HiveTableProperties.EXTRA_PROPERTIES; import static io.trino.plugin.hive.HiveTableProperties.NULL_FORMAT_PROPERTY; import static io.trino.plugin.hive.HiveTableProperties.ORC_BLOOM_FILTER_COLUMNS; import static io.trino.plugin.hive.HiveTableProperties.ORC_BLOOM_FILTER_FPP; @@ -238,6 +239,7 @@ import static io.trino.plugin.hive.HiveTableProperties.getAvroSchemaUrl; import static io.trino.plugin.hive.HiveTableProperties.getBucketProperty; import static io.trino.plugin.hive.HiveTableProperties.getExternalLocation; +import static io.trino.plugin.hive.HiveTableProperties.getExtraProperties; import static io.trino.plugin.hive.HiveTableProperties.getFooterSkipCount; import static io.trino.plugin.hive.HiveTableProperties.getHeaderSkipCount; import static io.trino.plugin.hive.HiveTableProperties.getHiveStorageFormat; @@ -375,6 +377,22 @@ public class HiveMetadata public static final String MODIFYING_NON_TRANSACTIONAL_TABLE_MESSAGE = "Modifying Hive table rows is only supported for transactional tables"; + private static final Set TABLE_PROPERTIES_TO_SKIP = ImmutableSet.of( + PRESTO_VERSION_NAME, + PRESTO_QUERY_ID_NAME, + BUCKETING_VERSION, + "EXTERNAL", + "numFiles", + "totalSize", + "last_modified_time", + "transient_lastDdlTime", + "last_modified_by", + "STATS_GENERATED_VIA_STATS_TASK", + "COLUMN_STATS_ACCURATE", + "numRows", + "rawDataSize", + "numFilesErasureCoded"); + private final CatalogName catalogName; private final SemiTransactionalHiveMetastore metastore; private final boolean autoCommit; @@ -655,84 +673,91 @@ private ConnectorTableMetadata doGetTableMetadata(ConnectorSession session, Sche properties.put(SORTED_BY_PROPERTY, property.getSortedBy()); }); + TableParameterProvider tableParameterProvider = new TableParameterProvider(table); + // Transactional properties - String transactionalProperty = table.getParameters().get(HiveMetadata.TRANSACTIONAL); + String transactionalProperty = tableParameterProvider.getParameter(HiveMetadata.TRANSACTIONAL); if (parseBoolean(transactionalProperty)) { properties.put(HiveTableProperties.TRANSACTIONAL, true); } // ORC format specific properties - String orcBloomFilterColumns = table.getParameters().get(ORC_BLOOM_FILTER_COLUMNS_KEY); + String orcBloomFilterColumns = tableParameterProvider.getParameter(ORC_BLOOM_FILTER_COLUMNS_KEY); if (orcBloomFilterColumns != null) { properties.put(ORC_BLOOM_FILTER_COLUMNS, Splitter.on(',').trimResults().omitEmptyStrings().splitToList(orcBloomFilterColumns)); } - String orcBloomFilterFfp = table.getParameters().get(ORC_BLOOM_FILTER_FPP_KEY); + String orcBloomFilterFfp = tableParameterProvider.getParameter(ORC_BLOOM_FILTER_FPP_KEY); if (orcBloomFilterFfp != null) { properties.put(ORC_BLOOM_FILTER_FPP, Double.parseDouble(orcBloomFilterFfp)); } // Avro specific property - String avroSchemaUrl = table.getParameters().get(AVRO_SCHEMA_URL_KEY); + String avroSchemaUrl = tableParameterProvider.getParameter(AVRO_SCHEMA_URL_KEY); if (avroSchemaUrl != null) { properties.put(AVRO_SCHEMA_URL, avroSchemaUrl); } - String avroSchemaLiteral = table.getParameters().get(AVRO_SCHEMA_LITERAL_KEY); + String avroSchemaLiteral = tableParameterProvider.getParameter(AVRO_SCHEMA_LITERAL_KEY); if (avroSchemaLiteral != null) { properties.put(AVRO_SCHEMA_LITERAL, avroSchemaLiteral); } // Textfile and CSV specific properties - getSerdeProperty(table, SKIP_HEADER_COUNT_KEY) + getSerdeProperty(tableParameterProvider, SKIP_HEADER_COUNT_KEY) .ifPresent(skipHeaderCount -> properties.put(SKIP_HEADER_LINE_COUNT, Integer.valueOf(skipHeaderCount))); - getSerdeProperty(table, SKIP_FOOTER_COUNT_KEY) + getSerdeProperty(tableParameterProvider, SKIP_FOOTER_COUNT_KEY) .ifPresent(skipFooterCount -> properties.put(SKIP_FOOTER_LINE_COUNT, Integer.valueOf(skipFooterCount))); // Multi-format property - getSerdeProperty(table, NULL_FORMAT_KEY) + getSerdeProperty(tableParameterProvider, NULL_FORMAT_KEY) .ifPresent(nullFormat -> properties.put(NULL_FORMAT_PROPERTY, nullFormat)); // Textfile specific properties - getSerdeProperty(table, TEXT_FIELD_SEPARATOR_KEY) + getSerdeProperty(tableParameterProvider, TEXT_FIELD_SEPARATOR_KEY) .ifPresent(fieldSeparator -> properties.put(TEXTFILE_FIELD_SEPARATOR, fieldSeparator)); - getSerdeProperty(table, TEXT_FIELD_SEPARATOR_ESCAPE_KEY) + getSerdeProperty(tableParameterProvider, TEXT_FIELD_SEPARATOR_ESCAPE_KEY) .ifPresent(fieldEscape -> properties.put(TEXTFILE_FIELD_SEPARATOR_ESCAPE, fieldEscape)); // CSV specific properties - getCsvSerdeProperty(table, CSV_SEPARATOR_KEY) + getCsvSerdeProperty(tableParameterProvider, CSV_SEPARATOR_KEY) .ifPresent(csvSeparator -> properties.put(CSV_SEPARATOR, csvSeparator)); - getCsvSerdeProperty(table, CSV_QUOTE_KEY) + getCsvSerdeProperty(tableParameterProvider, CSV_QUOTE_KEY) .ifPresent(csvQuote -> properties.put(CSV_QUOTE, csvQuote)); - getCsvSerdeProperty(table, CSV_ESCAPE_KEY) + getCsvSerdeProperty(tableParameterProvider, CSV_ESCAPE_KEY) .ifPresent(csvEscape -> properties.put(CSV_ESCAPE, csvEscape)); // REGEX specific properties - getSerdeProperty(table, REGEX_KEY) + getSerdeProperty(tableParameterProvider, REGEX_KEY) .ifPresent(regex -> properties.put(REGEX_PATTERN, regex)); - getSerdeProperty(table, REGEX_CASE_SENSITIVE_KEY) + getSerdeProperty(tableParameterProvider, REGEX_CASE_SENSITIVE_KEY) .ifPresent(regexCaseInsensitive -> properties.put(REGEX_CASE_INSENSITIVE, parseBoolean(regexCaseInsensitive))); - Optional comment = Optional.ofNullable(table.getParameters().get(TABLE_COMMENT)); + Optional comment = Optional.ofNullable(tableParameterProvider.getParameter(TABLE_COMMENT)); - String autoPurgeProperty = table.getParameters().get(AUTO_PURGE_KEY); + String autoPurgeProperty = tableParameterProvider.getParameter(AUTO_PURGE_KEY); if (parseBoolean(autoPurgeProperty)) { properties.put(AUTO_PURGE, true); } // Partition Projection specific properties - properties.putAll(partitionProjectionService.getPartitionProjectionTrinoTableProperties(table)); + properties.putAll(partitionProjectionService.getPartitionProjectionTrinoTableProperties(tableParameterProvider)); + + Map remainingProperties = tableParameterProvider.getUnconsumedProperties(); + if (!remainingProperties.isEmpty()) { + properties.put(EXTRA_PROPERTIES, remainingProperties); + } return new ConnectorTableMetadata(tableName, columns.build(), properties.buildOrThrow(), comment); } - private static Optional getCsvSerdeProperty(Table table, String key) + private static Optional getCsvSerdeProperty(TableParameterProvider tableParameterProvider, String key) { - return getSerdeProperty(table, key).map(csvSerdeProperty -> csvSerdeProperty.substring(0, 1)); + return getSerdeProperty(tableParameterProvider, key).map(csvSerdeProperty -> csvSerdeProperty.substring(0, 1)); } - private static Optional getSerdeProperty(Table table, String key) + private static Optional getSerdeProperty(TableParameterProvider tableParameterProvider, String key) { - String serdePropertyValue = table.getStorage().getSerdeParameters().get(key); - String tablePropertyValue = table.getParameters().get(key); + String serdePropertyValue = tableParameterProvider.getTable().getStorage().getSerdeParameters().get(key); + String tablePropertyValue = tableParameterProvider.getParameter(key); if (serdePropertyValue != null && tablePropertyValue != null && !tablePropertyValue.equals(serdePropertyValue)) { // in Hive one can set conflicting values for the same property, in such case it looks like table properties are used throw new TrinoException( @@ -1157,6 +1182,27 @@ else if (avroSchemaLiteral != null) { tableProperties.put("numFiles", "-1"); tableProperties.put("totalSize", "-1"); + // Extra properties + getExtraProperties(tableMetadata.getProperties()) + .ifPresent(extraProperties -> { + Set invalidProperties = ImmutableSet.builder() + .addAll(TABLE_PROPERTIES_TO_SKIP) + .addAll(tableProperties.buildOrThrow().keySet()) + .build(); + + if (invalidProperties.stream().anyMatch(extraProperties::containsKey)) { + throw new TrinoException( + INVALID_TABLE_PROPERTY, + "Invalid keys in extra_properties: %s".formatted( + extraProperties.keySet().stream() + .filter(invalidProperties::contains) + .collect(joining(" ,")))); + } + for (Map.Entry extraProperty : extraProperties.entrySet()) { + tableProperties.put(extraProperty.getKey(), extraProperty.getValue()); + } + }); + // Table comment property tableMetadata.getComment().ifPresent(value -> tableProperties.put(TABLE_COMMENT, value)); @@ -3746,7 +3792,7 @@ public Optional redirectTable(ConnectorSession session, } // we need to chop off any "$partitions" and similar suffixes from table name while querying the metastore for the Table object TableNameSplitResult tableNameSplit = splitTableName(tableName.getTableName()); - Optional table = metastore.getTable(tableName.getSchemaName(), tableNameSplit.getBaseTableName()); + Optional
table = metastore.getTable(tableName.getSchemaName(), tableNameSplit.baseTableName()); if (table.isEmpty() || VIRTUAL_VIEW.name().equals(table.get().getTableType())) { return Optional.empty(); } @@ -3760,7 +3806,7 @@ public Optional redirectTable(ConnectorSession session, name.getCatalogName(), new SchemaTableName( name.getSchemaTableName().getSchemaName(), - name.getSchemaTableName().getTableName() + tableNameSplit.getSuffix().orElse("")))); + name.getSchemaTableName().getTableName() + tableNameSplit.suffix().orElse("")))); } private Optional redirectTableToIceberg(ConnectorSession session, Table table) @@ -3807,25 +3853,12 @@ private static TableNameSplitResult splitTableName(String tableName) new TableNameSplitResult(tableName.substring(0, metadataMarkerIndex), Optional.of(tableName.substring(metadataMarkerIndex))); } - private static class TableNameSplitResult + private record TableNameSplitResult(String baseTableName, Optional suffix) { - private final String baseTableName; - private final Optional suffix; - - public TableNameSplitResult(String baseTableName, Optional suffix) - { - this.baseTableName = requireNonNull(baseTableName, "baseTableName is null"); - this.suffix = requireNonNull(suffix, "suffix is null"); - } - - public String getBaseTableName() - { - return baseTableName; - } - - public Optional getSuffix() + private TableNameSplitResult { - return suffix; + requireNonNull(baseTableName, "baseTableName is null"); + requireNonNull(suffix, "suffix is null"); } } @@ -3859,4 +3892,33 @@ public boolean supportsReportingWrittenBytes(ConnectorSession session, SchemaTab { return true; } + + public static class TableParameterProvider + { + private final Table table; + private final Set consumedParameters = new HashSet<>(TABLE_PROPERTIES_TO_SKIP); + + public TableParameterProvider(Table table) + { + this.table = requireNonNull(table, "table is null"); + } + + public String getParameter(String key) + { + consumedParameters.add(key); + return table.getParameters().get(key); + } + + public Table getTable() + { + return table; + } + + public Map getUnconsumedProperties() + { + return table.getParameters().entrySet().stream() + .filter(entry -> !consumedParameters.contains(entry.getKey())) + .collect(toImmutableMap(Map.Entry::getKey, Map.Entry::getValue)); + } + } } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableProperties.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableProperties.java index 328f494cbbfc..c5bcd86d0bf9 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableProperties.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/HiveTableProperties.java @@ -21,6 +21,8 @@ import io.trino.spi.TrinoException; import io.trino.spi.session.PropertyMetadata; import io.trino.spi.type.ArrayType; +import io.trino.spi.type.MapType; +import io.trino.spi.type.TypeManager; import javax.inject.Inject; @@ -69,13 +71,15 @@ public class HiveTableProperties public static final String REGEX_CASE_INSENSITIVE = "regex_case_insensitive"; public static final String TRANSACTIONAL = "transactional"; public static final String AUTO_PURGE = "auto_purge"; + public static final String EXTRA_PROPERTIES = "extra_properties"; private final List> tableProperties; @Inject public HiveTableProperties( HiveConfig config, - OrcWriterConfig orcWriterConfig) + OrcWriterConfig orcWriterConfig, + TypeManager typeManager) { tableProperties = ImmutableList.of( stringProperty( @@ -173,7 +177,25 @@ public HiveTableProperties( PARTITION_PROJECTION_LOCATION_TEMPLATE, "Partition projection location template", null, - false)); + false), + new PropertyMetadata<>( + EXTRA_PROPERTIES, + "Extra table properties", + new MapType(VARCHAR, VARCHAR, typeManager.getTypeOperators()), + Map.class, + null, + false, + value -> { + Map extraProperties = (Map) value; + if (extraProperties.isEmpty()) { + throw new TrinoException(INVALID_TABLE_PROPERTY, "Extra table properties cannot be empty"); + } + if (extraProperties.containsValue(null)) { + throw new TrinoException(INVALID_TABLE_PROPERTY, format("Extra table property value cannot be null '%s'", extraProperties)); + } + return extraProperties; + }, + value -> value)); } public List> getTableProperties() @@ -311,4 +333,9 @@ public static Optional isAutoPurge(Map tableProperties) { return Optional.ofNullable((Boolean) tableProperties.get(AUTO_PURGE)); } + + public static Optional> getExtraProperties(Map tableProperties) + { + return Optional.ofNullable((Map) tableProperties.get(EXTRA_PROPERTIES)); + } } diff --git a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/aws/athena/PartitionProjectionService.java b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/aws/athena/PartitionProjectionService.java index 205ad0cd9d1b..a0bea4b0a112 100644 --- a/plugin/trino-hive/src/main/java/io/trino/plugin/hive/aws/athena/PartitionProjectionService.java +++ b/plugin/trino-hive/src/main/java/io/trino/plugin/hive/aws/athena/PartitionProjectionService.java @@ -18,6 +18,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import io.trino.plugin.hive.HiveConfig; +import io.trino.plugin.hive.HiveMetadata; import io.trino.plugin.hive.aws.athena.projection.Projection; import io.trino.plugin.hive.aws.athena.projection.ProjectionFactory; import io.trino.plugin.hive.aws.athena.projection.ProjectionType; @@ -90,22 +91,15 @@ public PartitionProjectionService( this.projectionFactories = ImmutableMap.copyOf(requireNonNull(projectionFactories, "projectionFactories is null")); } - public Map getPartitionProjectionTrinoTableProperties(Table table) + public Map getPartitionProjectionTrinoTableProperties(HiveMetadata.TableParameterProvider tableParameterProvider) { - Map metastoreTableProperties = table.getParameters(); ImmutableMap.Builder trinoTablePropertiesBuilder = ImmutableMap.builder(); - rewriteProperty(metastoreTableProperties, trinoTablePropertiesBuilder, METASTORE_PROPERTY_PROJECTION_IGNORE, PARTITION_PROJECTION_IGNORE, Boolean::valueOf); - rewriteProperty(metastoreTableProperties, trinoTablePropertiesBuilder, METASTORE_PROPERTY_PROJECTION_ENABLED, PARTITION_PROJECTION_ENABLED, Boolean::valueOf); - rewriteProperty(metastoreTableProperties, trinoTablePropertiesBuilder, METASTORE_PROPERTY_PROJECTION_LOCATION_TEMPLATE, PARTITION_PROJECTION_LOCATION_TEMPLATE, String::valueOf); + rewriteProperty(tableParameterProvider::getParameter, trinoTablePropertiesBuilder, METASTORE_PROPERTY_PROJECTION_IGNORE, PARTITION_PROJECTION_IGNORE, Boolean::valueOf); + rewriteProperty(tableParameterProvider::getParameter, trinoTablePropertiesBuilder, METASTORE_PROPERTY_PROJECTION_ENABLED, PARTITION_PROJECTION_ENABLED, Boolean::valueOf); + rewriteProperty(tableParameterProvider::getParameter, trinoTablePropertiesBuilder, METASTORE_PROPERTY_PROJECTION_LOCATION_TEMPLATE, PARTITION_PROJECTION_LOCATION_TEMPLATE, String::valueOf); return trinoTablePropertiesBuilder.buildOrThrow(); } - public Map getPartitionProjectionTrinoColumnProperties(Table table, String columnName) - { - Map metastoreTableProperties = table.getParameters(); - return rewriteColumnProjectionProperties(metastoreTableProperties, columnName); - } - public Map getPartitionProjectionHiveTableProperties(ConnectorTableMetadata tableMetadata) { // If partition projection is globally disabled we don't allow defining its properties @@ -356,14 +350,24 @@ private Projection parseColumnProjection(String columnName, Type columnType, Map return projectionFactory.create(columnName, columnType, columnProperties); } - private void rewriteProperty( + private static void rewriteProperty( Map sourceProperties, ImmutableMap.Builder targetPropertiesBuilder, String sourcePropertyKey, String targetPropertyKey, Function valueMapper) { - Optional.ofNullable(sourceProperties.get(sourcePropertyKey)) + rewriteProperty(sourceProperties::get, targetPropertiesBuilder, sourcePropertyKey, targetPropertyKey, valueMapper); + } + + private static void rewriteProperty( + Function sourcePropertyProvider, + ImmutableMap.Builder targetPropertiesBuilder, + String sourcePropertyKey, + String targetPropertyKey, + Function valueMapper) + { + Optional.ofNullable(sourcePropertyProvider.apply(sourcePropertyKey)) .ifPresent(value -> targetPropertiesBuilder.put(targetPropertyKey, valueMapper.apply(value))); } diff --git a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseHiveConnectorTest.java b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseHiveConnectorTest.java index 967ee918fd9d..1f3ca141042d 100644 --- a/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseHiveConnectorTest.java +++ b/plugin/trino-hive/src/test/java/io/trino/plugin/hive/BaseHiveConnectorTest.java @@ -29,6 +29,7 @@ import io.trino.metadata.QualifiedObjectName; import io.trino.metadata.TableHandle; import io.trino.metadata.TableMetadata; +import io.trino.spi.QueryId; import io.trino.spi.connector.CatalogSchemaTableName; import io.trino.spi.connector.ColumnHandle; import io.trino.spi.connector.ColumnMetadata; @@ -54,6 +55,7 @@ import io.trino.testing.MaterializedResult; import io.trino.testing.MaterializedResultWithQueryId; import io.trino.testing.MaterializedRow; +import io.trino.testing.QueryFailedException; import io.trino.testing.QueryRunner; import io.trino.testing.TestingConnectorBehavior; import io.trino.testing.sql.TestTable; @@ -8510,6 +8512,128 @@ public void testCreateAcidTableUnsupported() assertQueryFails("CREATE TABLE acid_unsupported WITH (transactional = true) AS SELECT 123 x", "FileHiveMetastore does not support ACID tables"); } + @Test + public void testExtraProperties() + { + String tableName = format("%s.%s.test_extra_properties", getSession().getCatalog().get(), getSession().getSchema().get()); + @Language("SQL") String createTableSql = format(""" + CREATE TABLE %s ( + c1 integer) + WITH ( + extra_properties = MAP(ARRAY['extra.property'], ARRAY['true']), + format = 'ORC' + )""", + tableName); + MaterializedResultWithQueryId result = getDistributedQueryRunner().executeWithQueryId(getSession(), createTableSql); + QueryId queryId = result.getQueryId(); + String nodeVersion = (String) computeScalar("SELECT node_version FROM system.runtime.nodes WHERE coordinator"); + assertQuery( + "SELECT * FROM \"test_extra_properties$properties\"", + "SELECT 'workaround for potential lack of HIVE-12730', 'false', 'true', '0', '0', '" + queryId + "', '" + nodeVersion + "', '0', '0', 'false'"); + MaterializedResult actualResult = computeActual("SHOW CREATE TABLE " + tableName); + String expectedShowCreateTableSql = "CREATE TABLE hive.tpch.test_extra_properties (\n" + + " c1 integer\n" + + ")\n" + + "WITH (\n" + + " extra_properties = map_from_entries(ARRAY[ROW('extra.property', 'true')]),\n" + + " format = 'ORC'\n" + + ")"; + assertEquals(getOnlyElement(actualResult.getOnlyColumnAsSet()), expectedShowCreateTableSql); + assertUpdate("DROP TABLE " + tableName); + } + + @Test + public void testMultipleExtraProperties() + { + String tableName = format("%s.%s.test_multiple_extra_properties", getSession().getCatalog().get(), getSession().getSchema().get()); + @Language("SQL") String createTableSql = format(""" + CREATE TABLE %s ( + c1 integer) + WITH ( + extra_properties = MAP(ARRAY['extra.property.one', 'extra.property.two'], ARRAY['one', 'two']), + format = 'ORC' + )""", + tableName); + MaterializedResultWithQueryId result = getDistributedQueryRunner().executeWithQueryId(getSession(), createTableSql); + QueryId queryId = result.getQueryId(); + String nodeVersion = (String) computeScalar("SELECT node_version FROM system.runtime.nodes WHERE coordinator"); + assertQuery( + "SELECT * FROM \"test_multiple_extra_properties$properties\"", + "SELECT 'workaround for potential lack of HIVE-12730', 'false', 'one', 'two', '0', '0', '" + queryId + "', '" + nodeVersion + "', '0', '0', 'false'"); + MaterializedResult actualResult = computeActual("SHOW CREATE TABLE " + tableName); + String expectedShowCreateTableSql = "CREATE TABLE hive.tpch.test_multiple_extra_properties (\n" + + " c1 integer\n" + + ")\n" + + "WITH (\n" + + " extra_properties = map_from_entries(ARRAY[ROW('extra.property.one', 'one'),ROW('extra.property.two', 'two')]),\n" + + " format = 'ORC'\n" + + ")"; + assertEquals(getOnlyElement(actualResult.getOnlyColumnAsSet()), expectedShowCreateTableSql); + assertUpdate("DROP TABLE " + tableName); + } + + @Test + public void testDuplicateExtraProperties() + { + String tableName = format("%s.%s.test_duplicate_extra_properties", getSession().getCatalog().get(), getSession().getSchema().get()); + @Language("SQL") String createTableSql = format(""" + CREATE TABLE %s ( + c1 integer) + WITH ( + extra_properties = MAP(ARRAY['extra.property', 'extra.property'], ARRAY['true', 'false']), + format = 'ORC' + )""", + tableName); + assertQueryFails(createTableSql, "Invalid value for catalog 'hive' table property 'extra_properties': Cannot convert.*"); + } + + @Test + public void testOverwriteExistingPropertyWithExtraProperties() + { + String tableName = format("%s.%s.test_overwrite_extra_properties", getSession().getCatalog().get(), getSession().getSchema().get()); + @Language("SQL") String createTableSql = format(""" + CREATE TABLE %s ( + c1 integer) + WITH ( + extra_properties = MAP(ARRAY['transactional'], ARRAY['true']), + format = 'ORC' + )""", + tableName); + assertThatThrownBy(() -> assertUpdate(createTableSql)) + .isInstanceOf(QueryFailedException.class) + .hasMessage("Invalid keys in extra_properties: transactional"); + } + + @Test + public void testAddingNullProperty() + { + String tableName = format("%s.%s.test_duplicate_extra_properties", getSession().getCatalog().get(), getSession().getSchema().get()); + @Language("SQL") String createTableSql = format(""" + CREATE TABLE %s ( + c1 integer) + WITH ( + extra_properties = MAP(ARRAY['null.property'], ARRAY[null]), + format = 'ORC' + )""", + tableName); + assertQueryFails(createTableSql, ".*Extra table property value cannot be null '\\{null.property=null}'.*"); + } + + @Test + public void testAddingEmptyProperty() + { + String tableName = format("%s.%s.test_duplicate_extra_properties", getSession().getCatalog().get(), getSession().getSchema().get()); + @Language("SQL") String createTableSql = format(""" + CREATE TABLE %s ( + c1 integer) + WITH ( + extra_properties = MAP(), + format = 'ORC' + )""", + tableName); + assertQueryFails(createTableSql, ".*Extra table properties cannot be empty.*"); + } + private static final Set NAMED_COLUMN_ONLY_FORMATS = ImmutableSet.of(HiveStorageFormat.AVRO, HiveStorageFormat.JSON); @DataProvider diff --git a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveSparkCompatibility.java b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveSparkCompatibility.java index 11975c5f3664..8a43f19310df 100644 --- a/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveSparkCompatibility.java +++ b/testing/trino-product-tests/src/main/java/io/trino/tests/product/hive/TestHiveSparkCompatibility.java @@ -15,6 +15,7 @@ import com.google.common.collect.ImmutableList; import io.trino.tempto.ProductTest; +import org.assertj.core.api.Assertions; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; @@ -161,31 +162,30 @@ public void testReadSparkCreatedTable(String sparkTableFormat, String expectedTr onTrino().executeQuery("SET SESSION hive.timestamp_precision = 'NANOSECONDS'"); assertThat(onTrino().executeQuery("SELECT * FROM " + trinoTableName)).containsOnly(expected); - assertThat(onTrino().executeQuery("SHOW CREATE TABLE " + trinoTableName)) - .containsOnly(row(format( - "CREATE TABLE %s (\n" + - " a_boolean boolean,\n" + - " a_tinyint tinyint,\n" + - " a_smallint smallint,\n" + - " an_integer integer,\n" + - " a_bigint bigint,\n" + - " a_real real,\n" + - " a_double double,\n" + - " a_short_decimal decimal(11, 4),\n" + - " a_long_decimal decimal(26, 7),\n" + - " a_string varchar,\n" + - " a_date date,\n" + - " a_timestamp_seconds timestamp(9),\n" + - " a_timestamp_millis timestamp(9),\n" + - " a_timestamp_micros timestamp(9),\n" + - " a_timestamp_nanos timestamp(9),\n" + - " a_dummy varchar\n" + - ")\n" + - "WITH (\n" + - " format = '%s'\n" + - ")", - trinoTableName, - expectedTrinoTableFormat))); + Assertions.assertThat((String) onTrino().executeQuery("SHOW CREATE TABLE " + trinoTableName).getOnlyValue()) + .matches(""" + CREATE TABLE %s\\.default\\.%s \\( + a_boolean boolean, + a_tinyint tinyint, + a_smallint smallint, + an_integer integer, + a_bigint bigint, + a_real real, + a_double double, + a_short_decimal decimal\\(11, 4\\), + a_long_decimal decimal\\(26, 7\\), + a_string varchar, + a_date date, + a_timestamp_seconds timestamp\\(9\\), + a_timestamp_millis timestamp\\(9\\), + a_timestamp_micros timestamp\\(9\\), + a_timestamp_nanos timestamp\\(9\\), + a_dummy varchar + \\) + WITH \\( + extra_properties = map_from_entries\\(ARRAY.*\\), + format = '%s' + \\)""".formatted(TRINO_CATALOG, sparkTableName, expectedTrinoTableFormat)); onSpark().executeQuery("DROP TABLE " + sparkTableName); } @@ -627,20 +627,20 @@ public void testReadSparkBucketedTable() assertThat(onTrino().executeQuery("SELECT a_string, a_bigint, an_integer, a_real, a_double, a_boolean FROM " + trinoTableName)) .containsOnly(expected); - assertThat(onTrino().executeQuery("SHOW CREATE TABLE " + trinoTableName)) - .containsOnly(row(format( - "CREATE TABLE %s (\n" + - " a_string varchar,\n" + - " a_bigint bigint,\n" + - " an_integer integer,\n" + - " a_real real,\n" + - " a_double double,\n" + - " a_boolean boolean\n" + - ")\n" + - "WITH (\n" + - " format = 'ORC'\n" + - ")", - trinoTableName))); + Assertions.assertThat((String) onTrino().executeQuery("SHOW CREATE TABLE " + trinoTableName).getOnlyValue()) + .matches(""" + CREATE TABLE %s\\.default\\.%s \\( + a_string varchar, + a_bigint bigint, + an_integer integer, + a_real real, + a_double double, + a_boolean boolean + \\) + WITH \\( + extra_properties = map_from_entries\\(ARRAY.*\\), + format = 'ORC' + \\)""".formatted(TRINO_CATALOG, sparkTableName)); assertQueryFailure(() -> onTrino().executeQuery("SELECT a_string, a_bigint, an_integer, a_real, a_double, a_boolean, \"$bucket\" FROM " + trinoTableName)) .hasMessageContaining("Column '$bucket' cannot be resolved");