Skip to content

Commit

Permalink
Dummy commit
Browse files Browse the repository at this point in the history
  • Loading branch information
Priyansh121096 committed Oct 31, 2023
1 parent 39e4efa commit e6cb6cc
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 107 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -160,20 +160,7 @@
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Comparator;
import java.util.Deque;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalLong;
import java.util.Set;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
Expand Down Expand Up @@ -221,12 +208,7 @@
import static io.trino.plugin.iceberg.IcebergSessionProperties.isMergeManifestsOnWrite;
import static io.trino.plugin.iceberg.IcebergSessionProperties.isProjectionPushdownEnabled;
import static io.trino.plugin.iceberg.IcebergSessionProperties.isStatisticsEnabled;
import static io.trino.plugin.iceberg.IcebergTableProperties.EXTRA_PROPERTIES;
import static io.trino.plugin.iceberg.IcebergTableProperties.FILE_FORMAT_PROPERTY;
import static io.trino.plugin.iceberg.IcebergTableProperties.FORMAT_VERSION_PROPERTY;
import static io.trino.plugin.iceberg.IcebergTableProperties.PARTITIONING_PROPERTY;
import static io.trino.plugin.iceberg.IcebergTableProperties.SORTED_BY_PROPERTY;
import static io.trino.plugin.iceberg.IcebergTableProperties.getPartitioning;
import static io.trino.plugin.iceberg.IcebergTableProperties.*;
import static io.trino.plugin.iceberg.IcebergUtil.canEnforceColumnConstraintInSpecs;
import static io.trino.plugin.iceberg.IcebergUtil.commit;
import static io.trino.plugin.iceberg.IcebergUtil.deserializePartitionValue;
Expand Down Expand Up @@ -258,10 +240,7 @@
import static io.trino.plugin.iceberg.procedure.IcebergTableProcedureId.EXPIRE_SNAPSHOTS;
import static io.trino.plugin.iceberg.procedure.IcebergTableProcedureId.OPTIMIZE;
import static io.trino.plugin.iceberg.procedure.IcebergTableProcedureId.REMOVE_ORPHAN_FILES;
import static io.trino.spi.StandardErrorCode.COLUMN_ALREADY_EXISTS;
import static io.trino.spi.StandardErrorCode.INVALID_ANALYZE_PROPERTY;
import static io.trino.spi.StandardErrorCode.INVALID_ARGUMENTS;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.spi.StandardErrorCode.*;
import static io.trino.spi.connector.MaterializedViewFreshness.Freshness.FRESH;
import static io.trino.spi.connector.MaterializedViewFreshness.Freshness.STALE;
import static io.trino.spi.connector.MaterializedViewFreshness.Freshness.UNKNOWN;
Expand Down Expand Up @@ -1661,12 +1640,6 @@ public void setTableProperties(ConnectorSession session, ConnectorTableHandle ta
beginTransaction(icebergTable);
UpdateProperties updateProperties = transaction.updateProperties();

if (properties.containsKey(EXTRA_PROPERTIES)) {
Map<String, String> extraProperties = (Map<String, String>) properties.get(EXTRA_PROPERTIES)
.orElseThrow(() -> new IllegalArgumentException("extra_properties property cannot be empty"));
extraProperties.forEach(updateProperties::set);
}

if (properties.containsKey(FILE_FORMAT_PROPERTY)) {
IcebergFileFormat fileFormat = (IcebergFileFormat) properties.get(FILE_FORMAT_PROPERTY)
.orElseThrow(() -> new IllegalArgumentException("The format property cannot be empty"));
Expand Down Expand Up @@ -1708,6 +1681,19 @@ public void setTableProperties(ConnectorSession session, ConnectorTableHandle ta
}
}

if (properties.containsKey(EXTRA_PROPERTIES)) {
Map<String, String> extraProperties = (Map<String, String>) properties.get(EXTRA_PROPERTIES)
.orElseThrow(() -> new IllegalArgumentException("extra_properties property cannot be empty"));

Set<String> illegalExtraProperties = Sets.intersection(ILLEGAL_EXTRA_PROPERTIES, extraProperties.keySet());
if (!illegalExtraProperties.isEmpty()) {
throw new TrinoException(
INVALID_TABLE_PROPERTY,
"Illegal keys in extra_properties: " + illegalExtraProperties);
}
extraProperties.forEach(updateProperties::set);
}

try {
transaction.commitTransaction();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
package io.trino.plugin.iceberg;

import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.inject.Inject;
import io.trino.plugin.hive.orc.OrcWriterConfig;
import io.trino.spi.TrinoException;
Expand All @@ -25,6 +26,7 @@
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;

import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.trino.plugin.iceberg.IcebergConfig.FORMAT_VERSION_SUPPORT_MAX;
Expand All @@ -48,6 +50,14 @@ public class IcebergTableProperties
public static final String ORC_BLOOM_FILTER_COLUMNS = "orc_bloom_filter_columns";
public static final String ORC_BLOOM_FILTER_FPP = "orc_bloom_filter_fpp";
public static final String EXTRA_PROPERTIES = "extra_properties";
public static final Set<String> ILLEGAL_EXTRA_PROPERTIES = ImmutableSet.of(
FILE_FORMAT_PROPERTY,
PARTITIONING_PROPERTY,
SORTED_BY_PROPERTY,
LOCATION_PROPERTY,
FORMAT_VERSION_PROPERTY,
ORC_BLOOM_FILTER_COLUMNS,
ORC_BLOOM_FILTER_FPP);

private final List<PropertyMetadata<?>> tableProperties;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,19 +103,7 @@
import static io.trino.plugin.iceberg.IcebergErrorCode.ICEBERG_INVALID_PARTITION_VALUE;
import static io.trino.plugin.iceberg.IcebergMetadata.ORC_BLOOM_FILTER_COLUMNS_KEY;
import static io.trino.plugin.iceberg.IcebergMetadata.ORC_BLOOM_FILTER_FPP_KEY;
import static io.trino.plugin.iceberg.IcebergTableProperties.FILE_FORMAT_PROPERTY;
import static io.trino.plugin.iceberg.IcebergTableProperties.FORMAT_VERSION_PROPERTY;
import static io.trino.plugin.iceberg.IcebergTableProperties.LOCATION_PROPERTY;
import static io.trino.plugin.iceberg.IcebergTableProperties.ORC_BLOOM_FILTER_COLUMNS;
import static io.trino.plugin.iceberg.IcebergTableProperties.ORC_BLOOM_FILTER_FPP;
import static io.trino.plugin.iceberg.IcebergTableProperties.PARTITIONING_PROPERTY;
import static io.trino.plugin.iceberg.IcebergTableProperties.SORTED_BY_PROPERTY;
import static io.trino.plugin.iceberg.IcebergTableProperties.getExtraProperties;
import static io.trino.plugin.iceberg.IcebergTableProperties.getOrcBloomFilterColumns;
import static io.trino.plugin.iceberg.IcebergTableProperties.getOrcBloomFilterFpp;
import static io.trino.plugin.iceberg.IcebergTableProperties.getPartitioning;
import static io.trino.plugin.iceberg.IcebergTableProperties.getSortOrder;
import static io.trino.plugin.iceberg.IcebergTableProperties.getTableLocation;
import static io.trino.plugin.iceberg.IcebergTableProperties.*;
import static io.trino.plugin.iceberg.PartitionFields.parsePartitionFields;
import static io.trino.plugin.iceberg.PartitionFields.toPartitionFields;
import static io.trino.plugin.iceberg.SortFieldUtils.parseSortFields;
Expand Down Expand Up @@ -662,7 +650,7 @@ public static Transaction newCreateTableTransaction(TrinoCatalog catalog, Connec
// Add properties set via "extra_properties" table property.
Map<String, String> extraProperties = getExtraProperties(tableMetadata.getProperties())
.orElseGet(ImmutableMap::of);
Set<String> illegalExtraProperties = Sets.intersection(baseProperties.keySet(), extraProperties.keySet());
Set<String> illegalExtraProperties = Sets.intersection(ILLEGAL_EXTRA_PROPERTIES, extraProperties.keySet());
if (!illegalExtraProperties.isEmpty()) {
throw new TrinoException(
INVALID_TABLE_PROPERTY,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,25 +32,13 @@
import io.trino.plugin.hive.TestingHivePlugin;
import io.trino.plugin.iceberg.fileio.ForwardingFileIo;
import io.trino.spi.QueryId;
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.connector.Constraint;
import io.trino.spi.connector.ConstraintApplicationResult;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.connector.TableNotFoundException;
import io.trino.spi.connector.*;
import io.trino.spi.predicate.Domain;
import io.trino.spi.predicate.TupleDomain;
import io.trino.sql.planner.plan.FilterNode;
import io.trino.sql.planner.plan.OutputNode;
import io.trino.sql.planner.plan.ValuesNode;
import io.trino.testing.BaseConnectorTest;
import io.trino.testing.DataProviders;
import io.trino.testing.DistributedQueryRunner;
import io.trino.testing.MaterializedResult;
import io.trino.testing.MaterializedResultWithQueryId;
import io.trino.testing.MaterializedRow;
import io.trino.testing.QueryFailedException;
import io.trino.testing.QueryRunner;
import io.trino.testing.TestingConnectorBehavior;
import io.trino.testing.*;
import io.trino.testing.sql.TestTable;
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
Expand All @@ -62,6 +50,7 @@
import org.apache.iceberg.TableMetadataParser;
import org.apache.iceberg.io.FileIO;
import org.apache.iceberg.util.JsonUtil;
import org.assertj.core.api.Condition;
import org.intellij.lang.annotations.Language;
import org.testng.SkipException;
import org.testng.annotations.BeforeClass;
Expand All @@ -78,15 +67,7 @@
import java.time.Instant;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.Set;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.regex.Matcher;
Expand All @@ -103,14 +84,9 @@
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.collect.MoreCollectors.onlyElement;
import static com.google.common.util.concurrent.Uninterruptibles.sleepUninterruptibly;
import static io.trino.SystemSessionProperties.SCALE_WRITERS;
import static io.trino.SystemSessionProperties.TASK_MAX_WRITER_COUNT;
import static io.trino.SystemSessionProperties.TASK_MIN_WRITER_COUNT;
import static io.trino.SystemSessionProperties.USE_PREFERRED_WRITE_PARTITIONING;
import static io.trino.SystemSessionProperties.*;
import static io.trino.plugin.hive.metastore.file.TestingFileHiveMetastore.createTestingFileHiveMetastore;
import static io.trino.plugin.iceberg.IcebergFileFormat.AVRO;
import static io.trino.plugin.iceberg.IcebergFileFormat.ORC;
import static io.trino.plugin.iceberg.IcebergFileFormat.PARQUET;
import static io.trino.plugin.iceberg.IcebergFileFormat.*;
import static io.trino.plugin.iceberg.IcebergQueryRunner.ICEBERG_CATALOG;
import static io.trino.plugin.iceberg.IcebergSessionProperties.COLLECT_EXTENDED_STATISTICS_ON_WRITE;
import static io.trino.plugin.iceberg.IcebergSessionProperties.EXTENDED_STATISTICS_ENABLED;
Expand Down Expand Up @@ -145,19 +121,13 @@
import static java.util.Collections.nCopies;
import static java.util.Objects.requireNonNull;
import static java.util.UUID.randomUUID;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.MINUTES;
import static java.util.concurrent.TimeUnit.SECONDS;
import static java.util.concurrent.TimeUnit.*;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
import static java.util.stream.IntStream.range;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertNotEquals;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.*;

public abstract class BaseIcebergConnectorTest
extends BaseConnectorTest
Expand Down Expand Up @@ -7352,13 +7322,13 @@ public void testExtraProperties()
assertQuery(
"SELECT \"extra.property.one\", \"extra.property.two\" FROM \"%s$properties\"".formatted(tableName),
"SELECT 'one', 'two'");
assertThat(computeActual("SHOW CREATE TABLE %s".formatted(tableName)).getOnlyValue())
.isEqualTo("CREATE TABLE iceberg.tpch.%s (\n".formatted(tableName) +
" c1 integer\n" +
")\n" +
"WITH (\n" +
" format = 'ORC'\n" +
")");

// Assert that SHOW CREATE TABLE does not contain extra_properties
assertThat((String) computeActual("SHOW CREATE TABLE %s".formatted(tableName)).getOnlyValue())
.satisfies(new Condition<>(
queryResult -> queryResult.contains("extra_properties"), "noExtraProperties"
));

assertUpdate("DROP TABLE %s".formatted(tableName));
}

Expand All @@ -7371,13 +7341,12 @@ public void testExtraPropertiesWithCtas()
assertQuery(
"SELECT \"extra.property.one\", \"extra.property.two\" FROM \"%s$properties\"".formatted(tableName),
"SELECT 'one', 'two'");
assertThat(computeActual("SHOW CREATE TABLE %s".formatted(tableName)).getOnlyValue())
.isEqualTo("CREATE TABLE iceberg.tpch.%s (\n".formatted(tableName) +
" c1 integer\n" +
")\n" +
"WITH (\n" +
" format = 'ORC'\n" +
")");

// Assert that SHOW CREATE TABLE does not contain extra_properties
assertThat((String) computeActual("SHOW CREATE TABLE %s".formatted(tableName)).getOnlyValue())
.satisfies(new Condition<>(
queryResult -> queryResult.contains("extra_properties"), "noExtraProperties"
));

assertUpdate("DROP TABLE %s".formatted(tableName));
}
Expand All @@ -7388,13 +7357,11 @@ public void testShowCreateWithExtraProperties()
String tableName = format("%s.%s.show_create_table_with_extra_properties_%s", getSession().getCatalog().get(), getSession().getSchema().get(), randomNameSuffix());
assertUpdate("CREATE TABLE %s (c1 integer) WITH (extra_properties = MAP(ARRAY['extra.property.one', 'extra.property.two'], ARRAY['one', 'two']))".formatted(tableName));

assertThat(computeActual("SHOW CREATE TABLE " + tableName).getOnlyValue())
.isEqualTo("CREATE TABLE %s (\n".formatted(tableName) +
" c1 integer\n" +
")\n" +
"WITH (\n" +
" format = 'ORC'\n" +
")");
// Assert that SHOW CREATE TABLE does not contain extra_properties
assertThat((String) computeActual("SHOW CREATE TABLE " + tableName).getOnlyValue())
.satisfies(new Condition<>(
queryResult -> queryResult.contains("extra_properties"), "noExtraProperties"
));

assertUpdate("DROP TABLE %s".formatted(tableName));
}
Expand All @@ -7413,13 +7380,13 @@ public void testDuplicateExtraProperties()
@Test
public void testOverwriteExistingPropertyWithExtraProperties()
{
assertThatThrownBy(() -> assertUpdate("CREATE TABLE create_table_with_overwrite_extra_properties (c1 integer) WITH (extra_properties = MAP(ARRAY['transactional'], ARRAY['true']))"))
assertThatThrownBy(() -> assertUpdate("CREATE TABLE create_table_with_overwrite_extra_properties (c1 integer) WITH (extra_properties = MAP(ARRAY['write.format.default'], ARRAY['foobar']))"))
.isInstanceOf(QueryFailedException.class)
.hasMessage("Illegal keys in extra_properties: [transactional]");
.hasMessage("Illegal keys in extra_properties: [write.format.default]");

assertThatThrownBy(() -> assertUpdate("CREATE TABLE create_table_as_select_with_extra_properties WITH (extra_properties = MAP(ARRAY['rawDataSize'], ARRAY['1'])) AS SELECT 1 as c1"))
assertThatThrownBy(() -> assertUpdate("CREATE TABLE create_table_as_select_with_extra_properties WITH (extra_properties = MAP(ARRAY['format-version'], ARRAY['10'])) AS SELECT 1 as c1"))
.isInstanceOf(QueryFailedException.class)
.hasMessage("Illegal keys in extra_properties: [rawDataSize]");
.hasMessage("Illegal keys in extra_properties: [format-version]");
}

@Test
Expand Down

0 comments on commit e6cb6cc

Please sign in to comment.