Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Refactor] add getCatalogDbName and getCatalogTableName in Table (backport #53072) #53150

Merged
merged 2 commits into from
Nov 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ public boolean isSupported() {
return true;
}

@Override
public String getTableLocation() {
return tableLocation;
}
Expand All @@ -92,11 +93,13 @@ public String getCatalogName() {
return catalogName;
}

public String getDbName() {
@Override
public String getCatalogDBName() {
return dbName;
}

public String getTableName() {
@Override
public String getCatalogTableName() {
return tableName;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ private void validate(Map<String, String> properties) throws DdlException {
fileProperties.put(AzureCloudConfigurationProvider.AZURE_PATH_KEY, path);
}

@Override
public String getTableLocation() {
return fileProperties.get(JSON_KEY_FILE_PATH);
}
Expand Down

This file was deleted.

9 changes: 6 additions & 3 deletions fe/fe-core/src/main/java/com/starrocks/catalog/HiveTable.java
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@
import static com.starrocks.server.CatalogMgr.ResourceMappingCatalog.getResourceMappingCatalogName;
import static com.starrocks.server.CatalogMgr.ResourceMappingCatalog.isResourceMappingCatalog;

public class HiveTable extends Table implements HiveMetaStoreTable {
public class HiveTable extends Table {
public enum HiveTableType {
VIRTUAL_VIEW,
EXTERNAL_TABLE,
Expand Down Expand Up @@ -176,12 +176,13 @@ public String getCatalogName() {
return catalogName == null ? getResourceMappingCatalogName(resourceName, "hive") : catalogName;
}

public String getDbName() {
@Override
public String getCatalogDBName() {
return hiveDbName;
}

@Override
public String getTableName() {
public String getCatalogTableName() {
return hiveTableName;
}

Expand Down Expand Up @@ -210,6 +211,7 @@ public List<String> getPartitionColumnNames() {
return partColumnNames;
}

@Override
public List<String> getDataColumnNames() {
return dataColumnNames;
}
Expand All @@ -219,6 +221,7 @@ public boolean isUnPartitioned() {
return partColumnNames.size() == 0;
}

@Override
public String getTableLocation() {
return this.tableLocation;
}
Expand Down
10 changes: 7 additions & 3 deletions fe/fe-core/src/main/java/com/starrocks/catalog/HudiTable.java
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@
* Currently, we depend on Hive metastore to obtain table/partition path and statistics.
* This logic should be decoupled from metastore when the related interfaces are ready.
*/
public class HudiTable extends Table implements HiveMetaStoreTable {
public class HudiTable extends Table {
private static final Logger LOG = LogManager.getLogger(HudiTable.class);

private static final String JSON_KEY_HUDI_DB = "database";
Expand Down Expand Up @@ -124,10 +124,12 @@ public HudiTable(long id, String name, String catalogName, String hiveDbName, St
this.tableType = type;
}

public String getDbName() {
@Override
public String getCatalogDBName() {
return hiveDbName;
}

@Override
public String getResourceName() {
return resourceName;
}
Expand All @@ -141,6 +143,7 @@ public HoodieTableType getTableType() {
return HoodieTableType.valueOf(hudiProperties.get(HUDI_TABLE_TYPE));
}

@Override
public String getTableLocation() {
return hudiProperties.get(HUDI_BASE_PATH);
}
Expand All @@ -150,7 +153,7 @@ public String getHudiInputFormat() {
}

@Override
public String getTableName() {
public String getCatalogTableName() {
return hiveTableName;
}

Expand All @@ -175,6 +178,7 @@ public List<String> getPartitionColumnNames() {
return partColumnNames;
}

@Override
public List<String> getDataColumnNames() {
return dataColumnNames;
}
Expand Down
13 changes: 7 additions & 6 deletions fe/fe-core/src/main/java/com/starrocks/catalog/IcebergTable.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.


package com.starrocks.catalog;

import com.google.common.base.Joiner;
Expand Down Expand Up @@ -85,7 +84,6 @@ public class IcebergTable extends Table {

private final AtomicLong partitionIdGen = new AtomicLong(0L);


public IcebergTable() {
super(TableType.ICEBERG);
}
Expand All @@ -98,7 +96,7 @@ public IcebergTable(long id, String srTableName, String catalogName, String reso
this.resourceName = resourceName;
this.remoteDbName = remoteDbName;
this.remoteTableName = remoteTableName;
this.comment = comment;
this.comment = comment;
this.nativeTable = nativeTable;
this.icebergProperties = icebergProperties;
}
Expand All @@ -108,15 +106,18 @@ public String getCatalogName() {
return catalogName == null ? getResourceMappingCatalogName(resourceName, "iceberg") : catalogName;
}

@Override
public String getResourceName() {
return resourceName;
}

public String getRemoteDbName() {
@Override
public String getCatalogDBName() {
return remoteDbName;
}

public String getRemoteTableName() {
@Override
public String getCatalogTableName() {
return remoteTableName;
}

Expand All @@ -141,6 +142,7 @@ public List<Column> getPartitionColumns() {
}
return partitionColumns;
}

public List<Column> getPartitionColumnsIncludeTransformed() {
List<Column> allPartitionColumns = new ArrayList<>();
for (PartitionField field : getNativeTable().spec().fields()) {
Expand Down Expand Up @@ -429,7 +431,6 @@ public Builder setCatalogName(String catalogName) {
return this;
}


public Builder setComment(String comment) {
this.comment = comment;
return this;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ public JDBCTable(long id, String name, List<Column> schema, List<Column> partiti
validate(properties);
}

@Override
public String getResourceName() {
return resourceName;
}
Expand All @@ -91,7 +92,8 @@ public String getCatalogName() {
return catalogName;
}

public String getDbName() {
@Override
public String getCatalogDBName() {
return dbName;
}

Expand Down
10 changes: 6 additions & 4 deletions fe/fe-core/src/main/java/com/starrocks/catalog/KuduTable.java
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.


package com.starrocks.catalog;

import com.google.common.collect.Sets;
Expand All @@ -33,7 +32,6 @@

import static com.starrocks.connector.ConnectorTableId.CONNECTOR_ID_GENERATOR;


public class KuduTable extends Table {
private static final Logger LOG = LogManager.getLogger(KuduTable.class);
public static final Set<String> KUDU_INPUT_FORMATS = Sets.newHashSet(
Expand Down Expand Up @@ -73,18 +71,22 @@ public static KuduTable fromMetastoreTable(org.apache.hadoop.hive.metastore.api.
public String getMasterAddresses() {
return masterAddresses;
}

@Override
public String getCatalogName() {
return catalogName;
}

public String getDbName() {
@Override
public String getCatalogDBName() {
return databaseName;
}

public String getTableName() {
@Override
public String getCatalogTableName() {
return tableName;
}

public Optional<String> getKuduTableName() {
return kuduTableName;
}
Expand Down
8 changes: 4 additions & 4 deletions fe/fe-core/src/main/java/com/starrocks/catalog/OdpsTable.java
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@

import static com.starrocks.connector.ConnectorTableId.CONNECTOR_ID_GENERATOR;

public class OdpsTable extends Table implements HiveMetaStoreTable {
public class OdpsTable extends Table {
private static final Logger LOG = LogManager.getLogger(OdpsTable.class);
public static final String PARTITION_NULL_VALUE = "null";

Expand Down Expand Up @@ -74,12 +74,12 @@ public String getCatalogName() {
}

@Override
public String getDbName() {
public String getCatalogDBName() {
return dbName;
}

@Override
public String getTableName() {
public String getCatalogTableName() {
return tableName;
}

Expand Down Expand Up @@ -123,7 +123,7 @@ public String getUUID() {
@Override
public TTableDescriptor toThrift(List<DescriptorTable.ReferencedPartitionInfo> partitions) {
TTableDescriptor tTableDescriptor = new TTableDescriptor(getId(), TTableType.ODPS_TABLE,
fullSchema.size(), 0, getName(), getDbName());
fullSchema.size(), 0, getName(), getCatalogDBName());
THdfsTable hdfsTable = new THdfsTable();
hdfsTable.setColumns(getColumns().stream().map(Column::toThrift).collect(Collectors.toList()));
// for be, partition column is equals to data column
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
// See the License for the specific language governing permissions and
// limitations under the License.


package com.starrocks.catalog;

import com.starrocks.analysis.DescriptorTable;
Expand All @@ -33,7 +32,6 @@

import static com.starrocks.connector.ConnectorTableId.CONNECTOR_ID_GENERATOR;


public class PaimonTable extends Table {
private String catalogName;
private String databaseName;
Expand Down Expand Up @@ -66,11 +64,13 @@ public String getCatalogName() {
return catalogName;
}

public String getDbName() {
@Override
public String getCatalogDBName() {
return databaseName;
}

public String getTableName() {
@Override
public String getCatalogTableName() {
return tableName;
}

Expand Down
Loading
Loading