Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/HIVE-28147' into HIVE-28147
Browse files Browse the repository at this point in the history
  • Loading branch information
slfan1989 committed Mar 28, 2024
2 parents 88634d9 + 96f86e9 commit 34da82f
Show file tree
Hide file tree
Showing 25 changed files with 672 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -151,8 +151,8 @@ public class HiveIcebergMetaHook implements HiveMetaHook {
AlterTableType.ADDCOLS, AlterTableType.REPLACE_COLUMNS, AlterTableType.RENAME_COLUMN,
AlterTableType.ADDPROPS, AlterTableType.DROPPROPS, AlterTableType.SETPARTITIONSPEC,
AlterTableType.UPDATE_COLUMNS, AlterTableType.RENAME, AlterTableType.EXECUTE, AlterTableType.CREATE_BRANCH,
AlterTableType.CREATE_TAG, AlterTableType.DROP_BRANCH, AlterTableType.DROPPARTITION, AlterTableType.DROP_TAG,
AlterTableType.COMPACT);
AlterTableType.CREATE_TAG, AlterTableType.DROP_BRANCH, AlterTableType.RENAME_BRANCH, AlterTableType.DROPPARTITION,
AlterTableType.DROP_TAG, AlterTableType.COMPACT);
private static final List<String> MIGRATION_ALLOWED_SOURCE_FORMATS = ImmutableList.of(
FileFormat.PARQUET.name().toLowerCase(),
FileFormat.ORC.name().toLowerCase(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1002,6 +1002,11 @@ public void alterTableSnapshotRefOperation(org.apache.hadoop.hive.ql.metadata.Ta
(AlterTableSnapshotRefSpec.DropSnapshotRefSpec) alterTableSnapshotRefSpec.getOperationParams();
IcebergBranchExec.dropBranch(icebergTable, dropBranchSpec);
break;
case RENAME_BRANCH:
AlterTableSnapshotRefSpec.RenameSnapshotrefSpec renameSnapshotrefSpec =
(AlterTableSnapshotRefSpec.RenameSnapshotrefSpec) alterTableSnapshotRefSpec.getOperationParams();
IcebergBranchExec.renameBranch(icebergTable, renameSnapshotrefSpec);
break;
case DROP_TAG:
AlterTableSnapshotRefSpec.DropSnapshotRefSpec dropTagSpec =
(AlterTableSnapshotRefSpec.DropSnapshotRefSpec) alterTableSnapshotRefSpec.getOperationParams();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,4 +89,12 @@ public static void dropBranch(Table table, AlterTableSnapshotRefSpec.DropSnapsho
table.manageSnapshots().removeBranch(branchName).commit();
}
}

public static void renameBranch(Table table, AlterTableSnapshotRefSpec.RenameSnapshotrefSpec renameSnapshotrefSpec) {
String sourceBranch = renameSnapshotrefSpec.getSourceBranchName();
String targetBranch = renameSnapshotrefSpec.getTargetBranchName();

LOG.info("Renaming branch {} to {} on iceberg table {}", sourceBranch, targetBranch, table.name());
table.manageSnapshots().renameBranch(sourceBranch, targetBranch).commit();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
-- SORT_QUERY_RESULTS
set hive.explain.user=false;
set hive.fetch.task.conversion=more;

create external table ice01(id int) stored by iceberg stored as orc tblproperties ('format-version'='2');

insert into ice01 values (1), (2), (3), (4);

select * from ice01;

-- create a branch named soruce
alter table ice01 create branch source;
select * from default.ice01.branch_source;

-- insert some data to branch
insert into ice01 values (5), (6);
select * from default.ice01.branch_source;

-- rename the branch
explain alter table ice01 rename branch source to target;
alter table ice01 rename branch source to target;

select name,type from default.ice01.refs;

-- read from the renamed branch
select * from default.ice01.branch_target;

Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
PREHOOK: query: create external table ice01(id int) stored by iceberg stored as orc tblproperties ('format-version'='2')
PREHOOK: type: CREATETABLE
PREHOOK: Output: database:default
PREHOOK: Output: default@ice01
POSTHOOK: query: create external table ice01(id int) stored by iceberg stored as orc tblproperties ('format-version'='2')
POSTHOOK: type: CREATETABLE
POSTHOOK: Output: database:default
POSTHOOK: Output: default@ice01
PREHOOK: query: insert into ice01 values (1), (2), (3), (4)
PREHOOK: type: QUERY
PREHOOK: Input: _dummy_database@_dummy_table
PREHOOK: Output: default@ice01
POSTHOOK: query: insert into ice01 values (1), (2), (3), (4)
POSTHOOK: type: QUERY
POSTHOOK: Input: _dummy_database@_dummy_table
POSTHOOK: Output: default@ice01
PREHOOK: query: select * from ice01
PREHOOK: type: QUERY
PREHOOK: Input: default@ice01
PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: select * from ice01
POSTHOOK: type: QUERY
POSTHOOK: Input: default@ice01
POSTHOOK: Output: hdfs://### HDFS PATH ###
1
2
3
4
PREHOOK: query: alter table ice01 create branch source
PREHOOK: type: ALTERTABLE_CREATEBRANCH
PREHOOK: Input: default@ice01
POSTHOOK: query: alter table ice01 create branch source
POSTHOOK: type: ALTERTABLE_CREATEBRANCH
POSTHOOK: Input: default@ice01
PREHOOK: query: select * from default.ice01.branch_source
PREHOOK: type: QUERY
PREHOOK: Input: default@ice01
PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: select * from default.ice01.branch_source
POSTHOOK: type: QUERY
POSTHOOK: Input: default@ice01
POSTHOOK: Output: hdfs://### HDFS PATH ###
1
2
3
4
PREHOOK: query: insert into ice01 values (5), (6)
PREHOOK: type: QUERY
PREHOOK: Input: _dummy_database@_dummy_table
PREHOOK: Output: default@ice01
POSTHOOK: query: insert into ice01 values (5), (6)
POSTHOOK: type: QUERY
POSTHOOK: Input: _dummy_database@_dummy_table
POSTHOOK: Output: default@ice01
PREHOOK: query: select * from default.ice01.branch_source
PREHOOK: type: QUERY
PREHOOK: Input: default@ice01
PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: select * from default.ice01.branch_source
POSTHOOK: type: QUERY
POSTHOOK: Input: default@ice01
POSTHOOK: Output: hdfs://### HDFS PATH ###
1
2
3
4
PREHOOK: query: explain alter table ice01 rename branch source to target
PREHOOK: type: ALTERTABLE_RENAMEBRANCH
PREHOOK: Input: default@ice01
POSTHOOK: query: explain alter table ice01 rename branch source to target
POSTHOOK: type: ALTERTABLE_RENAMEBRANCH
POSTHOOK: Input: default@ice01
STAGE DEPENDENCIES:
Stage-0 is a root stage

STAGE PLANS:
Stage: Stage-0
SnapshotRef Operation
table name: default.ice01
spec: AlterTableSnapshotRefSpec{operationType=RENAME_BRANCH, operationParams=RenameSnapshotrefSpec{sourceBranch=source, targetBranch=target}}

PREHOOK: query: alter table ice01 rename branch source to target
PREHOOK: type: ALTERTABLE_RENAMEBRANCH
PREHOOK: Input: default@ice01
POSTHOOK: query: alter table ice01 rename branch source to target
POSTHOOK: type: ALTERTABLE_RENAMEBRANCH
POSTHOOK: Input: default@ice01
PREHOOK: query: select name,type from default.ice01.refs
PREHOOK: type: QUERY
PREHOOK: Input: default@ice01
PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: select name,type from default.ice01.refs
POSTHOOK: type: QUERY
POSTHOOK: Input: default@ice01
POSTHOOK: Output: hdfs://### HDFS PATH ###
main BRANCH
target BRANCH
PREHOOK: query: select * from default.ice01.branch_target
PREHOOK: type: QUERY
PREHOOK: Input: default@ice01
PREHOOK: Output: hdfs://### HDFS PATH ###
POSTHOOK: query: select * from default.ice01.branch_target
POSTHOOK: type: QUERY
POSTHOOK: Input: default@ice01
POSTHOOK: Output: hdfs://### HDFS PATH ###
1
2
3
4
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ alterTableStatementSuffix
| alterStatementSuffixCreateTag
| alterStatementSuffixDropTag
| alterStatementSuffixConvert
| alterStatementSuffixRenameBranch
;

alterTblPartitionStatementSuffix[boolean partition]
Expand Down Expand Up @@ -505,6 +506,13 @@ alterStatementSuffixExecute
-> ^(TOK_ALTERTABLE_EXECUTE KW_ORPHAN_FILES $timestamp?)
;

alterStatementSuffixRenameBranch
@init { gParent.pushMsg("alter table rename branch", state); }
@after { gParent.popMsg(state); }
: KW_RENAME KW_BRANCH sourceBranch=identifier KW_TO targetBranch=identifier
-> ^(TOK_ALTERTABLE_RENAME_BRANCH $sourceBranch $targetBranch)
;

alterStatementSuffixDropBranch
@init { gParent.pushMsg("alter table drop branch (if exists) branchName", state); }
@after { gParent.popMsg(state); }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -221,6 +221,7 @@ TOK_ALTERTABLE_SETPARTSPEC;
TOK_ALTERTABLE_EXECUTE;
TOK_ALTERTABLE_CREATE_BRANCH;
TOK_ALTERTABLE_DROP_BRANCH;
TOK_ALTERTABLE_RENAME_BRANCH;
TOK_ALTERTABLE_CREATE_TAG;
TOK_ALTERTABLE_DROP_TAG;
TOK_RETAIN;
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@
<rs-api.version>2.0.1</rs-api.version>
<json-path.version>2.9.0</json-path.version>
<janino.version>3.0.11</janino.version>
<datasketches.version>1.1.0-incubating</datasketches.version>
<datasketches.version>1.2.0</datasketches.version>
<spotbugs.version>4.0.3</spotbugs.version>
<validation-api.version>1.1.0.Final</validation-api.version>
<aws-secretsmanager-caching.version>1.0.1</aws-secretsmanager-caching.version>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ public enum AlterTableType {
EXECUTE("execute"),
CREATE_BRANCH("create branch"),
DROP_BRANCH("drop branch"),
RENAME_BRANCH("rename branch"),
CREATE_TAG("create tag"),
DROP_TAG("drop tag"),
// constraint
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.ddl.table.snapshotref.branch.rename;

import java.util.Map;

import org.apache.hadoop.hive.common.TableName;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory;
import org.apache.hadoop.hive.ql.ddl.DDLUtils;
import org.apache.hadoop.hive.ql.ddl.DDLWork;
import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableAnalyzer;
import org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc;
import org.apache.hadoop.hive.ql.ddl.table.AlterTableType;
import org.apache.hadoop.hive.ql.ddl.table.snapshotref.AlterTableSnapshotRefDesc;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.AlterTableSnapshotRefSpec;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.SemanticException;

@DDLSemanticAnalyzerFactory.DDLType(types = HiveParser.TOK_ALTERTABLE_RENAME_BRANCH)
public class AlterTableRenameSnapshotRefAnalyzer extends AbstractAlterTableAnalyzer {

protected AlterTableType alterTableType;

public AlterTableRenameSnapshotRefAnalyzer(QueryState queryState) throws SemanticException {
super(queryState);
alterTableType = AlterTableType.RENAME_BRANCH;
}

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command)
throws SemanticException {
Table table = getTable(tableName);
DDLUtils.validateTableIsIceberg(table);
inputs.add(new ReadEntity(table));
validateAlterTableType(table, alterTableType, false);
String sourceBranch = command.getChild(0).getText();
String targetBranch = command.getChild(1).getText();

AlterTableSnapshotRefSpec.RenameSnapshotrefSpec renameSnapshotrefSpec =
new AlterTableSnapshotRefSpec.RenameSnapshotrefSpec(sourceBranch, targetBranch);
AlterTableSnapshotRefSpec<AlterTableSnapshotRefSpec.RenameSnapshotrefSpec> alterTableSnapshotRefSpec =
new AlterTableSnapshotRefSpec(alterTableType, renameSnapshotrefSpec);
AbstractAlterTableDesc alterTableDesc =
new AlterTableSnapshotRefDesc(alterTableType, tableName, alterTableSnapshotRefSpec);
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTableDesc)));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -616,6 +616,7 @@ public final class FunctionRegistry {
system.registerGenericUDF("array_union", GenericUDFArrayUnion.class);
system.registerGenericUDF("array_remove", GenericUDFArrayRemove.class);
system.registerGenericUDF("array_position", GenericUDFArrayPosition.class);
system.registerGenericUDF("array_append", GenericUDFArrayAppend.class);
system.registerGenericUDF("deserialize", GenericUDFDeserialize.class);
system.registerGenericUDF("sentences", GenericUDFSentences.class);
system.registerGenericUDF("map_keys", GenericUDFMapKeys.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,4 +122,29 @@ public String toString() {
return MoreObjects.toStringHelper(this).add("refName", refName).add("ifExists", ifExists).toString();
}
}

public static class RenameSnapshotrefSpec {

private final String sourceBranch;
private final String targetBranch;

public String getSourceBranchName() {
return sourceBranch;
}

public String getTargetBranchName() {
return targetBranch;
}

public RenameSnapshotrefSpec(String sourceBranch, String targetBranch) {
this.sourceBranch = sourceBranch;
this.targetBranch = targetBranch;
}

@Override
public String toString() {
return MoreObjects.toStringHelper(this).add("sourceBranch", sourceBranch).add("targetBranch", targetBranch)
.toString();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ public enum HiveOperation {
ALTERTABLE_CREATEBRANCH("ALTERTABLE_CREATEBRANCH", HiveParser.TOK_ALTERTABLE_CREATE_BRANCH, null, null),
ALTERTABLE_CREATETAG("ALTERTABLE_CREATETAG", HiveParser.TOK_ALTERTABLE_CREATE_TAG, null, null),
ALTERTABLE_DROPBRANCH("ALTERTABLE_DROPBRANCH", HiveParser.TOK_ALTERTABLE_DROP_BRANCH, null, null),
ALTERTABLE_RENAMEBRANCH("ALTERTABLE_RENAMEBRANCH", HiveParser.TOK_ALTERTABLE_RENAME_BRANCH, null, null),
ALTERTABLE_DROPTAG("ALTERTABLE_DROPTAG", HiveParser.TOK_ALTERTABLE_DROP_TAG, null, null),
ALTERTABLE_CONVERT("ALTERTABLE_CONVERT", HiveParser.TOK_ALTERTABLE_CONVERT, null, null),
ALTERTABLE_SERIALIZER("ALTERTABLE_SERIALIZER", HiveParser.TOK_ALTERTABLE_SERIALIZER,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,7 @@ public enum HiveOperationType {
ALTERTABLE_UPDATECOLUMNS,
ALTERTABLE_CREATEBRANCH,
ALTERTABLE_DROPBRANCH,
ALTERTABLE_RENAMEBRANCH,
ALTERTABLE_CREATETAG,
ALTERTABLE_DROPTAG,
SHOW_COMPACTIONS,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -246,6 +246,8 @@ public HivePrivilegeObjectType getObjectType() {
PrivRequirement.newIOPrivRequirement(OWNER_PRIV_AR, OWNER_PRIV_AR));
op2Priv.put(HiveOperationType.ALTERTABLE_DROPBRANCH,
PrivRequirement.newIOPrivRequirement(OWNER_PRIV_AR, OWNER_PRIV_AR));
op2Priv.put(HiveOperationType.ALTERTABLE_RENAMEBRANCH,
PrivRequirement.newIOPrivRequirement(OWNER_PRIV_AR, OWNER_PRIV_AR));
op2Priv.put(HiveOperationType.ALTERTABLE_DROPTAG,
PrivRequirement.newIOPrivRequirement(OWNER_PRIV_AR, OWNER_PRIV_AR));

Expand Down
Loading

0 comments on commit 34da82f

Please sign in to comment.