diff --git a/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java b/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java index 048ca59becb0..a8378d914356 100644 --- a/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java +++ b/beeline/src/test/org/apache/hive/beeline/cli/TestHiveCli.java @@ -169,8 +169,11 @@ public void testSqlFromCmdWithDBName() { @Test public void testSqlFromCmdWithEmbeddedQuotes() { + // In Beeline.java, after upgrading the Maven SureFire plugin to 3.0.0-M5, InputStream inputStream = System.in + // no longer contains an EOT byte[]. This change causes an indefinite loop when calling + // beeLine.getConsoleReader().readLine(prompt.toString()). To resolve this, a delimiter has been added. verifyCMD(null, "hive", out, - new String[] { "-e", "select \"hive\"" }, ERRNO_OK, true); + new String[] { "-e", "select \"hive\";" }, ERRNO_OK, true); } @Test diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationMigrationTool.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationMigrationTool.java index 4cd0541cf2f6..dccd5316bb0f 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationMigrationTool.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationMigrationTool.java @@ -31,6 +31,7 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; import java.io.ByteArrayOutputStream; @@ -45,6 +46,8 @@ import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; +// Enable the test post fixing HIVE-28608 +@Ignore public class TestReplicationMigrationTool extends BaseReplicationAcrossInstances { String extraPrimaryDb; diff --git a/llap-server/pom.xml b/llap-server/pom.xml index 4a40a2e1e72d..3c7d32286330 100644 --- a/llap-server/pom.xml +++ b/llap-server/pom.xml @@ -371,6 +371,16 @@ ${log4j2.version} tests test + + + org.junit-pioneer + junit-pioneer + + + org.junit.platform + junit-platform-commons + + org.mockito @@ -386,11 +396,13 @@ org.junit.jupiter junit-jupiter-engine + ${junit.jupiter.version} test org.junit.vintage junit-vintage-engine + ${junit.vintage.version} test diff --git a/pom.xml b/pom.xml index 279ddcb17ef3..4620ad3d8e89 100644 --- a/pom.xml +++ b/pom.xml @@ -99,7 +99,7 @@ 3.1.0 2.16.0 3.5.0 - 3.0.0-M4 + 3.5.1 2.7.10 2.3.0 @@ -166,8 +166,8 @@ 6.0.0 1.8 4.13.2 - 5.10.0 - 5.6.3 + 5.11.2 + 5.11.2 2.5.0 5.5.0 1.11.9 @@ -1753,6 +1753,7 @@ org.apache.maven.plugins maven-surefire-plugin + false **/TestSerDe.java **/TestHiveMetaStore.java @@ -1764,8 +1765,8 @@ true false - false - ${maven.test.jvm.args} + false + ${maven.test.jvm.args} -Xshare:off false ${test.conf.dir} diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestGetPartitionAuthWithBatches.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestGetPartitionAuthWithBatches.java index 191d211d4b72..23f7c768f51e 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestGetPartitionAuthWithBatches.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestGetPartitionAuthWithBatches.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.exec; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthRequest; @@ -67,7 +68,7 @@ public class TestGetPartitionAuthWithBatches { @BeforeClass public static void setupClass() throws HiveException { - hiveConf = new HiveConf(TestGetPartitionAuthWithBatches.class); + hiveConf = new HiveConfForTest(TestGetPartitionAuthWithBatches.class); hiveConf.set("hive.security.authorization.enabled", "true"); hiveConf.set("hive.security.authorization.manager","org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider"); hive = Hive.get(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestGetPartitionInBatches.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestGetPartitionInBatches.java index d4afff716bf4..01768f3bc927 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestGetPartitionInBatches.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestGetPartitionInBatches.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.exec; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.api.GetPartitionsByNamesRequest; @@ -64,9 +65,9 @@ public class TestGetPartitionInBatches { @BeforeClass public static void setupClass() throws HiveException { - hiveConf = new HiveConf(TestGetPartitionInBatches.class); - hive = Hive.get(); - SessionState.start(hiveConf); + hiveConf = new HiveConfForTest(TestGetPartitionInBatches.class); + SessionState ss = SessionState.start(hiveConf); + hive = ss.getHiveDb(); try { msc = new HiveMetaStoreClient(hiveConf); } catch (MetaException e) { diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java index e666b4d2d3d3..137530906d88 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.CheckResult.PartitionResult; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; @@ -62,7 +63,7 @@ public class TestMsckCreatePartitionsInBatches { @BeforeClass public static void setupClass() throws HiveException, MetaException { - hiveConf = new HiveConf(TestMsckCreatePartitionsInBatches.class); + hiveConf = new HiveConfForTest(TestMsckCreatePartitionsInBatches.class); hiveConf.setIntVar(ConfVars.HIVE_MSCK_REPAIR_BATCH_SIZE, 5); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckDropPartitionsInBatches.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckDropPartitionsInBatches.java index a62816537016..e504f680f228 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckDropPartitionsInBatches.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckDropPartitionsInBatches.java @@ -19,6 +19,7 @@ import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.CheckResult.PartitionResult; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; @@ -65,7 +66,7 @@ public class TestMsckDropPartitionsInBatches { @BeforeClass public static void setupClass() throws Exception { - hiveConf = new HiveConf(TestMsckCreatePartitionsInBatches.class); + hiveConf = new HiveConfForTest(TestMsckCreatePartitionsInBatches.class); hiveConf.setIntVar(ConfVars.HIVE_MSCK_REPAIR_BATCH_SIZE, 5); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java index e0dccc9f5834..00fc86857484 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java @@ -20,6 +20,7 @@ import static org.junit.Assert.assertEquals; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Before; @@ -31,7 +32,7 @@ public class TestHooks { @BeforeClass public static void onetimeSetup() throws Exception { - HiveConf conf = new HiveConf(TestHooks.class); + HiveConf conf = new HiveConfForTest(TestHooks.class); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); @@ -41,7 +42,7 @@ public static void onetimeSetup() throws Exception { @AfterClass public static void onetimeTeardown() throws Exception { - HiveConf conf = new HiveConf(TestHooks.class); + HiveConf conf = new HiveConfForTest(TestHooks.class); Driver driver = createDriver(conf); driver.run("drop table t1"); } @@ -52,7 +53,7 @@ public void setup() { @Test public void testRedactLogString() throws Exception { - HiveConf conf = new HiveConf(TestHooks.class); + HiveConf conf = new HiveConfForTest(TestHooks.class); String str; HiveConf.setVar(conf, HiveConf.ConfVars.QUERY_REDACTOR_HOOKS, SimpleQueryRedactor.class.getName()); @@ -69,7 +70,7 @@ public void testRedactLogString() throws Exception { @Test public void testQueryRedactor() throws Exception { - HiveConf conf = new HiveConf(TestHooks.class); + HiveConf conf = new HiveConfForTest(TestHooks.class); HiveConf.setVar(conf, HiveConf.ConfVars.QUERY_REDACTOR_HOOKS, SimpleQueryRedactor.class.getName()); conf diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveCopyFiles.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveCopyFiles.java index f9c10f5ffb5d..2ef7bfcbccdd 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveCopyFiles.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHiveCopyFiles.java @@ -20,6 +20,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.BeforeClass; import org.junit.Rule; @@ -62,7 +63,7 @@ public static List getParameters() throws Exception { @BeforeClass public static void setUp() { - hiveConf = new HiveConf(TestHiveCopyFiles.class); + hiveConf = new HiveConfForTest(TestHiveCopyFiles.class); SessionState.start(hiveConf); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestTempAcidTable.java b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestTempAcidTable.java index 5d7ee356c298..e38bc4b5b949 100755 --- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestTempAcidTable.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestTempAcidTable.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hive.ql.metadata; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants; @@ -47,7 +48,7 @@ public class TestTempAcidTable { @BeforeClass public static void setUp() throws Exception { hive = Hive.get(); - HiveConf hiveConf = hive.getConf(); + HiveConf hiveConf = new HiveConfForTest(TestTempAcidTable.class); hiveConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); hiveConf.setBoolVar(ConfVars.HIVE_IN_TEST, true); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java index 305c17065616..9a71cd6a1b00 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestColumnAccess.java @@ -26,6 +26,7 @@ import org.junit.Assert; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.QueryPlan; import org.apache.hadoop.hive.ql.hooks.ReadEntity; @@ -196,7 +197,7 @@ private Map> getColsFromReadEntity(Set inputs) } private static Driver createDriver() { - HiveConf conf = new HiveConf(Driver.class); + HiveConf conf = new HiveConfForTest(TestColumnAccess.class); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java index d198830e9a35..9ac9f086dc55 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBCompact.java @@ -23,6 +23,7 @@ import java.util.Map; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryState; @@ -50,7 +51,7 @@ public class TestQBCompact { @BeforeClass public static void init() throws Exception { queryState = new QueryState.Builder().build(); - conf = queryState.getConf(); + conf = new HiveConfForTest(TestQBCompact.class); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java index 322fbbd7f324..ce19d9374537 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBJoinTreeApplyPredicate.java @@ -23,6 +23,7 @@ import org.junit.Assert; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Before; @@ -39,8 +40,8 @@ public class TestQBJoinTreeApplyPredicate { @BeforeClass public static void initialize() { queryState = - new QueryState.Builder().withHiveConf(new HiveConf(SemanticAnalyzer.class)).build(); - conf = queryState.getConf(); + new QueryState.Builder().build(); + conf = new HiveConfForTest(TestQBJoinTreeApplyPredicate.class); SessionState.start(conf); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java index 5749fb29908f..a1cf8b2fd45f 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestQBSubQuery.java @@ -22,6 +22,7 @@ import java.util.List; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Assert; @@ -49,8 +50,8 @@ public class TestQBSubQuery { @BeforeClass public static void initialize() { queryState = - new QueryState.Builder().withHiveConf(new HiveConf(SemanticAnalyzer.class)).build(); - conf = queryState.getConf(); + new QueryState.Builder().build(); + conf = new HiveConfForTest(TestQBSubQuery.class); SessionState.start(conf); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java index 57508b318835..d4e8aaf42c5b 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestReadEntityDirect.java @@ -27,6 +27,7 @@ import java.util.Set; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.hooks.ReadEntity; @@ -174,7 +175,7 @@ public void testSelectEntityInDirectJoinAlias() throws ParseException { * Create driver with the test hook set in config */ private static Driver createDriver() { - HiveConf conf = new HiveConf(Driver.class); + HiveConf conf = new HiveConfForTest(TestReadEntityDirect.class); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java index d3a3cd574068..b6c94b983e9c 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestViewEntity.java @@ -24,6 +24,7 @@ import java.util.Set; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.ql.Driver; import org.apache.hadoop.hive.ql.exec.Task; import org.apache.hadoop.hive.ql.hooks.ReadEntity; @@ -57,7 +58,7 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context, @BeforeClass public static void onetimeSetup() throws Exception { - HiveConf conf = new HiveConf(Driver.class); + HiveConf conf = new HiveConfForTest(TestViewEntity.class); conf .setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java b/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java index 8a993686a690..bb8cf5356551 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/processors/TestSetProcessor.java @@ -26,6 +26,7 @@ import org.apache.hadoop.hive.common.io.SessionStream; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.conf.SystemVariables; import org.apache.hadoop.hive.ql.session.SessionState; import org.junit.Assert; @@ -49,7 +50,7 @@ public static void before() throws Exception { env.put(TEST_ENV_VAR_PASSWORD, TEST_ENV_VAR_PASSWORD_VALUE); setEnv(env); System.setProperty(TEST_SYSTEM_PROPERTY, TEST_SYSTEM_PROPERTY_VALUE); - HiveConf conf = new HiveConf(); + HiveConf conf = new HiveConfForTest(TestSetProcessor.class); SessionState.start(conf); state = SessionState.get(); } diff --git a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java index 0f762da0abf0..66b6138a19ca 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHivePrivilegeObjectOwnerNameAndType.java @@ -21,6 +21,7 @@ import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.utils.TestTxnDbUtil; import org.apache.hadoop.hive.ql.Driver; @@ -68,7 +69,7 @@ public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreC @BeforeClass public static void beforeTest() throws Exception { UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser("hive")); - conf = new HiveConf(); + conf = new HiveConfForTest(TestHivePrivilegeObjectOwnerNameAndType.class); // Turn on mocked authorization conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); @@ -79,6 +80,8 @@ public static void beforeTest() throws Exception { conf.setVar(ConfVars.HIVE_TXN_MANAGER, DbTxnManager.class.getName()); conf.setVar(ConfVars.HIVE_MAPRED_MODE, "nonstrict"); conf.setVar(ConfVars.DYNAMIC_PARTITIONING_MODE, "nonstrict"); + // TODO: HIVE-28619: TestHivePrivilegeObjectOwnerNameAndType to run on Tez + conf.set("hive.execution.engine", "mr"); TestTxnDbUtil.prepDb(conf); SessionState.start(conf); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDTFGetSQLSchema.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDTFGetSQLSchema.java index bce6f6c048bc..b771250c4712 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDTFGetSQLSchema.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDTFGetSQLSchema.java @@ -22,6 +22,7 @@ import java.util.List; import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConfForTest; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -39,9 +40,11 @@ public class TestGenericUDTFGetSQLSchema { private static SessionState sessionState; + public static HiveConf conf; + @BeforeClass public static void setUpBeforeClass() throws Exception { - HiveConf conf = new HiveConf(); + conf = new HiveConfForTest(TestGenericUDTFGetSQLSchema.class); conf.set("hive.security.authorization.manager", "org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider"); sessionState = SessionState.start(conf); @@ -84,6 +87,9 @@ public void testWithSimpleTypes() throws Exception { @Test public void testWithDDL() throws Exception { + // Set the execution engine to mr to avoid the NPE exception in stats flow + // TODO: HIVE-28618: TestGenericUDTFGetSQLSchema to run on Tez + conf.set("hive.execution.engine", "mr"); invokeUDTFAndTest("show tables", new String[]{}); } diff --git a/service/src/test/org/apache/hive/service/server/TestHS2HttpServerLDAP.java b/service/src/test/org/apache/hive/service/server/TestHS2HttpServerLDAP.java index cdcf98ca7f6f..8300519f7618 100644 --- a/service/src/test/org/apache/hive/service/server/TestHS2HttpServerLDAP.java +++ b/service/src/test/org/apache/hive/service/server/TestHS2HttpServerLDAP.java @@ -61,7 +61,7 @@ public static void beforeTests() throws Exception { HiveConf hiveConf = new HiveConf(); hiveConf.setBoolVar(ConfVars.HIVE_IN_TEST, true); hiveConf.set(ConfVars.HIVE_SERVER2_WEBUI_PORT.varname, webUIPort.toString()); - hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_WEBUI_AUTH_METHOD, true); + hiveConf.set(ConfVars.HIVE_SERVER2_WEBUI_AUTH_METHOD.varname, "LDAP"); hiveConf.set(ConfVars.METASTORE_PWD.varname, METASTORE_PASSWD); hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER, "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory"); diff --git a/standalone-metastore/pom.xml b/standalone-metastore/pom.xml index 10087ac3bd79..8d6198d01df1 100644 --- a/standalone-metastore/pom.xml +++ b/standalone-metastore/pom.xml @@ -57,7 +57,7 @@ 1.0b3 2.17 2.16.0 - 3.0.0-M4 + 3.5.1 4.9.3 1.5.7 @@ -85,8 +85,8 @@ 3.3 5.5.1 4.13.2 - 5.6.2 - 5.6.3 + 5.11.2 + 5.11.2 0.9.3 0.16.0 2.18.0 @@ -516,7 +516,8 @@ maven-surefire-plugin ${maven.surefire.plugin.version} - false + false + false diff --git a/storage-api/pom.xml b/storage-api/pom.xml index 0c955c425053..03ba7bb2ffee 100644 --- a/storage-api/pom.xml +++ b/storage-api/pom.xml @@ -32,14 +32,14 @@ 22.0 3.3.6 4.13.2 - 5.6.3 - 5.6.3 + 5.11.2 + 5.11.2 1.7.30 2.17 2.7.10 ${basedir}/checkstyle/ 2.16.0 - 3.0.0-M4 + 3.5.1 2024-01-01T00:00:00Z @@ -201,7 +201,8 @@ false -Xmx3g - false + false + false ${project.build.directory}/tmp