Skip to content

Commit

Permalink
HIVE-28519: Upgrade Maven SureFire Plugin to latest version 3.5.1 (#5530
Browse files Browse the repository at this point in the history
) (Indhumathi Muthumurugesh reviewed by Laszlo Bodor, Ayush Saxena)
  • Loading branch information
Indhumathi27 authored Nov 12, 2024
1 parent 883d5df commit e48cb8e
Show file tree
Hide file tree
Showing 23 changed files with 82 additions and 38 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,11 @@ public void testSqlFromCmdWithDBName() {

@Test
public void testSqlFromCmdWithEmbeddedQuotes() {
// In Beeline.java, after upgrading the Maven SureFire plugin to 3.0.0-M5, InputStream inputStream = System.in
// no longer contains an EOT byte[]. This change causes an indefinite loop when calling
// beeLine.getConsoleReader().readLine(prompt.toString()). To resolve this, a delimiter has been added.
verifyCMD(null, "hive", out,
new String[] { "-e", "select \"hive\"" }, ERRNO_OK, true);
new String[] { "-e", "select \"hive\";" }, ERRNO_OK, true);
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;

import java.io.ByteArrayOutputStream;
Expand All @@ -45,6 +46,8 @@
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;

// Enable the test post fixing HIVE-28608
@Ignore
public class TestReplicationMigrationTool extends BaseReplicationAcrossInstances {

String extraPrimaryDb;
Expand Down
12 changes: 12 additions & 0 deletions llap-server/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -371,6 +371,16 @@
<version>${log4j2.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.junit-pioneer</groupId>
<artifactId>junit-pioneer</artifactId>
</exclusion>
<exclusion>
<groupId>org.junit.platform</groupId>
<artifactId>junit-platform-commons</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
Expand All @@ -386,11 +396,13 @@
<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>${junit.jupiter.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.junit.vintage</groupId>
<artifactId>junit-vintage-engine</artifactId>
<version>${junit.vintage.version}</version>
<scope>test</scope>
</dependency>
<dependency>
Expand Down
11 changes: 6 additions & 5 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@
<maven.exec.plugin.version>3.1.0</maven.exec.plugin.version>
<maven.versions.plugin.version>2.16.0</maven.versions.plugin.version>
<maven.shade.plugin.version>3.5.0</maven.shade.plugin.version>
<maven.surefire.plugin.version>3.0.0-M4</maven.surefire.plugin.version>
<maven.surefire.plugin.version>3.5.1</maven.surefire.plugin.version>
<maven.cyclonedx.plugin.version>2.7.10</maven.cyclonedx.plugin.version>
<maven.license.plugin.version>2.3.0</maven.license.plugin.version>
<!-- Library Dependency Versions -->
Expand Down Expand Up @@ -166,8 +166,8 @@
<jodd.version>6.0.0</jodd.version>
<json.version>1.8</json.version>
<junit.version>4.13.2</junit.version>
<junit.jupiter.version>5.10.0</junit.jupiter.version>
<junit.vintage.version>5.6.3</junit.vintage.version>
<junit.jupiter.version>5.11.2</junit.jupiter.version>
<junit.vintage.version>5.11.2</junit.vintage.version>
<kafka.version>2.5.0</kafka.version>
<kryo.version>5.5.0</kryo.version>
<reflectasm.version>1.11.9</reflectasm.version>
Expand Down Expand Up @@ -1753,6 +1753,7 @@
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<enableOutErrElements>false</enableOutErrElements>
<excludes>
<exclude>**/TestSerDe.java</exclude>
<exclude>**/TestHiveMetaStore.java</exclude>
Expand All @@ -1764,8 +1765,8 @@
</excludes>
<redirectTestOutputToFile>true</redirectTestOutputToFile>
<reuseForks>false</reuseForks>
<failIfNoTests>false</failIfNoTests>
<argLine>${maven.test.jvm.args}</argLine>
<failIfNoSpecifiedTests>false</failIfNoSpecifiedTests>
<argLine>${maven.test.jvm.args} -Xshare:off</argLine>
<trimStackTrace>false</trimStackTrace>
<additionalClasspathElements>
<additionalClasspathElement>${test.conf.dir}</additionalClasspathElement>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.hadoop.hive.ql.exec;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;

import org.apache.hadoop.hive.metastore.api.GetPartitionsPsWithAuthRequest;
Expand Down Expand Up @@ -67,7 +68,7 @@ public class TestGetPartitionAuthWithBatches {

@BeforeClass
public static void setupClass() throws HiveException {
hiveConf = new HiveConf(TestGetPartitionAuthWithBatches.class);
hiveConf = new HiveConfForTest(TestGetPartitionAuthWithBatches.class);
hiveConf.set("hive.security.authorization.enabled", "true");
hiveConf.set("hive.security.authorization.manager","org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider");
hive = Hive.get();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.hadoop.hive.ql.exec;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;

import org.apache.hadoop.hive.metastore.api.GetPartitionsByNamesRequest;
Expand Down Expand Up @@ -64,9 +65,9 @@ public class TestGetPartitionInBatches {

@BeforeClass
public static void setupClass() throws HiveException {
hiveConf = new HiveConf(TestGetPartitionInBatches.class);
hive = Hive.get();
SessionState.start(hiveConf);
hiveConf = new HiveConfForTest(TestGetPartitionInBatches.class);
SessionState ss = SessionState.start(hiveConf);
hive = ss.getHiveDb();
try {
msc = new HiveMetaStoreClient(hiveConf);
} catch (MetaException e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.hadoop.hive.ql.exec;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.CheckResult.PartitionResult;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
Expand Down Expand Up @@ -62,7 +63,7 @@ public class TestMsckCreatePartitionsInBatches {

@BeforeClass
public static void setupClass() throws HiveException, MetaException {
hiveConf = new HiveConf(TestMsckCreatePartitionsInBatches.class);
hiveConf = new HiveConfForTest(TestMsckCreatePartitionsInBatches.class);
hiveConf.setIntVar(ConfVars.HIVE_MSCK_REPAIR_BATCH_SIZE, 5);
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.CheckResult.PartitionResult;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
Expand Down Expand Up @@ -65,7 +66,7 @@ public class TestMsckDropPartitionsInBatches {

@BeforeClass
public static void setupClass() throws Exception {
hiveConf = new HiveConf(TestMsckCreatePartitionsInBatches.class);
hiveConf = new HiveConfForTest(TestMsckCreatePartitionsInBatches.class);
hiveConf.setIntVar(ConfVars.HIVE_MSCK_REPAIR_BATCH_SIZE, 5);
hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
Expand Down
9 changes: 5 additions & 4 deletions ql/src/test/org/apache/hadoop/hive/ql/hooks/TestHooks.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import static org.junit.Assert.assertEquals;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.junit.Before;
Expand All @@ -31,7 +32,7 @@ public class TestHooks {

@BeforeClass
public static void onetimeSetup() throws Exception {
HiveConf conf = new HiveConf(TestHooks.class);
HiveConf conf = new HiveConfForTest(TestHooks.class);
conf
.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
Expand All @@ -41,7 +42,7 @@ public static void onetimeSetup() throws Exception {

@AfterClass
public static void onetimeTeardown() throws Exception {
HiveConf conf = new HiveConf(TestHooks.class);
HiveConf conf = new HiveConfForTest(TestHooks.class);
Driver driver = createDriver(conf);
driver.run("drop table t1");
}
Expand All @@ -52,7 +53,7 @@ public void setup() {

@Test
public void testRedactLogString() throws Exception {
HiveConf conf = new HiveConf(TestHooks.class);
HiveConf conf = new HiveConfForTest(TestHooks.class);
String str;

HiveConf.setVar(conf, HiveConf.ConfVars.QUERY_REDACTOR_HOOKS, SimpleQueryRedactor.class.getName());
Expand All @@ -69,7 +70,7 @@ public void testRedactLogString() throws Exception {

@Test
public void testQueryRedactor() throws Exception {
HiveConf conf = new HiveConf(TestHooks.class);
HiveConf conf = new HiveConfForTest(TestHooks.class);
HiveConf.setVar(conf, HiveConf.ConfVars.QUERY_REDACTOR_HOOKS,
SimpleQueryRedactor.class.getName());
conf
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.junit.BeforeClass;
import org.junit.Rule;
Expand Down Expand Up @@ -62,7 +63,7 @@ public static List<Object[]> getParameters() throws Exception {

@BeforeClass
public static void setUp() {
hiveConf = new HiveConf(TestHiveCopyFiles.class);
hiveConf = new HiveConfForTest(TestHiveCopyFiles.class);
SessionState.start(hiveConf);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
package org.apache.hadoop.hive.ql.metadata;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
Expand Down Expand Up @@ -47,7 +48,7 @@ public class TestTempAcidTable {
@BeforeClass
public static void setUp() throws Exception {
hive = Hive.get();
HiveConf hiveConf = hive.getConf();
HiveConf hiveConf = new HiveConfForTest(TestTempAcidTable.class);
hiveConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
hiveConf.setBoolVar(ConfVars.HIVE_IN_TEST, true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.junit.Assert;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
Expand Down Expand Up @@ -196,7 +197,7 @@ private Map<String, List<String>> getColsFromReadEntity(Set<ReadEntity> inputs)
}

private static Driver createDriver() {
HiveConf conf = new HiveConf(Driver.class);
HiveConf conf = new HiveConfForTest(TestColumnAccess.class);
conf
.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import java.util.Map;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryState;
Expand Down Expand Up @@ -50,7 +51,7 @@ public class TestQBCompact {
@BeforeClass
public static void init() throws Exception {
queryState = new QueryState.Builder().build();
conf = queryState.getConf();
conf = new HiveConfForTest(TestQBCompact.class);
conf
.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.junit.Assert;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.junit.Before;
Expand All @@ -39,8 +40,8 @@ public class TestQBJoinTreeApplyPredicate {
@BeforeClass
public static void initialize() {
queryState =
new QueryState.Builder().withHiveConf(new HiveConf(SemanticAnalyzer.class)).build();
conf = queryState.getConf();
new QueryState.Builder().build();
conf = new HiveConfForTest(TestQBJoinTreeApplyPredicate.class);
SessionState.start(conf);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import java.util.List;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.junit.Assert;
Expand Down Expand Up @@ -49,8 +50,8 @@ public class TestQBSubQuery {
@BeforeClass
public static void initialize() {
queryState =
new QueryState.Builder().withHiveConf(new HiveConf(SemanticAnalyzer.class)).build();
conf = queryState.getConf();
new QueryState.Builder().build();
conf = new HiveConfForTest(TestQBSubQuery.class);
SessionState.start(conf);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import java.util.Set;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
Expand Down Expand Up @@ -174,7 +175,7 @@ public void testSelectEntityInDirectJoinAlias() throws ParseException {
* Create driver with the test hook set in config
*/
private static Driver createDriver() {
HiveConf conf = new HiveConf(Driver.class);
HiveConf conf = new HiveConfForTest(TestReadEntityDirect.class);
conf
.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import java.util.Set;

import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.ql.Driver;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
Expand Down Expand Up @@ -57,7 +58,7 @@ public void postAnalyze(HiveSemanticAnalyzerHookContext context,

@BeforeClass
public static void onetimeSetup() throws Exception {
HiveConf conf = new HiveConf(Driver.class);
HiveConf conf = new HiveConfForTest(TestViewEntity.class);
conf
.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@

import org.apache.hadoop.hive.common.io.SessionStream;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.conf.SystemVariables;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.junit.Assert;
Expand All @@ -49,7 +50,7 @@ public static void before() throws Exception {
env.put(TEST_ENV_VAR_PASSWORD, TEST_ENV_VAR_PASSWORD_VALUE);
setEnv(env);
System.setProperty(TEST_SYSTEM_PROPERTY, TEST_SYSTEM_PROPERTY_VALUE);
HiveConf conf = new HiveConf();
HiveConf conf = new HiveConfForTest(TestSetProcessor.class);
SessionState.start(conf);
state = SessionState.get();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConfForTest;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.utils.TestTxnDbUtil;
import org.apache.hadoop.hive.ql.Driver;
Expand Down Expand Up @@ -68,7 +69,7 @@ public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreC
@BeforeClass
public static void beforeTest() throws Exception {
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser("hive"));
conf = new HiveConf();
conf = new HiveConfForTest(TestHivePrivilegeObjectOwnerNameAndType.class);

// Turn on mocked authorization
conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName());
Expand All @@ -79,6 +80,8 @@ public static void beforeTest() throws Exception {
conf.setVar(ConfVars.HIVE_TXN_MANAGER, DbTxnManager.class.getName());
conf.setVar(ConfVars.HIVE_MAPRED_MODE, "nonstrict");
conf.setVar(ConfVars.DYNAMIC_PARTITIONING_MODE, "nonstrict");
// TODO: HIVE-28619: TestHivePrivilegeObjectOwnerNameAndType to run on Tez
conf.set("hive.execution.engine", "mr");

TestTxnDbUtil.prepDb(conf);
SessionState.start(conf);
Expand Down
Loading

0 comments on commit e48cb8e

Please sign in to comment.