Skip to content

Commit 2cfaff7

Browse files
authored
Merge branch 'apache:trunk' into HDFS-17769
2 parents 25f49ca + f097d68 commit 2cfaff7

File tree

16 files changed

+318
-107
lines changed

16 files changed

+318
-107
lines changed

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -415,7 +415,7 @@ public void setResponse(Message message) {
415415
long deltaNanos = Time.monotonicNowNanos() - call.getStartHandleTimestampNanos();
416416
updateProcessingDetails(call, deltaNanos);
417417
call.setDeferredResponse(RpcWritable.wrap(message));
418-
server.updateDeferredMetrics(call, methodName, deltaNanos);
418+
server.updateDeferredMetrics(call, methodName);
419419
}
420420

421421
@Override
@@ -424,7 +424,7 @@ public void error(Throwable t) {
424424
updateProcessingDetails(call, deltaNanos);
425425
call.setDeferredError(t);
426426
String detailedMetricsName = t.getClass().getSimpleName();
427-
server.updateDeferredMetrics(call, detailedMetricsName, deltaNanos);
427+
server.updateDeferredMetrics(call, detailedMetricsName);
428428
}
429429
}
430430

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -447,7 +447,7 @@ public void setResponse(Message message) {
447447
long deltaNanos = Time.monotonicNowNanos() - call.getStartHandleTimestampNanos();
448448
updateProcessingDetails(call, deltaNanos);
449449
call.setDeferredResponse(RpcWritable.wrap(message));
450-
server.updateDeferredMetrics(call, methodName, deltaNanos);
450+
server.updateDeferredMetrics(call, methodName);
451451
}
452452

453453
@Override
@@ -456,7 +456,7 @@ public void error(Throwable t) {
456456
updateProcessingDetails(call, deltaNanos);
457457
call.setDeferredError(t);
458458
String detailedMetricsName = t.getClass().getSimpleName();
459-
server.updateDeferredMetrics(call, detailedMetricsName, deltaNanos);
459+
server.updateDeferredMetrics(call, detailedMetricsName);
460460
}
461461
}
462462

hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java

+3-2
Original file line numberDiff line numberDiff line change
@@ -673,9 +673,8 @@ void updateMetrics(Call call, long processingStartTimeNanos, boolean connDropped
673673
* Update rpc metrics for defered calls.
674674
* @param call The Rpc Call
675675
* @param name Rpc method name
676-
* @param processingTime processing call in ms unit.
677676
*/
678-
void updateDeferredMetrics(Call call, String name, long processingTime) {
677+
void updateDeferredMetrics(Call call, String name) {
679678
long completionTimeNanos = Time.monotonicNowNanos();
680679
long arrivalTimeNanos = call.timestampNanos;
681680

@@ -684,6 +683,8 @@ void updateDeferredMetrics(Call call, String name, long processingTime) {
684683
details.get(Timing.LOCKWAIT, rpcMetrics.getMetricsTimeUnit());
685684
long responseTime =
686685
details.get(Timing.RESPONSE, rpcMetrics.getMetricsTimeUnit());
686+
long processingTime =
687+
details.get(Timing.PROCESSING, rpcMetrics.getMetricsTimeUnit());
687688
rpcMetrics.addRpcLockWaitTime(waitTime);
688689
rpcMetrics.addRpcProcessingTime(processingTime);
689690
rpcMetrics.addRpcResponseTime(responseTime);

hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java

+21
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@
4343
import static org.apache.hadoop.test.MetricsAsserts.assertCounter;
4444
import static org.apache.hadoop.test.MetricsAsserts.assertCounterGt;
4545
import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
46+
import static org.junit.jupiter.api.Assertions.assertEquals;
4647
import static org.junit.jupiter.api.Assertions.assertTrue;
4748

4849
public class TestProtoBufRpcServerHandoff {
@@ -144,6 +145,26 @@ public void testHandoffMetrics() throws Exception {
144145
assertCounter("RpcProcessingTimeNumOps", 2L, rb);
145146
}
146147

148+
@Test
149+
public void testUpdateDeferredMetrics() throws Exception {
150+
final TestProtoBufRpcServerHandoffProtocol client = RPC.getProxy(
151+
TestProtoBufRpcServerHandoffProtocol.class, 1, address, conf);
152+
153+
ExecutorService executorService = Executors.newFixedThreadPool(1);
154+
CompletionService<ClientInvocationCallable> completionService =
155+
new ExecutorCompletionService<ClientInvocationCallable>(
156+
executorService);
157+
158+
completionService.submit(new ClientInvocationCallable(client, 2000L));
159+
Future<ClientInvocationCallable> future1 = completionService.take();
160+
ClientInvocationCallable callable1 = future1.get();
161+
162+
double deferredProcessingTime = server.getRpcMetrics().getDeferredRpcProcessingTime()
163+
.lastStat().max();
164+
double processingTime = server.getRpcMetrics().getRpcProcessingTime().lastStat().max();
165+
assertEquals(deferredProcessingTime, processingTime);
166+
}
167+
147168
private static class ClientInvocationCallable
148169
implements Callable<ClientInvocationCallable> {
149170
final TestProtoBufRpcServerHandoffProtocol client;

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/pipes/TestPipeApplication.java

-5
Original file line numberDiff line numberDiff line change
@@ -272,8 +272,6 @@ public void testSubmitter() throws Exception {
272272
Submitter.setJavaPartitioner(conf, partitioner.getClass());
273273

274274
assertEquals(PipesPartitioner.class, (Submitter.getJavaPartitioner(conf)));
275-
// test going to call main method with System.exit(). Change Security
276-
SecurityManager securityManager = System.getSecurityManager();
277275
// store System.out
278276
PrintStream oldps = System.out;
279277
ByteArrayOutputStream out = new ByteArrayOutputStream();
@@ -330,8 +328,6 @@ public void testSubmitter() throws Exception {
330328
+ "archives to be unarchived on the compute machines"));
331329
} finally {
332330
System.setOut(oldps);
333-
// restore
334-
System.setSecurityManager(securityManager);
335331
if (psw != null) {
336332
// remove password files
337333
for (File file : psw) {
@@ -381,7 +377,6 @@ public void testSubmitter() throws Exception {
381377

382378
} finally {
383379
System.setOut(oldps);
384-
System.setSecurityManager(securityManager);
385380
}
386381

387382
}

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractAnalyticsStreamVectoredRead.java

+17-4
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,15 @@
2323
import org.apache.hadoop.fs.contract.AbstractFSContract;
2424

2525
import static org.apache.hadoop.fs.s3a.S3ATestUtils.enableAnalyticsAccelerator;
26+
import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipForAnyEncryptionExceptSSES3;
2627

2728
/**
28-
* S3A contract tests for vectored reads with the Analytics stream. The analytics stream does
29-
* not explicitly implement the vectoredRead() method, or currently do and vectored-read specific
30-
* optimisations (such as range coalescing). However, this test ensures that the base implementation
31-
* of readVectored {@link org.apache.hadoop.fs.PositionedReadable} still works.
29+
* S3A contract tests for vectored reads with the Analytics stream.
30+
* The analytics stream does not explicitly implement the vectoredRead() method,
31+
* or currently do and vectored-read specific optimisations
32+
* (such as range coalescing). However, this test ensures that the base
33+
* implementation of readVectored {@link org.apache.hadoop.fs.PositionedReadable}
34+
* still works.
3235
*/
3336
public class ITestS3AContractAnalyticsStreamVectoredRead extends AbstractContractVectoredReadTest {
3437

@@ -44,6 +47,16 @@ public ITestS3AContractAnalyticsStreamVectoredRead(String bufferType) {
4447
protected Configuration createConfiguration() {
4548
Configuration conf = super.createConfiguration();
4649
enableAnalyticsAccelerator(conf);
50+
// If encryption is set, some AAL tests will fail.
51+
// This is because AAL caches the head request response, and uses
52+
// the eTag when making a GET request. When using encryption, the eTag is
53+
// no longer a hash of the object content, and is not always the same when
54+
// the same object is created multiple times. This test creates the file
55+
// vectored_file.txt before running each test, which will have a
56+
// different eTag when using encryption, leading to preconditioned failures.
57+
// This issue is tracked in:
58+
// https://github.com/awslabs/analytics-accelerator-s3/issues/218
59+
skipForAnyEncryptionExceptSSES3(conf);
4760
conf.set("fs.contract.vector-io-early-eof-check", "false");
4861
return conf;
4962
}

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAnalyticsAcceleratorStreamReading.java

+1
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,7 @@ public class ITestS3AAnalyticsAcceleratorStreamReading extends AbstractS3ATestBa
6969
@Before
7070
public void setUp() throws Exception {
7171
super.setup();
72+
skipIfClientSideEncryption();
7273
externalTestFile = getExternalData(getConfiguration());
7374
}
7475

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java

+25
Original file line numberDiff line numberDiff line change
@@ -106,6 +106,7 @@
106106

107107
import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
108108
import static org.apache.hadoop.fs.impl.FlagSet.createFlagSet;
109+
import static org.apache.hadoop.fs.s3a.S3AEncryptionMethods.SSE_S3;
109110
import static org.apache.hadoop.fs.s3a.impl.streams.InputStreamType.Analytics;
110111
import static org.apache.hadoop.fs.s3a.impl.streams.InputStreamType.Prefetch;
111112
import static org.apache.hadoop.fs.s3a.impl.CallableSupplier.submit;
@@ -1720,6 +1721,30 @@ public static void skipIfEncryptionNotSet(Configuration configuration,
17201721
}
17211722
}
17221723

1724+
/**
1725+
* Skip a test if encryption algorithm is not empty, or if it is set to
1726+
* anything other than AES256.
1727+
*
1728+
* @param configuration configuration
1729+
*/
1730+
public static void skipForAnyEncryptionExceptSSES3(Configuration configuration) {
1731+
String bucket = getTestBucketName(configuration);
1732+
try {
1733+
final EncryptionSecrets secrets = buildEncryptionSecrets(bucket, configuration);
1734+
S3AEncryptionMethods s3AEncryptionMethods = secrets.getEncryptionMethod();
1735+
1736+
if (s3AEncryptionMethods.getMethod().equals(SSE_S3.getMethod())
1737+
|| s3AEncryptionMethods.getMethod().isEmpty()) {
1738+
return;
1739+
}
1740+
1741+
skip("Encryption method is set to " + s3AEncryptionMethods.getMethod());
1742+
} catch (IOException e) {
1743+
throw new UncheckedIOException(e);
1744+
}
1745+
1746+
}
1747+
17231748
/**
17241749
* Get the input stream statistics of an input stream.
17251750
* Raises an exception if the inner stream is not an S3A input stream

hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-blockgen/src/test/java/org/apache/hadoop/tools/dynamometer/blockgenerator/TestBlockGen.java

+6-6
Original file line numberDiff line numberDiff line change
@@ -21,13 +21,13 @@
2121
import org.apache.hadoop.fs.FileSystem;
2222
import org.apache.hadoop.fs.Path;
2323
import org.apache.hadoop.hdfs.MiniDFSCluster;
24-
import org.junit.After;
25-
import org.junit.Before;
26-
import org.junit.Test;
24+
import org.junit.jupiter.api.AfterEach;
25+
import org.junit.jupiter.api.BeforeEach;
26+
import org.junit.jupiter.api.Test;
2727
import org.slf4j.Logger;
2828
import org.slf4j.LoggerFactory;
2929

30-
import static org.junit.Assert.assertEquals;
30+
import static org.junit.jupiter.api.Assertions.assertEquals;
3131

3232

3333
/** Tests for block generation via {@link GenerateBlockImagesDriver}. */
@@ -40,7 +40,7 @@ public class TestBlockGen {
4040
private static final String BLOCK_LIST_OUTPUT_DIR_NAME = "blockLists";
4141
private Path tmpPath;
4242

43-
@Before
43+
@BeforeEach
4444
public void setup() throws Exception {
4545
Configuration conf = new Configuration();
4646
dfsCluster = new MiniDFSCluster.Builder(conf).build();
@@ -57,7 +57,7 @@ public void setup() throws Exception {
5757
new Path(tmpPath, FS_IMAGE_NAME));
5858
}
5959

60-
@After
60+
@AfterEach
6161
public void cleanUp() {
6262
dfsCluster.shutdown();
6363
}

hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-blockgen/src/test/java/org/apache/hadoop/tools/dynamometer/blockgenerator/TestXMLParser.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@
1919

2020
import java.util.HashMap;
2121
import java.util.Map;
22-
import org.junit.Test;
23-
import static org.junit.Assert.*;
22+
import org.junit.jupiter.api.Test;
23+
import static org.junit.jupiter.api.Assertions.*;
2424

2525

2626
/** Tests for {@link XMLParser}. */

hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/src/test/java/org/apache/hadoop/tools/dynamometer/workloadgenerator/TestWorkloadGenerator.java

+9-9
Original file line numberDiff line numberDiff line change
@@ -40,16 +40,16 @@
4040
import org.apache.hadoop.security.authorize.AuthorizationException;
4141
import org.apache.hadoop.security.authorize.ImpersonationProvider;
4242
import org.jline.utils.Log;
43-
import org.junit.After;
44-
import org.junit.Before;
45-
import org.junit.Test;
43+
import org.junit.jupiter.api.AfterEach;
44+
import org.junit.jupiter.api.BeforeEach;
45+
import org.junit.jupiter.api.Test;
4646
import org.slf4j.Logger;
4747
import org.slf4j.LoggerFactory;
4848

4949
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS;
50-
import static org.junit.Assert.assertEquals;
51-
import static org.junit.Assert.assertFalse;
52-
import static org.junit.Assert.assertTrue;
50+
import static org.junit.jupiter.api.Assertions.assertEquals;
51+
import static org.junit.jupiter.api.Assertions.assertFalse;
52+
import static org.junit.jupiter.api.Assertions.assertTrue;
5353

5454

5555
/** Tests for {@link WorkloadDriver} and related classes. */
@@ -61,7 +61,7 @@ public class TestWorkloadGenerator {
6161
private MiniDFSCluster miniCluster;
6262
private FileSystem dfs;
6363

64-
@Before
64+
@BeforeEach
6565
public void setup() throws Exception {
6666
conf = new Configuration();
6767
conf.setClass(HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS,
@@ -74,7 +74,7 @@ public void setup() throws Exception {
7474
dfs.setOwner(new Path("/tmp"), "hdfs", "hdfs");
7575
}
7676

77-
@After
77+
@AfterEach
7878
public void tearDown() throws Exception {
7979
if (miniCluster != null) {
8080
miniCluster.shutdown();
@@ -135,7 +135,7 @@ private void testAuditWorkloadWithOutput(String auditOutputPath)
135135
Job workloadJob = WorkloadDriver.getJobForSubmission(conf,
136136
dfs.getUri().toString(), workloadStartTime, AuditReplayMapper.class);
137137
boolean success = workloadJob.waitForCompletion(true);
138-
assertTrue("workload job should succeed", success);
138+
assertTrue(success, "workload job should succeed");
139139
Counters counters = workloadJob.getCounters();
140140
assertEquals(6,
141141
counters.findCounter(AuditReplayMapper.REPLAYCOUNTERS.TOTALCOMMANDS)

hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-workload/src/test/java/org/apache/hadoop/tools/dynamometer/workloadgenerator/audit/TestAuditLogDirectParser.java

+4-4
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,10 @@
2020
import java.util.function.Function;
2121
import org.apache.hadoop.conf.Configuration;
2222
import org.apache.hadoop.io.Text;
23-
import org.junit.Before;
24-
import org.junit.Test;
23+
import org.junit.jupiter.api.BeforeEach;
24+
import org.junit.jupiter.api.Test;
2525

26-
import static org.junit.Assert.assertEquals;
26+
import static org.junit.jupiter.api.Assertions.assertEquals;
2727

2828
/** Tests for {@link AuditLogDirectParser}. */
2929
public class TestAuditLogDirectParser {
@@ -32,7 +32,7 @@ public class TestAuditLogDirectParser {
3232
private AuditLogDirectParser parser;
3333
private Long sequence = 1L;
3434

35-
@Before
35+
@BeforeEach
3636
public void setup() throws Exception {
3737
parser = new AuditLogDirectParser();
3838
Configuration conf = new Configuration();

0 commit comments

Comments
 (0)