Skip to content

Commit 20c1669

Browse files
committed
HADOOP-13614 apply to branch-2; test and verify that all appears well. Tune more tests, including ITestS3AMiniYarnCluster.
A big change here is fixing up the scale tests to work as subclasses of the S3AScaleTestBase indirect subclasses of AbstractFSContractTestBase, because that sets up the test timeout rule. Rather than have a field of the same name and hope that its timeout gets picked up, I've tuned how timeouts get set up, so the subclasses do it. All well and good, except those subclasses are being called during the initialization of the base class, that is: before the subclasses are full inited. I don't ever like doing that, though it is working here.
1 parent fbbea16 commit 20c1669

16 files changed

+144
-175
lines changed

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractDistCp.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
package org.apache.hadoop.fs.contract.s3a;
2020

2121
import static org.apache.hadoop.fs.s3a.Constants.*;
22-
import static org.apache.hadoop.fs.s3a.S3ATestConstants.SCALE_TEST_TIMEOUT;
22+
import static org.apache.hadoop.fs.s3a.S3ATestConstants.SCALE_TEST_TIMEOUT_MILLIS;
2323

2424
import org.apache.hadoop.conf.Configuration;
2525
import org.apache.hadoop.tools.contract.AbstractContractDistCpTest;
@@ -35,7 +35,7 @@ public class ITestS3AContractDistCp extends AbstractContractDistCpTest {
3535

3636
@Override
3737
protected int getTestTimeoutMillis() {
38-
return SCALE_TEST_TIMEOUT;
38+
return SCALE_TEST_TIMEOUT_MILLIS;
3939
}
4040

4141
@Override

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3ATestBase.java

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,8 @@
2626
import org.apache.hadoop.fs.contract.s3a.S3AContract;
2727
import org.apache.hadoop.io.IOUtils;
2828
import org.junit.Before;
29+
import org.slf4j.Logger;
30+
import org.slf4j.LoggerFactory;
2931

3032
import java.io.IOException;
3133

@@ -38,6 +40,9 @@
3840
public abstract class AbstractS3ATestBase extends AbstractFSContractTestBase
3941
implements S3ATestConstants {
4042

43+
protected static final Logger LOG =
44+
LoggerFactory.getLogger(AbstractS3ATestBase.class);
45+
4146
@Override
4247
protected AbstractFSContract createContract(Configuration conf) {
4348
return new S3AContract(conf);
@@ -73,6 +78,17 @@ public S3AFileSystem getFileSystem() {
7378
return (S3AFileSystem) super.getFileSystem();
7479
}
7580

81+
/**
82+
* Describe a test in the logs.
83+
* @param text text to print
84+
* @param args arguments to format in the printing
85+
*/
86+
protected void describe(String text, Object... args) {
87+
LOG.info("\n\n{}: {}\n",
88+
methodName.getMethodName(),
89+
String.format(text, args));
90+
}
91+
7692
/**
7793
* Write a file, read it back, validate the dataset. Overwrites the file
7894
* if it is present

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlockingThreadPool.java

Lines changed: 0 additions & 56 deletions
This file was deleted.

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ABlocksize.java

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,10 @@
2525
import org.apache.hadoop.fs.contract.AbstractFSContract;
2626
import org.apache.hadoop.fs.contract.AbstractFSContractTestBase;
2727
import org.apache.hadoop.fs.contract.s3a.S3AContract;
28+
29+
import org.junit.Rule;
2830
import org.junit.Test;
31+
import org.junit.rules.Timeout;
2932
import org.slf4j.Logger;
3033
import org.slf4j.LoggerFactory;
3134

@@ -36,16 +39,11 @@
3639
/**
3740
* S3A tests for configuring block size.
3841
*/
39-
public class ITestS3ABlocksize extends AbstractFSContractTestBase {
42+
public class ITestS3ABlocksize extends AbstractS3ATestBase {
4043

4144
private static final Logger LOG =
4245
LoggerFactory.getLogger(ITestS3ABlocksize.class);
4346

44-
@Override
45-
protected AbstractFSContract createContract(Configuration conf) {
46-
return new S3AContract(conf);
47-
}
48-
4947
@Test
5048
@SuppressWarnings("deprecation")
5149
public void testBlockSize() throws Exception {

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -66,34 +66,37 @@ public class ITestS3AConfiguration {
6666
private static final Logger LOG =
6767
LoggerFactory.getLogger(ITestS3AConfiguration.class);
6868

69-
private static final String TEST_ENDPOINT = "test.fs.s3a.endpoint";
70-
7169
@Rule
72-
public Timeout testTimeout = new Timeout(30 * 60 * 1000);
70+
public Timeout testTimeout = new Timeout(
71+
S3ATestConstants.S3A_TEST_TIMEOUT
72+
);
7373

7474
@Rule
7575
public final TemporaryFolder tempDir = new TemporaryFolder();
7676

7777
/**
7878
* Test if custom endpoint is picked up.
7979
* <p>
80-
* The test expects TEST_ENDPOINT to be defined in the Configuration
80+
* The test expects {@link S3ATestConstants#CONFIGURATION_TEST_ENDPOINT}
81+
* to be defined in the Configuration
8182
* describing the endpoint of the bucket to which TEST_FS_S3A_NAME points
8283
* (i.e. "s3-eu-west-1.amazonaws.com" if the bucket is located in Ireland).
8384
* Evidently, the bucket has to be hosted in the region denoted by the
8485
* endpoint for the test to succeed.
8586
* <p>
8687
* More info and the list of endpoint identifiers:
87-
* http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
88+
* @see <a href="http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region">endpoint list</a>.
8889
*
8990
* @throws Exception
9091
*/
9192
@Test
9293
public void testEndpoint() throws Exception {
9394
conf = new Configuration();
94-
String endpoint = conf.getTrimmed(TEST_ENDPOINT, "");
95+
String endpoint = conf.getTrimmed(
96+
S3ATestConstants.CONFIGURATION_TEST_ENDPOINT, "");
9597
if (endpoint.isEmpty()) {
96-
LOG.warn("Custom endpoint test skipped as " + TEST_ENDPOINT + "config " +
98+
LOG.warn("Custom endpoint test skipped as " +
99+
S3ATestConstants.CONFIGURATION_TEST_ENDPOINT + "config " +
97100
"setting was not detected");
98101
} else {
99102
conf.set(Constants.ENDPOINT, endpoint);

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileOperationCost.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -232,7 +232,8 @@ public void testFakeDirectoryDeletion() throws Throwable {
232232

233233
int destDirDepth = directoriesInPath(destDir);
234234
directoriesCreated.assertDiffEquals(state, 1);
235-
/* TODO: uncomment once HADOOP-13222 is in
235+
/* TODO: uncomment once HADOOP-13222 "s3a.mkdirs() to delete empty fake parent directories"
236+
is in
236237
deleteRequests.assertDiffEquals(state,1);
237238
directoriesDeleted.assertDiffEquals(state,0);
238239
fakeDirectoriesDeleted.assertDiffEquals(state,destDirDepth);

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestConstants.java

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -134,18 +134,11 @@ public interface S3ATestConstants {
134134
*/
135135
int DEFAULT_DIRECTORY_COUNT = 2;
136136

137-
/**
138-
* Default scale test timeout in seconds: {@value}.
139-
*/
140-
int DEFAULT_TEST_TIMEOUT = 30 * 60;
141-
142137
/**
143138
* Default policy on scale tests: {@value}.
144139
*/
145140
boolean DEFAULT_SCALE_TESTS_ENABLED = false;
146141

147-
String KEY_ENCRYPTION_TESTS = TEST_FS_S3A + "encryption.enabled";
148-
149142
/**
150143
* Fork ID passed down from maven if the test is running in parallel.
151144
*/
@@ -159,7 +152,15 @@ public interface S3ATestConstants {
159152
int S3A_TEST_TIMEOUT = 10 * 60 * 1000;
160153

161154
/**
162-
* Timeout in Milliseconds for Scale Tests: {@value}.
155+
* Timeout in Seconds for Scale Tests: {@value}.
156+
*/
157+
int SCALE_TEST_TIMEOUT_SECONDS = 30 * 60;
158+
159+
int SCALE_TEST_TIMEOUT_MILLIS = SCALE_TEST_TIMEOUT_SECONDS * 1000;
160+
/**
161+
* Optional custom endpoint for S3A configuration tests.
162+
* This does <i>not</i> set the endpoint for s3 access elsewhere.
163163
*/
164-
int SCALE_TEST_TIMEOUT = 30 * 60 * 1000;
164+
String CONFIGURATION_TEST_ENDPOINT =
165+
"test.fs.s3a.endpoint";
165166
}

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ public class S3ATestUtils {
5959
*/
6060
public static S3AFileSystem createTestFileSystem(Configuration conf)
6161
throws IOException {
62-
return createTestFileSystem(conf, true);
62+
return createTestFileSystem(conf, false);
6363
}
6464

6565
/**

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java

Lines changed: 13 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@
3838
import org.apache.hadoop.fs.StorageStatistics;
3939
import org.apache.hadoop.fs.contract.ContractTestUtils;
4040
import org.apache.hadoop.fs.s3a.S3AFileStatus;
41+
import org.apache.hadoop.fs.s3a.S3AFileSystem;
4142
import org.apache.hadoop.fs.s3a.Statistic;
4243
import org.apache.hadoop.util.Progressable;
4344

@@ -70,27 +71,22 @@ public abstract class AbstractSTestS3AHugeFiles extends S3AScaleTestBase {
7071
private int partitionSize;
7172

7273
@Override
73-
public void setUp() throws Exception {
74-
super.setUp();
75-
74+
public void setup() throws Exception {
75+
super.setup();
7676
final Path testPath = getTestPath();
7777
scaleTestDir = new Path(testPath, "scale");
7878
hugefile = new Path(scaleTestDir, "hugefile");
7979
hugefileRenamed = new Path(scaleTestDir, "hugefileRenamed");
8080
}
8181

82-
@Override
83-
public void tearDown() throws Exception {
84-
// do nothing. Specifically: do not delete the test dir
85-
}
8682

8783
/**
8884
* Note that this can get called before test setup.
8985
* @return the configuration to use.
9086
*/
9187
@Override
92-
protected Configuration createConfiguration() {
93-
Configuration conf = super.createConfiguration();
88+
protected Configuration createScaleConfiguration() {
89+
Configuration conf = super.createScaleConfiguration();
9490
partitionSize = (int)getTestPropertyBytes(conf,
9591
KEY_HUGE_PARTITION_SIZE,
9692
DEFAULT_PARTITION_SIZE);
@@ -155,6 +151,7 @@ public void test_010_CreateHugeFile() throws IOException {
155151
// perform the upload.
156152
// there's lots of logging here, so that a tail -f on the output log
157153
// can give a view of what is happening.
154+
S3AFileSystem fs = getFileSystem();
158155
StorageStatistics storageStatistics = fs.getStorageStatistics();
159156
String putRequests = Statistic.OBJECT_PUT_REQUESTS.getSymbol();
160157
String putBytes = Statistic.OBJECT_PUT_BYTES.getSymbol();
@@ -286,12 +283,13 @@ private void verifyNoFailures(String operation) {
286283
}
287284

288285
void assumeHugeFileExists() throws IOException {
286+
S3AFileSystem fs = getFileSystem();
289287
ContractTestUtils.assertPathExists(fs, "huge file not created", hugefile);
290288
ContractTestUtils.assertIsFile(fs, hugefile);
291289
}
292290

293291
private void logFSState() {
294-
LOG.info("File System state after operation:\n{}", fs);
292+
LOG.info("File System state after operation:\n{}", getFileSystem());
295293
}
296294

297295
@Test
@@ -305,6 +303,7 @@ public void test_040_PositionedReadHugeFile() throws Throwable {
305303
}
306304
String filetype = encrypted ? "encrypted file" : "file";
307305
describe("Positioned reads of %s %s", filetype, hugefile);
306+
S3AFileSystem fs = getFileSystem();
308307
S3AFileStatus status = fs.getFileStatus(hugefile);
309308
long filesize = status.getLen();
310309
int ops = 0;
@@ -344,6 +343,7 @@ public void test_040_PositionedReadHugeFile() throws Throwable {
344343
public void test_050_readHugeFile() throws Throwable {
345344
assumeHugeFileExists();
346345
describe("Reading %s", hugefile);
346+
S3AFileSystem fs = getFileSystem();
347347
S3AFileStatus status = fs.getFileStatus(hugefile);
348348
long filesize = status.getLen();
349349
long blocks = filesize / uploadBlockSize;
@@ -369,6 +369,7 @@ public void test_050_readHugeFile() throws Throwable {
369369
public void test_100_renameHugeFile() throws Throwable {
370370
assumeHugeFileExists();
371371
describe("renaming %s to %s", hugefile, hugefileRenamed);
372+
S3AFileSystem fs = getFileSystem();
372373
S3AFileStatus status = fs.getFileStatus(hugefile);
373374
long filesize = status.getLen();
374375
fs.delete(hugefileRenamed, false);
@@ -396,7 +397,7 @@ public void test_100_renameHugeFile() throws Throwable {
396397
public void test_999_DeleteHugeFiles() throws IOException {
397398
deleteHugeFile();
398399
ContractTestUtils.NanoTimer timer2 = new ContractTestUtils.NanoTimer();
399-
400+
S3AFileSystem fs = getFileSystem();
400401
fs.delete(hugefileRenamed, false);
401402
timer2.end("time to delete %s", hugefileRenamed);
402403
ContractTestUtils.rm(fs, getTestPath(), true, true);
@@ -405,7 +406,7 @@ public void test_999_DeleteHugeFiles() throws IOException {
405406
protected void deleteHugeFile() throws IOException {
406407
describe("Deleting %s", hugefile);
407408
NanoTimer timer = new NanoTimer();
408-
fs.delete(hugefile, false);
409+
getFileSystem().delete(hugefile, false);
409410
timer.end("time to delete %s", hugefile);
410411
}
411412

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteFilesOneByOne.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,8 @@
2727
public class ITestS3ADeleteFilesOneByOne extends ITestS3ADeleteManyFiles {
2828

2929
@Override
30-
protected Configuration createConfiguration() {
31-
Configuration configuration = super.createConfiguration();
30+
protected Configuration createScaleConfiguration() {
31+
Configuration configuration = super.createScaleConfiguration();
3232
configuration.setBoolean(Constants.ENABLE_MULTI_DELETE, false);
3333
return configuration;
3434
}

hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADeleteManyFiles.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@
2020

2121
import org.apache.hadoop.fs.Path;
2222
import org.apache.hadoop.fs.contract.ContractTestUtils;
23+
import org.apache.hadoop.fs.s3a.S3AFileSystem;
24+
2325
import org.junit.Test;
2426
import org.slf4j.Logger;
2527
import org.slf4j.LoggerFactory;
@@ -54,8 +56,8 @@ public void testBulkRenameAndDelete() throws Throwable {
5456
final Path srcDir = new Path(scaleTestDir, "src");
5557
final Path finalDir = new Path(scaleTestDir, "final");
5658
final long count = getOperationCount();
59+
final S3AFileSystem fs = getFileSystem();
5760
ContractTestUtils.rm(fs, scaleTestDir, true, false);
58-
5961
fs.mkdirs(srcDir);
6062
fs.mkdirs(finalDir);
6163

0 commit comments

Comments
 (0)