HADOOP-13257. Improve Azure Data Lake contract tests. Contributed by Vishwajeet Dusane

(cherry picked from commit 4113ec5fa5)
(cherry picked from commit 7fd0556b2b)
This commit is contained in:
Mingliang Liu 2016-12-02 15:54:57 -08:00 committed by Chris Douglas
parent b4b4ca9199
commit f1671e9519
21 changed files with 996 additions and 206 deletions

View File

@ -346,7 +346,6 @@ public FSDataOutputStream create(Path f, FsPermission permission,
* @see #setPermission(Path, FsPermission)
* @deprecated API only for 0.20-append
*/
@Deprecated
@Override
public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
EnumSet<CreateFlag> flags, int bufferSize, short replication,
@ -471,6 +470,10 @@ public FileStatus[] listStatus(final Path f) throws IOException {
@Override
public boolean rename(final Path src, final Path dst) throws IOException {
statistics.incrementWriteOps(1);
if (toRelativeFilePath(src).equals("/")) {
return false;
}
return adlClient.rename(toRelativeFilePath(src), toRelativeFilePath(dst));
}
@ -522,9 +525,24 @@ public void concat(final Path trg, final Path[] srcs) throws IOException {
public boolean delete(final Path path, final boolean recursive)
throws IOException {
statistics.incrementWriteOps(1);
String relativePath = toRelativeFilePath(path);
// Delete on root directory not supported.
if (relativePath.equals("/")) {
// This is important check after recent commit
// HADOOP-12977 and HADOOP-13716 validates on root for
// 1. if root is empty and non recursive delete then return false.
// 2. if root is non empty and non recursive delete then throw exception.
if (!recursive
&& adlClient.enumerateDirectory(toRelativeFilePath(path), 1).size()
> 0) {
throw new IOException("Delete on root is not supported.");
}
return false;
}
return recursive ?
adlClient.deleteRecursive(toRelativeFilePath(path)) :
adlClient.delete(toRelativeFilePath(path));
adlClient.deleteRecursive(relativePath) :
adlClient.delete(relativePath);
}
/**

View File

@ -102,7 +102,7 @@ public void testEntireBytes() throws IOException, InterruptedException {
n += count;
}
Assert.assertEquals(testData.getActualData().length, expectedData.length);
Assert.assertEquals(expectedData.length, testData.getActualData().length);
Assert.assertArrayEquals(expectedData, testData.getActualData());
in.close();
if (testData.isCheckOfNoOfCalls()) {
@ -119,8 +119,8 @@ public void testSeekOperation() throws IOException, InterruptedException {
for (int i = 0; i < 1000; ++i) {
int position = random.nextInt(testData.getActualData().length);
in.seek(position);
Assert.assertEquals(in.getPos(), position);
Assert.assertEquals(in.read(), testData.getActualData()[position] & 0xFF);
Assert.assertEquals(position, in.getPos());
Assert.assertEquals(testData.getActualData()[position] & 0xFF, in.read());
}
in.close();
if (testData.isCheckOfNoOfCalls()) {

View File

@ -50,7 +50,7 @@ public void listStatusReturnsAsExpected() throws IOException {
.listStatus(new Path("/test1/test2"));
long endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
Assert.assertEquals(ls.length, 10);
Assert.assertEquals(10, ls.length);
getMockServer().enqueue(new MockResponse().setResponseCode(200)
.setBody(TestADLResponseData.getListFileStatusJSONResponse(200)));
@ -58,7 +58,7 @@ public void listStatusReturnsAsExpected() throws IOException {
ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
Assert.assertEquals(ls.length, 200);
Assert.assertEquals(200, ls.length);
getMockServer().enqueue(new MockResponse().setResponseCode(200)
.setBody(TestADLResponseData.getListFileStatusJSONResponse(2048)));
@ -66,7 +66,7 @@ public void listStatusReturnsAsExpected() throws IOException {
ls = getMockAdlFileSystem().listStatus(new Path("/test1/test2"));
endTime = Time.monotonicNow();
LOG.debug("Time : " + (endTime - startTime));
Assert.assertEquals(ls.length, 2048);
Assert.assertEquals(2048, ls.length);
}
@Test

View File

@ -23,11 +23,10 @@
import org.apache.hadoop.fs.contract.AbstractContractAppendTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
import org.apache.hadoop.fs.contract.ContractTestUtils;
import org.junit.Before;
import org.junit.Test;
/**
* Verify Adls APPEND semantics compliance with Hadoop.
* Test Append on Adl file system.
*/
public class TestAdlContractAppendLive extends AbstractContractAppendTest {
@ -42,12 +41,4 @@ public void testRenameFileBeingAppended() throws Throwable {
ContractTestUtils.unsupported("Skipping since renaming file in append "
+ "mode not supported in Adl");
}
@Before
@Override
public void setup() throws Exception {
org.junit.Assume
.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
super.setup();
}
}

View File

@ -20,14 +20,15 @@
package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.contract.AbstractContractConcatTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
import org.apache.hadoop.fs.contract.ContractTestUtils;
import org.junit.Before;
import org.junit.Test;
import static org.apache.hadoop.fs.contract.ContractTestUtils.touch;
/**
* Verify Adls CONCAT semantics compliance with Hadoop.
* Test concat on Adl file system.
*/
public class TestAdlContractConcatLive extends AbstractContractConcatTest {
@ -36,17 +37,13 @@ protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
@Before
@Override
public void setup() throws Exception {
org.junit.Assume
.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
super.setup();
}
@Test
public void testConcatMissingTarget() throws Throwable {
ContractTestUtils.unsupported("BUG : Adl to support expectation from "
+ "concat on missing targets.");
Path testPath = path("test");
Path zeroByteFile = new Path(testPath, "zero.txt");
Path target = new Path(testPath, "target");
touch(getFileSystem(), zeroByteFile);
// Concat on missing target is allowed on Adls file system.
getFileSystem().concat(target, new Path[] {zeroByteFile});
}
}

View File

@ -22,12 +22,9 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractCreateTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
import org.apache.hadoop.fs.contract.ContractTestUtils;
import org.junit.Before;
import org.junit.Test;
/**
* Verify Adls CREATE semantics compliance with Hadoop.
* Test creating files, overwrite options.
*/
public class TestAdlContractCreateLive extends AbstractContractCreateTest {
@ -35,18 +32,4 @@ public class TestAdlContractCreateLive extends AbstractContractCreateTest {
protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
@Before
@Override
public void setup() throws Exception {
org.junit.Assume
.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
super.setup();
}
@Test
public void testOverwriteEmptyDirectory() throws Throwable {
ContractTestUtils
.unsupported("BUG : Adl to support override empty " + "directory.");
}
}

View File

@ -22,10 +22,9 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractDeleteTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
import org.junit.Before;
/**
* Verify Adls DELETE semantics compliance with Hadoop.
* Test delete contract test.
*/
public class TestAdlContractDeleteLive extends AbstractContractDeleteTest {
@ -33,12 +32,4 @@ public class TestAdlContractDeleteLive extends AbstractContractDeleteTest {
protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
@Before
@Override
public void setup() throws Exception {
org.junit.Assume
.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
super.setup();
}
}

View File

@ -0,0 +1,36 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractGetFileStatusTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
/**
* Test getFileStatus contract test.
*/
public class TestAdlContractGetFileStatusLive extends
AbstractContractGetFileStatusTest {
@Override
protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
}

View File

@ -22,34 +22,13 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractMkdirTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
import org.apache.hadoop.fs.contract.ContractTestUtils;
import org.junit.Before;
import org.junit.Test;
/**
* Verify Adls MKDIR semantics compliance with Hadoop.
* Test Mkdir contract on Adl storage file system.
*/
public class TestAdlContractMkdirLive extends AbstractContractMkdirTest {
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new AdlStorageContract(conf);
}
@Before
@Override
public void setup() throws Exception {
org.junit.Assume
.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
super.setup();
}
@Test
public void testMkdirOverParentFile() throws Throwable {
ContractTestUtils.unsupported("Not supported by Adl");
}
@Test
public void testNoMkdirOverFile() throws Throwable {
ContractTestUtils.unsupported("Not supported by Adl");
}
}
}

View File

@ -22,10 +22,9 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractOpenTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
import org.junit.Before;
/**
* Verify Adls OPEN/READ semantics compliance with Hadoop.
* Test OPEN - read API.
*/
public class TestAdlContractOpenLive extends AbstractContractOpenTest {
@ -33,12 +32,4 @@ public class TestAdlContractOpenLive extends AbstractContractOpenTest {
protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
@Before
@Override
public void setup() throws Exception {
org.junit.Assume
.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
super.setup();
}
}

View File

@ -22,12 +22,9 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractRenameTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
import org.apache.hadoop.fs.contract.ContractTestUtils;
import org.junit.Before;
import org.junit.Test;
/**
* Verify Adls RENAME semantics compliance with Hadoop.
* Test rename contract test cases on Adl file system.
*/
public class TestAdlContractRenameLive extends AbstractContractRenameTest {
@ -35,29 +32,4 @@ public class TestAdlContractRenameLive extends AbstractContractRenameTest {
protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
@Before
@Override
public void setup() throws Exception {
org.junit.Assume
.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
super.setup();
}
@Test
public void testRenameFileOverExistingFile() throws Throwable {
ContractTestUtils
.unsupported("BUG : Adl to support full complete POSIX" + "behaviour");
}
@Test
public void testRenameFileNonexistentDir() throws Throwable {
ContractTestUtils
.unsupported("BUG : Adl to support create dir is not " + "exist");
}
@Test
public void testRenameWithNonEmptySubDir() throws Throwable {
ContractTestUtils.unsupported("BUG : Adl to support non empty dir move.");
}
}

View File

@ -22,12 +22,9 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractRootDirectoryTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
import org.apache.hadoop.fs.contract.ContractTestUtils;
import org.junit.Before;
import org.junit.Test;
/**
* Verify Adls root level operation support.
* Test operation on root level.
*/
public class TestAdlContractRootDirLive
extends AbstractContractRootDirectoryTest {
@ -35,18 +32,4 @@ public class TestAdlContractRootDirLive
protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
@Before
@Override
public void setup() throws Exception {
org.junit.Assume
.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
super.setup();
}
@Test
public void testRmNonEmptyRootDirNonRecursive() throws Throwable {
ContractTestUtils.unsupported(
"BUG : Adl should throw exception instred " + "of returning false.");
}
}

View File

@ -22,10 +22,9 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.contract.AbstractContractSeekTest;
import org.apache.hadoop.fs.contract.AbstractFSContract;
import org.junit.Before;
/**
* Verify Adls OPEN/READ seek operation support.
* Test seek operation on Adl file system.
*/
public class TestAdlContractSeekLive extends AbstractContractSeekTest {
@ -33,12 +32,4 @@ public class TestAdlContractSeekLive extends AbstractContractSeekTest {
protected AbstractFSContract createContract(Configuration configuration) {
return new AdlStorageContract(configuration);
}
@Before
@Override
public void setup() throws Exception {
org.junit.Assume
.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
super.setup();
}
}

View File

@ -23,27 +23,63 @@
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.adl.common.Parallelized;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Random;
import java.util.UUID;
import static org.apache.hadoop.fs.adl.AdlConfKeys.WRITE_BUFFER_SIZE_KEY;
/**
* Verify different data segment size writes ensure the integrity and
* order of the data.
* Verify data integrity with different data sizes with buffer size.
*/
@RunWith(Parallelized.class)
public class TestAdlDifferentSizeWritesLive {
private static Random rand = new Random();
private int totalSize;
private int chunkSize;
public TestAdlDifferentSizeWritesLive(int totalSize, int chunkSize) {
this.totalSize = totalSize;
this.chunkSize = chunkSize;
}
public static byte[] getRandomByteArrayData(int size) {
byte[] b = new byte[size];
Random rand = new Random();
rand.nextBytes(b);
return b;
}
@Parameterized.Parameters(name = "{index}: Data Size [{0}] ; Chunk Size "
+ "[{1}]")
public static Collection testDataForIntegrityTest() {
return Arrays.asList(
new Object[][] {{4 * 1024, 1 * 1024}, {4 * 1024, 7 * 1024},
{4 * 1024, 10}, {2 * 1024, 10}, {1 * 1024, 10}, {100, 1},
{4 * 1024, 1 * 1024}, {7 * 1024, 2 * 1024}, {9 * 1024, 2 * 1024},
{10 * 1024, 3 * 1024}, {10 * 1024, 1 * 1024},
{10 * 1024, 8 * 1024}});
}
@BeforeClass
public static void cleanUpParent() throws IOException, URISyntaxException {
if (AdlStorageConfiguration.isContractTestEnabled()) {
Path path = new Path("/test/dataIntegrityCheck/");
FileSystem fs = AdlStorageConfiguration.createStorageConnector();
fs.delete(path, true);
}
}
@Before
public void setup() throws Exception {
org.junit.Assume
@ -51,32 +87,17 @@ public void setup() throws Exception {
}
@Test
public void testSmallDataWrites() throws IOException {
testDataIntegrity(4 * 1024 * 1024, 1 * 1024);
testDataIntegrity(4 * 1024 * 1024, 7 * 1024);
testDataIntegrity(4 * 1024 * 1024, 10);
testDataIntegrity(2 * 1024 * 1024, 10);
testDataIntegrity(1 * 1024 * 1024, 10);
testDataIntegrity(100, 1);
}
@Test
public void testMediumDataWrites() throws IOException {
testDataIntegrity(4 * 1024 * 1024, 1 * 1024 * 1024);
testDataIntegrity(7 * 1024 * 1024, 2 * 1024 * 1024);
testDataIntegrity(9 * 1024 * 1024, 2 * 1024 * 1024);
testDataIntegrity(10 * 1024 * 1024, 3 * 1024 * 1024);
}
private void testDataIntegrity(int totalSize, int chunkSize)
throws IOException {
Path path = new Path("/test/dataIntegrityCheck");
public void testDataIntegrity() throws IOException {
Path path = new Path(
"/test/dataIntegrityCheck/" + UUID.randomUUID().toString());
FileSystem fs = null;
AdlStorageConfiguration.getConfiguration()
.setInt(WRITE_BUFFER_SIZE_KEY, 4 * 1024);
try {
fs = AdlStorageConfiguration.createStorageConnector();
} catch (URISyntaxException e) {
throw new IllegalStateException("Can not initialize ADL FileSystem. "
+ "Please check fs.defaultFS property.", e);
+ "Please check test.fs.adl.name property.", e);
}
byte[] expectedData = getRandomByteArrayData(totalSize);

View File

@ -0,0 +1,67 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.DelegateToFileSystem;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileContextCreateMkdirBaseTest;
import org.apache.hadoop.fs.FileContextTestHelper;
import org.apache.hadoop.fs.FileSystem;
import org.junit.Assume;
import org.junit.BeforeClass;
import java.net.URI;
import java.util.UUID;
/**
* Test file context Create/Mkdir operation.
*/
public class TestAdlFileContextCreateMkdirLive
extends FileContextCreateMkdirBaseTest {
private static final String KEY_FILE_SYSTEM = "test.fs.adl.name";
@BeforeClass
public static void skipTestCheck() {
Assume.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
}
@Override
public void setUp() throws Exception {
Configuration conf = AdlStorageConfiguration.getConfiguration();
String fileSystem = conf.get(KEY_FILE_SYSTEM);
if (fileSystem == null || fileSystem.trim().length() == 0) {
throw new Exception("Default file system not configured.");
}
URI uri = new URI(fileSystem);
FileSystem fs = AdlStorageConfiguration.createStorageConnector();
fc = FileContext.getFileContext(
new DelegateToFileSystem(uri, fs, conf, fs.getScheme(), false) {
}, conf);
super.setUp();
}
@Override
protected FileContextTestHelper createFileContextHelper() {
// On Windows, root directory path is created from local running directory.
// Adl does not support ':' as part of the path which results in failure.
return new FileContextTestHelper(UUID.randomUUID().toString());
}
}

View File

@ -0,0 +1,99 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.Assume;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
import java.net.URI;
import java.util.UUID;
import static org.apache.hadoop.util.Shell.WINDOWS;
/**
* Run collection of tests for the {@link FileContext}.
*/
public class TestAdlFileContextMainOperationsLive
extends FileContextMainOperationsBaseTest {
private static final String KEY_FILE_SYSTEM = "test.fs.adl.name";
@BeforeClass
public static void skipTestCheck() {
Assume.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
}
@Override
public void setUp() throws Exception {
Configuration conf = AdlStorageConfiguration.getConfiguration();
String fileSystem = conf.get(KEY_FILE_SYSTEM);
if (fileSystem == null || fileSystem.trim().length() == 0) {
throw new Exception("Default file system not configured.");
}
URI uri = new URI(fileSystem);
FileSystem fs = AdlStorageConfiguration.createStorageConnector();
fc = FileContext.getFileContext(
new DelegateToFileSystem(uri, fs, conf, fs.getScheme(), false) {
}, conf);
super.setUp();
}
@Override
protected FileContextTestHelper createFileContextHelper() {
// On Windows, root directory path is created from local running directory.
// Adl does not support ':' as part of the path which results in failure.
// return new FileContextTestHelper(GenericTestUtils
// .getRandomizedTestDir()
// .getAbsolutePath().replaceAll(":",""));
return new FileContextTestHelper(UUID.randomUUID().toString());
}
@Override
protected boolean listCorruptedBlocksSupported() {
return false;
}
@Override
public void testWorkingDirectory() throws Exception {
if (WINDOWS) {
// TODO :Fix is required in Hadoop shell to support windows permission
// set.
// The test is failing with NPE on windows platform only, with Linux
// platform test passes.
Assume.assumeTrue(false);
} else {
super.testWorkingDirectory();
}
}
@Override
public void testUnsupportedSymlink() throws IOException {
Assume.assumeTrue(false);
}
@Test
public void testSetVerifyChecksum() throws IOException {
Assume.assumeTrue(false);
}
}

View File

@ -22,12 +22,13 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileSystemContractBaseTest;
import org.apache.hadoop.fs.Path;
import org.junit.Test;
import org.junit.Assume;
import org.junit.Before;
import java.io.IOException;
/**
* Verify Adls adhere to Hadoop file system semantics.
* Test Base contract tests on Adl file system.
*/
public class TestAdlFileSystemContractLive extends FileSystemContractBaseTest {
private FileSystem adlStore;
@ -60,35 +61,8 @@ protected void runTest() throws Throwable {
}
}
public void testGetFileStatus() throws IOException {
if (!AdlStorageConfiguration.isContractTestEnabled()) {
return;
}
Path testPath = new Path("/test/adltest");
if (adlStore.exists(testPath)) {
adlStore.delete(testPath, false);
}
adlStore.create(testPath).close();
assertTrue(adlStore.delete(testPath, false));
@Before
public void skipTestCheck() {
Assume.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
}
/**
* The following tests are failing on Azure Data Lake and the Azure Data Lake
* file system code needs to be modified to make them pass.
* A separate work item has been opened for this.
*/
@Test
@Override
public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception {
// BUG : Adl should return exception instead of false.
}
@Test
@Override
public void testMkdirsWithUmask() throws Exception {
// Support under implementation in Adl
}
}

View File

@ -0,0 +1,134 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.adl.common.Parallelized;
import org.apache.hadoop.fs.permission.FsPermission;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.Arrays;
import java.util.Collection;
import java.util.UUID;
/**
* Test createNonRecursive API.
*/
@RunWith(Parallelized.class)
public class TestAdlInternalCreateNonRecursive {
private Path inputFileName;
private FsPermission inputPermission;
private boolean inputOverride;
private boolean inputFileAlreadyExist;
private boolean inputParentAlreadyExist;
private Class<IOException> expectedExceptionType;
private FileSystem adlStore;
public TestAdlInternalCreateNonRecursive(String testScenario, String fileName,
FsPermission permission, boolean override, boolean fileAlreadyExist,
boolean parentAlreadyExist, Class<IOException> exceptionType) {
// Random parent path for each test so that parallel execution does not fail
// other running test.
inputFileName = new Path(
"/test/createNonRecursive/" + UUID.randomUUID().toString(), fileName);
inputPermission = permission;
inputFileAlreadyExist = fileAlreadyExist;
inputOverride = override;
inputParentAlreadyExist = parentAlreadyExist;
expectedExceptionType = exceptionType;
}
@Parameterized.Parameters(name = "{0}")
public static Collection adlCreateNonRecursiveTestData()
throws UnsupportedEncodingException {
/*
Test Data
File name, Permission, Override flag, File already exist, Parent
already exist
shouldCreateSucceed, expectedExceptionIfFileCreateFails
File already exist and Parent already exist are mutually exclusive.
*/
return Arrays.asList(new Object[][] {
{"CNR - When file do not exist.", UUID.randomUUID().toString(),
FsPermission.getFileDefault(), false, false, true, null},
{"CNR - When file exist. Override false", UUID.randomUUID().toString(),
FsPermission.getFileDefault(), false, true, true,
FileAlreadyExistsException.class},
{"CNR - When file exist. Override true", UUID.randomUUID().toString(),
FsPermission.getFileDefault(), true, true, true, null},
//TODO: This test is skipped till the fixes are not made it to prod.
/*{ "CNR - When parent do no exist.", UUID.randomUUID().toString(),
FsPermission.getFileDefault(), false, false, true, false,
IOException.class }*/});
}
@Before
public void setUp() throws Exception {
Assume.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
adlStore = AdlStorageConfiguration.createStorageConnector();
}
@Test
public void testCreateNonRecursiveFunctionality() throws IOException {
if (inputFileAlreadyExist) {
FileSystem.create(adlStore, inputFileName, inputPermission);
}
// Mutually exclusive to inputFileAlreadyExist
if (inputParentAlreadyExist) {
adlStore.mkdirs(inputFileName.getParent());
} else {
adlStore.delete(inputFileName.getParent(), true);
}
try {
adlStore.createNonRecursive(inputFileName, inputPermission, inputOverride,
CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_DEFAULT,
adlStore.getDefaultReplication(inputFileName),
adlStore.getDefaultBlockSize(inputFileName), null);
} catch (IOException e) {
if (expectedExceptionType == null) {
throw e;
}
Assert.assertEquals(expectedExceptionType, e.getClass());
return;
}
if (expectedExceptionType != null) {
Assert.fail("CreateNonRecursive should have failed with exception "
+ expectedExceptionType.getName());
}
}
}

View File

@ -0,0 +1,116 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.adl.common.Parallelized;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.junit.*;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.UUID;
/**
* Test ACL permission on file/folder on Adl file system.
*/
@RunWith(Parallelized.class)
public class TestAdlPermissionLive {
private static Path testRoot = new Path("/test");
private FsPermission permission;
private Path path;
private FileSystem adlStore;
public TestAdlPermissionLive(FsPermission testPermission) {
permission = testPermission;
}
@Parameterized.Parameters(name = "{0}")
public static Collection adlCreateNonRecursiveTestData()
throws UnsupportedEncodingException {
/*
Test Data
File/Folder name, User permission, Group permission, Other Permission,
Parent already exist
shouldCreateSucceed, expectedExceptionIfFileCreateFails
*/
final Collection<Object[]> datas = new ArrayList<>();
for (FsAction g : FsAction.values()) {
for (FsAction o : FsAction.values()) {
datas.add(new Object[] {new FsPermission(FsAction.ALL, g, o)});
}
}
return datas;
}
@AfterClass
public static void cleanUp() throws IOException, URISyntaxException {
if (AdlStorageConfiguration.isContractTestEnabled()) {
Assert.assertTrue(AdlStorageConfiguration.createStorageConnector()
.delete(testRoot, true));
}
}
@Before
public void setUp() throws Exception {
Assume.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
adlStore = AdlStorageConfiguration.createStorageConnector();
}
@Test
public void testFilePermission() throws IOException {
path = new Path(testRoot, UUID.randomUUID().toString());
adlStore.getConf()
.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "000");
adlStore.mkdirs(path.getParent(),
new FsPermission(FsAction.ALL, FsAction.WRITE, FsAction.NONE));
adlStore.removeDefaultAcl(path.getParent());
adlStore.create(path, permission, true, 1024, (short) 1, 1023, null);
FileStatus status = adlStore.getFileStatus(path);
Assert.assertEquals(permission, status.getPermission());
}
@Test
public void testFolderPermission() throws IOException {
path = new Path(testRoot, UUID.randomUUID().toString());
adlStore.getConf()
.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "000");
adlStore.mkdirs(path.getParent(),
new FsPermission(FsAction.ALL, FsAction.WRITE, FsAction.NONE));
adlStore.removeDefaultAcl(path.getParent());
adlStore.mkdirs(path, permission);
FileStatus status = adlStore.getFileStatus(path);
Assert.assertEquals(permission, status.getPermission());
}
}

View File

@ -0,0 +1,336 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.adl.common.Parallelized;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URISyntaxException;
import java.util.*;
/**
* Test supported ASCII, UTF-8 character set supported by Adl storage file
* system on file/folder operation.
*/
@RunWith(Parallelized.class)
public class TestAdlSupportedCharsetInPath {
private static final String TEST_ROOT = "/test/";
private static final Logger LOG = LoggerFactory
.getLogger(TestAdlSupportedCharsetInPath.class);
private String path;
public TestAdlSupportedCharsetInPath(String filePath) {
path = filePath;
}
@Parameterized.Parameters(name = "{0}")
public static Collection<Object[]> adlCharTestData()
throws UnsupportedEncodingException {
ArrayList<String> filePathList = new ArrayList<>();
for (int i = 32; i < 127; ++i) {
String specialChar = (char) i + "";
if (i >= 48 && i <= 57) {
continue;
}
if (i >= 65 && i <= 90) {
continue;
}
if (i >= 97 && i <= 122) {
continue;
}
// Special char at start of the path
if (i != 92 && i != 58 && i != 46 && i != 47) {
filePathList.add(specialChar + "");
}
// Special char at end of string
if (i != 92 && i != 47 && i != 58) {
filePathList.add("file " + i + " " + specialChar);
}
// Special char in between string
if (i != 47 && i != 58 && i != 92) {
filePathList.add("file " + i + " " + specialChar + "_name");
}
}
filePathList.add("a ");
filePathList.add("a..b");
fillUnicodes(filePathList);
Collection<Object[]> result = new ArrayList<>();
for (String item : filePathList) {
result.add(new Object[] {item});
}
return result;
}
private static void fillUnicodes(ArrayList<String> filePathList) {
// Unicode characters
filePathList.add("البيانات الكبيرة"); // Arabic
filePathList.add("Të dhënat i madh"); // Albanian
filePathList.add("մեծ տվյալները"); // Armenian
filePathList.add("böyük data"); // Azerbaijani
filePathList.add("вялікія дадзеныя"); // Belarusian,
filePathList.add("বিগ ডেটা"); // Bengali
filePathList.add("veliki podataka"); // Bosnian
filePathList.add("голяма данни"); // Bulgarian
filePathList.add("大数据"); // Chinese - Simplified
filePathList.add("大數據"); // Chinese - Traditional
filePathList.add("დიდი მონაცემთა"); // Georgian,
filePathList.add("große Daten"); // German
filePathList.add("μεγάλο δεδομένα"); // Greek
filePathList.add("મોટા માહિતી"); // Gujarati
filePathList.add("נתונים גדולים"); // Hebrew
filePathList.add("बड़ा डेटा"); // Hindi
filePathList.add("stór gögn"); // Icelandic
filePathList.add("sonraí mór"); // Irish
filePathList.add("ビッグデータ"); // Japanese
filePathList.add("үлкен деректер"); // Kazakh
filePathList.add("ទិន្នន័យធំ"); // Khmer
filePathList.add("빅 데이터"); // Korean
filePathList.add("ຂໍ້ມູນ ຂະຫນາດໃຫຍ່"); // Lao
filePathList.add("големи податоци"); // Macedonian
filePathList.add("ठूलो डाटा"); // Nepali
filePathList.add("വലിയ ഡാറ്റ"); // Malayalam
filePathList.add("मोठे डेटा"); // Marathi
filePathList.add("том мэдээлэл"); // Mangolian
filePathList.add("اطلاعات بزرگ"); // Persian
filePathList.add("ਵੱਡੇ ਡਾਟੇ ਨੂੰ"); // Punjabi
filePathList.add("большие данные"); // Russian
filePathList.add("Велики података"); // Serbian
filePathList.add("විශාල දත්ත"); // Sinhala
filePathList.add("big dát"); // Slovak
filePathList.add("маълумоти калон"); // Tajik
filePathList.add("பெரிய தரவு"); // Tamil
filePathList.add("పెద్ద డేటా"); // Telugu
filePathList.add("ข้อมูลใหญ่"); // Thai
filePathList.add("büyük veri"); // Turkish
filePathList.add("великі дані"); // Ukranian
filePathList.add("بڑے اعداد و شمار"); // Urdu
filePathList.add("katta ma'lumotlar"); // Uzbek
filePathList.add("dữ liệu lớn"); // Vietanamese
filePathList.add("גרויס דאַטן"); // Yiddish
filePathList.add("big idatha"); // Zulu
filePathList.add("rachelχ");
filePathList.add("jessicaο");
filePathList.add("sarahδ");
filePathList.add("katieν");
filePathList.add("wendyξ");
filePathList.add("davidμ");
filePathList.add("priscillaυ");
filePathList.add("oscarθ");
filePathList.add("xavierχ");
filePathList.add("gabriellaθ");
filePathList.add("davidυ");
filePathList.add("ireneμ");
filePathList.add("fredρ");
filePathList.add("davidτ");
filePathList.add("ulyssesν");
filePathList.add("gabriellaμ");
filePathList.add("zachζ");
filePathList.add("gabriellaλ");
filePathList.add("ulyssesφ");
filePathList.add("davidχ");
filePathList.add("sarahσ");
filePathList.add("hollyψ");
filePathList.add("nickα");
filePathList.add("ulyssesι");
filePathList.add("mikeβ");
filePathList.add("priscillaκ");
filePathList.add("wendyθ");
filePathList.add("jessicaς");
filePathList.add("fredχ");
filePathList.add("fredζ");
filePathList.add("sarahκ");
filePathList.add("calvinη");
filePathList.add("xavierχ");
filePathList.add("yuriχ");
filePathList.add("ethanλ");
filePathList.add("hollyε");
filePathList.add("xavierσ");
filePathList.add("victorτ");
filePathList.add("wendyβ");
filePathList.add("jessicaς");
filePathList.add("quinnφ");
filePathList.add("xavierυ");
filePathList.add("nickι");
filePathList.add("rachelφ");
filePathList.add("oscarξ");
filePathList.add("zachδ");
filePathList.add("zachλ");
filePathList.add("rachelα");
filePathList.add("jessicaφ");
filePathList.add("lukeφ");
filePathList.add("tomζ");
filePathList.add("nickξ");
filePathList.add("nickκ");
filePathList.add("ethanδ");
filePathList.add("fredχ");
filePathList.add("priscillaθ");
filePathList.add("zachξ");
filePathList.add("xavierξ");
filePathList.add("zachψ");
filePathList.add("ethanα");
filePathList.add("oscarι");
filePathList.add("ireneδ");
filePathList.add("ireneζ");
filePathList.add("victorο");
filePathList.add("wendyβ");
filePathList.add("mikeσ");
filePathList.add("fredο");
filePathList.add("mikeη");
filePathList.add("sarahρ");
filePathList.add("quinnβ");
filePathList.add("mikeυ");
filePathList.add("nickζ");
filePathList.add("nickο");
filePathList.add("tomκ");
filePathList.add("bobλ");
filePathList.add("yuriπ");
filePathList.add("davidτ");
filePathList.add("quinnπ");
filePathList.add("mikeλ");
filePathList.add("davidη");
filePathList.add("ethanτ");
filePathList.add("nickφ");
filePathList.add("yuriο");
filePathList.add("ethanυ");
filePathList.add("bobθ");
filePathList.add("davidλ");
filePathList.add("priscillaξ");
filePathList.add("nickγ");
filePathList.add("lukeυ");
filePathList.add("ireneλ");
filePathList.add("xavierο");
filePathList.add("fredυ");
filePathList.add("ulyssesμ");
filePathList.add("wendyγ");
filePathList.add("zachλ");
filePathList.add("rachelς");
filePathList.add("sarahπ");
filePathList.add("aliceψ");
filePathList.add("bobτ");
}
@AfterClass
public static void testReport() throws IOException, URISyntaxException {
if (!AdlStorageConfiguration.isContractTestEnabled()) {
return;
}
FileSystem fs = AdlStorageConfiguration.createStorageConnector();
fs.delete(new Path(TEST_ROOT), true);
}
@Test
public void testAllowedSpecialCharactersMkdir()
throws IOException, URISyntaxException {
Path parentPath = new Path(TEST_ROOT, UUID.randomUUID().toString() + "/");
Path specialFile = new Path(parentPath, path);
FileSystem fs = AdlStorageConfiguration.createStorageConnector();
Assert.assertTrue("Mkdir failed : " + specialFile, fs.mkdirs(specialFile));
Assert.assertTrue("File not Found after Mkdir success" + specialFile,
fs.exists(specialFile));
Assert.assertTrue("Not listed under parent " + parentPath,
contains(fs.listStatus(parentPath),
fs.makeQualified(specialFile).toString()));
Assert.assertTrue("Delete failed : " + specialFile,
fs.delete(specialFile, true));
Assert.assertFalse("File still exist after delete " + specialFile,
fs.exists(specialFile));
}
private boolean contains(FileStatus[] statuses, String remotePath) {
for (FileStatus status : statuses) {
if (status.getPath().toString().equals(remotePath)) {
return true;
}
}
for (FileStatus status : statuses) {
LOG.info(status.getPath().toString());
}
return false;
}
@Before
public void setup() throws Exception {
org.junit.Assume
.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
}
@Test
public void testAllowedSpecialCharactersRename()
throws IOException, URISyntaxException {
String parentPath = TEST_ROOT + UUID.randomUUID().toString() + "/";
Path specialFile = new Path(parentPath + path);
Path anotherLocation = new Path(parentPath + UUID.randomUUID().toString());
FileSystem fs = AdlStorageConfiguration.createStorageConnector();
Assert.assertTrue("Could not create " + specialFile.toString(),
fs.createNewFile(specialFile));
Assert.assertTrue(
"Failed to rename " + specialFile.toString() + " --> " + anotherLocation
.toString(), fs.rename(specialFile, anotherLocation));
Assert.assertFalse("File should not be present after successful rename : "
+ specialFile.toString(), fs.exists(specialFile));
Assert.assertTrue("File should be present after successful rename : "
+ anotherLocation.toString(), fs.exists(anotherLocation));
Assert.assertFalse(
"Listed under parent whereas expected not listed : " + parentPath,
contains(fs.listStatus(new Path(parentPath)),
fs.makeQualified(specialFile).toString()));
Assert.assertTrue(
"Failed to rename " + anotherLocation.toString() + " --> " + specialFile
.toString(), fs.rename(anotherLocation, specialFile));
Assert.assertTrue(
"File should be present after successful rename : " + "" + specialFile
.toString(), fs.exists(specialFile));
Assert.assertFalse("File should not be present after successful rename : "
+ anotherLocation.toString(), fs.exists(anotherLocation));
Assert.assertTrue("Not listed under parent " + parentPath,
contains(fs.listStatus(new Path(parentPath)),
fs.makeQualified(specialFile).toString()));
Assert.assertTrue("Failed to delete " + parentPath,
fs.delete(new Path(parentPath), true));
}
}

View File

@ -0,0 +1,111 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.adl.live;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.io.OutputStream;
import java.util.UUID;
/**
* This class is responsible for testing ContentSummary, ListStatus on
* file/folder.
*/
public class TestMetadata {
private FileSystem adlStore;
private Path parent;
public TestMetadata() {
parent = new Path("test");
}
@Before
public void setUp() throws Exception {
Assume.assumeTrue(AdlStorageConfiguration.isContractTestEnabled());
adlStore = AdlStorageConfiguration.createStorageConnector();
}
@After
public void cleanUp() throws Exception {
if (AdlStorageConfiguration.isContractTestEnabled()) {
adlStore.delete(parent, true);
}
}
@Test
public void testContentSummaryOnFile() throws IOException {
Path child = new Path(UUID.randomUUID().toString());
Path testFile = new Path(parent, child);
OutputStream out = adlStore.create(testFile);
for (int i = 0; i < 1024; ++i) {
out.write(97);
}
out.close();
Assert.assertTrue(adlStore.isFile(testFile));
ContentSummary summary = adlStore.getContentSummary(testFile);
Assert.assertEquals(1024, summary.getSpaceConsumed());
Assert.assertEquals(1, summary.getFileCount());
Assert.assertEquals(0, summary.getDirectoryCount());
Assert.assertEquals(1024, summary.getLength());
}
@Test
public void testContentSummaryOnFolder() throws IOException {
Path child = new Path(UUID.randomUUID().toString());
Path testFile = new Path(parent, child);
OutputStream out = adlStore.create(testFile);
for (int i = 0; i < 1024; ++i) {
out.write(97);
}
out.close();
Assert.assertTrue(adlStore.isFile(testFile));
ContentSummary summary = adlStore.getContentSummary(parent);
Assert.assertEquals(1024, summary.getSpaceConsumed());
Assert.assertEquals(1, summary.getFileCount());
Assert.assertEquals(1, summary.getDirectoryCount());
Assert.assertEquals(1024, summary.getLength());
}
@Test
public void listStatusOnFile() throws IOException {
Path path = new Path(parent, "a.txt");
FileSystem fs = adlStore;
fs.createNewFile(path);
Assert.assertTrue(fs.isFile(path));
FileStatus[] statuses = fs.listStatus(path);
Assert
.assertEquals(path.makeQualified(fs.getUri(), fs.getWorkingDirectory()),
statuses[0].getPath());
}
}