+ * http://www.apache.org/licenses/LICENSE-2.0 + *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.hadoop.fs.azurebfs.contracts.exceptions;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.azurebfs.contracts.services.AzureServiceErrorCode;
+
+/**
+ * Exception to wrap invalid Azure service error responses.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class InvalidAbfsRestOperationException extends AbfsRestOperationException {
+ public InvalidAbfsRestOperationException(
+ final Exception innerException) {
+ super(
+ AzureServiceErrorCode.UNKNOWN.getStatusCode(),
+ AzureServiceErrorCode.UNKNOWN.getErrorCode(),
+ "InvalidAbfsRestOperationException",
+ innerException);
+ }
+}
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidAclOperationException.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidAclOperationException.java
new file mode 100644
index 00000000000..9c186baab9d
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidAclOperationException.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.contracts.exceptions;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Thrown when there is an attempt to perform an invalid operation on an ACL.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public final class InvalidAclOperationException extends AzureBlobFileSystemException {
+ public InvalidAclOperationException(String message) {
+ super(message);
+ }
+}
\ No newline at end of file
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidConfigurationValueException.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidConfigurationValueException.java
new file mode 100644
index 00000000000..7591bac59e2
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidConfigurationValueException.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.contracts.exceptions;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Thrown when a configuration value is invalid
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class InvalidConfigurationValueException extends AzureBlobFileSystemException {
+ public InvalidConfigurationValueException(String configKey, Exception innerException) {
+ super("Invalid configuration value detected for " + configKey, innerException);
+ }
+
+ public InvalidConfigurationValueException(String configKey) {
+ super("Invalid configuration value detected for " + configKey);
+ }
+}
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidFileSystemPropertyException.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidFileSystemPropertyException.java
new file mode 100644
index 00000000000..5823fd2c589
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidFileSystemPropertyException.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.contracts.exceptions;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Thrown when a file system property is invalid.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public final class InvalidFileSystemPropertyException extends AzureBlobFileSystemException {
+ public InvalidFileSystemPropertyException(String property) {
+ super(String.format("%s is invalid.", property));
+ }
+}
\ No newline at end of file
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidUriAuthorityException.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidUriAuthorityException.java
new file mode 100644
index 00000000000..7aa319c90c8
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidUriAuthorityException.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.contracts.exceptions;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Thrown when URI authority is invalid.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public final class InvalidUriAuthorityException extends AzureBlobFileSystemException {
+ public InvalidUriAuthorityException(String url) {
+ super(String.format("%s has invalid authority.", url));
+ }
+}
\ No newline at end of file
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidUriException.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidUriException.java
new file mode 100644
index 00000000000..4fa01509779
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidUriException.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.contracts.exceptions;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Thrown when URI is invalid.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public final class InvalidUriException extends AzureBlobFileSystemException {
+ public InvalidUriException(String url) {
+ super(String.format("Invalid URI %s", url));
+ }
+}
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/KeyProviderException.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/KeyProviderException.java
new file mode 100644
index 00000000000..6723d699f56
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/KeyProviderException.java
@@ -0,0 +1,42 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.contracts.exceptions;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * Thrown if there is a problem instantiating a KeyProvider or retrieving a key
+ * using a KeyProvider object.
+ */
+@InterfaceAudience.Private
+public class KeyProviderException extends AzureBlobFileSystemException {
+ private static final long serialVersionUID = 1L;
+
+ public KeyProviderException(String message) {
+ super(message);
+ }
+
+ public KeyProviderException(String message, Throwable cause) {
+ super(message);
+ }
+
+ public KeyProviderException(Throwable t) {
+ super(t.getMessage());
+ }
+}
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/TimeoutException.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/TimeoutException.java
new file mode 100644
index 00000000000..8dd5d71d683
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/TimeoutException.java
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.contracts.exceptions;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Thrown when a timeout happens.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public final class TimeoutException extends AzureBlobFileSystemException {
+ public TimeoutException(String message) {
+ super(message);
+ }
+}
\ No newline at end of file
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/TokenAccessProviderException.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/TokenAccessProviderException.java
new file mode 100644
index 00000000000..b40b34ac13e
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/TokenAccessProviderException.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.azurebfs.contracts.exceptions;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+
+/**
+ * Thrown if there is a problem instantiating a TokenAccessProvider or retrieving a configuration
+ * using a TokenAccessProvider object.
+ */
+@InterfaceAudience.Private
+public class TokenAccessProviderException extends AzureBlobFileSystemException {
+
+ public TokenAccessProviderException(String message) {
+ super(message);
+ }
+
+ public TokenAccessProviderException(String message, Throwable cause) {
+ super(message);
+ }
+}
\ No newline at end of file
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/package-info.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/package-info.java
new file mode 100644
index 00000000000..e4c75f460f9
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+package org.apache.hadoop.fs.azurebfs.contracts.exceptions;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
\ No newline at end of file
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/package-info.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/package-info.java
new file mode 100644
index 00000000000..67f5633c3a7
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+package org.apache.hadoop.fs.azurebfs.contracts;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
\ No newline at end of file
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/services/AzureServiceErrorCode.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/services/AzureServiceErrorCode.java
new file mode 100644
index 00000000000..60e7f92d270
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/services/AzureServiceErrorCode.java
@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.contracts.services;
+
+import java.net.HttpURLConnection;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * Azure service error codes.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public enum AzureServiceErrorCode {
+ FILE_SYSTEM_ALREADY_EXISTS("FilesystemAlreadyExists", HttpURLConnection.HTTP_CONFLICT, null),
+ PATH_ALREADY_EXISTS("PathAlreadyExists", HttpURLConnection.HTTP_CONFLICT, null),
+ INTERNAL_OPERATION_ABORT("InternalOperationAbortError", HttpURLConnection.HTTP_CONFLICT, null),
+ PATH_CONFLICT("PathConflict", HttpURLConnection.HTTP_CONFLICT, null),
+ FILE_SYSTEM_NOT_FOUND("FilesystemNotFound", HttpURLConnection.HTTP_NOT_FOUND, null),
+ PATH_NOT_FOUND("PathNotFound", HttpURLConnection.HTTP_NOT_FOUND, null),
+ PRE_CONDITION_FAILED("PreconditionFailed", HttpURLConnection.HTTP_PRECON_FAILED, null),
+ SOURCE_PATH_NOT_FOUND("SourcePathNotFound", HttpURLConnection.HTTP_NOT_FOUND, null),
+ INVALID_SOURCE_OR_DESTINATION_RESOURCE_TYPE("InvalidSourceOrDestinationResourceType", HttpURLConnection.HTTP_CONFLICT, null),
+ RENAME_DESTINATION_PARENT_PATH_NOT_FOUND("RenameDestinationParentPathNotFound", HttpURLConnection.HTTP_NOT_FOUND, null),
+ INVALID_RENAME_SOURCE_PATH("InvalidRenameSourcePath", HttpURLConnection.HTTP_CONFLICT, null),
+ INGRESS_OVER_ACCOUNT_LIMIT(null, HttpURLConnection.HTTP_UNAVAILABLE, "Ingress is over the account limit."),
+ EGRESS_OVER_ACCOUNT_LIMIT(null, HttpURLConnection.HTTP_UNAVAILABLE, "Egress is over the account limit."),
+ INVALID_QUERY_PARAMETER_VALUE("InvalidQueryParameterValue", HttpURLConnection.HTTP_BAD_REQUEST, null),
+ AUTHORIZATION_PERMISSION_MISS_MATCH("AuthorizationPermissionMismatch", HttpURLConnection.HTTP_FORBIDDEN, null),
+ UNKNOWN(null, -1, null);
+
+ private final String errorCode;
+ private final int httpStatusCode;
+ private final String errorMessage;
+ AzureServiceErrorCode(String errorCode, int httpStatusCodes, String errorMessage) {
+ this.errorCode = errorCode;
+ this.httpStatusCode = httpStatusCodes;
+ this.errorMessage = errorMessage;
+ }
+
+ public int getStatusCode() {
+ return this.httpStatusCode;
+ }
+
+ public String getErrorCode() {
+ return this.errorCode;
+ }
+
+ public static List
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.contracts.services;
+
+import org.codehaus.jackson.annotate.JsonProperty;
+
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * The ListResultEntrySchema model.
+ */
+@InterfaceStability.Evolving
+public class ListResultEntrySchema {
+ /**
+ * The name property.
+ */
+ @JsonProperty(value = "name")
+ private String name;
+
+ /**
+ * The isDirectory property.
+ */
+ @JsonProperty(value = "isDirectory")
+ private Boolean isDirectory;
+
+ /**
+ * The lastModified property.
+ */
+ @JsonProperty(value = "lastModified")
+ private String lastModified;
+
+ /**
+ * The eTag property.
+ */
+ @JsonProperty(value = "etag")
+ private String eTag;
+
+ /**
+ * The contentLength property.
+ */
+ @JsonProperty(value = "contentLength")
+ private Long contentLength;
+
+ /**
+ * The owner property.
+ */
+ @JsonProperty(value = "owner")
+ private String owner;
+
+ /**
+ * The group property.
+ */
+ @JsonProperty(value = "group")
+ private String group;
+
+ /**
+ * The permissions property.
+ */
+ @JsonProperty(value = "permissions")
+ private String permissions;
+
+ /**
+ * Get the name value.
+ *
+ * @return the name value
+ */
+ public String name() {
+ return name;
+ }
+
+ /**
+ * Set the name value.
+ *
+ * @param name the name value to set
+ * @return the ListEntrySchema object itself.
+ */
+ public ListResultEntrySchema withName(String name) {
+ this.name = name;
+ return this;
+ }
+
+ /**
+ * Get the isDirectory value.
+ *
+ * @return the isDirectory value
+ */
+ public Boolean isDirectory() {
+ return isDirectory;
+ }
+
+ /**
+ * Set the isDirectory value.
+ *
+ * @param isDirectory the isDirectory value to set
+ * @return the ListEntrySchema object itself.
+ */
+ public ListResultEntrySchema withIsDirectory(final Boolean isDirectory) {
+ this.isDirectory = isDirectory;
+ return this;
+ }
+
+ /**
+ * Get the lastModified value.
+ *
+ * @return the lastModified value
+ */
+ public String lastModified() {
+ return lastModified;
+ }
+
+ /**
+ * Set the lastModified value.
+ *
+ * @param lastModified the lastModified value to set
+ * @return the ListEntrySchema object itself.
+ */
+ public ListResultEntrySchema withLastModified(String lastModified) {
+ this.lastModified = lastModified;
+ return this;
+ }
+
+ /**
+ * Get the etag value.
+ *
+ * @return the etag value
+ */
+ public String eTag() {
+ return eTag;
+ }
+
+ /**
+ * Set the eTag value.
+ *
+ * @param eTag the eTag value to set
+ * @return the ListEntrySchema object itself.
+ */
+ public ListResultEntrySchema withETag(final String eTag) {
+ this.eTag = eTag;
+ return this;
+ }
+
+ /**
+ * Get the contentLength value.
+ *
+ * @return the contentLength value
+ */
+ public Long contentLength() {
+ return contentLength;
+ }
+
+ /**
+ * Set the contentLength value.
+ *
+ * @param contentLength the contentLength value to set
+ * @return the ListEntrySchema object itself.
+ */
+ public ListResultEntrySchema withContentLength(final Long contentLength) {
+ this.contentLength = contentLength;
+ return this;
+ }
+
+ /**
+ *
+ Get the owner value.
+ *
+ * @return the owner value
+ */
+ public String owner() {
+ return owner;
+ }
+
+ /**
+ * Set the owner value.
+ *
+ * @param owner the owner value to set
+ * @return the ListEntrySchema object itself.
+ */
+ public ListResultEntrySchema withOwner(final String owner) {
+ this.owner = owner;
+ return this;
+ }
+
+ /**
+ * Get the group value.
+ *
+ * @return the group value
+ */
+ public String group() {
+ return group;
+ }
+
+ /**
+ * Set the group value.
+ *
+ * @param group the group value to set
+ * @return the ListEntrySchema object itself.
+ */
+ public ListResultEntrySchema withGroup(final String group) {
+ this.group = group;
+ return this;
+ }
+
+ /**
+ * Get the permissions value.
+ *
+ * @return the permissions value
+ */
+ public String permissions() {
+ return permissions;
+ }
+
+ /**
+ * Set the permissions value.
+ *
+ * @param permissions the permissions value to set
+ * @return the ListEntrySchema object itself.
+ */
+ public ListResultEntrySchema withPermissions(final String permissions) {
+ this.permissions = permissions;
+ return this;
+ }
+
+}
\ No newline at end of file
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/services/ListResultSchema.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/services/ListResultSchema.java
new file mode 100644
index 00000000000..32597423c86
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/services/ListResultSchema.java
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.contracts.services;
+
+import java.util.List;
+
+import org.codehaus.jackson.annotate.JsonProperty;
+
+import org.apache.hadoop.classification.InterfaceStability;
+
+/**
+ * The ListResultSchema model.
+ */
+@InterfaceStability.Evolving
+public class ListResultSchema {
+ /**
+ * The paths property.
+ */
+ @JsonProperty(value = "paths")
+ private List
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.diagnostics;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.azurebfs.contracts.exceptions.InvalidConfigurationValueException;
+
+/**
+ * Boolean configuration value validator.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class BooleanConfigurationBasicValidator extends ConfigurationBasicValidator
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.diagnostics;
+
+import org.apache.hadoop.fs.azurebfs.contracts.diagnostics.ConfigurationValidator;
+import org.apache.hadoop.fs.azurebfs.contracts.exceptions.InvalidConfigurationValueException;
+
+/**
+ * ConfigurationBasicValidator covers the base case of missing user defined configuration value
+ * @param
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.diagnostics;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.azurebfs.contracts.diagnostics.ConfigurationValidator;
+import org.apache.hadoop.fs.azurebfs.contracts.exceptions.InvalidConfigurationValueException;
+
+/**
+ * Integer configuration value Validator.
+ */
+@InterfaceAudience.Public
+@InterfaceStability.Evolving
+public class IntegerConfigurationBasicValidator extends ConfigurationBasicValidator
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.azurebfs.diagnostics;
+
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.azurebfs.contracts.diagnostics.ConfigurationValidator;
+import org.apache.hadoop.fs.azurebfs.contracts.exceptions.InvalidConfigurationValueException;
+
+/**
+ * Long configuration value Validator.
+ */
+@InterfaceStability.Evolving
+public class LongConfigurationBasicValidator extends ConfigurationBasicValidator
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.azurebfs.oauth2;
+
+import java.io.UnsupportedEncodingException;
+import java.net.URLEncoder;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Utilities class http query parameters.
+ */
+public class QueryParams {
+ private Map
+A distributed implementation of {@link
+org.apache.hadoop.fs.FileSystem} for reading and writing files on
+Azure Storage.
+This implementation stores files on Azure in their native form for
+interoperability with other Azure tools.
+
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.services;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants;
+import org.apache.hadoop.fs.azurebfs.contracts.exceptions.AzureBlobFileSystemException;
+import org.apache.hadoop.fs.azurebfs.contracts.exceptions.InvalidAclOperationException;
+import org.apache.hadoop.fs.permission.FsAction;
+
+/**
+ * AbfsAclHelper provides convenience methods to implement modifyAclEntries / removeAclEntries / removeAcl / removeDefaultAcl
+ * from setAcl and getAcl.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+public final class AbfsAclHelper {
+
+ private AbfsAclHelper() {
+ // not called
+ }
+
+ public static Map
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.services;
+
+import java.io.IOException;
+import java.io.UnsupportedEncodingException;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.net.URLEncoder;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Locale;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.fs.azurebfs.utils.SSLSocketFactoryEx;
+import org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants;
+import org.apache.hadoop.fs.azurebfs.constants.HttpHeaderConfigurations;
+import org.apache.hadoop.fs.azurebfs.constants.HttpQueryParams;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.fs.azurebfs.contracts.exceptions.AzureBlobFileSystemException;
+import org.apache.hadoop.fs.azurebfs.contracts.exceptions.InvalidUriException;
+import org.apache.hadoop.fs.azurebfs.AbfsConfiguration;
+import org.apache.hadoop.fs.azurebfs.oauth2.AccessTokenProvider;
+
+import static org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants.*;
+import static org.apache.hadoop.fs.azurebfs.constants.FileSystemUriSchemes.HTTPS_SCHEME;
+import static org.apache.hadoop.fs.azurebfs.constants.HttpHeaderConfigurations.*;
+import static org.apache.hadoop.fs.azurebfs.constants.HttpQueryParams.*;
+
+/**
+ * AbfsClient.
+ */
+public class AbfsClient {
+ public static final Logger LOG = LoggerFactory.getLogger(AbfsClient.class);
+ private final URL baseUrl;
+ private final SharedKeyCredentials sharedKeyCredentials;
+ private final String xMsVersion = "2018-11-09";
+ private final ExponentialRetryPolicy retryPolicy;
+ private final String filesystem;
+ private final AbfsConfiguration abfsConfiguration;
+ private final String userAgent;
+
+ private final AccessTokenProvider tokenProvider;
+
+
+ public AbfsClient(final URL baseUrl, final SharedKeyCredentials sharedKeyCredentials,
+ final AbfsConfiguration abfsConfiguration,
+ final ExponentialRetryPolicy exponentialRetryPolicy,
+ final AccessTokenProvider tokenProvider) {
+ this.baseUrl = baseUrl;
+ this.sharedKeyCredentials = sharedKeyCredentials;
+ String baseUrlString = baseUrl.toString();
+ this.filesystem = baseUrlString.substring(baseUrlString.lastIndexOf(FORWARD_SLASH) + 1);
+ this.abfsConfiguration = abfsConfiguration;
+ this.retryPolicy = exponentialRetryPolicy;
+
+ String sslProviderName = null;
+
+ if (this.baseUrl.toString().startsWith(HTTPS_SCHEME)) {
+ try {
+ SSLSocketFactoryEx.initializeDefaultFactory(this.abfsConfiguration.getPreferredSSLFactoryOption());
+ sslProviderName = SSLSocketFactoryEx.getDefaultFactory().getProviderName();
+ } catch (IOException e) {
+ // Suppress exception. Failure to init SSLSocketFactoryEx would have only performance impact.
+ }
+ }
+
+ this.userAgent = initializeUserAgent(abfsConfiguration, sslProviderName);
+ this.tokenProvider = tokenProvider;
+ }
+
+ public String getFileSystem() {
+ return filesystem;
+ }
+
+ ExponentialRetryPolicy getRetryPolicy() {
+ return retryPolicy;
+ }
+
+ SharedKeyCredentials getSharedKeyCredentials() {
+ return sharedKeyCredentials;
+ }
+
+ List
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.services;
+
+/**
+ * The Http Request / Response Headers for Rest AbfsClient.
+ */
+public class AbfsHttpHeader {
+ private final String name;
+ private final String value;
+
+ public AbfsHttpHeader(final String name, final String value) {
+ this.name = name;
+ this.value = value;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public String getValue() {
+ return value;
+ }
+}
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java
new file mode 100644
index 00000000000..de38b347248
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java
@@ -0,0 +1,446 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.services;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.List;
+import java.util.UUID;
+
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.SSLSocketFactory;
+
+import org.apache.hadoop.fs.azurebfs.utils.SSLSocketFactoryEx;
+import org.codehaus.jackson.JsonFactory;
+import org.codehaus.jackson.JsonParser;
+import org.codehaus.jackson.JsonToken;
+import org.codehaus.jackson.map.ObjectMapper;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants;
+import org.apache.hadoop.fs.azurebfs.constants.HttpHeaderConfigurations;
+import org.apache.hadoop.fs.azurebfs.contracts.services.ListResultSchema;
+
+/**
+ * Represents an HTTP operation.
+ */
+public class AbfsHttpOperation {
+ private static final Logger LOG = LoggerFactory.getLogger(AbfsHttpOperation.class);
+
+ private static final int CONNECT_TIMEOUT = 30 * 1000;
+ private static final int READ_TIMEOUT = 30 * 1000;
+
+ private static final int CLEAN_UP_BUFFER_SIZE = 64 * 1024;
+
+ private static final int ONE_THOUSAND = 1000;
+ private static final int ONE_MILLION = ONE_THOUSAND * ONE_THOUSAND;
+
+ private final String method;
+ private final URL url;
+
+ private HttpURLConnection connection;
+ private int statusCode;
+ private String statusDescription;
+ private String storageErrorCode = "";
+ private String storageErrorMessage = "";
+ private String clientRequestId = "";
+ private String requestId = "";
+ private ListResultSchema listResultSchema = null;
+
+ // metrics
+ private int bytesSent;
+ private long bytesReceived;
+
+ // optional trace enabled metrics
+ private final boolean isTraceEnabled;
+ private long connectionTimeMs;
+ private long sendRequestTimeMs;
+ private long recvResponseTimeMs;
+
+ protected HttpURLConnection getConnection() {
+ return connection;
+ }
+
+ public String getMethod() {
+ return method;
+ }
+
+ public URL getUrl() {
+ return url;
+ }
+
+ public int getStatusCode() {
+ return statusCode;
+ }
+
+ public String getStatusDescription() {
+ return statusDescription;
+ }
+
+ public String getStorageErrorCode() {
+ return storageErrorCode;
+ }
+
+ public String getStorageErrorMessage() {
+ return storageErrorMessage;
+ }
+
+ public String getClientRequestId() {
+ return clientRequestId;
+ }
+
+ public String getRequestId() {
+ return requestId;
+ }
+
+ public int getBytesSent() {
+ return bytesSent;
+ }
+
+ public long getBytesReceived() {
+ return bytesReceived;
+ }
+
+ public ListResultSchema getListResultSchema() {
+ return listResultSchema;
+ }
+
+ public String getResponseHeader(String httpHeader) {
+ return connection.getHeaderField(httpHeader);
+ }
+
+ // Returns a trace message for the request
+ @Override
+ public String toString() {
+ final String urlStr = url.toString();
+ final StringBuilder sb = new StringBuilder();
+ sb.append(statusCode);
+ sb.append(",");
+ sb.append(storageErrorCode);
+ sb.append(",cid=");
+ sb.append(clientRequestId);
+ sb.append(",rid=");
+ sb.append(requestId);
+ if (isTraceEnabled) {
+ sb.append(",connMs=");
+ sb.append(connectionTimeMs);
+ sb.append(",sendMs=");
+ sb.append(sendRequestTimeMs);
+ sb.append(",recvMs=");
+ sb.append(recvResponseTimeMs);
+ }
+ sb.append(",sent=");
+ sb.append(bytesSent);
+ sb.append(",recv=");
+ sb.append(bytesReceived);
+ sb.append(",");
+ sb.append(method);
+ sb.append(",");
+ sb.append(urlStr);
+ return sb.toString();
+ }
+
+ /**
+ * Initializes a new HTTP request and opens the connection.
+ *
+ * @param url The full URL including query string parameters.
+ * @param method The HTTP method (PUT, PATCH, POST, GET, HEAD, or DELETE).
+ * @param requestHeaders The HTTP request headers.READ_TIMEOUT
+ *
+ * @throws IOException if an error occurs.
+ */
+ public AbfsHttpOperation(final URL url, final String method, final List
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.services;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants;
+import org.apache.hadoop.fs.azurebfs.contracts.exceptions.AzureBlobFileSystemException;
+
+/**
+ * The UrlQueryBuilder for Rest AbfsClient.
+ */
+public class AbfsUriQueryBuilder {
+ private Map
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fs.azurebfs.services;
+
+import org.apache.hadoop.fs.azurebfs.contracts.services.ReadBufferStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.Queue;
+import java.util.Stack;
+import java.util.concurrent.CountDownLatch;
+
+/**
+ * The Read Buffer Manager for Rest AbfsClient.
+ */
+final class ReadBufferManager {
+ private static final Logger LOGGER = LoggerFactory.getLogger(ReadBufferManager.class);
+
+ private static final int NUM_BUFFERS = 16;
+ private static final int BLOCK_SIZE = 4 * 1024 * 1024;
+ private static final int NUM_THREADS = 8;
+ private static final int THRESHOLD_AGE_MILLISECONDS = 3000; // have to see if 3 seconds is a good threshold
+
+ private Thread[] threads = new Thread[NUM_THREADS];
+ private byte[][] buffers; // array of byte[] buffers, to hold the data that is read
+ private Stack
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.services;
+
+import javax.crypto.Mac;
+import javax.crypto.spec.SecretKeySpec;
+import java.io.UnsupportedEncodingException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.net.URLDecoder;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.TimeZone;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.hadoop.fs.azurebfs.constants.AbfsHttpConstants;
+import org.apache.hadoop.fs.azurebfs.constants.HttpHeaderConfigurations;
+import org.apache.hadoop.fs.azurebfs.utils.Base64;
+
+/**
+ * Represents the shared key credentials used to access an Azure Storage
+ * account.
+ */
+public class SharedKeyCredentials {
+ private static final int EXPECTED_BLOB_QUEUE_CANONICALIZED_STRING_LENGTH = 300;
+ private static final Pattern CRLF = Pattern.compile("\r\n", Pattern.LITERAL);
+ private static final String HMAC_SHA256 = "HmacSHA256";
+ /**
+ * Stores a reference to the RFC1123 date/time pattern.
+ */
+ private static final String RFC1123_PATTERN = "EEE, dd MMM yyyy HH:mm:ss z";
+
+
+ private String accountName;
+ private byte[] accountKey;
+ private Mac hmacSha256;
+
+ public SharedKeyCredentials(final String accountName,
+ final String accountKey) {
+ if (accountName == null || accountName.isEmpty()) {
+ throw new IllegalArgumentException("Invalid account name.");
+ }
+ if (accountKey == null || accountKey.isEmpty()) {
+ throw new IllegalArgumentException("Invalid account key.");
+ }
+ this.accountName = accountName;
+ this.accountKey = Base64.decode(accountKey);
+ initializeMac();
+ }
+
+ public void signRequest(HttpURLConnection connection, final long contentLength) throws UnsupportedEncodingException {
+
+ connection.setRequestProperty(HttpHeaderConfigurations.X_MS_DATE, getGMTTime());
+
+ final String stringToSign = canonicalize(connection, accountName, contentLength);
+
+ final String computedBase64Signature = computeHmac256(stringToSign);
+
+ connection.setRequestProperty(HttpHeaderConfigurations.AUTHORIZATION,
+ String.format("%s %s:%s", "SharedKey", accountName, computedBase64Signature));
+ }
+
+ private String computeHmac256(final String stringToSign) {
+ byte[] utf8Bytes;
+ try {
+ utf8Bytes = stringToSign.getBytes(AbfsHttpConstants.UTF_8);
+ } catch (final UnsupportedEncodingException e) {
+ throw new IllegalArgumentException(e);
+ }
+ byte[] hmac;
+ synchronized (this) {
+ hmac = hmacSha256.doFinal(utf8Bytes);
+ }
+ return Base64.encode(hmac);
+ }
+
+ /**
+ * Add x-ms- prefixed headers in a fixed order.
+ *
+ * @param conn the HttpURLConnection for the operation
+ * @param canonicalizedString the canonicalized string to add the canonicalized headerst to.
+ */
+ private static void addCanonicalizedHeaders(final HttpURLConnection conn, final StringBuilder canonicalizedString) {
+ // Look for header names that start with
+ // HeaderNames.PrefixForStorageHeader
+ // Then sort them in case-insensitive manner.
+
+ final Map
+ * If a storage service error occurred.
+ */
+ private static String safeDecode(final String stringToDecode) throws UnsupportedEncodingException {
+ if (stringToDecode == null) {
+ return null;
+ }
+
+ if (stringToDecode.length() == 0) {
+ return "";
+ }
+
+ if (stringToDecode.contains(AbfsHttpConstants.PLUS)) {
+ final StringBuilder outBuilder = new StringBuilder();
+
+ int startDex = 0;
+ for (int m = 0; m < stringToDecode.length(); m++) {
+ if (stringToDecode.charAt(m) == '+') {
+ if (m > startDex) {
+ outBuilder.append(URLDecoder.decode(stringToDecode.substring(startDex, m),
+ AbfsHttpConstants.UTF_8));
+ }
+
+ outBuilder.append(AbfsHttpConstants.PLUS);
+ startDex = m + 1;
+ }
+ }
+
+ if (startDex != stringToDecode.length()) {
+ outBuilder.append(URLDecoder.decode(stringToDecode.substring(startDex, stringToDecode.length()),
+ AbfsHttpConstants.UTF_8));
+ }
+
+ return outBuilder.toString();
+ } else {
+ return URLDecoder.decode(stringToDecode, AbfsHttpConstants.UTF_8);
+ }
+ }
+
+ private static String trimStart(final String value) {
+ int spaceDex = 0;
+ while (spaceDex < value.length() && value.charAt(spaceDex) == ' ') {
+ spaceDex++;
+ }
+
+ return value.substring(spaceDex);
+ }
+
+ private static String getHeaderValue(final HttpURLConnection conn, final String headerName, final String defaultValue) {
+ final String headerValue = conn.getRequestProperty(headerName);
+ return headerValue == null ? defaultValue : headerValue;
+ }
+
+
+ /**
+ * Constructs a canonicalized string for signing a request.
+ *
+ * @param conn the HttpURLConnection to canonicalize
+ * @param accountName the account name associated with the request
+ * @param contentLength the length of the content written to the outputstream in bytes,
+ * -1 if unknown
+ * @return a canonicalized string.
+ */
+ private String canonicalize(final HttpURLConnection conn,
+ final String accountName,
+ final Long contentLength) throws UnsupportedEncodingException {
+
+ if (contentLength < -1) {
+ throw new IllegalArgumentException(
+ "The Content-Length header must be greater than or equal to -1.");
+ }
+
+ String contentType = getHeaderValue(conn, HttpHeaderConfigurations.CONTENT_TYPE, "");
+
+ return canonicalizeHttpRequest(conn.getURL(), accountName,
+ conn.getRequestMethod(), contentType, contentLength, null, conn);
+ }
+
+ /**
+ * Thread local for storing GMT date format.
+ */
+ private static ThreadLocalAbfsClientThrottlingAnalyzer
class with
+ * the specified name.
+ *
+ * @param name a name used to identify this instance.
+ * @throws IllegalArgumentException if name is null or empty.
+ */
+ AbfsClientThrottlingAnalyzer(String name) throws IllegalArgumentException {
+ this(name, DEFAULT_ANALYSIS_PERIOD_MS);
+ }
+
+ /**
+ * Creates an instance of the AbfsClientThrottlingAnalyzer
class with
+ * the specified name and period.
+ *
+ * @param name A name used to identify this instance.
+ * @param period The frequency, in milliseconds, at which metrics are
+ * analyzed.
+ * @throws IllegalArgumentException If name is null or empty.
+ * If period is less than 1000 or greater than 30000 milliseconds.
+ */
+ AbfsClientThrottlingAnalyzer(String name, int period)
+ throws IllegalArgumentException {
+ Preconditions.checkArgument(
+ StringUtils.isNotEmpty(name),
+ "The argument 'name' cannot be null or empty.");
+ Preconditions.checkArgument(
+ period >= MIN_ANALYSIS_PERIOD_MS && period <= MAX_ANALYSIS_PERIOD_MS,
+ "The argument 'period' must be between 1000 and 30000.");
+ this.name = name;
+ this.analysisPeriodMs = period;
+ this.blobMetrics = new AtomicReference+
character, rather
+ * than replacing it with a space character.
+ *
+ * @param stringToDecode A String
that represents the string to decode.
+ * @return A String
that represents the decoded string.
+ * String
that represents the current GMT date/time using the RFC1123 pattern.
+ */
+ static String getGMTTime() {
+ return getGMTTime(new Date());
+ }
+
+ /**
+ * Returns the GTM date/time String for the specified value using the RFC1123 pattern.
+ *
+ * @param date
+ * A Date
object that represents the date to convert to GMT date/time in the RFC1123
+ * pattern.
+ *
+ * @return A String
that represents the GMT date/time for the specified value using the RFC1123
+ * pattern.
+ */
+ static String getGMTTime(final Date date) {
+ return rfc1123GmtDateTimeFormatter.get().format(date);
+ }
+}
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/ShellDecryptionKeyProvider.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/ShellDecryptionKeyProvider.java
new file mode 100644
index 00000000000..bdac922fb3a
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/ShellDecryptionKeyProvider.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.services;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.azurebfs.AbfsConfiguration;
+import org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys;
+import org.apache.hadoop.fs.azurebfs.contracts.exceptions.KeyProviderException;
+import org.apache.hadoop.util.Shell;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Shell decryption key provider which invokes an external script that will
+ * perform the key decryption.
+ */
+public class ShellDecryptionKeyProvider extends SimpleKeyProvider {
+ private static final Logger LOG = LoggerFactory.getLogger(ShellDecryptionKeyProvider.class);
+
+ @Override
+ public String getStorageAccountKey(String accountName, Configuration rawConfig)
+ throws KeyProviderException {
+ String envelope = super.getStorageAccountKey(accountName, rawConfig);
+
+ AbfsConfiguration abfsConfig;
+ try {
+ abfsConfig = new AbfsConfiguration(rawConfig, accountName);
+ } catch(IllegalAccessException | IOException e) {
+ throw new KeyProviderException("Unable to get key from credential providers.", e);
+ }
+
+ final String command = abfsConfig.get(ConfigurationKeys.AZURE_KEY_ACCOUNT_SHELLKEYPROVIDER_SCRIPT);
+ if (command == null) {
+ throw new KeyProviderException(
+ "Script path is not specified via fs.azure.shellkeyprovider.script");
+ }
+
+ String[] cmd = command.split(" ");
+ String[] cmdWithEnvelope = Arrays.copyOf(cmd, cmd.length + 1);
+ cmdWithEnvelope[cmdWithEnvelope.length - 1] = envelope;
+
+ String decryptedKey = null;
+ try {
+ decryptedKey = Shell.execCommand(cmdWithEnvelope);
+ } catch (IOException ex) {
+ throw new KeyProviderException(ex);
+ }
+
+ // trim any whitespace
+ return decryptedKey.trim();
+ }
+}
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SimpleKeyProvider.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SimpleKeyProvider.java
new file mode 100644
index 00000000000..727e1b3fd3f
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/SimpleKeyProvider.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.services;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.azurebfs.AbfsConfiguration;
+import org.apache.hadoop.fs.azurebfs.constants.ConfigurationKeys;
+import org.apache.hadoop.fs.azurebfs.contracts.exceptions.KeyProviderException;
+import org.apache.hadoop.fs.azurebfs.contracts.exceptions.InvalidConfigurationValueException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Key provider that simply returns the storage account key from the
+ * configuration as plaintext.
+ */
+public class SimpleKeyProvider implements KeyProvider {
+ private static final Logger LOG = LoggerFactory.getLogger(SimpleKeyProvider.class);
+
+ @Override
+ public String getStorageAccountKey(String accountName, Configuration rawConfig)
+ throws KeyProviderException {
+ String key = null;
+
+ try {
+ AbfsConfiguration abfsConfig = new AbfsConfiguration(rawConfig, accountName);
+ key = abfsConfig.getPasswordString(ConfigurationKeys.FS_AZURE_ACCOUNT_KEY_PROPERTY_NAME);
+ } catch(IllegalAccessException | InvalidConfigurationValueException e) {
+ throw new KeyProviderException("Failure to initialize configuration", e);
+ } catch(IOException ioe) {
+ LOG.warn("Unable to get key from credential providers. {}", ioe);
+ }
+
+ return key;
+ }
+}
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/package-info.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/package-info.java
new file mode 100644
index 00000000000..97c1d71251f
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/package-info.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+@InterfaceAudience.Private
+@InterfaceStability.Evolving
+package org.apache.hadoop.fs.azurebfs.services;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
\ No newline at end of file
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/Base64.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/Base64.java
new file mode 100644
index 00000000000..c1910060420
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/Base64.java
@@ -0,0 +1,329 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.utils;
+
+/**
+ * Base64
+ */
+public final class Base64 {
+ /**
+ * The Base 64 Characters.
+ */
+ private static final String BASE_64_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
+
+ /**
+ * Decoded values, -1 is invalid character, -2 is = pad character.
+ */
+ private static final byte[] DECODE_64 = {
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, // 0-15
+
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, /*
+ * 16- 31
+ */
+ -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, -1, -1, 63, /*
+ * 32- 47
+ */
+ 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, -2, -1, -1, /*
+ * 48- 63
+ */
+ -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, /* 64-79 */
+ 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, -1, /*
+ * 80- 95
+ */
+ -1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, /*
+ * 96- 111
+ */
+ 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -1, -1, -1, -1, -1 /*
+ * 112- 127
+ */
+ };
+
+ /**
+ * Decodes a given Base64 string into its corresponding byte array.
+ *
+ * @param data
+ * the Base64 string, as a String
object, to decode
+ *
+ * @return the corresponding decoded byte array
+ * @throws IllegalArgumentException
+ * If the string is not a valid base64 encoded string
+ */
+ public static byte[] decode(final String data) {
+ if (data == null) {
+ throw new IllegalArgumentException("The data parameter is not a valid base64-encoded string.");
+ }
+
+ int byteArrayLength = 3 * data.length() / 4;
+
+ if (data.endsWith("==")) {
+ byteArrayLength -= 2;
+ }
+ else if (data.endsWith("=")) {
+ byteArrayLength -= 1;
+ }
+
+ final byte[] retArray = new byte[byteArrayLength];
+ int byteDex = 0;
+ int charDex = 0;
+
+ for (; charDex < data.length(); charDex += 4) {
+ // get 4 chars, convert to 3 bytes
+ final int char1 = DECODE_64[(byte) data.charAt(charDex)];
+ final int char2 = DECODE_64[(byte) data.charAt(charDex + 1)];
+ final int char3 = DECODE_64[(byte) data.charAt(charDex + 2)];
+ final int char4 = DECODE_64[(byte) data.charAt(charDex + 3)];
+
+ if (char1 < 0 || char2 < 0 || char3 == -1 || char4 == -1) {
+ // invalid character(-1), or bad padding (-2)
+ throw new IllegalArgumentException("The data parameter is not a valid base64-encoded string.");
+ }
+
+ int tVal = char1 << 18;
+ tVal += char2 << 12;
+ tVal += (char3 & 0xff) << 6;
+ tVal += char4 & 0xff;
+
+ if (char3 == -2) {
+ // two "==" pad chars, check bits 12-24
+ tVal &= 0x00FFF000;
+ retArray[byteDex++] = (byte) (tVal >> 16 & 0xFF);
+ }
+ else if (char4 == -2) {
+ // one pad char "=" , check bits 6-24.
+ tVal &= 0x00FFFFC0;
+ retArray[byteDex++] = (byte) (tVal >> 16 & 0xFF);
+ retArray[byteDex++] = (byte) (tVal >> 8 & 0xFF);
+
+ }
+ else {
+ // No pads take all 3 bytes, bits 0-24
+ retArray[byteDex++] = (byte) (tVal >> 16 & 0xFF);
+ retArray[byteDex++] = (byte) (tVal >> 8 & 0xFF);
+ retArray[byteDex++] = (byte) (tVal & 0xFF);
+ }
+ }
+ return retArray;
+ }
+
+ /**
+ * Decodes a given Base64 string into its corresponding byte array.
+ *
+ * @param data
+ * the Base64 string, as a String
object, to decode
+ *
+ * @return the corresponding decoded byte array
+ * @throws IllegalArgumentException
+ * If the string is not a valid base64 encoded string
+ */
+ public static Byte[] decodeAsByteObjectArray(final String data) {
+ int byteArrayLength = 3 * data.length() / 4;
+
+ if (data.endsWith("==")) {
+ byteArrayLength -= 2;
+ }
+ else if (data.endsWith("=")) {
+ byteArrayLength -= 1;
+ }
+
+ final Byte[] retArray = new Byte[byteArrayLength];
+ int byteDex = 0;
+ int charDex = 0;
+
+ for (; charDex < data.length(); charDex += 4) {
+ // get 4 chars, convert to 3 bytes
+ final int char1 = DECODE_64[(byte) data.charAt(charDex)];
+ final int char2 = DECODE_64[(byte) data.charAt(charDex + 1)];
+ final int char3 = DECODE_64[(byte) data.charAt(charDex + 2)];
+ final int char4 = DECODE_64[(byte) data.charAt(charDex + 3)];
+
+ if (char1 < 0 || char2 < 0 || char3 == -1 || char4 == -1) {
+ // invalid character(-1), or bad padding (-2)
+ throw new IllegalArgumentException("The data parameter is not a valid base64-encoded string.");
+ }
+
+ int tVal = char1 << 18;
+ tVal += char2 << 12;
+ tVal += (char3 & 0xff) << 6;
+ tVal += char4 & 0xff;
+
+ if (char3 == -2) {
+ // two "==" pad chars, check bits 12-24
+ tVal &= 0x00FFF000;
+ retArray[byteDex++] = (byte) (tVal >> 16 & 0xFF);
+ }
+ else if (char4 == -2) {
+ // one pad char "=" , check bits 6-24.
+ tVal &= 0x00FFFFC0;
+ retArray[byteDex++] = (byte) (tVal >> 16 & 0xFF);
+ retArray[byteDex++] = (byte) (tVal >> 8 & 0xFF);
+
+ }
+ else {
+ // No pads take all 3 bytes, bits 0-24
+ retArray[byteDex++] = (byte) (tVal >> 16 & 0xFF);
+ retArray[byteDex++] = (byte) (tVal >> 8 & 0xFF);
+ retArray[byteDex++] = (byte) (tVal & 0xFF);
+ }
+ }
+ return retArray;
+ }
+
+ /**
+ * Encodes a byte array as a Base64 string.
+ *
+ * @param data
+ * the byte array to encode
+ * @return the Base64-encoded string, as a String
object
+ */
+ public static String encode(final byte[] data) {
+ final StringBuilder builder = new StringBuilder();
+ final int dataRemainder = data.length % 3;
+
+ int j = 0;
+ int n = 0;
+ for (; j < data.length; j += 3) {
+
+ if (j < data.length - dataRemainder) {
+ n = ((data[j] & 0xFF) << 16) + ((data[j + 1] & 0xFF) << 8) + (data[j + 2] & 0xFF);
+ }
+ else {
+ if (dataRemainder == 1) {
+ n = (data[j] & 0xFF) << 16;
+ }
+ else if (dataRemainder == 2) {
+ n = ((data[j] & 0xFF) << 16) + ((data[j + 1] & 0xFF) << 8);
+ }
+ }
+
+ // Left here for readability
+ // byte char1 = (byte) ((n >>> 18) & 0x3F);
+ // byte char2 = (byte) ((n >>> 12) & 0x3F);
+ // byte char3 = (byte) ((n >>> 6) & 0x3F);
+ // byte char4 = (byte) (n & 0x3F);
+ builder.append(BASE_64_CHARS.charAt((byte) ((n >>> 18) & 0x3F)));
+ builder.append(BASE_64_CHARS.charAt((byte) ((n >>> 12) & 0x3F)));
+ builder.append(BASE_64_CHARS.charAt((byte) ((n >>> 6) & 0x3F)));
+ builder.append(BASE_64_CHARS.charAt((byte) (n & 0x3F)));
+ }
+
+ final int bLength = builder.length();
+
+ // append '=' to pad
+ if (data.length % 3 == 1) {
+ builder.replace(bLength - 2, bLength, "==");
+ }
+ else if (data.length % 3 == 2) {
+ builder.replace(bLength - 1, bLength, "=");
+ }
+
+ return builder.toString();
+ }
+
+ /**
+ * Encodes a byte array as a Base64 string.
+ *
+ * @param data
+ * the byte array to encode
+ * @return the Base64-encoded string, as a String
object
+ */
+ public static String encode(final Byte[] data) {
+ final StringBuilder builder = new StringBuilder();
+ final int dataRemainder = data.length % 3;
+
+ int j = 0;
+ int n = 0;
+ for (; j < data.length; j += 3) {
+
+ if (j < data.length - dataRemainder) {
+ n = ((data[j] & 0xFF) << 16) + ((data[j + 1] & 0xFF) << 8) + (data[j + 2] & 0xFF);
+ }
+ else {
+ if (dataRemainder == 1) {
+ n = (data[j] & 0xFF) << 16;
+ }
+ else if (dataRemainder == 2) {
+ n = ((data[j] & 0xFF) << 16) + ((data[j + 1] & 0xFF) << 8);
+ }
+ }
+
+ // Left here for readability
+ // byte char1 = (byte) ((n >>> 18) & 0x3F);
+ // byte char2 = (byte) ((n >>> 12) & 0x3F);
+ // byte char3 = (byte) ((n >>> 6) & 0x3F);
+ // byte char4 = (byte) (n & 0x3F);
+ builder.append(BASE_64_CHARS.charAt((byte) ((n >>> 18) & 0x3F)));
+ builder.append(BASE_64_CHARS.charAt((byte) ((n >>> 12) & 0x3F)));
+ builder.append(BASE_64_CHARS.charAt((byte) ((n >>> 6) & 0x3F)));
+ builder.append(BASE_64_CHARS.charAt((byte) (n & 0x3F)));
+ }
+
+ final int bLength = builder.length();
+
+ // append '=' to pad
+ if (data.length % 3 == 1) {
+ builder.replace(bLength - 2, bLength, "==");
+ }
+ else if (data.length % 3 == 2) {
+ builder.replace(bLength - 1, bLength, "=");
+ }
+
+ return builder.toString();
+ }
+
+ /**
+ * Determines whether the given string contains only Base64 characters.
+ *
+ * @param data
+ * the string, as a String
object, to validate
+ * @return true
if data
is a valid Base64 string, otherwise false
+ */
+ public static boolean validateIsBase64String(final String data) {
+
+ if (data == null || data.length() % 4 != 0) {
+ return false;
+ }
+
+ for (int m = 0; m < data.length(); m++) {
+ final byte charByte = (byte) data.charAt(m);
+
+ // pad char detected
+ if (DECODE_64[charByte] == -2) {
+ if (m < data.length() - 2) {
+ return false;
+ }
+ else if (m == data.length() - 2 && DECODE_64[(byte) data.charAt(m + 1)] != -2) {
+ return false;
+ }
+ }
+
+ if (charByte < 0 || DECODE_64[charByte] == -1) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ /**
+ * Private Default Ctor.
+ */
+ private Base64() {
+ // No op
+ }
+}
\ No newline at end of file
diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/SSLSocketFactoryEx.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/SSLSocketFactoryEx.java
new file mode 100644
index 00000000000..1fc81e207ab
--- /dev/null
+++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/utils/SSLSocketFactoryEx.java
@@ -0,0 +1,241 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs.azurebfs.utils;
+
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.Socket;
+import java.net.SocketException;
+import java.security.KeyManagementException;
+import java.security.NoSuchAlgorithmException;
+import java.util.ArrayList;
+
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.SSLSocket;
+import javax.net.ssl.SSLSocketFactory;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+/*
+import org.wildfly.openssl.OpenSSLProvider;
+*/
+
+/**
+ * Extension to use native OpenSSL library instead of JSSE for better
+ * performance.
+ *
+ */
+public final class SSLSocketFactoryEx extends SSLSocketFactory {
+
+ /**
+ * Default indicates Ordered, preferred OpenSSL, if failed to load then fall
+ * back to Default_JSSE
+ */
+ public enum SSLChannelMode {
+ OpenSSL,
+ Default,
+ Default_JSSE
+ }
+
+ private static SSLSocketFactoryEx instance = null;
+ private static final Logger LOG = LoggerFactory.getLogger(
+ SSLSocketFactoryEx.class);
+ private String providerName;
+ private SSLContext ctx;
+ private String[] ciphers;
+ private SSLChannelMode channelMode;
+
+ /**
+ * Initialize a singleton SSL socket factory.
+ *
+ * @param preferredMode applicable only if the instance is not initialized.
+ * @throws IOException if an error occurs.
+ */
+ public static synchronized void initializeDefaultFactory(
+ SSLChannelMode preferredMode) throws IOException {
+ if (instance == null) {
+ instance = new SSLSocketFactoryEx(preferredMode);
+ }
+ }
+
+ /**
+ * Singletone instance of the SSLSocketFactory.
+ *
+ * SSLSocketFactory must be initialized with appropriate SSLChannelMode
+ * using initializeDefaultFactory method.
+ *
+ * @return instance of the SSLSocketFactory, instance must be initialized by
+ * initializeDefaultFactory.
+ */
+ public static SSLSocketFactoryEx getDefaultFactory() {
+ return instance;
+ }
+/*
+ static {
+ OpenSSLProvider.register();
+ }
+*/
+ private SSLSocketFactoryEx(SSLChannelMode preferredChannelMode)
+ throws IOException {
+ try {
+ initializeSSLContext(preferredChannelMode);
+ } catch (NoSuchAlgorithmException e) {
+ throw new IOException(e);
+ } catch (KeyManagementException e) {
+ throw new IOException(e);
+ }
+
+ // Get list of supported cipher suits from the SSL factory.
+ SSLSocketFactory factory = ctx.getSocketFactory();
+ String[] defaultCiphers = factory.getSupportedCipherSuites();
+ String version = System.getProperty("java.version");
+
+ ciphers = (channelMode == SSLChannelMode.Default_JSSE
+ && version.startsWith("1.8"))
+ ? alterCipherList(defaultCiphers) : defaultCiphers;
+
+ providerName = ctx.getProvider().getName() + "-"
+ + ctx.getProvider().getVersion();
+ }
+
+ private void initializeSSLContext(SSLChannelMode preferredChannelMode)
+ throws NoSuchAlgorithmException, KeyManagementException {
+ switch (preferredChannelMode) {
+ case Default:
+ try {
+ ctx = SSLContext.getInstance("openssl.TLS");
+ ctx.init(null, null, null);
+ channelMode = SSLChannelMode.OpenSSL;
+ } catch (NoSuchAlgorithmException e) {
+ LOG.warn("Failed to load OpenSSL. Falling back to the JSSE default.");
+ ctx = SSLContext.getDefault();
+ channelMode = SSLChannelMode.Default_JSSE;
+ }
+ break;
+ case OpenSSL:
+ ctx = SSLContext.getInstance("openssl.TLS");
+ ctx.init(null, null, null);
+ channelMode = SSLChannelMode.OpenSSL;
+ break;
+ case Default_JSSE:
+ ctx = SSLContext.getDefault();
+ channelMode = SSLChannelMode.Default_JSSE;
+ break;
+ default:
+ throw new AssertionError("Unknown channel mode: "
+ + preferredChannelMode);
+ }
+ }
+
+ public String getProviderName() {
+ return providerName;
+ }
+
+ @Override
+ public String[] getDefaultCipherSuites() {
+ return ciphers.clone();
+ }
+
+ @Override
+ public String[] getSupportedCipherSuites() {
+ return ciphers.clone();
+ }
+
+ public Socket createSocket() throws IOException {
+ SSLSocketFactory factory = ctx.getSocketFactory();
+ SSLSocket ss = (SSLSocket) factory.createSocket();
+ configureSocket(ss);
+ return ss;
+ }
+
+ @Override
+ public Socket createSocket(Socket s, String host, int port,
+ boolean autoClose) throws IOException {
+ SSLSocketFactory factory = ctx.getSocketFactory();
+ SSLSocket ss = (SSLSocket) factory.createSocket(s, host, port, autoClose);
+
+ configureSocket(ss);
+ return ss;
+ }
+
+ @Override
+ public Socket createSocket(InetAddress address, int port,
+ InetAddress localAddress, int localPort)
+ throws IOException {
+ SSLSocketFactory factory = ctx.getSocketFactory();
+ SSLSocket ss = (SSLSocket) factory
+ .createSocket(address, port, localAddress, localPort);
+
+ configureSocket(ss);
+ return ss;
+ }
+
+ @Override
+ public Socket createSocket(String host, int port, InetAddress localHost,
+ int localPort) throws IOException {
+ SSLSocketFactory factory = ctx.getSocketFactory();
+ SSLSocket ss = (SSLSocket) factory
+ .createSocket(host, port, localHost, localPort);
+
+ configureSocket(ss);
+
+ return ss;
+ }
+
+ @Override
+ public Socket createSocket(InetAddress host, int port) throws IOException {
+ SSLSocketFactory factory = ctx.getSocketFactory();
+ SSLSocket ss = (SSLSocket) factory.createSocket(host, port);
+
+ configureSocket(ss);
+
+ return ss;
+ }
+
+ @Override
+ public Socket createSocket(String host, int port) throws IOException {
+ SSLSocketFactory factory = ctx.getSocketFactory();
+ SSLSocket ss = (SSLSocket) factory.createSocket(host, port);
+
+ configureSocket(ss);
+
+ return ss;
+ }
+
+ private void configureSocket(SSLSocket ss) throws SocketException {
+ ss.setEnabledCipherSuites(ciphers);
+ }
+
+ private String[] alterCipherList(String[] defaultCiphers) {
+
+ ArrayList