From b75ced1e5d589b01da02afdb769aa84d89cb8702 Mon Sep 17 00:00:00 2001 From: Steve Loughran Date: Wed, 18 Aug 2021 11:39:17 +0100 Subject: [PATCH] HADOOP-17836. Improve logging on ABFS error reporting (#3281) Contributed by Steve Loughran. --- .../exceptions/InvalidAbfsRestOperationException.java | 7 +++++-- .../hadoop/fs/azurebfs/services/AbfsHttpOperation.java | 4 +++- .../hadoop/fs/azurebfs/services/AbfsRestOperation.java | 6 +++--- .../hadoop/fs/azurebfs/services/ReadBufferManager.java | 4 ++-- .../hadoop/fs/azurebfs/services/ReadBufferWorker.java | 6 +++++- 5 files changed, 18 insertions(+), 9 deletions(-) diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidAbfsRestOperationException.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidAbfsRestOperationException.java index aba1d8c1efa..19620212134 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidAbfsRestOperationException.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/contracts/exceptions/InvalidAbfsRestOperationException.java @@ -24,7 +24,8 @@ import org.apache.hadoop.fs.azurebfs.contracts.services.AzureServiceErrorCode; /** - * Exception to wrap invalid Azure service error responses. + * Exception to wrap invalid Azure service error responses and exceptions + * raised on network IO. */ @InterfaceAudience.Public @InterfaceStability.Evolving @@ -34,7 +35,9 @@ public InvalidAbfsRestOperationException( super( AzureServiceErrorCode.UNKNOWN.getStatusCode(), AzureServiceErrorCode.UNKNOWN.getErrorCode(), - "InvalidAbfsRestOperationException", + innerException != null + ? innerException.toString() + : "InvalidAbfsRestOperationException", innerException); } } diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java index cd8cfa2147a..6f7ee2e1622 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsHttpOperation.java @@ -409,7 +409,9 @@ public void processResponse(final byte[] buffer, final int offset, final int len } } } catch (IOException ex) { - LOG.error("UnexpectedError: ", ex); + LOG.warn("IO/Network error: {} {}: {}", + method, getMaskedUrl(), ex.getMessage()); + LOG.debug("IO Error: ", ex); throw ex; } finally { if (this.isTraceEnabled) { diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsRestOperation.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsRestOperation.java index 2c669e9e101..00da9b66013 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsRestOperation.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/AbfsRestOperation.java @@ -303,7 +303,7 @@ private boolean executeHttpOperation(final int retryCount, } catch (UnknownHostException ex) { String hostname = null; hostname = httpOperation.getHost(); - LOG.warn("Unknown host name: %s. Retrying to resolve the host name...", + LOG.warn("Unknown host name: {}. Retrying to resolve the host name...", hostname); if (!client.getRetryPolicy().shouldRetry(retryCount, -1)) { throw new InvalidAbfsRestOperationException(ex); @@ -311,7 +311,7 @@ private boolean executeHttpOperation(final int retryCount, return false; } catch (IOException ex) { if (LOG.isDebugEnabled()) { - LOG.debug("HttpRequestFailure: {}, {}", httpOperation.toString(), ex); + LOG.debug("HttpRequestFailure: {}, {}", httpOperation, ex); } if (!client.getRetryPolicy().shouldRetry(retryCount, -1)) { @@ -323,7 +323,7 @@ private boolean executeHttpOperation(final int retryCount, intercept.updateMetrics(operationType, httpOperation); } - LOG.debug("HttpRequest: {}: {}", operationType, httpOperation.toString()); + LOG.debug("HttpRequest: {}: {}", operationType, httpOperation); if (client.getRetryPolicy().shouldRetry(retryCount, httpOperation.getStatusCode())) { return false; diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/ReadBufferManager.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/ReadBufferManager.java index 0f91afe0982..6f194270e21 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/ReadBufferManager.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/ReadBufferManager.java @@ -455,8 +455,8 @@ ReadBuffer getNextBlockToRead() throws InterruptedException { */ void doneReading(final ReadBuffer buffer, final ReadBufferStatus result, final int bytesActuallyRead) { if (LOGGER.isTraceEnabled()) { - LOGGER.trace("ReadBufferWorker completed file {} for offset {} bytes {}", - buffer.getStream().getPath(), buffer.getOffset(), bytesActuallyRead); + LOGGER.trace("ReadBufferWorker completed read file {} for offset {} outcome {} bytes {}", + buffer.getStream().getPath(), buffer.getOffset(), result, bytesActuallyRead); } synchronized (this) { // If this buffer has already been purged during diff --git a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/ReadBufferWorker.java b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/ReadBufferWorker.java index 21c9d1be766..a30f06261ef 100644 --- a/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/ReadBufferWorker.java +++ b/hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azurebfs/services/ReadBufferWorker.java @@ -21,6 +21,7 @@ import java.io.IOException; import java.util.concurrent.CountDownLatch; +import org.apache.hadoop.fs.PathIOException; import org.apache.hadoop.fs.azurebfs.contracts.services.ReadBufferStatus; class ReadBufferWorker implements Runnable { @@ -73,8 +74,11 @@ public void run() { buffer.getTracingContext()); bufferManager.doneReading(buffer, ReadBufferStatus.AVAILABLE, bytesRead); // post result back to ReadBufferManager + } catch (IOException ex) { + buffer.setErrException(ex); + bufferManager.doneReading(buffer, ReadBufferStatus.READ_FAILED, 0); } catch (Exception ex) { - buffer.setErrException(new IOException(ex)); + buffer.setErrException(new PathIOException(buffer.getStream().getPath(), ex)); bufferManager.doneReading(buffer, ReadBufferStatus.READ_FAILED, 0); } }