HDFS-16453. Upgrade okhttp from 2.7.5 to 4.9.3 (#4229)

Co-authored-by: Ashutosh Gupta <ashugpt@amazon.com>
Signed-off-by: Akira Ajisaka <aajisaka@apache.org>
(cherry picked from commit fb910bd906)

 Conflicts:
	hadoop-project/pom.xml
This commit is contained in:
Ashutosh Gupta 2022-05-20 18:53:14 +01:00 committed by Akira Ajisaka
parent 90ec4418c7
commit 57fe613299
No known key found for this signature in database
GPG Key ID: C1EDBB9CA400FD50
9 changed files with 130 additions and 83 deletions

View File

@ -243,7 +243,7 @@ com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava
com.google.j2objc:j2objc-annotations:1.3 com.google.j2objc:j2objc-annotations:1.3
com.microsoft.azure:azure-storage:7.0.1 com.microsoft.azure:azure-storage:7.0.1
com.nimbusds:nimbus-jose-jwt:9.8.1 com.nimbusds:nimbus-jose-jwt:9.8.1
com.squareup.okhttp:okhttp:2.7.5 com.squareup.okhttp3:okhttp:4.9.3
com.squareup.okio:okio:1.6.0 com.squareup.okio:okio:1.6.0
com.yammer.metrics:metrics-core:2.2.0 com.yammer.metrics:metrics-core:2.2.0
com.zaxxer:HikariCP-java7:2.4.12 com.zaxxer:HikariCP-java7:2.4.12

View File

@ -114,6 +114,18 @@
<groupId>org.eclipse.jetty</groupId> <groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-server</artifactId> <artifactId>jetty-server</artifactId>
</exclusion> </exclusion>
<exclusion>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-stdlib</artifactId>
</exclusion>
<exclusion>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-stdlib-common</artifactId>
</exclusion>
<exclusion>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
</exclusion>
<exclusion> <exclusion>
<groupId>com.sun.jersey</groupId> <groupId>com.sun.jersey</groupId>
<artifactId>jersey-core</artifactId> <artifactId>jersey-core</artifactId>

View File

@ -93,4 +93,17 @@
<Bug pattern="EI_EXPOSE_REP" /> <Bug pattern="EI_EXPOSE_REP" />
</Match> </Match>
<!--okhttp classes from Kotlin are not analysed for NP check. -->
<Match>
<Class name="org.apache.hadoop.hdfs.web.oauth2.ConfRefreshTokenBasedAccessTokenProvider" />
<Method name="refresh" />
<Bug pattern="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE" />
</Match>
<Match>
<Class name="org.apache.hadoop.hdfs.web.oauth2.CredentialBasedAccessTokenProvider" />
<Method name="refresh" />
<Bug pattern="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE" />
</Match>
</FindBugsFilter> </FindBugsFilter>

View File

@ -35,9 +35,17 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>com.squareup.okhttp</groupId> <groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId> <artifactId>okhttp</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-stdlib</artifactId>
</dependency>
<dependency>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-stdlib-common</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>

View File

@ -18,10 +18,15 @@
*/ */
package org.apache.hadoop.hdfs.web.oauth2; package org.apache.hadoop.hdfs.web.oauth2;
import com.squareup.okhttp.OkHttpClient; import java.io.IOException;
import com.squareup.okhttp.Request; import java.util.Map;
import com.squareup.okhttp.RequestBody; import java.util.concurrent.TimeUnit;
import com.squareup.okhttp.Response;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -30,10 +35,6 @@
import org.apache.hadoop.util.Timer; import org.apache.hadoop.util.Timer;
import org.apache.http.HttpStatus; import org.apache.http.HttpStatus;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY; import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY; import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY;
import static org.apache.hadoop.hdfs.web.oauth2.OAuth2Constants.ACCESS_TOKEN; import static org.apache.hadoop.hdfs.web.oauth2.OAuth2Constants.ACCESS_TOKEN;
@ -102,37 +103,34 @@ public synchronized String getAccessToken() throws IOException {
} }
void refresh() throws IOException { void refresh() throws IOException {
try { OkHttpClient client =
OkHttpClient client = new OkHttpClient(); new OkHttpClient.Builder().connectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
client.setConnectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS)
TimeUnit.MILLISECONDS); .readTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS)
client.setReadTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
TimeUnit.MILLISECONDS);
String bodyString = Utils.postBody(GRANT_TYPE, REFRESH_TOKEN,
REFRESH_TOKEN, refreshToken,
CLIENT_ID, clientId);
RequestBody body = RequestBody.create(URLENCODED, bodyString);
Request request = new Request.Builder()
.url(refreshURL)
.post(body)
.build(); .build();
Response responseBody = client.newCall(request).execute();
if (responseBody.code() != HttpStatus.SC_OK) { String bodyString =
throw new IllegalArgumentException("Received invalid http response: " Utils.postBody(GRANT_TYPE, REFRESH_TOKEN, REFRESH_TOKEN, refreshToken, CLIENT_ID, clientId);
+ responseBody.code() + ", text = " + responseBody.toString());
RequestBody body = RequestBody.create(bodyString, URLENCODED);
Request request = new Request.Builder().url(refreshURL).post(body).build();
try (Response response = client.newCall(request).execute()) {
if (!response.isSuccessful()) {
throw new IOException("Unexpected code " + response);
}
if (response.code() != HttpStatus.SC_OK) {
throw new IllegalArgumentException(
"Received invalid http response: " + response.code() + ", text = "
+ response.toString());
} }
Map<?, ?> response = JsonSerialization.mapReader().readValue( Map<?, ?> responseBody = JsonSerialization.mapReader().readValue(response.body().string());
responseBody.body().string());
String newExpiresIn = response.get(EXPIRES_IN).toString(); String newExpiresIn = responseBody.get(EXPIRES_IN).toString();
accessTokenTimer.setExpiresIn(newExpiresIn); accessTokenTimer.setExpiresIn(newExpiresIn);
accessToken = response.get(ACCESS_TOKEN).toString(); accessToken = responseBody.get(ACCESS_TOKEN).toString();
} catch (Exception e) { } catch (Exception e) {
throw new IOException("Exception while refreshing access token", e); throw new IOException("Exception while refreshing access token", e);
} }

View File

@ -18,10 +18,15 @@
*/ */
package org.apache.hadoop.hdfs.web.oauth2; package org.apache.hadoop.hdfs.web.oauth2;
import com.squareup.okhttp.OkHttpClient; import java.io.IOException;
import com.squareup.okhttp.Request; import java.util.Map;
import com.squareup.okhttp.RequestBody; import java.util.concurrent.TimeUnit;
import com.squareup.okhttp.Response;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -30,10 +35,6 @@
import org.apache.hadoop.util.Timer; import org.apache.hadoop.util.Timer;
import org.apache.http.HttpStatus; import org.apache.http.HttpStatus;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY; import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY; import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY;
import static org.apache.hadoop.hdfs.web.oauth2.OAuth2Constants.ACCESS_TOKEN; import static org.apache.hadoop.hdfs.web.oauth2.OAuth2Constants.ACCESS_TOKEN;
@ -96,38 +97,38 @@ public synchronized String getAccessToken() throws IOException {
} }
void refresh() throws IOException { void refresh() throws IOException {
try { OkHttpClient client = new OkHttpClient.Builder()
OkHttpClient client = new OkHttpClient(); .connectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS)
client.setConnectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, .readTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS)
TimeUnit.MILLISECONDS); .build();
client.setReadTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
TimeUnit.MILLISECONDS);
String bodyString = Utils.postBody(CLIENT_SECRET, getCredential(), String bodyString = Utils.postBody(CLIENT_SECRET, getCredential(),
GRANT_TYPE, CLIENT_CREDENTIALS, GRANT_TYPE, CLIENT_CREDENTIALS,
CLIENT_ID, clientId); CLIENT_ID, clientId);
RequestBody body = RequestBody.create(URLENCODED, bodyString); RequestBody body = RequestBody.create(bodyString, URLENCODED);
Request request = new Request.Builder() Request request = new Request.Builder()
.url(refreshURL) .url(refreshURL)
.post(body) .post(body)
.build(); .build();
Response responseBody = client.newCall(request).execute(); try (Response response = client.newCall(request).execute()) {
if (!response.isSuccessful()) {
if (responseBody.code() != HttpStatus.SC_OK) { throw new IOException("Unexpected code " + response);
throw new IllegalArgumentException("Received invalid http response: "
+ responseBody.code() + ", text = " + responseBody.toString());
} }
Map<?, ?> response = JsonSerialization.mapReader().readValue( if (response.code() != HttpStatus.SC_OK) {
responseBody.body().string()); throw new IllegalArgumentException("Received invalid http response: "
+ response.code() + ", text = " + response.toString());
}
String newExpiresIn = response.get(EXPIRES_IN).toString(); Map<?, ?> responseBody = JsonSerialization.mapReader().readValue(
response.body().string());
String newExpiresIn = responseBody.get(EXPIRES_IN).toString();
timer.setExpiresIn(newExpiresIn); timer.setExpiresIn(newExpiresIn);
accessToken = response.get(ACCESS_TOKEN).toString(); accessToken = responseBody.get(ACCESS_TOKEN).toString();
} catch (Exception e) { } catch (Exception e) {
throw new IOException("Unable to obtain access token from credential", e); throw new IOException("Unable to obtain access token from credential", e);
} }

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.hadoop.hdfs.web.oauth2; package org.apache.hadoop.hdfs.web.oauth2;
import com.squareup.okhttp.MediaType; import okhttp3.MediaType;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;

View File

@ -135,7 +135,9 @@
<ehcache.version>3.3.1</ehcache.version> <ehcache.version>3.3.1</ehcache.version>
<hikari.version>2.4.12</hikari.version> <hikari.version>2.4.12</hikari.version>
<mssql.version>6.2.1.jre7</mssql.version> <mssql.version>6.2.1.jre7</mssql.version>
<okhttp.version>2.7.5</okhttp.version> <okhttp3.version>4.9.3</okhttp3.version>
<kotlin-stdlib.verion>1.4.10</kotlin-stdlib.verion>
<kotlin-stdlib-common.version>1.4.10</kotlin-stdlib-common.version>
<jdom.version>1.1</jdom.version> <jdom.version>1.1</jdom.version>
<jna.version>5.2.0</jna.version> <jna.version>5.2.0</jna.version>
<grizzly.version>2.2.21</grizzly.version> <grizzly.version>2.2.21</grizzly.version>
@ -221,14 +223,40 @@
<dependencyManagement> <dependencyManagement>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>com.squareup.okhttp</groupId> <groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId> <artifactId>okhttp</artifactId>
<version>${okhttp.version}</version> <version>${okhttp3.version}</version>
<exclusions>
<exclusion>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-stdlib</artifactId>
</exclusion>
<exclusion>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-stdlib-common</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-stdlib</artifactId>
<version>${kotlin-stdlib.verion}</version>
<exclusions>
<exclusion>
<groupId>org.jetbrains</groupId>
<artifactId>annotations</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-stdlib-common</artifactId>
<version>${kotlin-stdlib-common.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>com.squareup.okhttp3</groupId> <groupId>com.squareup.okhttp3</groupId>
<artifactId>mockwebserver</artifactId> <artifactId>mockwebserver</artifactId>
<version>3.7.0</version> <version>4.9.3</version>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -29,7 +29,6 @@
</description> </description>
<packaging>jar</packaging> <packaging>jar</packaging>
<properties> <properties>
<okHttpVersion>${okhttp.version}</okHttpVersion>
<minimalJsonVersion>0.9.1</minimalJsonVersion> <minimalJsonVersion>0.9.1</minimalJsonVersion>
<file.encoding>UTF-8</file.encoding> <file.encoding>UTF-8</file.encoding>
<downloadSources>true</downloadSources> <downloadSources>true</downloadSources>
@ -118,12 +117,6 @@
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<dependency>
<groupId>com.squareup.okhttp</groupId>
<artifactId>okhttp</artifactId>
<version>${okhttp.version}</version>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>junit</groupId> <groupId>junit</groupId>
<artifactId>junit</artifactId> <artifactId>junit</artifactId>
@ -141,12 +134,6 @@
<scope>test</scope> <scope>test</scope>
<type>test-jar</type> <type>test-jar</type>
</dependency> </dependency>
<dependency>
<groupId>com.squareup.okhttp</groupId>
<artifactId>mockwebserver</artifactId>
<version>${okhttp.version}</version>
<scope>test</scope>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>