diff --git a/LICENSE-binary b/LICENSE-binary
index fe60ac3609..980b9c7f2b 100644
--- a/LICENSE-binary
+++ b/LICENSE-binary
@@ -241,7 +241,7 @@ com.google.guava:guava:27.0-jre
com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava
com.microsoft.azure:azure-storage:7.0.0
com.nimbusds:nimbus-jose-jwt:9.8.1
-com.squareup.okhttp:okhttp:2.7.5
+com.squareup.okhttp3:okhttp:4.9.3
com.squareup.okio:okio:1.6.0
com.zaxxer:HikariCP:4.0.3
commons-beanutils:commons-beanutils:1.9.3
diff --git a/hadoop-client-modules/hadoop-client/pom.xml b/hadoop-client-modules/hadoop-client/pom.xml
index b48a221bdf..5299c9e871 100644
--- a/hadoop-client-modules/hadoop-client/pom.xml
+++ b/hadoop-client-modules/hadoop-client/pom.xml
@@ -114,6 +114,18 @@
org.eclipse.jetty
jetty-server
+
+ org.jetbrains.kotlin
+ kotlin-stdlib
+
+
+ org.jetbrains.kotlin
+ kotlin-stdlib-common
+
+
+ com.squareup.okhttp3
+ okhttp
+
com.sun.jersey
jersey-core
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml b/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml
index c96b3a99bd..508388aa48 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml
@@ -94,4 +94,17 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
index c4e65ef811..9bb0932d32 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/pom.xml
@@ -35,9 +35,17 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
- com.squareup.okhttp
+ com.squareup.okhttp3
okhttp
+
+ org.jetbrains.kotlin
+ kotlin-stdlib
+
+
+ org.jetbrains.kotlin
+ kotlin-stdlib-common
+
org.apache.hadoop
hadoop-common
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java
index 3e3fbfbd91..e944e8c1c8 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/ConfRefreshTokenBasedAccessTokenProvider.java
@@ -18,10 +18,15 @@
*/
package org.apache.hadoop.hdfs.web.oauth2;
-import com.squareup.okhttp.OkHttpClient;
-import com.squareup.okhttp.Request;
-import com.squareup.okhttp.RequestBody;
-import com.squareup.okhttp.Response;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.RequestBody;
+import okhttp3.Response;
+
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
@@ -30,10 +35,6 @@
import org.apache.hadoop.util.Timer;
import org.apache.http.HttpStatus;
-import java.io.IOException;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY;
import static org.apache.hadoop.hdfs.web.oauth2.OAuth2Constants.ACCESS_TOKEN;
@@ -102,37 +103,34 @@ public synchronized String getAccessToken() throws IOException {
}
void refresh() throws IOException {
- try {
- OkHttpClient client = new OkHttpClient();
- client.setConnectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
- TimeUnit.MILLISECONDS);
- client.setReadTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
- TimeUnit.MILLISECONDS);
+ OkHttpClient client =
+ new OkHttpClient.Builder().connectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
+ TimeUnit.MILLISECONDS)
+ .readTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS)
+ .build();
- String bodyString = Utils.postBody(GRANT_TYPE, REFRESH_TOKEN,
- REFRESH_TOKEN, refreshToken,
- CLIENT_ID, clientId);
+ String bodyString =
+ Utils.postBody(GRANT_TYPE, REFRESH_TOKEN, REFRESH_TOKEN, refreshToken, CLIENT_ID, clientId);
- RequestBody body = RequestBody.create(URLENCODED, bodyString);
+ RequestBody body = RequestBody.create(bodyString, URLENCODED);
- Request request = new Request.Builder()
- .url(refreshURL)
- .post(body)
- .build();
- Response responseBody = client.newCall(request).execute();
-
- if (responseBody.code() != HttpStatus.SC_OK) {
- throw new IllegalArgumentException("Received invalid http response: "
- + responseBody.code() + ", text = " + responseBody.toString());
+ Request request = new Request.Builder().url(refreshURL).post(body).build();
+ try (Response response = client.newCall(request).execute()) {
+ if (!response.isSuccessful()) {
+ throw new IOException("Unexpected code " + response);
+ }
+ if (response.code() != HttpStatus.SC_OK) {
+ throw new IllegalArgumentException(
+ "Received invalid http response: " + response.code() + ", text = "
+ + response.toString());
}
- Map, ?> response = JsonSerialization.mapReader().readValue(
- responseBody.body().string());
+ Map, ?> responseBody = JsonSerialization.mapReader().readValue(response.body().string());
- String newExpiresIn = response.get(EXPIRES_IN).toString();
+ String newExpiresIn = responseBody.get(EXPIRES_IN).toString();
accessTokenTimer.setExpiresIn(newExpiresIn);
- accessToken = response.get(ACCESS_TOKEN).toString();
+ accessToken = responseBody.get(ACCESS_TOKEN).toString();
} catch (Exception e) {
throw new IOException("Exception while refreshing access token", e);
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java
index bfd7055990..25ceb88460 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/CredentialBasedAccessTokenProvider.java
@@ -18,10 +18,15 @@
*/
package org.apache.hadoop.hdfs.web.oauth2;
-import com.squareup.okhttp.OkHttpClient;
-import com.squareup.okhttp.Request;
-import com.squareup.okhttp.RequestBody;
-import com.squareup.okhttp.Response;
+import java.io.IOException;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import okhttp3.OkHttpClient;
+import okhttp3.Request;
+import okhttp3.RequestBody;
+import okhttp3.Response;
+
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
@@ -30,10 +35,6 @@
import org.apache.hadoop.util.Timer;
import org.apache.http.HttpStatus;
-import java.io.IOException;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_CLIENT_ID_KEY;
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.OAUTH_REFRESH_URL_KEY;
import static org.apache.hadoop.hdfs.web.oauth2.OAuth2Constants.ACCESS_TOKEN;
@@ -96,38 +97,38 @@ public synchronized String getAccessToken() throws IOException {
}
void refresh() throws IOException {
- try {
- OkHttpClient client = new OkHttpClient();
- client.setConnectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
- TimeUnit.MILLISECONDS);
- client.setReadTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
- TimeUnit.MILLISECONDS);
+ OkHttpClient client = new OkHttpClient.Builder()
+ .connectTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS)
+ .readTimeout(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, TimeUnit.MILLISECONDS)
+ .build();
- String bodyString = Utils.postBody(CLIENT_SECRET, getCredential(),
- GRANT_TYPE, CLIENT_CREDENTIALS,
- CLIENT_ID, clientId);
+ String bodyString = Utils.postBody(CLIENT_SECRET, getCredential(),
+ GRANT_TYPE, CLIENT_CREDENTIALS,
+ CLIENT_ID, clientId);
- RequestBody body = RequestBody.create(URLENCODED, bodyString);
+ RequestBody body = RequestBody.create(bodyString, URLENCODED);
- Request request = new Request.Builder()
- .url(refreshURL)
- .post(body)
- .build();
- Response responseBody = client.newCall(request).execute();
-
- if (responseBody.code() != HttpStatus.SC_OK) {
- throw new IllegalArgumentException("Received invalid http response: "
- + responseBody.code() + ", text = " + responseBody.toString());
+ Request request = new Request.Builder()
+ .url(refreshURL)
+ .post(body)
+ .build();
+ try (Response response = client.newCall(request).execute()) {
+ if (!response.isSuccessful()) {
+ throw new IOException("Unexpected code " + response);
}
- Map, ?> response = JsonSerialization.mapReader().readValue(
- responseBody.body().string());
+ if (response.code() != HttpStatus.SC_OK) {
+ throw new IllegalArgumentException("Received invalid http response: "
+ + response.code() + ", text = " + response.toString());
+ }
- String newExpiresIn = response.get(EXPIRES_IN).toString();
+ Map, ?> responseBody = JsonSerialization.mapReader().readValue(
+ response.body().string());
+
+ String newExpiresIn = responseBody.get(EXPIRES_IN).toString();
timer.setExpiresIn(newExpiresIn);
- accessToken = response.get(ACCESS_TOKEN).toString();
-
+ accessToken = responseBody.get(ACCESS_TOKEN).toString();
} catch (Exception e) {
throw new IOException("Unable to obtain access token from credential", e);
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java
index 3f091c2945..2f28b65e40 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/oauth2/OAuth2Constants.java
@@ -18,7 +18,7 @@
*/
package org.apache.hadoop.hdfs.web.oauth2;
-import com.squareup.okhttp.MediaType;
+import okhttp3.MediaType;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 85df7ca7df..6831609661 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -135,6 +135,9 @@
4.0.3
6.2.1.jre7
2.7.5
+ 4.9.3
+ 1.4.10
+ 1.4.10
2.0.6.1
5.2.0
2.2.21
@@ -222,14 +225,40 @@
- com.squareup.okhttp
+ com.squareup.okhttp3
okhttp
- ${okhttp.version}
+ ${okhttp3.version}
+
+
+ org.jetbrains.kotlin
+ kotlin-stdlib
+
+
+ org.jetbrains.kotlin
+ kotlin-stdlib-common
+
+
+
+
+ org.jetbrains.kotlin
+ kotlin-stdlib
+ ${kotlin-stdlib.verion}
+
+
+ org.jetbrains
+ annotations
+
+
+
+
+ org.jetbrains.kotlin
+ kotlin-stdlib-common
+ ${kotlin-stdlib-common.version}
com.squareup.okhttp3
mockwebserver
- 3.7.0
+ 4.9.3
test
diff --git a/hadoop-tools/hadoop-azure-datalake/pom.xml b/hadoop-tools/hadoop-azure-datalake/pom.xml
index 446e2957b6..cec050d2c1 100644
--- a/hadoop-tools/hadoop-azure-datalake/pom.xml
+++ b/hadoop-tools/hadoop-azure-datalake/pom.xml
@@ -29,7 +29,6 @@
jar
- ${okhttp.version}
0.9.1
UTF-8
true
@@ -118,12 +117,6 @@
hadoop-common
provided
-
- com.squareup.okhttp
- okhttp
- ${okhttp.version}
- test
-
junit
junit
@@ -141,12 +134,6 @@
test
test-jar
-
- com.squareup.okhttp
- mockwebserver
- ${okhttp.version}
- test
-
org.apache.hadoop