Skip to content

Commit 72d7304

Browse files
committed
Update bundled JDK to JDK-24
Signed-off-by: Andriy Redko <[email protected]>
1 parent 5baf5d8 commit 72d7304

File tree

26 files changed

+178
-39
lines changed

26 files changed

+178
-39
lines changed

DEVELOPER_GUIDE.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ Fork [opensearch-project/OpenSearch](https://github.com/opensearch-project/OpenS
7676

7777
#### JDK
7878

79-
OpenSearch recommends building with the [Temurin/Adoptium](https://adoptium.net/temurin/releases/) distribution. JDK 11 is the minimum supported, and JDK-23 is the newest supported. You must have a supported JDK installed with the environment variable `JAVA_HOME` referencing the path to Java home for your JDK installation, e.g. `JAVA_HOME=/usr/lib/jvm/jdk-21`.
79+
OpenSearch recommends building with the [Temurin/Adoptium](https://adoptium.net/temurin/releases/) distribution. JDK 11 is the minimum supported, and JDK-24 is the newest supported. You must have a supported JDK installed with the environment variable `JAVA_HOME` referencing the path to Java home for your JDK installation, e.g. `JAVA_HOME=/usr/lib/jvm/jdk-21`.
8080

8181
Download Java 11 from [here](https://adoptium.net/releases.html?variant=openjdk11).
8282

@@ -87,11 +87,11 @@ In addition, certain backward compatibility tests check out and compile the prev
8787
./gradlew check -Dorg.gradle.warning.mode=none
8888
```
8989

90-
By default, the test tasks use bundled JDK runtime, configured in version catalog [gradle/libs.versions.toml](gradle/libs.versions.toml), and set to JDK 23 (non-LTS).
90+
By default, the test tasks use bundled JDK runtime, configured in version catalog [gradle/libs.versions.toml](gradle/libs.versions.toml), and set to JDK 24 (non-LTS).
9191

9292
```
9393
bundled_jdk_vendor = adoptium
94-
bundled_jdk = 23.0.1+11
94+
bundled_jdk = 24.0.1+9
9595
```
9696

9797
#### Custom Runtime JDK

distribution/tools/plugin-cli/build.gradle

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ base {
3737
dependencies {
3838
compileOnly project(":server")
3939
compileOnly project(":libs:opensearch-cli")
40+
api project(":libs:agent-sm:agent-policy")
4041
api "org.bouncycastle:bc-fips:${versions.bouncycastle_jce}"
4142
api "org.bouncycastle:bcpg-fips:${versions.bouncycastle_pg}"
4243
testImplementation project(":test:framework")

distribution/tools/plugin-cli/src/main/java/org/opensearch/tools/cli/plugin/PluginSecurity.java

Lines changed: 4 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -37,16 +37,15 @@
3737
import org.opensearch.cli.Terminal.Verbosity;
3838
import org.opensearch.cli.UserException;
3939
import org.opensearch.common.util.io.IOUtils;
40+
import org.opensearch.secure_sm.policy.PolicyFile;
4041

4142
import java.io.IOException;
4243
import java.nio.file.Files;
4344
import java.nio.file.Path;
44-
import java.security.NoSuchAlgorithmException;
4545
import java.security.Permission;
4646
import java.security.PermissionCollection;
4747
import java.security.Permissions;
4848
import java.security.Policy;
49-
import java.security.URIParameter;
5049
import java.security.UnresolvedPermission;
5150
import java.util.ArrayList;
5251
import java.util.Collections;
@@ -143,22 +142,12 @@ static Set<String> parsePermissions(Path file, Path tmpDir) throws IOException {
143142
// 2. read permission to the code itself (e.g. jar file of the code)
144143

145144
Path emptyPolicyFile = Files.createTempFile(tmpDir, "empty", "tmp");
146-
final Policy emptyPolicy;
147-
try {
148-
emptyPolicy = Policy.getInstance("JavaPolicy", new URIParameter(emptyPolicyFile.toUri()));
149-
} catch (NoSuchAlgorithmException e) {
150-
throw new RuntimeException(e);
151-
}
145+
final Policy emptyPolicy = new PolicyFile(emptyPolicyFile.toUri().toURL());
152146
IOUtils.rm(emptyPolicyFile);
153147

154148
// parse the plugin's policy file into a set of permissions
155-
final Policy policy;
156-
try {
157-
policy = Policy.getInstance("JavaPolicy", new URIParameter(file.toUri()));
158-
} catch (NoSuchAlgorithmException e) {
159-
throw new RuntimeException(e);
160-
}
161-
PermissionCollection permissions = policy.getPermissions(PluginSecurity.class.getProtectionDomain());
149+
final Policy policy = new PolicyFile(file.toUri().toURL());
150+
final PermissionCollection permissions = policy.getPermissions(PluginSecurity.class.getProtectionDomain());
162151
// this method is supported with the specific implementation we use, but just check for safety.
163152
if (permissions == Policy.UNSUPPORTED_EMPTY_COLLECTION) {
164153
throw new UnsupportedOperationException("JavaPolicy implementation does not support retrieving permissions");

gradle/code-coverage.gradle

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ repositories {
1919

2020
allprojects {
2121
plugins.withId('jacoco') {
22-
jacoco.toolVersion = '0.8.12'
22+
jacoco.toolVersion = '0.8.13'
2323
}
2424
}
2525

gradle/libs.versions.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ opensearch = "3.1.0"
33
lucene = "10.1.0"
44

55
bundled_jdk_vendor = "adoptium"
6-
bundled_jdk = "21.0.7+6"
6+
bundled_jdk = "24.0.1+9"
77

88
# optional dependencies
99
spatial4j = "0.7"
@@ -56,6 +56,7 @@ commonsio = "2.16.0"
5656
aws = "2.30.31"
5757
awscrt = "0.35.0"
5858
reactivestreams = "1.0.4"
59+
hadoop3 = "3.4.1"
5960

6061
# when updating this version, you need to ensure compatibility with:
6162
# - plugins/ingest-attachment (transitive dependency, check the upstream POM)

libs/agent-sm/agent-policy/src/main/java/org/opensearch/secure_sm/policy/PolicyFile.java

Lines changed: 25 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -125,8 +125,32 @@ private void addGrantEntry(GrantEntry grantEntry, List<PolicyEntry> entries) thr
125125
entries.add(new PolicyEntry(codesource, permissions));
126126
}
127127

128+
/**
129+
* Expands known system properties like ${java.home} and ${user.home} to their absolute
130+
* path equivalents.
131+
*/
132+
private static String expandKnownSystemProperty(final String property, final String value) {
133+
final int index = value.indexOf("${" + property + "}/");
134+
final String path = System.getProperty(property);
135+
if (path.endsWith(File.pathSeparator)) {
136+
return path + value.substring(index + property.length() + 4 /* replace the path separator */);
137+
} else {
138+
return path + value.substring(index + property.length() + 3 /* keep the path separator */);
139+
}
140+
}
141+
128142
private static PermissionEntry expandPermissionName(PermissionEntry pe) {
129-
if (pe.name() == null || !pe.name().contains("${{")) {
143+
if (pe.name() == null) {
144+
return pe;
145+
}
146+
147+
if (pe.name().startsWith("${java.home}")) {
148+
return new PermissionEntry(pe.permission(), expandKnownSystemProperty("java.home", pe.name()), pe.action());
149+
} else if (pe.name().startsWith("${user.home}")) {
150+
return new PermissionEntry(pe.permission(), expandKnownSystemProperty("user.home", pe.name()), pe.action());
151+
}
152+
153+
if (!pe.name().contains("${{")) {
130154
return pe;
131155
}
132156

libs/agent-sm/agent/src/test/java/org/opensearch/javaagent/FileInterceptorIntegTests.java

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
import java.nio.file.Files;
2323
import java.nio.file.Path;
2424
import java.nio.file.StandardOpenOption;
25+
import java.security.Permission;
2526
import java.security.PermissionCollection;
2627
import java.security.Permissions;
2728
import java.security.Policy;
@@ -56,6 +57,17 @@ public PermissionCollection getPermissions(ProtectionDomain domain) {
5657
permissions.add(new FilePermission(System.getProperty("user.dir") + "/-", "read,write,delete"));
5758
return permissions;
5859
}
60+
61+
@Override
62+
public boolean implies(ProtectionDomain domain, Permission permission) {
63+
final PermissionCollection pc = getPermissions(domain);
64+
65+
if (pc == null) {
66+
return false;
67+
}
68+
69+
return pc.implies(permission);
70+
}
5971
};
6072
AgentPolicy.setPolicy(policy);
6173
Files.createDirectories(getTestDir());

plugins/repository-hdfs/build.gradle

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -47,10 +47,6 @@ opensearchplugin {
4747
classname = 'org.opensearch.repositories.hdfs.HdfsPlugin'
4848
}
4949

50-
versions << [
51-
'hadoop3': '3.3.6'
52-
]
53-
5450
testFixtures.useFixture ":test:fixtures:krb5kdc-fixture", "hdfs"
5551

5652
configurations {
@@ -138,7 +134,8 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture',
138134
executable = "${BuildParams.runtimeJavaHome}/bin/java"
139135
env 'CLASSPATH', "${-> configurations.hdfsFixture.asPath}"
140136
maxWaitInSeconds = 60
141-
onlyIf { BuildParams.inFipsJvm == false }
137+
// See please https://issues.apache.org/jira/browse/HADOOP-19486
138+
onlyIf { BuildParams.inFipsJvm == false && BuildParams.runtimeJavaVersion < JavaVersion.VERSION_24 }
142139
waitCondition = { fixture, ant ->
143140
// the hdfs.MiniHDFS fixture writes the ports file when
144141
// it's ready, so we can just wait for the file to exist
@@ -197,7 +194,8 @@ for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSec
197194
}
198195
}
199196

200-
onlyIf { BuildParams.inFipsJvm == false }
197+
// See please https://issues.apache.org/jira/browse/HADOOP-19486
198+
onlyIf { BuildParams.inFipsJvm == false && BuildParams.runtimeJavaVersion < JavaVersion.VERSION_24 }
201199
if (integTestTaskName.contains("Ha")) {
202200
Path portsFile
203201
File portsFileDir = file("${workingDir}/hdfsFixture")
@@ -271,7 +269,8 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) {
271269
}
272270
}
273271
} else {
274-
fixtureSupported = true
272+
// See please https://issues.apache.org/jira/browse/HADOOP-19486
273+
fixtureSupported = BuildParams.runtimeJavaVersion < JavaVersion.VERSION_24
275274
}
276275

277276
boolean legalPath = rootProject.rootDir.toString().contains(" ") == false
@@ -282,7 +281,8 @@ if (legalPath == false) {
282281
// Always ignore HA integration tests in the normal integration test runner, they are included below as
283282
// part of their own HA-specific integration test tasks.
284283
integTest {
285-
onlyIf { BuildParams.inFipsJvm == false }
284+
// See please https://issues.apache.org/jira/browse/HADOOP-19486
285+
onlyIf { BuildParams.inFipsJvm == false && BuildParams.runtimeJavaVersion < JavaVersion.VERSION_24 }
286286
exclude('**/Ha*TestSuiteIT.class')
287287
}
288288

@@ -371,7 +371,6 @@ thirdPartyAudit {
371371
'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray',
372372
'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1',
373373
'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2',
374-
'org.apache.hadoop.thirdparty.com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$3',
375374
'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64',
376375
'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$1',
377376
'org.apache.hadoop.thirdparty.com.google.common.hash.Striped64$Cell',
@@ -383,6 +382,9 @@ thirdPartyAudit {
383382
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$1',
384383
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$JvmMemoryAccessor',
385384
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$MemoryAccessor',
385+
'org.apache.hadoop.thirdparty.protobuf.MessageSchema',
386+
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$Android32MemoryAccessor',
387+
'org.apache.hadoop.thirdparty.protobuf.UnsafeUtil$Android64MemoryAccessor',
386388

387389
'org.apache.hadoop.shaded.com.google.common.cache.Striped64',
388390
'org.apache.hadoop.shaded.com.google.common.cache.Striped64$1',

plugins/repository-hdfs/licenses/hadoop-client-api-3.3.6.jar.sha1

Lines changed: 0 additions & 1 deletion
This file was deleted.
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
7e4cfae7f5c85cffdbc21fdf749262fc78d4463a

plugins/repository-hdfs/licenses/hadoop-client-runtime-3.3.6.jar.sha1

Lines changed: 0 additions & 1 deletion
This file was deleted.
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
30ec07ceccb224b2ef17af34fbb593bff3e9e071

plugins/repository-hdfs/licenses/hadoop-hdfs-3.3.6.jar.sha1

Lines changed: 0 additions & 1 deletion
This file was deleted.
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
8545078b39e33416cb70ccef1bb22f2c88fb6b6c

plugins/repository-hdfs/src/main/java/org/opensearch/repositories/hdfs/HdfsRepository.java

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,8 @@
5555
import org.opensearch.indices.recovery.RecoverySettings;
5656
import org.opensearch.repositories.blobstore.BlobStoreRepository;
5757

58+
import javax.security.auth.Subject;
59+
5860
import java.io.IOException;
5961
import java.io.UncheckedIOException;
6062
import java.net.InetAddress;
@@ -212,6 +214,10 @@ private UserGroupInformation login(Configuration hadoopConfiguration, Settings r
212214
logger.debug("Using kerberos principal [{}] and keytab located at [{}]", principal, keytab);
213215
return UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytab);
214216
}
217+
final Subject subject = Subject.current();
218+
if (subject == null || subject.getPrincipals() == null) {
219+
return UserGroupInformation.getLoginUser();
220+
}
215221
return UserGroupInformation.getCurrentUser();
216222
} catch (IOException e) {
217223
throw new UncheckedIOException("Could not retrieve the current user information", e);

plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsBlobStoreContainerTests.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -119,6 +119,7 @@ private FileContext createContext(URI uri) {
119119
});
120120
}
121121

122+
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/HADOOP-19486")
122123
public void testReadOnly() throws Exception {
123124
FileContext fileContext = createTestContext();
124125
// Constructor will not create dir if read only

plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsBlobStoreRepositoryTests.java

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,11 +33,13 @@
3333

3434
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
3535

36+
import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix;
3637
import org.opensearch.common.settings.Settings;
3738
import org.opensearch.plugins.Plugin;
3839
import org.opensearch.repositories.blobstore.OpenSearchBlobStoreRepositoryIntegTestCase;
3940
import org.opensearch.test.OpenSearchIntegTestCase;
4041

42+
import java.io.IOException;
4143
import java.util.Collection;
4244
import java.util.Collections;
4345

@@ -66,4 +68,58 @@ protected Settings repositorySettings() {
6668
protected Collection<Class<? extends Plugin>> nodePlugins() {
6769
return Collections.singletonList(HdfsPlugin.class);
6870
}
71+
72+
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/HADOOP-19486")
73+
@Override
74+
public void testContainerCreationAndDeletion() throws IOException {
75+
super.testContainerCreationAndDeletion();
76+
}
77+
78+
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/HADOOP-19486")
79+
@Override
80+
public void testDeleteBlobs() throws IOException {
81+
super.testDeleteBlobs();
82+
}
83+
84+
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/HADOOP-19486")
85+
@Override
86+
public void testIndicesDeletedFromRepository() throws Exception {
87+
super.testIndicesDeletedFromRepository();
88+
}
89+
90+
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/HADOOP-19486")
91+
@Override
92+
public void testList() throws IOException {
93+
super.testList();
94+
}
95+
96+
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/HADOOP-19486")
97+
@Override
98+
public void testMultipleSnapshotAndRollback() throws Exception {
99+
super.testMultipleSnapshotAndRollback();
100+
}
101+
102+
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/HADOOP-19486")
103+
@Override
104+
public void testReadNonExistingPath() throws IOException {
105+
super.testReadNonExistingPath();
106+
}
107+
108+
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/HADOOP-19486")
109+
@Override
110+
public void testReadRange() throws IOException {
111+
super.testReadRange();
112+
}
113+
114+
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/HADOOP-19486")
115+
@Override
116+
public void testSnapshotAndRestore() throws Exception {
117+
super.testSnapshotAndRestore();
118+
}
119+
120+
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/HADOOP-19486")
121+
@Override
122+
public void testWriteRead() throws IOException {
123+
super.testWriteRead();
124+
}
69125
}

plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsRepositoryTests.java

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333

3434
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
3535

36+
import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix;
3637
import org.opensearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryResponse;
3738
import org.opensearch.common.settings.MockSecureSettings;
3839
import org.opensearch.common.settings.SecureSettings;
@@ -79,4 +80,22 @@ protected void assertCleanupResponse(CleanupRepositoryResponse response, long by
7980
assertThat(response.result().blobs(), equalTo(0L));
8081
}
8182
}
83+
84+
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/HADOOP-19486")
85+
@Override
86+
public void testCleanup() throws Exception {
87+
super.testCleanup();
88+
}
89+
90+
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/HADOOP-19486")
91+
@Override
92+
public void testCreateSnapshot() {
93+
super.testCreateSnapshot();
94+
}
95+
96+
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/HADOOP-19486")
97+
@Override
98+
public void testListChildren() throws Exception {
99+
super.testListChildren();
100+
}
82101
}

plugins/repository-hdfs/src/test/java/org/opensearch/repositories/hdfs/HdfsTests.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333

3434
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
3535

36+
import org.apache.lucene.tests.util.LuceneTestCase.AwaitsFix;
3637
import org.opensearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
3738
import org.opensearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse;
3839
import org.opensearch.cluster.ClusterState;
@@ -61,6 +62,7 @@ protected Collection<Class<? extends Plugin>> getPlugins() {
6162
return pluginList(HdfsPlugin.class);
6263
}
6364

65+
@AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/HADOOP-19486")
6466
public void testSimpleWorkflow() {
6567
Client client = client();
6668
Settings.Builder settings = Settings.builder()

0 commit comments

Comments
 (0)