Skip to content

Commit 7164c76

Browse files
HDDS-12488. S3G should handle the signature calculation with trailers (apache#8020)
Co-authored-by: Doroszlai, Attila <[email protected]>
1 parent c8c6d0e commit 7164c76

File tree

20 files changed

+632
-78
lines changed

20 files changed

+632
-78
lines changed

hadoop-ozone/dev-support/checks/license.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ grep '(' ${src} \
5959
-e "(CDDL\>" -e ' CDDL '\
6060
-e "(EDL\>" -e "Eclipse Distribution ${L}" \
6161
-e "(EPL\>" -e "Eclipse Public ${L}" \
62-
-e "(MIT)" -e "\<MIT ${L}" \
62+
-e "(MIT)" -e "(MIT-0)" -e "\<MIT ${L}" \
6363
-e "Modified BSD\>" \
6464
-e "New BSD ${L}" \
6565
-e "Public Domain" \

hadoop-ozone/fault-injection-test/mini-chaos-tests/pom.xml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,11 @@
7575
<artifactId>ozone-tools</artifactId>
7676
<scope>test</scope>
7777
</dependency>
78+
<dependency>
79+
<groupId>software.amazon.awssdk</groupId>
80+
<artifactId>s3</artifactId>
81+
<scope>test</scope>
82+
</dependency>
7883
</dependencies>
7984

8085
<build>

hadoop-ozone/integration-test/pom.xml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -281,6 +281,11 @@
281281
<type>test-jar</type>
282282
<scope>test</scope>
283283
</dependency>
284+
<dependency>
285+
<groupId>software.amazon.awssdk</groupId>
286+
<artifactId>s3</artifactId>
287+
<scope>test</scope>
288+
</dependency>
284289
</dependencies>
285290

286291
<build>

hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneCluster.java

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@
3939
import org.apache.ozone.test.GenericTestUtils;
4040
import org.apache.ratis.util.ExitUtils;
4141
import org.apache.ratis.util.function.CheckedFunction;
42+
import software.amazon.awssdk.services.s3.S3Client;
4243

4344
/**
4445
* Interface used for MiniOzoneClusters.
@@ -162,10 +163,15 @@ void waitForPipelineTobeReady(HddsProtos.ReplicationFactor factor,
162163
OzoneClient newClient() throws IOException;
163164

164165
/**
165-
* Returns an {@link AmazonS3} to access the {@link MiniOzoneCluster}.
166+
* Returns an {@link AmazonS3} to use AWS SDK V1 to access the {@link MiniOzoneCluster}.
166167
*/
167168
AmazonS3 newS3Client();
168169

170+
/**
171+
* Returns an {@link S3Client} to use AWS SDK V2 to access the {@link MiniOzoneCluster}.
172+
*/
173+
S3Client newS3ClientV2() throws Exception;
174+
169175
/**
170176
* Returns StorageContainerLocationClient to communicate with
171177
* {@link StorageContainerManager} associated with the MiniOzoneCluster.

hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java

Lines changed: 53 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@
4545
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
4646
import java.io.File;
4747
import java.io.IOException;
48+
import java.net.URI;
4849
import java.nio.file.Files;
4950
import java.nio.file.Path;
5051
import java.nio.file.Paths;
@@ -106,6 +107,10 @@
106107
import org.apache.ozone.test.GenericTestUtils;
107108
import org.slf4j.Logger;
108109
import org.slf4j.LoggerFactory;
110+
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
111+
import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
112+
import software.amazon.awssdk.regions.Region;
113+
import software.amazon.awssdk.services.s3.S3Client;
109114

110115
/**
111116
* MiniOzoneCluster creates a complete in-process Ozone cluster suitable for
@@ -307,6 +312,11 @@ public AmazonS3 newS3Client() {
307312
return createS3Client(true);
308313
}
309314

315+
@Override
316+
public S3Client newS3ClientV2() throws Exception {
317+
return createS3ClientV2(true);
318+
}
319+
310320
public AmazonS3 createS3Client(boolean enablePathStyle) {
311321
final String accessKey = "user";
312322
final String secretKey = "password";
@@ -317,6 +327,8 @@ public AmazonS3 createS3Client(boolean enablePathStyle) {
317327
String host;
318328

319329
if (webPolicy.isHttpsEnabled()) {
330+
// TODO: Currently HTTPS is disabled in the test, we can add HTTPS
331+
// integration in the future
320332
protocol = HTTPS_SCHEME;
321333
host = conf.get(OZONE_S3G_HTTPS_ADDRESS_KEY);
322334
} else {
@@ -334,19 +346,49 @@ public AmazonS3 createS3Client(boolean enablePathStyle) {
334346
ClientConfiguration clientConfiguration = new ClientConfiguration();
335347
LOG.info("S3 Endpoint is {}", endpoint);
336348

337-
AmazonS3 s3Client =
338-
AmazonS3ClientBuilder.standard()
339-
.withPathStyleAccessEnabled(enablePathStyle)
340-
.withEndpointConfiguration(
341-
new AwsClientBuilder.EndpointConfiguration(
342-
endpoint, region.getName()
343-
)
349+
return AmazonS3ClientBuilder.standard()
350+
.withPathStyleAccessEnabled(enablePathStyle)
351+
.withEndpointConfiguration(
352+
new AwsClientBuilder.EndpointConfiguration(
353+
endpoint, region.getName()
344354
)
345-
.withClientConfiguration(clientConfiguration)
346-
.withCredentials(credentials)
347-
.build();
355+
)
356+
.withClientConfiguration(clientConfiguration)
357+
.withCredentials(credentials)
358+
.build();
359+
}
360+
361+
public S3Client createS3ClientV2(boolean enablePathStyle) throws Exception {
362+
final String accessKey = "user";
363+
final String secretKey = "password";
364+
final Region region = Region.US_EAST_1;
365+
366+
final String protocol;
367+
final HttpConfig.Policy webPolicy = getHttpPolicy(conf);
368+
String host;
369+
370+
if (webPolicy.isHttpsEnabled()) {
371+
// TODO: Currently HTTPS is disabled in the test, we can add HTTPS
372+
// integration in the future
373+
protocol = HTTPS_SCHEME;
374+
host = conf.get(OZONE_S3G_HTTPS_ADDRESS_KEY);
375+
} else {
376+
protocol = HTTP_SCHEME;
377+
host = conf.get(OZONE_S3G_HTTP_ADDRESS_KEY);
378+
}
379+
380+
String endpoint = protocol + "://" + host;
381+
382+
LOG.info("S3 Endpoint is {}", endpoint);
383+
384+
AwsBasicCredentials credentials = AwsBasicCredentials.create(accessKey, secretKey);
348385

349-
return s3Client;
386+
return S3Client.builder()
387+
.region(region)
388+
.endpointOverride(new URI(endpoint))
389+
.credentialsProvider(StaticCredentialsProvider.create(credentials))
390+
.forcePathStyle(enablePathStyle)
391+
.build();
350392
}
351393

352394
protected OzoneClient createClient() throws IOException {
Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.hadoop.ozone.s3.awssdk;
19+
20+
import java.io.File;
21+
import java.io.IOException;
22+
import java.io.InputStream;
23+
import java.io.RandomAccessFile;
24+
import java.security.MessageDigest;
25+
import org.apache.commons.lang3.RandomUtils;
26+
import org.apache.hadoop.utils.InputSubstream;
27+
28+
/**
29+
* Utilities for S3 SDK tests.
30+
*/
31+
public final class S3SDKTestUtils {
32+
33+
private S3SDKTestUtils() {
34+
}
35+
36+
/**
37+
* Calculate the MD5 digest from an input stream from a specific offset and length.
38+
* @param inputStream The input stream where the digest will be read from.
39+
* Note that the input stream will not be closed, the caller is responsible in closing
40+
* the input stream.
41+
* @param skip The byte offset to start the digest from.
42+
* @param length The number of bytes from the starting offset that will be digested.
43+
* @return byte array of the MD5 digest of the input stream from a specific offset and length.
44+
* @throws Exception exception.
45+
*/
46+
public static byte[] calculateDigest(final InputStream inputStream, int skip, int length) throws Exception {
47+
int numRead;
48+
byte[] buffer = new byte[1024];
49+
50+
MessageDigest complete = MessageDigest.getInstance("MD5");
51+
InputStream subStream = inputStream;
52+
if (skip > -1 && length > -1) {
53+
subStream = new InputSubstream(inputStream, skip, length);
54+
}
55+
56+
do {
57+
numRead = subStream.read(buffer);
58+
if (numRead > 0) {
59+
complete.update(buffer, 0, numRead);
60+
}
61+
} while (numRead != -1);
62+
63+
return complete.digest();
64+
}
65+
66+
public static void createFile(File newFile, int size) throws IOException {
67+
// write random data so that filesystems with compression enabled (e.g. ZFS)
68+
// can't compress the file
69+
byte[] data = new byte[size];
70+
data = RandomUtils.secure().randomBytes(data.length);
71+
72+
RandomAccessFile file = new RandomAccessFile(newFile, "rws");
73+
74+
file.write(data);
75+
76+
file.getFD().sync();
77+
file.close();
78+
}
79+
}

hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java

Lines changed: 2 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@
1818
package org.apache.hadoop.ozone.s3.awssdk.v1;
1919

2020
import static org.apache.hadoop.ozone.OzoneConsts.MB;
21+
import static org.apache.hadoop.ozone.s3.awssdk.S3SDKTestUtils.calculateDigest;
22+
import static org.apache.hadoop.ozone.s3.awssdk.S3SDKTestUtils.createFile;
2123
import static org.assertj.core.api.Assertions.assertThat;
2224
import static org.junit.jupiter.api.Assertions.assertEquals;
2325
import static org.junit.jupiter.api.Assertions.assertFalse;
@@ -72,11 +74,9 @@
7274
import java.io.File;
7375
import java.io.IOException;
7476
import java.io.InputStream;
75-
import java.io.RandomAccessFile;
7677
import java.nio.charset.StandardCharsets;
7778
import java.nio.file.Files;
7879
import java.nio.file.Path;
79-
import java.security.MessageDigest;
8080
import java.util.ArrayList;
8181
import java.util.Arrays;
8282
import java.util.Collections;
@@ -89,7 +89,6 @@
8989
import java.util.stream.Collectors;
9090
import javax.xml.bind.DatatypeConverter;
9191
import org.apache.commons.lang3.RandomStringUtils;
92-
import org.apache.commons.lang3.RandomUtils;
9392
import org.apache.hadoop.hdds.client.OzoneQuota;
9493
import org.apache.hadoop.hdds.client.ReplicationConfig;
9594
import org.apache.hadoop.hdds.client.ReplicationFactor;
@@ -102,7 +101,6 @@
102101
import org.apache.hadoop.ozone.client.OzoneClientFactory;
103102
import org.apache.hadoop.ozone.client.OzoneVolume;
104103
import org.apache.hadoop.ozone.client.io.OzoneOutputStream;
105-
import org.apache.hadoop.utils.InputSubstream;
106104
import org.apache.ozone.test.OzoneTestBase;
107105
import org.junit.jupiter.api.MethodOrderer;
108106
import org.junit.jupiter.api.Test;
@@ -117,7 +115,6 @@
117115
* - https://github.com/awsdocs/aws-doc-sdk-examples/tree/main/java/example_code/s3/
118116
* - https://github.com/ceph/s3-tests
119117
*
120-
* TODO: Currently we are using AWS SDK V1, need to also add tests for AWS SDK V2.
121118
*/
122119
@TestMethodOrder(MethodOrderer.MethodName.class)
123120
public abstract class AbstractS3SDKV1Tests extends OzoneTestBase {
@@ -1037,37 +1034,4 @@ private void abortMultipartUpload(String bucketName, String key, String uploadId
10371034
AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest(bucketName, key, uploadId);
10381035
s3Client.abortMultipartUpload(abortRequest);
10391036
}
1040-
1041-
private static byte[] calculateDigest(InputStream inputStream, int skip, int length) throws Exception {
1042-
int numRead;
1043-
byte[] buffer = new byte[1024];
1044-
1045-
MessageDigest complete = MessageDigest.getInstance("MD5");
1046-
if (skip > -1 && length > -1) {
1047-
inputStream = new InputSubstream(inputStream, skip, length);
1048-
}
1049-
1050-
do {
1051-
numRead = inputStream.read(buffer);
1052-
if (numRead > 0) {
1053-
complete.update(buffer, 0, numRead);
1054-
}
1055-
} while (numRead != -1);
1056-
1057-
return complete.digest();
1058-
}
1059-
1060-
private static void createFile(File newFile, int size) throws IOException {
1061-
// write random data so that filesystems with compression enabled (e.g. ZFS)
1062-
// can't compress the file
1063-
byte[] data = new byte[size];
1064-
data = RandomUtils.secure().randomBytes(data.length);
1065-
1066-
RandomAccessFile file = new RandomAccessFile(newFile, "rws");
1067-
1068-
file.write(data);
1069-
1070-
file.getFD().sync();
1071-
file.close();
1072-
}
10731037
}

0 commit comments

Comments
 (0)