Skip to content

Commit f129cf3

Browse files
authored
Merge branch 'apache:trunk' into HADOOP-19438
2 parents 28a0a0b + 5ccb0dc commit f129cf3

File tree

26 files changed

+984
-129
lines changed

26 files changed

+984
-129
lines changed

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferServer.java

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
import java.util.ArrayList;
3434
import java.util.List;
3535
import java.util.Map;
36+
import java.util.TreeMap;
3637

3738
import javax.security.auth.callback.Callback;
3839
import javax.security.auth.callback.CallbackHandler;
@@ -392,14 +393,16 @@ private IOStreamPair doSaslHandshake(Peer peer, OutputStream underlyingOut,
392393
SaslMessageWithHandshake message = readSaslMessageWithHandshakeSecret(in);
393394
byte[] secret = message.getSecret();
394395
String bpid = message.getBpid();
396+
Map<String, String> dynamicSaslProps = new TreeMap<>(saslProps);
395397
if (secret != null || bpid != null) {
396398
// sanity check, if one is null, the other must also not be null
397399
assert(secret != null && bpid != null);
398400
String qop = new String(secret, StandardCharsets.UTF_8);
399401
saslProps.put(Sasl.QOP, qop);
402+
dynamicSaslProps.put(Sasl.QOP, qop);
400403
}
401404
SaslParticipant sasl = SaslParticipant.createServerSaslParticipant(
402-
saslProps, callbackHandler);
405+
dynamicSaslProps, callbackHandler);
403406

404407
byte[] remoteResponse = message.getPayload();
405408
byte[] localResponse = sasl.evaluateChallengeOrResponse(remoteResponse);
@@ -412,7 +415,7 @@ private IOStreamPair doSaslHandshake(Peer peer, OutputStream underlyingOut,
412415
localResponse = sasl.evaluateChallengeOrResponse(remoteResponse);
413416

414417
// SASL handshake is complete
415-
checkSaslComplete(sasl, saslProps);
418+
checkSaslComplete(sasl, dynamicSaslProps);
416419

417420
CipherOption cipherOption = null;
418421
negotiatedQOP = sasl.getNegotiatedQop();

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirXAttrOp.java

Lines changed: 1 addition & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
package org.apache.hadoop.hdfs.server.namenode;
1919

2020
import org.apache.hadoop.HadoopIllegalArgumentException;
21-
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
2221
import org.apache.hadoop.fs.FileStatus;
2322
import org.apache.hadoop.fs.XAttr;
2423
import org.apache.hadoop.fs.XAttrSetFlag;
@@ -43,11 +42,10 @@
4342
import java.util.List;
4443
import java.util.ListIterator;
4544

45+
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.CRYPTO_XATTR_ENCRYPTION_ZONE;
4646
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.SECURITY_XATTR_UNREADABLE_BY_SUPERUSER;
4747
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.XATTR_SATISFY_STORAGE_POLICY;
48-
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.CRYPTO_XATTR_FILE_ENCRYPTION_INFO;
4948
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.XATTR_SNAPSHOT_DELETED;
50-
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.CRYPTO_XATTR_ENCRYPTION_ZONE;
5149

5250
public class FSDirXAttrOp {
5351
private static final XAttr KEYID_XATTR =
@@ -283,25 +281,6 @@ public static INode unprotectedSetXAttrs(
283281
* If we're adding the encryption zone xattr, then add src to the list
284282
* of encryption zones.
285283
*/
286-
287-
if (CRYPTO_XATTR_FILE_ENCRYPTION_INFO.equals(xaName)) {
288-
HdfsProtos.PerFileEncryptionInfoProto fileProto = HdfsProtos.
289-
PerFileEncryptionInfoProto.parseFrom(xattr.getValue());
290-
String keyVersionName = fileProto.getEzKeyVersionName();
291-
String zoneKeyName = fsd.ezManager.getKeyName(iip);
292-
if (zoneKeyName == null) {
293-
throw new IOException("Cannot add raw feInfo XAttr to a file in a " +
294-
"non-encryption zone");
295-
}
296-
297-
if (!KeyProviderCryptoExtension.
298-
getBaseName(keyVersionName).equals(zoneKeyName)) {
299-
throw new IllegalArgumentException(String.format(
300-
"KeyVersion '%s' does not belong to the key '%s'",
301-
keyVersionName, zoneKeyName));
302-
}
303-
}
304-
305284
if (CRYPTO_XATTR_ENCRYPTION_ZONE.equals(xaName)) {
306285
final HdfsProtos.ZoneEncryptionInfoProto ezProto =
307286
HdfsProtos.ZoneEncryptionInfoProto.parseFrom(xattr.getValue());

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java

Lines changed: 0 additions & 68 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,6 @@
3535
import java.util.Collection;
3636
import java.util.EnumSet;
3737
import java.util.List;
38-
import java.util.Map;
3938
import java.util.concurrent.Callable;
4039
import java.util.concurrent.CountDownLatch;
4140
import java.util.concurrent.ExecutionException;
@@ -68,7 +67,6 @@
6867
import org.apache.hadoop.fs.FileSystem;
6968
import org.apache.hadoop.fs.FileSystemTestHelper;
7069
import org.apache.hadoop.fs.FileSystemTestWrapper;
71-
import org.apache.hadoop.fs.FileUtil;
7270
import org.apache.hadoop.fs.FsServerDefaults;
7371
import org.apache.hadoop.fs.FsShell;
7472
import org.apache.hadoop.fs.Path;
@@ -330,72 +328,6 @@ public Object run() throws Exception {
330328
});
331329
}
332330

333-
/**
334-
* Tests encrypted files with same original content placed in two different
335-
* EZ are not same in encrypted form.
336-
*/
337-
@Test
338-
public void testEncryptionZonesDictCp() throws Exception {
339-
final String testkey1 = "testkey1";
340-
final String testkey2 = "testkey2";
341-
DFSTestUtil.createKey(testkey1, cluster, conf);
342-
DFSTestUtil.createKey(testkey2, cluster, conf);
343-
344-
final int len = 8196;
345-
final Path zone1 = new Path("/zone1");
346-
final Path zone1File = new Path(zone1, "file");
347-
final Path raw1File = new Path("/.reserved/raw/zone1/file");
348-
349-
final Path zone2 = new Path("/zone2");
350-
final Path zone2File = new Path(zone2, "file");
351-
final Path raw2File = new Path(zone2, "/.reserved/raw/zone2/file");
352-
353-
// 1. Create two encrypted zones
354-
fs.mkdirs(zone1, new FsPermission(700));
355-
dfsAdmin.createEncryptionZone(zone1, testkey1, NO_TRASH);
356-
357-
fs.mkdirs(zone2, new FsPermission(700));
358-
dfsAdmin.createEncryptionZone(zone2, testkey2, NO_TRASH);
359-
360-
// 2. Create a file in one of the zones
361-
DFSTestUtil.createFile(fs, zone1File, len, (short) 1, 0xFEED);
362-
// 3. Copy it to the other zone through /.raw/reserved
363-
FileUtil.copy(fs, raw1File, fs, raw2File, false, conf);
364-
Map<String, byte[]> attrs = fs.getXAttrs(raw1File);
365-
if (attrs != null) {
366-
for (Map.Entry<String, byte[]> entry : attrs.entrySet()) {
367-
String xattrName = entry.getKey();
368-
369-
try {
370-
fs.setXAttr(raw2File, xattrName, entry.getValue());
371-
fail("Exception should be thrown while setting: " +
372-
xattrName + " on file:" + raw2File);
373-
} catch (RemoteException e) {
374-
Assert.assertEquals(e.getClassName(),
375-
IllegalArgumentException.class.getCanonicalName());
376-
Assert.assertTrue(e.getMessage().
377-
contains("does not belong to the key"));
378-
}
379-
}
380-
}
381-
382-
assertEquals("File can be created on the root encryption zone " +
383-
"with correct length", len, fs.getFileStatus(zone1File).getLen());
384-
assertTrue("/zone1 dir is encrypted",
385-
fs.getFileStatus(zone1).isEncrypted());
386-
assertTrue("File is encrypted", fs.getFileStatus(zone1File).isEncrypted());
387-
388-
assertTrue("/zone2 dir is encrypted",
389-
fs.getFileStatus(zone2).isEncrypted());
390-
assertTrue("File is encrypted", fs.getFileStatus(zone2File).isEncrypted());
391-
392-
// 4. Now the decrypted contents of the files should be different.
393-
DFSTestUtil.verifyFilesNotEqual(fs, zone1File, zone2File, len);
394-
395-
// 5. Encrypted contents of the files should be same.
396-
DFSTestUtil.verifyFilesEqual(fs, raw1File, raw2File, len);
397-
}
398-
399331
/**
400332
* Make sure hdfs crypto -provisionTrash command creates a trash directory
401333
* with sticky bits.

hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1781,6 +1781,15 @@ private Constants() {
17811781
*/
17821782
public static final boolean CHECKSUM_VALIDATION_DEFAULT = false;
17831783

1784+
/**
1785+
* Indicates the algorithm used to create the checksum for the object
1786+
* to be uploaded to S3. Unset by default. It supports the following values:
1787+
* 'CRC32', 'CRC32C', 'SHA1', and 'SHA256'
1788+
* value:{@value}
1789+
*/
1790+
public static final String CHECKSUM_ALGORITHM =
1791+
"fs.s3a.create.checksum.algorithm";
1792+
17841793
/**
17851794
* Are extensions classes, such as {@code fs.s3a.aws.credentials.provider},
17861795
* going to be loaded from the same classloader that loaded

hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ABlockOutputStream.java

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1064,6 +1064,10 @@ private void uploadBlockAsync(final S3ADataBlocks.DataBlock block,
10641064
return CompletedPart.builder()
10651065
.eTag(response.eTag())
10661066
.partNumber(currentPartNumber)
1067+
.checksumCRC32(response.checksumCRC32())
1068+
.checksumCRC32C(response.checksumCRC32C())
1069+
.checksumSHA1(response.checksumSHA1())
1070+
.checksumSHA256(response.checksumSHA256())
10671071
.build();
10681072
} catch (Exception e) {
10691073
final IOException ex = e instanceof IOException

hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -117,6 +117,7 @@
117117
import org.apache.hadoop.fs.s3a.impl.BulkDeleteOperationCallbacksImpl;
118118
import org.apache.hadoop.fs.s3a.impl.CSES3AFileSystemOperations;
119119
import org.apache.hadoop.fs.s3a.impl.ChangeDetectionPolicy;
120+
import org.apache.hadoop.fs.s3a.impl.ChecksumSupport;
120121
import org.apache.hadoop.fs.s3a.impl.ClientManager;
121122
import org.apache.hadoop.fs.s3a.impl.ClientManagerImpl;
122123
import org.apache.hadoop.fs.s3a.impl.ConfigurationHelper;
@@ -1327,6 +1328,7 @@ protected RequestFactory createRequestFactory() {
13271328
.withStorageClass(storageClass)
13281329
.withMultipartUploadEnabled(isMultipartUploadEnabled)
13291330
.withPartUploadTimeout(partUploadTimeout)
1331+
.withChecksumAlgorithm(ChecksumSupport.getChecksumAlgorithm(getConf()))
13301332
.build();
13311333
}
13321334

hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/files/SinglePendingCommit.java

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@
7171
@InterfaceAudience.Private
7272
@InterfaceStability.Unstable
7373
public class SinglePendingCommit extends PersistentCommitData<SinglePendingCommit>
74-
implements Iterable<String> {
74+
implements Iterable<UploadEtag> {
7575

7676
/**
7777
* Serialization ID: {@value}.
@@ -118,7 +118,7 @@ public class SinglePendingCommit extends PersistentCommitData<SinglePendingCommi
118118
private String text = "";
119119

120120
/** Ordered list of etags. */
121-
private List<String> etags;
121+
private List<UploadEtag> etags;
122122

123123
/**
124124
* Any custom extra data committer subclasses may choose to add.
@@ -222,7 +222,7 @@ public void bindCommitData(List<CompletedPart> parts) throws ValidationFailure {
222222
for (CompletedPart part : parts) {
223223
verify(part.partNumber() == counter,
224224
"Expected part number %s but got %s", counter, part.partNumber());
225-
etags.add(part.eTag());
225+
etags.add(UploadEtag.fromCompletedPart(part));
226226
counter++;
227227
}
228228
}
@@ -237,9 +237,10 @@ public void validate() throws ValidationFailure {
237237
verify(length >= 0, "Invalid length: " + length);
238238
destinationPath();
239239
verify(etags != null, "No etag list");
240-
validateCollectionClass(etags, String.class);
241-
for (String etag : etags) {
242-
verify(StringUtils.isNotEmpty(etag), "Empty etag");
240+
validateCollectionClass(etags, UploadEtag.class);
241+
for (UploadEtag etag : etags) {
242+
verify(etag != null && StringUtils.isNotEmpty(etag.getEtag()),
243+
"Empty etag");
243244
}
244245
if (extraData != null) {
245246
validateCollectionClass(extraData.keySet(), String.class);
@@ -313,7 +314,7 @@ public int getPartCount() {
313314
* @return an iterator.
314315
*/
315316
@Override
316-
public Iterator<String> iterator() {
317+
public Iterator<UploadEtag> iterator() {
317318
return etags.iterator();
318319
}
319320

@@ -442,11 +443,11 @@ public void setText(String text) {
442443
}
443444

444445
/** @return ordered list of etags. */
445-
public List<String> getEtags() {
446+
public List<UploadEtag> getEtags() {
446447
return etags;
447448
}
448449

449-
public void setEtags(List<String> etags) {
450+
public void setEtags(List<UploadEtag> etags) {
450451
this.etags = etags;
451452
}
452453

0 commit comments

Comments
 (0)