Skip to content

Commit 3dac730

Browse files
committed
HDFS-10744. Internally optimize path component resolution. Contributed by Daryn Sharp.
(cherry picked from commit 1ef8d7a)
1 parent 5a779cc commit 3dac730

File tree

15 files changed

+115
-186
lines changed

15 files changed

+115
-186
lines changed

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAclOp.java

Lines changed: 6 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,7 @@ static HdfsFileStatus modifyAclEntries(
3939
String src = srcArg;
4040
checkAclsConfigFlag(fsd);
4141
FSPermissionChecker pc = fsd.getPermissionChecker();
42-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
43-
src = fsd.resolvePath(pc, src, pathComponents);
42+
src = fsd.resolvePath(pc, src);
4443
INodesInPath iip;
4544
fsd.writeLock();
4645
try {
@@ -65,8 +64,7 @@ static HdfsFileStatus removeAclEntries(
6564
String src = srcArg;
6665
checkAclsConfigFlag(fsd);
6766
FSPermissionChecker pc = fsd.getPermissionChecker();
68-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
69-
src = fsd.resolvePath(pc, src, pathComponents);
67+
src = fsd.resolvePath(pc, src);
7068
INodesInPath iip;
7169
fsd.writeLock();
7270
try {
@@ -90,8 +88,7 @@ static HdfsFileStatus removeDefaultAcl(FSDirectory fsd, final String srcArg)
9088
String src = srcArg;
9189
checkAclsConfigFlag(fsd);
9290
FSPermissionChecker pc = fsd.getPermissionChecker();
93-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
94-
src = fsd.resolvePath(pc, src, pathComponents);
91+
src = fsd.resolvePath(pc, src);
9592
INodesInPath iip;
9693
fsd.writeLock();
9794
try {
@@ -115,8 +112,7 @@ static HdfsFileStatus removeAcl(FSDirectory fsd, final String srcArg)
115112
String src = srcArg;
116113
checkAclsConfigFlag(fsd);
117114
FSPermissionChecker pc = fsd.getPermissionChecker();
118-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
119-
src = fsd.resolvePath(pc, src, pathComponents);
115+
src = fsd.resolvePath(pc, src);
120116
INodesInPath iip;
121117
fsd.writeLock();
122118
try {
@@ -135,9 +131,8 @@ static HdfsFileStatus setAcl(
135131
throws IOException {
136132
String src = srcArg;
137133
checkAclsConfigFlag(fsd);
138-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
139134
FSPermissionChecker pc = fsd.getPermissionChecker();
140-
src = fsd.resolvePath(pc, src, pathComponents);
135+
src = fsd.resolvePath(pc, src);
141136
INodesInPath iip;
142137
fsd.writeLock();
143138
try {
@@ -155,8 +150,7 @@ static AclStatus getAclStatus(
155150
FSDirectory fsd, String src) throws IOException {
156151
checkAclsConfigFlag(fsd);
157152
FSPermissionChecker pc = fsd.getPermissionChecker();
158-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
159-
src = fsd.resolvePath(pc, src, pathComponents);
153+
src = fsd.resolvePath(pc, src);
160154
String srcs = FSDirectory.normalizePath(src);
161155
fsd.readLock();
162156
try {

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAppendOp.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -82,14 +82,12 @@ static LastBlockWithStatus appendFile(final FSNamesystem fsn,
8282
final boolean logRetryCache) throws IOException {
8383
assert fsn.hasWriteLock();
8484

85-
final byte[][] pathComponents = FSDirectory
86-
.getPathComponentsForReservedPath(srcArg);
8785
final LocatedBlock lb;
8886
final FSDirectory fsd = fsn.getFSDirectory();
8987
final String src;
9088
fsd.writeLock();
9189
try {
92-
src = fsd.resolvePath(pc, srcArg, pathComponents);
90+
src = fsd.resolvePath(pc, srcArg);
9391
final INodesInPath iip = fsd.getINodesInPath4Write(src);
9492
// Verify that the destination does not exist as a directory already
9593
final INode inode = iip.getLastINode();

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAttrOp.java

Lines changed: 7 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -57,11 +57,10 @@ static HdfsFileStatus setPermission(
5757
throw new InvalidPathException(src);
5858
}
5959
FSPermissionChecker pc = fsd.getPermissionChecker();
60-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
6160
INodesInPath iip;
6261
fsd.writeLock();
6362
try {
64-
src = fsd.resolvePath(pc, src, pathComponents);
63+
src = fsd.resolvePath(pc, src);
6564
iip = fsd.getINodesInPath4Write(src);
6665
fsd.checkOwner(pc, iip);
6766
unprotectedSetPermission(fsd, src, permission);
@@ -79,11 +78,10 @@ static HdfsFileStatus setOwner(
7978
throw new InvalidPathException(src);
8079
}
8180
FSPermissionChecker pc = fsd.getPermissionChecker();
82-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
8381
INodesInPath iip;
8482
fsd.writeLock();
8583
try {
86-
src = fsd.resolvePath(pc, src, pathComponents);
84+
src = fsd.resolvePath(pc, src);
8785
iip = fsd.getINodesInPath4Write(src);
8886
fsd.checkOwner(pc, iip);
8987
if (!pc.isSuperUser()) {
@@ -106,12 +104,11 @@ static HdfsFileStatus setTimes(
106104
FSDirectory fsd, String src, long mtime, long atime)
107105
throws IOException {
108106
FSPermissionChecker pc = fsd.getPermissionChecker();
109-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
110107

111108
INodesInPath iip;
112109
fsd.writeLock();
113110
try {
114-
src = fsd.resolvePath(pc, src, pathComponents);
111+
src = fsd.resolvePath(pc, src);
115112
iip = fsd.getINodesInPath4Write(src);
116113
// Write access is required to set access and modification times
117114
if (fsd.isPermissionEnabled()) {
@@ -139,10 +136,9 @@ static boolean setReplication(
139136
bm.verifyReplication(src, replication, null);
140137
final boolean isFile;
141138
FSPermissionChecker pc = fsd.getPermissionChecker();
142-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
143139
fsd.writeLock();
144140
try {
145-
src = fsd.resolvePath(pc, src, pathComponents);
141+
src = fsd.resolvePath(pc, src);
146142
final INodesInPath iip = fsd.getINodesInPath4Write(src);
147143
if (fsd.isPermissionEnabled()) {
148144
fsd.checkPathAccess(pc, iip, FsAction.WRITE);
@@ -187,11 +183,10 @@ static HdfsFileStatus setStoragePolicy(FSDirectory fsd, BlockManager bm,
187183
DFS_STORAGE_POLICY_ENABLED_KEY));
188184
}
189185
FSPermissionChecker pc = fsd.getPermissionChecker();
190-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
191186
INodesInPath iip;
192187
fsd.writeLock();
193188
try {
194-
src = FSDirectory.resolvePath(src, pathComponents, fsd);
189+
src = FSDirectory.resolvePath(src, fsd);
195190
iip = fsd.getINodesInPath4Write(src);
196191

197192
if (fsd.isPermissionEnabled()) {
@@ -214,11 +209,9 @@ static BlockStoragePolicy[] getStoragePolicies(BlockManager bm)
214209
static BlockStoragePolicy getStoragePolicy(FSDirectory fsd, BlockManager bm,
215210
String path) throws IOException {
216211
FSPermissionChecker pc = fsd.getPermissionChecker();
217-
byte[][] pathComponents = FSDirectory
218-
.getPathComponentsForReservedPath(path);
219212
fsd.readLock();
220213
try {
221-
path = fsd.resolvePath(pc, path, pathComponents);
214+
path = fsd.resolvePath(pc, path);
222215
final INodesInPath iip = fsd.getINodesInPath(path, false);
223216
if (fsd.isPermissionEnabled()) {
224217
fsd.checkPathAccess(pc, iip, FsAction.READ);
@@ -237,10 +230,9 @@ static BlockStoragePolicy getStoragePolicy(FSDirectory fsd, BlockManager bm,
237230
static long getPreferredBlockSize(FSDirectory fsd, String src)
238231
throws IOException {
239232
FSPermissionChecker pc = fsd.getPermissionChecker();
240-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
241233
fsd.readLock();
242234
try {
243-
src = fsd.resolvePath(pc, src, pathComponents);
235+
src = fsd.resolvePath(pc, src);
244236
final INodesInPath iip = fsd.getINodesInPath(src, false);
245237
if (fsd.isPermissionEnabled()) {
246238
fsd.checkTraverse(pc, iip);

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirDeleteOp.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -97,9 +97,8 @@ static BlocksMapUpdateInfo delete(
9797
throws IOException {
9898
FSDirectory fsd = fsn.getFSDirectory();
9999
FSPermissionChecker pc = fsd.getPermissionChecker();
100-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
101100

102-
src = fsd.resolvePath(pc, src, pathComponents);
101+
src = fsd.resolvePath(pc, src);
103102
final INodesInPath iip = fsd.getINodesInPath4Write(src, false);
104103
if (!recursive && fsd.isNonEmptyDirectory(iip)) {
105104
throw new PathIsNotEmptyDirectoryException(src + " is non empty");
@@ -109,7 +108,7 @@ static BlocksMapUpdateInfo delete(
109108
FsAction.ALL, true);
110109
}
111110
if (recursive && fsd.isNonEmptyDirectory(iip)) {
112-
checkProtectedDescendants(fsd, fsd.normalizePath(src));
111+
checkProtectedDescendants(fsd, src);
113112
}
114113

115114
return deleteInternal(fsn, src, iip, logRetryCache);

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirEncryptionZoneOp.java

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -146,8 +146,6 @@ static KeyProvider.Metadata ensureKeyIsInitialized(final FSDirectory fsd,
146146
static HdfsFileStatus createEncryptionZone(final FSDirectory fsd,
147147
final String srcArg, final FSPermissionChecker pc, final String cipher,
148148
final String keyName, final boolean logRetryCache) throws IOException {
149-
final byte[][] pathComponents = FSDirectory
150-
.getPathComponentsForReservedPath(srcArg);
151149
final CipherSuite suite = CipherSuite.convert(cipher);
152150
List<XAttr> xAttrs = Lists.newArrayListWithCapacity(1);
153151
final String src;
@@ -157,7 +155,7 @@ static HdfsFileStatus createEncryptionZone(final FSDirectory fsd,
157155

158156
fsd.writeLock();
159157
try {
160-
src = fsd.resolvePath(pc, srcArg, pathComponents);
158+
src = fsd.resolvePath(pc, srcArg);
161159
final XAttr ezXAttr = fsd.ezManager.createEncryptionZone(src, suite,
162160
version, keyName);
163161
xAttrs.add(ezXAttr);
@@ -180,14 +178,12 @@ static HdfsFileStatus createEncryptionZone(final FSDirectory fsd,
180178
static Map.Entry<EncryptionZone, HdfsFileStatus> getEZForPath(
181179
final FSDirectory fsd, final String srcArg, final FSPermissionChecker pc)
182180
throws IOException {
183-
final byte[][] pathComponents = FSDirectory
184-
.getPathComponentsForReservedPath(srcArg);
185181
final String src;
186182
final INodesInPath iip;
187183
final EncryptionZone ret;
188184
fsd.readLock();
189185
try {
190-
src = fsd.resolvePath(pc, srcArg, pathComponents);
186+
src = fsd.resolvePath(pc, srcArg);
191187
iip = fsd.getINodesInPath(src, true);
192188
if (iip.getLastINode() == null) {
193189
throw new FileNotFoundException("Path not found: " + iip.getPath());

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirMkdirOp.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -50,10 +50,9 @@ static HdfsFileStatus mkdirs(FSNamesystem fsn, String src,
5050
throw new InvalidPathException(src);
5151
}
5252
FSPermissionChecker pc = fsd.getPermissionChecker();
53-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
5453
fsd.writeLock();
5554
try {
56-
src = fsd.resolvePath(pc, src, pathComponents);
55+
src = fsd.resolvePath(pc, src);
5756
INodesInPath iip = fsd.getINodesInPath4Write(src);
5857
if (fsd.isPermissionEnabled()) {
5958
fsd.checkTraverse(pc, iip);

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirRenameOp.java

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -64,11 +64,9 @@ static RenameOldResult renameToInt(
6464
}
6565
FSPermissionChecker pc = fsd.getPermissionChecker();
6666

67-
byte[][] srcComponents = FSDirectory.getPathComponentsForReservedPath(src);
68-
byte[][] dstComponents = FSDirectory.getPathComponentsForReservedPath(dst);
6967
HdfsFileStatus resultingStat = null;
70-
src = fsd.resolvePath(pc, src, srcComponents);
71-
dst = fsd.resolvePath(pc, dst, dstComponents);
68+
src = fsd.resolvePath(pc, src);
69+
dst = fsd.resolvePath(pc, dst);
7270
@SuppressWarnings("deprecation")
7371
final boolean status = renameTo(fsd, pc, src, dst, logRetryCache);
7472
if (status) {
@@ -239,11 +237,9 @@ static Map.Entry<BlocksMapUpdateInfo, HdfsFileStatus> renameToInt(
239237
}
240238
final FSPermissionChecker pc = fsd.getPermissionChecker();
241239

242-
byte[][] srcComponents = FSDirectory.getPathComponentsForReservedPath(src);
243-
byte[][] dstComponents = FSDirectory.getPathComponentsForReservedPath(dst);
244240
BlocksMapUpdateInfo collectedBlocks = new BlocksMapUpdateInfo();
245-
src = fsd.resolvePath(pc, src, srcComponents);
246-
dst = fsd.resolvePath(pc, dst, dstComponents);
241+
src = fsd.resolvePath(pc, src);
242+
dst = fsd.resolvePath(pc, dst);
247243
renameTo(fsd, pc, src, dst, collectedBlocks, logRetryCache, options);
248244
INodesInPath dstIIP = fsd.getINodesInPath(dst, false);
249245
HdfsFileStatus resultingStat = fsd.getAuditFileInfo(dstIIP);

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirStatAndListingOp.java

Lines changed: 9 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -50,24 +50,20 @@
5050
class FSDirStatAndListingOp {
5151
static DirectoryListing getListingInt(FSDirectory fsd, final String srcArg,
5252
byte[] startAfter, boolean needLocation) throws IOException {
53-
byte[][] pathComponents = FSDirectory
54-
.getPathComponentsForReservedPath(srcArg);
5553
final String startAfterString = DFSUtil.bytes2String(startAfter);
5654
String src = null;
5755

5856
if (fsd.isPermissionEnabled()) {
5957
FSPermissionChecker pc = fsd.getPermissionChecker();
60-
src = fsd.resolvePath(pc, srcArg, pathComponents);
58+
src = fsd.resolvePath(pc, srcArg);
6159
} else {
62-
src = FSDirectory.resolvePath(srcArg, pathComponents, fsd);
60+
src = FSDirectory.resolvePath(srcArg, fsd);
6361
}
6462

6563
// Get file name when startAfter is an INodePath
6664
if (FSDirectory.isReservedName(startAfterString)) {
67-
byte[][] startAfterComponents = FSDirectory
68-
.getPathComponentsForReservedPath(startAfterString);
6965
try {
70-
String tmp = FSDirectory.resolvePath(src, startAfterComponents, fsd);
66+
String tmp = FSDirectory.resolvePath(startAfterString, fsd);
7167
byte[][] regularPath = INode.getPathComponents(tmp);
7268
startAfter = regularPath[regularPath.length - 1];
7369
} catch (IOException e) {
@@ -108,14 +104,13 @@ static HdfsFileStatus getFileInfo(
108104
if (!DFSUtil.isValidName(src)) {
109105
throw new InvalidPathException("Invalid file name: " + src);
110106
}
111-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
112107
if (fsd.isPermissionEnabled()) {
113108
FSPermissionChecker pc = fsd.getPermissionChecker();
114-
src = fsd.resolvePath(pc, src, pathComponents);
109+
src = fsd.resolvePath(pc, srcArg);
115110
final INodesInPath iip = fsd.getINodesInPath(src, resolveLink);
116111
fsd.checkPermission(pc, iip, false, null, null, null, null, false);
117112
} else {
118-
src = FSDirectory.resolvePath(src, pathComponents, fsd);
113+
src = FSDirectory.resolvePath(srcArg, fsd);
119114
}
120115
return getFileInfo(fsd, src, FSDirectory.isReservedRawName(srcArg),
121116
resolveLink);
@@ -126,8 +121,7 @@ static HdfsFileStatus getFileInfo(
126121
*/
127122
static boolean isFileClosed(FSDirectory fsd, String src) throws IOException {
128123
FSPermissionChecker pc = fsd.getPermissionChecker();
129-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
130-
src = fsd.resolvePath(pc, src, pathComponents);
124+
src = fsd.resolvePath(pc, src);
131125
final INodesInPath iip = fsd.getINodesInPath(src, true);
132126
if (fsd.isPermissionEnabled()) {
133127
fsd.checkTraverse(pc, iip);
@@ -137,9 +131,8 @@ static boolean isFileClosed(FSDirectory fsd, String src) throws IOException {
137131

138132
static ContentSummary getContentSummary(
139133
FSDirectory fsd, String src) throws IOException {
140-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
141134
FSPermissionChecker pc = fsd.getPermissionChecker();
142-
src = fsd.resolvePath(pc, src, pathComponents);
135+
src = fsd.resolvePath(pc, src);
143136
final INodesInPath iip = fsd.getINodesInPath(src, false);
144137
if (fsd.isPermissionEnabled()) {
145138
fsd.checkPermission(pc, iip, false, null, null, null,
@@ -162,11 +155,10 @@ static GetBlockLocationsResult getBlockLocations(
162155
"Negative length is not supported. File: " + src);
163156
CacheManager cm = fsd.getFSNamesystem().getCacheManager();
164157
BlockManager bm = fsd.getBlockManager();
165-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
166158
boolean isReservedName = FSDirectory.isReservedRawName(src);
167159
fsd.readLock();
168160
try {
169-
src = fsd.resolvePath(pc, src, pathComponents);
161+
src = fsd.resolvePath(pc, src);
170162
final INodesInPath iip = fsd.getINodesInPath(src, true);
171163
final INodeFile inode = INodeFile.valueOf(iip.getLastINode(), src);
172164
if (fsd.isPermissionEnabled()) {
@@ -613,12 +605,11 @@ private static ContentSummary getContentSummaryInt(FSDirectory fsd,
613605

614606
static QuotaUsage getQuotaUsage(
615607
FSDirectory fsd, String src) throws IOException {
616-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(src);
617608
FSPermissionChecker pc = fsd.getPermissionChecker();
618609
final INodesInPath iip;
619610
fsd.readLock();
620611
try {
621-
src = fsd.resolvePath(pc, src, pathComponents);
612+
src = fsd.resolvePath(pc, src);
622613
iip = fsd.getINodesInPath(src, false);
623614
if (fsd.isPermissionEnabled()) {
624615
fsd.checkPermission(pc, iip, false, null, null, null,

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirSymlinkOp.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -53,11 +53,10 @@ static HdfsFileStatus createSymlinkInt(
5353
}
5454

5555
FSPermissionChecker pc = fsn.getPermissionChecker();
56-
byte[][] pathComponents = FSDirectory.getPathComponentsForReservedPath(link);
5756
INodesInPath iip;
5857
fsd.writeLock();
5958
try {
60-
link = fsd.resolvePath(pc, link, pathComponents);
59+
link = fsd.resolvePath(pc, link);
6160
iip = fsd.getINodesInPath4Write(link, false);
6261
if (!createParent) {
6362
fsd.verifyParentDir(iip, link);

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirTruncateOp.java

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,15 +70,13 @@ static TruncateResult truncate(final FSNamesystem fsn, final String srcArg,
7070
assert fsn.hasWriteLock();
7171

7272
FSDirectory fsd = fsn.getFSDirectory();
73-
byte[][] pathComponents = FSDirectory
74-
.getPathComponentsForReservedPath(srcArg);
7573
final String src;
7674
final INodesInPath iip;
7775
final boolean onBlockBoundary;
7876
Block truncateBlock = null;
7977
fsd.writeLock();
8078
try {
81-
src = fsd.resolvePath(pc, srcArg, pathComponents);
79+
src = fsd.resolvePath(pc, srcArg);
8280
iip = fsd.getINodesInPath4Write(src, true);
8381
if (fsd.isPermissionEnabled()) {
8482
fsd.checkPathAccess(pc, iip, FsAction.WRITE);

0 commit comments

Comments
 (0)