Skip to content

Commit

Permalink
Merge branch 'trunk' into HADOOP-18516-fixedSAS
Browse files Browse the repository at this point in the history
  • Loading branch information
Anuj Modi committed Apr 3, 2024
2 parents a65cdd7 + 1357bb1 commit a79cae1
Show file tree
Hide file tree
Showing 278 changed files with 81,558 additions and 1,956 deletions.
16 changes: 8 additions & 8 deletions LICENSE-binary
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ com.google.guava:guava:20.0
com.google.guava:guava:27.0-jre
com.google.guava:listenablefuture:9999.0-empty-to-avoid-conflict-with-guava
com.microsoft.azure:azure-storage:7.0.0
com.nimbusds:nimbus-jose-jwt:9.31
com.nimbusds:nimbus-jose-jwt:9.37.2
com.zaxxer:HikariCP:4.0.3
commons-beanutils:commons-beanutils:1.9.4
commons-cli:commons-cli:1.5.0
Expand Down Expand Up @@ -300,7 +300,7 @@ net.minidev:accessors-smart:1.2
org.apache.avro:avro:1.9.2
org.apache.commons:commons-collections4:4.2
org.apache.commons:commons-compress:1.24.0
org.apache.commons:commons-configuration2:2.8.0
org.apache.commons:commons-configuration2:2.10.1
org.apache.commons:commons-csv:1.9.0
org.apache.commons:commons-digester:1.8.1
org.apache.commons:commons-lang3:3.12.0
Expand Down Expand Up @@ -340,7 +340,7 @@ org.apache.sshd:sshd-core:2.11.0
org.apache.sshd:sshd-sftp:2.11.0
org.apache.solr:solr-solrj:8.11.2
org.apache.yetus:audience-annotations:0.5.0
org.apache.zookeeper:zookeeper:3.8.3
org.apache.zookeeper:zookeeper:3.8.4
org.codehaus.jettison:jettison:1.5.4
org.eclipse.jetty:jetty-annotations:9.4.53.v20231009
org.eclipse.jetty:jetty-http:9.4.53.v20231009
Expand All @@ -356,7 +356,7 @@ org.eclipse.jetty:jetty-webapp:9.4.53.v20231009
org.eclipse.jetty:jetty-xml:9.4.53.v20231009
org.eclipse.jetty.websocket:javax-websocket-client-impl:9.4.53.v20231009
org.eclipse.jetty.websocket:javax-websocket-server-impl:9.4.53.v20231009
org.ehcache:ehcache:3.3.1
org.ehcache:ehcache:3.8.2
org.ini4j:ini4j:0.5.4
org.lz4:lz4-java:1.7.1
org.objenesis:objenesis:2.6
Expand Down Expand Up @@ -483,9 +483,9 @@ com.microsoft.azure:azure-cosmosdb-gateway:2.4.5
com.microsoft.azure:azure-data-lake-store-sdk:2.3.3
com.microsoft.azure:azure-keyvault-core:1.0.0
com.microsoft.sqlserver:mssql-jdbc:6.2.1.jre7
org.bouncycastle:bcpkix-jdk15on:1.70
org.bouncycastle:bcprov-jdk15on:1.70
org.bouncycastle:bcutil-jdk15on:1.70
org.bouncycastle:bcpkix-jdk18on:1.77
org.bouncycastle:bcprov-jdk18on:1.77
org.bouncycastle:bcutil-jdk18on:1.77
org.checkerframework:checker-qual:2.5.2
org.codehaus.mojo:animal-sniffer-annotations:1.21
org.jruby.jcodings:jcodings:1.0.13
Expand All @@ -499,7 +499,7 @@ org.slf4j:slf4j-reload4j:1.7.36
CDDL 1.1 + GPLv2 with classpath exception
-----------------------------------------

com.github.pjfanning:jersey-json:1.20
com.github.pjfanning:jersey-json:1.22.0
com.sun.jersey:jersey-client:1.19.4
com.sun.jersey:jersey-core:1.19.4
com.sun.jersey:jersey-guice:1.19.4
Expand Down
5 changes: 5 additions & 0 deletions dev-support/bin/hadoop.sh
Original file line number Diff line number Diff line change
Expand Up @@ -387,6 +387,11 @@ function personality_modules
fi
;;
unit)
if [[ "$IS_WINDOWS" && "$IS_WINDOWS" == 1 && (-z "$IS_NIGHTLY_BUILD" || "$IS_NIGHTLY_BUILD" == 0) ]]; then
echo "Won't run unit tests for Windows in pre-commit CI"
return
fi

extra="-Dsurefire.rerunFailingTestsCount=2"
if [[ "${BUILDMODE}" = full ]]; then
ordering=mvnsrc
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@
<exclude>org.bouncycastle:*</exclude>
<!-- Leave snappy that includes native methods which cannot be relocated. -->
<exclude>org.xerial.snappy:*</exclude>
<exclude>org.ehcache:*</exclude>
</excludes>
</banTransitiveDependencies>
<banDuplicateClasses>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,12 @@ allowed_expr+="|^org.apache.hadoop.application-classloader.properties$"
allowed_expr+="|^java.policy$"
# * Used by javax.annotation
allowed_expr+="|^jndi.properties$"
# * Used by ehcache
allowed_expr+="|^ehcache-107-ext.xsd$"
allowed_expr+="|^ehcache-multi.xsd$"
allowed_expr+="|^.gitkeep$"
allowed_expr+="|^OSGI-INF.*$"
allowed_expr+="|^javax.*$"

allowed_expr+=")"
declare -i bad_artifacts=0
Expand Down
4 changes: 2 additions & 2 deletions hadoop-client-modules/hadoop-client-integration-tests/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -77,12 +77,12 @@
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
<artifactId>bcprov-jdk18on</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcpkix-jdk15on</artifactId>
<artifactId>bcpkix-jdk18on</artifactId>
<scope>test</scope>
</dependency>
<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ Linux kernel 2.6+
- joda-time (version 2.9.9 recommended)
- httpClient (version 4.5.1 or later recommended)
- Jackson: jackson-core, jackson-databind, jackson-annotations (version 2.9.8 or later)
- bcprov-jdk15on (version 1.59 recommended)
- bcprov-jdk18on (version 1.77 recommended)


#### Configure Properties
Expand Down
2 changes: 1 addition & 1 deletion hadoop-common-project/hadoop-auth/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@
<exclusions>
<exclusion>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
<artifactId>bcprov-jdk18on</artifactId>
</exclusion>
</exclusions>
</dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,11 @@

import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.InvocationTargetException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.nio.charset.IllegalCharsetNameException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
Expand Down Expand Up @@ -426,12 +425,8 @@ DER get(int... tags) {
}

String getAsString() {
try {
return new String(bb.array(), bb.arrayOffset() + bb.position(),
bb.remaining(), "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new IllegalCharsetNameException("UTF-8"); // won't happen.
}
return new String(bb.array(), bb.arrayOffset() + bb.position(),
bb.remaining(), StandardCharsets.UTF_8);
}

@Override
Expand Down
4 changes: 3 additions & 1 deletion hadoop-common-project/hadoop-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -374,7 +374,7 @@
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
<artifactId>bcprov-jdk15on</artifactId>
<artifactId>bcprov-jdk18on</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kerby</groupId>
Expand Down Expand Up @@ -646,6 +646,7 @@
<exclude>src/main/native/*</exclude>
<exclude>src/main/native/config/*</exclude>
<exclude>src/main/native/m4/*</exclude>
<exclude>src/main/winutils/winutils.sln</exclude>
<exclude>src/test/empty-file</exclude>
<exclude>src/test/all-tests</exclude>
<exclude>src/main/native/gtest/**/*</exclude>
Expand All @@ -655,6 +656,7 @@
<exclude>src/test/resources/test.har/_masterindex</exclude>
<exclude>src/test/resources/test.har/part-0</exclude>
<exclude>src/test/resources/javakeystoreprovider.password</exclude>
<exclude>src/test/resources/lz4/sequencefile</exclude>
<exclude>dev-support/jdiff-workaround.patch</exclude>
</excludes>
</configuration>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,13 @@
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.core.HttpHeaders;

import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.http.HttpServer2;

import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.net.HttpHeaders;

/**
* A servlet to print out the running configuration data.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3565,7 +3565,7 @@ private void checkForOverride(Properties properties, String name, String attr, S
* @throws IOException raised on errors performing I/O.
*/
public void writeXml(OutputStream out) throws IOException {
writeXml(new OutputStreamWriter(out, "UTF-8"));
writeXml(new OutputStreamWriter(out, StandardCharsets.UTF_8));
}

public void writeXml(Writer out) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;

import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheBuilder;
import org.apache.hadoop.thirdparty.com.google.common.cache.CacheLoader;
Expand Down Expand Up @@ -317,19 +318,22 @@ public void drain(String keyName) {
/**
* Get size of the Queue for keyName. This is only used in unit tests.
* @param keyName the key name
* @return int queue size
* @return int queue size. Zero means the queue is empty or the key does not exist.
*/
@VisibleForTesting
public int getSize(String keyName) {
readLock(keyName);
try {
// We can't do keyQueues.get(keyName).size() here,
// since that will have the side effect of populating the cache.
Map<String, LinkedBlockingQueue<E>> map =
keyQueues.getAllPresent(Arrays.asList(keyName));
if (map.get(keyName) == null) {
final LinkedBlockingQueue<E> linkedQueue = map.get(keyName);
if (linkedQueue == null) {
return 0;
} else {
return linkedQueue.size();
}
return map.get(keyName).size();
} finally {
readUnlock(keyName);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import java.util.Arrays;
import java.util.EnumSet;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
import java.util.function.IntFunction;
Expand All @@ -52,9 +53,9 @@
import org.apache.hadoop.util.Progressable;

import static org.apache.hadoop.fs.Options.OpenFileOptions.FS_OPTION_OPENFILE_STANDARD_OPTIONS;
import static org.apache.hadoop.fs.VectoredReadUtils.validateAndSortRanges;
import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapabilityArgs;
import static org.apache.hadoop.fs.impl.StoreImplementationUtils.isProbeForSyncable;
import static org.apache.hadoop.fs.VectoredReadUtils.sortRanges;

/****************************************************************
* Abstract Checksumed FileSystem.
Expand Down Expand Up @@ -425,41 +426,31 @@ static ByteBuffer checkBytes(ByteBuffer sumsBytes,
}

/**
* Validates range parameters.
* In case of CheckSum FS, we already have calculated
* fileLength so failing fast here.
* @param ranges requested ranges.
* @param fileLength length of file.
* @throws EOFException end of file exception.
* Vectored read.
* If the file has no checksums: delegate to the underlying stream.
* If the file is checksummed: calculate the checksum ranges as
* well as the data ranges, read both, and validate the checksums
* as well as returning the data.
* @param ranges the byte ranges to read
* @param allocate the function to allocate ByteBuffer
* @throws IOException
*/
private void validateRangeRequest(List<? extends FileRange> ranges,
final long fileLength) throws EOFException {
for (FileRange range : ranges) {
VectoredReadUtils.validateRangeRequest(range);
if (range.getOffset() + range.getLength() > fileLength) {
final String errMsg = String.format("Requested range [%d, %d) is beyond EOF for path %s",
range.getOffset(), range.getLength(), file);
LOG.warn(errMsg);
throw new EOFException(errMsg);
}
}
}

@Override
public void readVectored(List<? extends FileRange> ranges,
IntFunction<ByteBuffer> allocate) throws IOException {
final long length = getFileLength();
validateRangeRequest(ranges, length);

// If the stream doesn't have checksums, just delegate.
if (sums == null) {
datas.readVectored(ranges, allocate);
return;
}
final long length = getFileLength();
final List<? extends FileRange> sorted = validateAndSortRanges(ranges,
Optional.of(length));
int minSeek = minSeekForVectorReads();
int maxSize = maxReadSizeForVectorReads();
List<CombinedFileRange> dataRanges =
VectoredReadUtils.mergeSortedRanges(Arrays.asList(sortRanges(ranges)), bytesPerSum,
VectoredReadUtils.mergeSortedRanges(sorted, bytesPerSum,
minSeek, maxReadSizeForVectorReads());
// While merging the ranges above, they are rounded up based on the value of bytesPerSum
// which leads to some ranges crossing the EOF thus they need to be fixed else it will
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,17 @@
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.StringReader;
import java.nio.file.FileStore;
import java.nio.file.Files;

import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.Shell.ExitCodeException;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;

import org.apache.hadoop.classification.VisibleForTesting;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import static java.nio.file.Files.createLink;

Expand All @@ -50,6 +54,11 @@ public class HardLink {
private static HardLinkCommandGetter getHardLinkCommand;

public final LinkStats linkStats; //not static

static final Logger LOG = LoggerFactory.getLogger(HardLink.class);

private static final String FILE_ATTRIBUTE_VIEW = "unix";
private static final String FILE_ATTRIBUTE = "unix:nlink";

//initialize the command "getters" statically, so can use their
//methods without instantiating the HardLink object
Expand Down Expand Up @@ -204,6 +213,21 @@ public static void createHardLinkMult(File parentDir, String[] fileBaseNames,
}
}

/**
* Determines whether the system supports hardlinks.
* @param f - file to examine
* @return true if hardlinks are supported, false otherwise
*/
public static boolean supportsHardLink(File f) {
try {
FileStore store = Files.getFileStore(f.toPath());
return store.supportsFileAttributeView(FILE_ATTRIBUTE_VIEW);
} catch (IOException e) {
LOG.warn("Failed to determine if hardlink is supported", e);
return false;
}
}

/**
* Retrieves the number of links to the specified file.
*
Expand All @@ -220,6 +244,10 @@ public static int getLinkCount(File fileName) throws IOException {
throw new FileNotFoundException(fileName + " not found.");
}

if (supportsHardLink(fileName)) {
return (int) Files.getAttribute(fileName.toPath(), FILE_ATTRIBUTE);
}

// construct and execute shell command
String[] cmd = getHardLinkCommand.linkCount(fileName);
String inpMsg = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ default int maxReadSizeForVectorReads() {
* @param ranges the byte ranges to read
* @param allocate the function to allocate ByteBuffer
* @throws IOException any IOE.
* @throws IllegalArgumentException if the any of ranges are invalid, or they overlap.
*/
default void readVectored(List<? extends FileRange> ranges,
IntFunction<ByteBuffer> allocate) throws IOException {
Expand Down
Loading

0 comments on commit a79cae1

Please sign in to comment.