Skip to content

Commit

Permalink
Merge branch 'apache:master' into HDDS-11775
Browse files Browse the repository at this point in the history
  • Loading branch information
Tejaskriya authored Jan 15, 2025
2 parents 56bd8af + 85e7521 commit f6c9941
Show file tree
Hide file tree
Showing 174 changed files with 3,186 additions and 2,518 deletions.
14 changes: 7 additions & 7 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ env:
FAIL_FAST: ${{ github.event_name == 'pull_request' }}
# Minimum required Java version for running Ozone is defined in pom.xml (javac.version).
TEST_JAVA_VERSION: 21 # JDK version used by CI build and tests; should match the JDK version in apache/ozone-runner image
MAVEN_ARGS: --batch-mode --settings ${{ github.workspace }}/dev-support/ci/maven-settings.xml --show-version
MAVEN_ARGS: --batch-mode --settings ${{ github.workspace }}/dev-support/ci/maven-settings.xml
MAVEN_OPTS: -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3
HADOOP_IMAGE: ghcr.io/apache/hadoop
OZONE_IMAGE: ghcr.io/apache/ozone
Expand Down Expand Up @@ -146,7 +146,7 @@ jobs:
- name: Run a full build
run: hadoop-ozone/dev-support/checks/build.sh -Pdist -Psrc -Dmaven.javadoc.skip=true ${{ inputs.ratis_args }}
env:
DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }}
DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
- name: Store binaries for tests
uses: actions/upload-artifact@v4
with:
Expand Down Expand Up @@ -226,7 +226,7 @@ jobs:
run: hadoop-ozone/dev-support/checks/build.sh -Pdist -DskipRecon -Dmaven.javadoc.failOnWarnings=${{ matrix.java != 8 }} -Djavac.version=${{ matrix.java }} ${{ inputs.ratis_args }}
env:
OZONE_WITH_COVERAGE: false
DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }}
DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
basic:
needs:
- build-info
Expand Down Expand Up @@ -274,7 +274,7 @@ jobs:
- name: Execute tests
run: hadoop-ozone/dev-support/checks/${{ matrix.check }}.sh ${{ inputs.ratis_args }}
env:
DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }}
DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
- name: Summary of failures
run: hadoop-ozone/dev-support/checks/_summary.sh target/${{ matrix.check }}/summary.txt
if: ${{ failure() }}
Expand Down Expand Up @@ -321,7 +321,7 @@ jobs:
- name: Execute tests
run: hadoop-ozone/dev-support/checks/${{ github.job }}.sh ${{ inputs.ratis_args }}
env:
DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }}
DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
- name: Summary of failures
run: hadoop-ozone/dev-support/checks/_summary.sh target/${{ github.job }}/summary.txt
if: ${{ failure() }}
Expand Down Expand Up @@ -641,7 +641,7 @@ jobs:
hadoop-ozone/dev-support/checks/integration.sh -P${{ matrix.profile }} ${args}
env:
DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }}
DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
- name: Summary of failures
run: |
if [[ -s "target/${{ github.job }}/summary.md" ]]; then
Expand Down Expand Up @@ -701,7 +701,7 @@ jobs:
env:
SONAR_TOKEN: ${{ secrets.SONARCLOUD_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }}
DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
- name: Archive build results
uses: actions/upload-artifact@v4
with:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/intermittent-test-check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ jobs:
fi
continue-on-error: true
env:
DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }}
DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
- name: Summary of failures
run: hadoop-ozone/dev-support/checks/_summary.sh target/unit/summary.txt
if: ${{ !cancelled() }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/repeat-acceptance.yml
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ jobs:
- name: Run a full build
run: hadoop-ozone/dev-support/checks/build.sh -Pdist -Psrc -Dmaven.javadoc.skip=true
env:
DEVELOCITY_ACCESS_KEY: ${{ secrets.GE_ACCESS_TOKEN }}
DEVELOCITY_ACCESS_KEY: ${{ secrets.DEVELOCITY_ACCESS_KEY }}
- name: Store binaries for tests
uses: actions/upload-artifact@v4
with:
Expand Down
3 changes: 2 additions & 1 deletion .mvn/develocity.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,9 @@
<develocity
xmlns="https://www.gradle.com/develocity-maven" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://www.gradle.com/develocity-maven https://www.gradle.com/schema/develocity-maven.xsd">
<projectId>ozone</projectId>
<server>
<url>https://ge.apache.org</url>
<url>https://develocity.apache.org</url>
<allowUntrusted>false</allowUntrusted>
</server>
<buildScan>
Expand Down
2 changes: 1 addition & 1 deletion .mvn/extensions.xml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
<extension>
<groupId>com.gradle</groupId>
<artifactId>develocity-maven-extension</artifactId>
<version>1.23</version>
<version>1.22.2</version>
</extension>
<extension>
<groupId>com.gradle</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -485,14 +485,16 @@ public ChecksumCombineMode getChecksumCombineMode() {
try {
return ChecksumCombineMode.valueOf(checksumCombineMode);
} catch (IllegalArgumentException iae) {
LOG.warn("Bad checksum combine mode: {}. Using default {}",
checksumCombineMode,
ChecksumCombineMode.COMPOSITE_CRC.name());
return ChecksumCombineMode.valueOf(
ChecksumCombineMode.COMPOSITE_CRC.name());
LOG.warn("Bad checksum combine mode: {}.",
checksumCombineMode);
return null;
}
}

public void setChecksumCombineMode(String checksumCombineMode) {
this.checksumCombineMode = checksumCombineMode;
}

public void setEcReconstructStripeReadPoolLimit(int poolLimit) {
this.ecReconstructStripeReadPoolLimit = poolLimit;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -410,6 +410,10 @@ public void executePutBlock(boolean close,
waitFuturesComplete();
final BlockData blockData = containerBlockData.build();
if (close) {
// HDDS-12007 changed datanodes to ignore the following PutBlock request.
// However, clients still have to send it for maintaining compatibility.
// Otherwise, new clients won't send a PutBlock.
// Then, old datanodes will fail since they expect a PutBlock.
final ContainerCommandRequestProto putBlockRequest
= ContainerProtocolCalls.getPutBlockRequest(
xceiverClient.getPipeline(), blockData, true, tokenString);
Expand Down Expand Up @@ -507,6 +511,22 @@ public void flush() throws IOException {
}
}

@Override
public void hflush() throws IOException {
hsync();
}

@Override
public void hsync() throws IOException {
try {
if (!isClosed()) {
handleFlush(false);
}
} catch (Exception e) {

}
}

public void waitFuturesComplete() throws IOException {
try {
CompletableFuture.allOf(futures.toArray(EMPTY_FUTURE_ARRAY)).get();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@

package org.apache.hadoop.hdds.scm.storage;

import org.apache.hadoop.fs.Syncable;

import java.io.Closeable;
import java.io.IOException;
import java.nio.ByteBuffer;
Expand All @@ -26,7 +28,7 @@
* This interface is similar to {@link java.io.OutputStream}
* except that this class support {@link ByteBuffer} instead of byte[].
*/
public interface ByteBufferStreamOutput extends Closeable {
public interface ByteBufferStreamOutput extends Closeable, Syncable {
/**
* Similar to {@link java.io.OutputStream#write(byte[])},
* except that the parameter of this method is a {@link ByteBuffer}.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package org.apache.hadoop.hdds.cli;

import java.io.IOException;
import java.io.PrintWriter;
import java.util.Map;

import com.google.common.base.Strings;
Expand Down Expand Up @@ -87,9 +88,9 @@ protected void printError(Throwable error) {
//message could be null in case of NPE. This is unexpected so we can
//print out the stack trace.
if (verbose || Strings.isNullOrEmpty(error.getMessage())) {
error.printStackTrace(System.err);
error.printStackTrace(cmd.getErr());
} else {
System.err.println(error.getMessage().split("\n")[0]);
cmd.getErr().println(error.getMessage().split("\n")[0]);
}
}

Expand All @@ -114,4 +115,12 @@ public CommandLine getCmd() {
public boolean isVerbose() {
return verbose;
}

protected PrintWriter out() {
return cmd.getOut();
}

protected PrintWriter err() {
return cmd.getErr();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,9 @@ public enum OzoneManagerVersion implements ComponentVersion {
S3_OBJECT_TAGGING_API(9, "OzoneManager version that supports S3 object tagging APIs, such as " +
"PutObjectTagging, GetObjectTagging, and DeleteObjectTagging"),

S3_PART_AWARE_GET(10, "OzoneManager version that supports S3 get for a specific multipart " +
"upload part number"),

FUTURE_VERSION(-1, "Used internally in the client when the server side is "
+ " newer and an unknown server version has arrived to the client.");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -524,21 +524,6 @@ private ContainerCommandResponseProto dispatchCommand(
return response;
}

private CompletableFuture<ContainerCommandResponseProto> link(
ContainerCommandRequestProto requestProto, LogEntryProto entry) {
return CompletableFuture.supplyAsync(() -> {
final DispatcherContext context = DispatcherContext
.newBuilder(DispatcherContext.Op.STREAM_LINK)
.setTerm(entry.getTerm())
.setLogIndex(entry.getIndex())
.setStage(DispatcherContext.WriteChunkStage.COMMIT_DATA)
.setContainer2BCSIDMap(container2BCSIDMap)
.build();

return dispatchCommand(requestProto, context);
}, executor);
}

private CompletableFuture<Message> writeStateMachineData(
ContainerCommandRequestProto requestProto, long entryIndex, long term,
long startTime) {
Expand Down Expand Up @@ -689,29 +674,8 @@ public CompletableFuture<?> link(DataStream stream, LogEntryProto entry) {

final KeyValueStreamDataChannel kvStreamDataChannel =
(KeyValueStreamDataChannel) dataChannel;

final ContainerCommandRequestProto request =
kvStreamDataChannel.getPutBlockRequest();

return link(request, entry).whenComplete((response, e) -> {
if (e != null) {
LOG.warn("Failed to link logEntry {} for request {}",
TermIndex.valueOf(entry), request, e);
}
if (response != null) {
final ContainerProtos.Result result = response.getResult();
if (LOG.isDebugEnabled()) {
LOG.debug("{} to link logEntry {} for request {}, response: {}",
result, TermIndex.valueOf(entry), request, response);
}
if (result == ContainerProtos.Result.SUCCESS) {
kvStreamDataChannel.setLinked();
return;
}
}
// failed to link, cleanup
kvStreamDataChannel.cleanUp();
});
kvStreamDataChannel.setLinked();
return CompletableFuture.completedFuture(null);
}

private ExecutorService getChunkExecutor(WriteChunkRequestProto req) {
Expand Down
Loading

0 comments on commit f6c9941

Please sign in to comment.