Skip to content

Commit

Permalink
HDDS-7990. Add acceptance test for HA Proxy with secure Ozone S3 Gate…
Browse files Browse the repository at this point in the history
…way (#7562)
  • Loading branch information
ptlrs authored Jan 8, 2025
1 parent a1324b6 commit ab29a55
Show file tree
Hide file tree
Showing 4 changed files with 109 additions and 3 deletions.
57 changes: 57 additions & 0 deletions hadoop-ozone/dist/src/main/compose/common/s3-haproxy-secure.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

x-s3-worker:
&s3-worker
image: ${OZONE_RUNNER_IMAGE}:${OZONE_RUNNER_VERSION}
volumes:
- ../..:/opt/hadoop
- ../_keytabs:/etc/security/keytabs
- ./krb5.conf:/etc/krb5.conf
env_file:
- docker-config
command: ["ozone","s3g", "-Dozone.om.transport.class=${OZONE_S3_OM_TRANSPORT:-org.apache.hadoop.ozone.om.protocolPB.GrpcOmTransportFactory}"]

services:
s3g:
image: haproxy:lts-alpine
hostname: s3g
dns_search: .
volumes:
- ../..:/opt/hadoop
- ../common/s3-haproxy.cfg:/usr/local/etc/haproxy/haproxy.cfg
ports:
- 9878:9878
command: ["haproxy", "-f", "/usr/local/etc/haproxy/haproxy.cfg"]
s3g1:
<<: *s3-worker
hostname: s3g1
dns_search: .
ports:
- 9879:9878
s3g2:

<<: *s3-worker
hostname: s3g2
dns_search: .
ports:
- 9880:9878
s3g3:
<<: *s3-worker
hostname: s3g3
dns_search: .
ports:
- 9881:9878
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ services:
command: ["ozone","s3g"]
recon:
<<: *common-config
hostname: recon
ports:
- 9888:9888
environment:
Expand Down
48 changes: 48 additions & 0 deletions hadoop-ozone/dist/src/main/compose/ozonesecure/test-haproxy-s3g.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

#suite:secure

COMPOSE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
export COMPOSE_DIR

# shellcheck source=/dev/null
source "$COMPOSE_DIR/../testlib.sh"

export SECURITY_ENABLED=true
export COMPOSE_FILE=docker-compose.yaml:../common/s3-haproxy-secure.yaml

: ${OZONE_BUCKET_KEY_NAME:=key1}

start_docker_env

execute_command_in_container kms hadoop key create ${OZONE_BUCKET_KEY_NAME}

execute_robot_test scm kinit.robot

execute_robot_test scm security

## Exclude virtual-host tests. This is tested separately as it requires additional config.
exclude="--exclude virtual-host"
for bucket in encrypted; do
execute_robot_test scm -v BUCKET:${bucket} -N s3-${bucket} ${exclude} s3
# some tests are independent of the bucket type, only need to be run once
## Exclude virtual-host.robot
exclude="--exclude virtual-host --exclude no-bucket-type"
done

execute_robot_test scm spnego
6 changes: 3 additions & 3 deletions hadoop-ozone/dist/src/main/smoketest/s3/objectputget.robot
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ Create key with custom etag metadata and expect it won't conflict with ETag resp
${file_md5_checksum} Execute md5sum /tmp/small_file | awk '{print $1}'
Execute AWSS3CliDebug cp --metadata "ETag=custom-etag-value" /tmp/small_file s3://${BUCKET}/test_file
${result} Execute AWSS3CliDebug cp s3://${BUCKET}/test_file /tmp/test_file_downloaded
${match} ${ETag} ${etagCustom} Should Match Regexp ${result} HEAD /${BUCKET}/test_file\ .*?Response headers.*?ETag':\ '"(.*?)"'.*?x-amz-meta-etag':\ '(.*?)' flags=DOTALL
${match} ${ETag} ${etagCustom} Should Match Regexp ${result} HEAD /${BUCKET}/test_file\ .*?Response headers.*?ETag':\ '"(.*?)"'.*?x-amz-meta-etag':\ '(.*?)' flags=DOTALL | IGNORECASE
Should Be Equal As Strings ${ETag} ${file_md5_checksum}
Should BE Equal As Strings ${etagCustom} custom-etag-value
Should Not Be Equal As Strings ${ETag} ${etagCustom}
Expand All @@ -262,9 +262,9 @@ Create key twice with different content and expect different ETags
Execute head -c 1MiB </dev/urandom > /tmp/file1
Execute head -c 1MiB </dev/urandom > /tmp/file2
${file1UploadResult} Execute AWSS3CliDebug cp /tmp/file1 s3://${BUCKET}/test_key_to_check_etag_differences
${match} ${etag1} Should Match Regexp ${file1UploadResult} PUT /${BUCKET}/test_key_to_check_etag_differences\ .*?Response headers.*?ETag':\ '"(.*?)"' flags=DOTALL
${match} ${etag1} Should Match Regexp ${file1UploadResult} PUT /${BUCKET}/test_key_to_check_etag_differences\ .*?Response headers.*?ETag':\ '"(.*?)"' flags=DOTALL | IGNORECASE
${file2UploadResult} Execute AWSS3CliDebug cp /tmp/file2 s3://${BUCKET}/test_key_to_check_etag_differences
${match} ${etag2} Should Match Regexp ${file2UploadResult} PUT /${BUCKET}/test_key_to_check_etag_differences\ .*?Response headers.*?ETag':\ '"(.*?)"' flags=DOTALL
${match} ${etag2} Should Match Regexp ${file2UploadResult} PUT /${BUCKET}/test_key_to_check_etag_differences\ .*?Response headers.*?ETag':\ '"(.*?)"' flags=DOTALL | IGNORECASE
Should Not Be Equal As Strings ${etag1} ${etag2}
# clean up
Execute AWSS3Cli rm s3://${BUCKET}/test_key_to_check_etag_differences
Expand Down

0 comments on commit ab29a55

Please sign in to comment.