diff --git a/.github/workflows/run-pr-tests.yaml b/.github/workflows/run-pr-tests.yaml index c300026246..48ad426aaf 100644 --- a/.github/workflows/run-pr-tests.yaml +++ b/.github/workflows/run-pr-tests.yaml @@ -51,6 +51,10 @@ jobs: needs: build-noobaa-image uses: ./.github/workflows/mint-nc-tests.yaml + s3a-tests: + needs: build-noobaa-image + uses: ./.github/workflows/s3a-tests.yaml + build-noobaa-image: name: Build Noobaa Image runs-on: ubuntu-latest diff --git a/.github/workflows/s3a-tests.yaml b/.github/workflows/s3a-tests.yaml new file mode 100644 index 0000000000..7da3c81811 --- /dev/null +++ b/.github/workflows/s3a-tests.yaml @@ -0,0 +1,47 @@ +name: Hadoop S3A Tests +on: [workflow_call] + +jobs: + s3a-tests: + name: Hadoop S3A Tests + runs-on: ubuntu-latest + timeout-minutes: 120 + permissions: + actions: read # download-artifact + contents: read # required for actions/checkout + steps: + - name: Checkout noobaa-core + uses: actions/checkout@v4 + with: + repository: 'noobaa/noobaa-core' + path: 'noobaa-core' + + - name: Download artifact + uses: actions/download-artifact@v4 + with: + name: noobaa-tester + path: /tmp + + - name: Load image + run: docker load --input /tmp/noobaa-tester.tar + + - name: Create S3A logs directory + run: | + set -x + cd ./noobaa-core + mkdir -p logs/s3a-test-logs + chmod 777 logs/s3a-test-logs + + - name: Run Hadoop S3A tests + run: | + set -x + cd ./noobaa-core + make test-s3a -o tester + + - name: Upload S3A test logs + if: always() + uses: actions/upload-artifact@v4 + with: + name: s3a-test-logs + path: noobaa-core/logs/s3a-test-logs/ + retention-days: 7 diff --git a/Makefile b/Makefile index 9356663acd..85b806068c 100644 --- a/Makefile +++ b/Makefile @@ -404,6 +404,17 @@ test-nc-mint: tester @$(call remove_docker_network) .PHONY: test-nc-mint +test-s3a: tester + @echo "\033[1;34mRunning Hadoop S3A tests with Postgres.\033[0m" + @$(call create_docker_network) + @$(call run_postgres) + @echo "\033[1;34mRunning Hadoop S3A tests\033[0m" + $(CONTAINER_ENGINE) run $(CPUSET) --network noobaa-net --name noobaa_$(GIT_COMMIT)_$(NAME_POSTFIX) --env "SUPPRESS_LOGS=$(SUPPRESS_LOGS)" --env "POSTGRES_HOST=coretest-postgres-$(GIT_COMMIT)-$(NAME_POSTFIX)" --env "POSTGRES_USER=noobaa" --env "DB_TYPE=postgres" --env "POSTGRES_DBNAME=coretest" -v $(PWD)/logs:/logs $(TESTER_TAG) "./src/test/external_tests/s3a/run_s3a_on_test_container.sh" + @$(call stop_noobaa) + @$(call stop_postgres) + @$(call remove_docker_network) +.PHONY: test-s3a + test-nsfs-cephs3: tester @echo "\033[1;34mRunning Ceph S3 tests on NSFS Standalone platform\033[0m" $(CONTAINER_ENGINE) run $(CPUSET) --privileged --user root --name noobaa_$(GIT_COMMIT)_$(NAME_POSTFIX) --env "SUPPRESS_LOGS=$(SUPPRESS_LOGS)" -v $(PWD)/logs:/logs $(TESTER_TAG) "./src/test/external_tests/ceph_s3_tests/run_ceph_nsfs_test_on_test_container.sh" diff --git a/src/test/external_tests/s3a/configure_s3a.js b/src/test/external_tests/s3a/configure_s3a.js new file mode 100644 index 0000000000..c380f6ec4e --- /dev/null +++ b/src/test/external_tests/s3a/configure_s3a.js @@ -0,0 +1,37 @@ +/* Copyright (C) 2024 NooBaa */ +"use strict"; + +const dbg = require('../../../util/debug_module')(__filename); +dbg.set_process_name('test_s3a'); + +const { S3A_TEST } = require('./s3a_constants'); +const { is_containerized_deployment, create_system_test_account, create_system_test_bucket } = require('../external_tests_utils.js'); + +async function main() { + try { + await s3a_test_setup(); + } catch (err) { + console.error(`S3A Setup Failed: ${err}`); + process.exit(1); + } + process.exit(0); +} + +async function s3a_test_setup() { + console.info('S3A TEST CONFIGURATION:', JSON.stringify(S3A_TEST)); + const is_containerized = is_containerized_deployment(); + const account_options = is_containerized ? S3A_TEST.s3a_account_params : S3A_TEST.nc_s3a_account_params; + + // Create the test account + await create_system_test_account(account_options); + + // Create the hadoop bucket + const bucket_options = { name: S3A_TEST.bucket_name }; + await create_system_test_bucket(account_options, bucket_options); + + console.info('S3A TEST SETUP DONE'); +} + +if (require.main === module) { + main(); +} diff --git a/src/test/external_tests/s3a/run_s3a_on_test_container.sh b/src/test/external_tests/s3a/run_s3a_on_test_container.sh new file mode 100755 index 0000000000..a1a8972baf --- /dev/null +++ b/src/test/external_tests/s3a/run_s3a_on_test_container.sh @@ -0,0 +1,177 @@ +#!/bin/bash + +export PS4='\e[36m+ ${FUNCNAME:-main}\e[0m@\e[32m${BASH_SOURCE}:\e[35m${LINENO} \e[0m' + +set -e + +# ==================================================================================== +# Set the environment variables +export email='admin@noobaa.io' +export password=123456789 + +export PORT=8080 +export SSL_PORT=5443 +export ENDPOINT_PORT=6001 +export ENDPOINT_SSL_PORT=6443 +export NOOBAA_MGMT_SERVICE_HOST=localhost +export NOOBAA_MGMT_SERVICE_PORT=${SSL_PORT} +export NOOBAA_MGMT_SERVICE_PROTO=wss +export S3_SERVICE_HOST=localhost + +export CREATE_SYS_NAME=noobaa +export CREATE_SYS_EMAIL=${email} +export CREATE_SYS_PASSWD=${password} +export JWT_SECRET=123456789 +export NOOBAA_ROOT_SECRET='AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=' +export LOCAL_MD_SERVER=true + +#The default max connections for postgres is 100. limit max clients to 10 per pool (per process). +export CONFIG_JS_POSTGRES_MD_MAX_CLIENTS=10 +export CONFIG_JS_POSTGRES_DEFAULT_MAX_CLIENTS=10 + +export POSTGRES_HOST=${POSTGRES_HOST:-localhost} +export MGMT_ADDR=wss://${NOOBAA_MGMT_SERVICE_HOST:-localhost}:${NOOBAA_MGMT_SERVICE_PORT:-5443} +export BG_ADDR=wss://localhost:5445 +export HOSTED_AGENTS_ADDR=wss://localhost:5446 +export S3A_TEST_LOGS_DIR=/logs/s3a-test-logs + +# ==================================================================================== + +# Create the logs directory +mkdir -p ${S3A_TEST_LOGS_DIR} + +# Deploy standalone NooBaa on the test container +./src/deploy/NVA_build/standalone_deploy.sh + +# ==================================================================================== + +cd /root/node_modules/noobaa-core/ + +# Configure the S3A test - create account and bucket +node ./src/test/external_tests/s3a/configure_s3a.js + +# ==================================================================================== +# Install Maven and clone Hadoop repository +echo "Installing Maven and dependencies..." +yum install -y maven git java-11-openjdk-devel || dnf install -y maven git java-11-openjdk-devel + +# Set JAVA_HOME +export JAVA_HOME=/usr/lib/jvm/java-11-openjdk +export PATH=$JAVA_HOME/bin:$PATH + +echo "Cloning Hadoop repository..." +HADOOP_VERSION="3.4.2" +HADOOP_DIR="/root/hadoop" +HADOOP_AWS_DIR="${HADOOP_DIR}/hadoop-tools/hadoop-aws" + +if [ ! -d "${HADOOP_DIR}" ]; then + cd /root + git clone --depth 1 --branch rel/release-${HADOOP_VERSION} https://github.com/apache/hadoop +fi + +# ==================================================================================== +# Create auth-keys.xml configuration file +echo "Creating Hadoop S3A test configuration..." +cd ${HADOOP_AWS_DIR} + +mkdir -p src/test/resources + +cat > src/test/resources/auth-keys.xml < + + test.fs.s3a.name + s3a://hadoop/ + + + fs.contract.test.fs.s3a + \${test.fs.s3a.name} + + + + fs.s3a.endpoint + http://localhost:${ENDPOINT_PORT} + + + fs.s3a.endpoint.region + us-east-1 + + + + fs.s3a.connection.ssl.enabled + false + + + fs.s3a.path.style.access + true + + + test.fs.s3a.encryption.enabled + false + + + test.fs.s3a.create.storage.class.enabled + false + + + test.fs.s3a.sts.enabled + false + + + test.fs.s3a.create.create.acl.enabled + false + + + + fs.s3.awsAccessKeyId + XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX + + + fs.s3.awsSecretAccessKey + YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY + + + fs.s3n.awsAccessKeyId + XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX + + + fs.s3n.awsSecretAccessKey + YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY + + + fs.s3a.access.key + AWS access key ID. Omit for Role-based authentication. + XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX + + + fs.s3a.secret.key + AWS secret key. Omit for Role-based authentication. + YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY + + + +EOF + +# ==================================================================================== +# Run the Hadoop S3A tests +echo "Running Hadoop S3A tests..." +set +e # Don't exit on test failures, we want to collect results + +mvn clean verify -Dtest=TestS3A* -Dit.test=ITestS3A* 2>&1 | tee ${S3A_TEST_LOGS_DIR}/s3a-tests.log + +# Capture the exit code +TEST_EXIT_CODE=$? + +# ==================================================================================== +# Generate summary +echo "Generating test summary..." +echo "========================================" | tee -a ${S3A_TEST_LOGS_DIR}/s3a-tests.log +echo "Test Summary:" | tee -a ${S3A_TEST_LOGS_DIR}/s3a-tests.log +grep "Tests run" ${S3A_TEST_LOGS_DIR}/s3a-tests.log | tail -1 | tee -a ${S3A_TEST_LOGS_DIR}/s3a-tests.log +echo "========================================" | tee -a ${S3A_TEST_LOGS_DIR}/s3a-tests.log + +echo "Test failures:" | tee -a ${S3A_TEST_LOGS_DIR}/s3a-tests.log +grep "FAILURE" ${S3A_TEST_LOGS_DIR}/s3a-tests.log || echo "No FAILURE found in logs" | tee -a ${S3A_TEST_LOGS_DIR}/s3a-tests.log +echo "========================================" | tee -a ${S3A_TEST_LOGS_DIR}/s3a-tests.log + +# Exit with the test exit code +exit ${TEST_EXIT_CODE} diff --git a/src/test/external_tests/s3a/s3a_constants.js b/src/test/external_tests/s3a/s3a_constants.js new file mode 100644 index 0000000000..809ae7d4f5 --- /dev/null +++ b/src/test/external_tests/s3a/s3a_constants.js @@ -0,0 +1,41 @@ +/* Copyright (C) 2024 NooBaa */ +'use strict'; + +const SensitiveString = require('../../../util/sensitive_string'); + +const s3a_account_name = 's3a_account'; + +const S3A_MOCK_ACCESS_KEY = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'; +const S3A_MOCK_SECRET_KEY = 'YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY'; + +const S3A_TEST = { + s3a_logs_dir_path: '/logs/s3a-test-logs', + s3a_account_params: { + name: s3a_account_name, + email: 's3a_account@noobaa.com', + has_login: true, + password: 's3a_pass123_example', + s3_access: true, + allow_bucket_creation: true, + access_keys: [{ + access_key: new SensitiveString(S3A_MOCK_ACCESS_KEY), + secret_key: new SensitiveString(S3A_MOCK_SECRET_KEY) + }] + }, + nc_s3a_account_params: { + name: s3a_account_name, + uid: 1000, + gid: 1000, + new_buckets_path: '/tmp/nsfs_root_s3a/', + access_key: S3A_MOCK_ACCESS_KEY, + secret_key: S3A_MOCK_SECRET_KEY + }, + bucket_name: 'hadoop', + hadoop_version: '3.4.2', + hadoop_dir: '/root/hadoop', + hadoop_aws_dir: '/root/hadoop/hadoop-tools/hadoop-aws' +}; + +exports.S3A_TEST = S3A_TEST; +exports.S3A_MOCK_ACCESS_KEY = S3A_MOCK_ACCESS_KEY; +exports.S3A_MOCK_SECRET_KEY = S3A_MOCK_SECRET_KEY;