|
| 1 | +name: escu-manual-workflow |
| 2 | +on: |
| 3 | + workflow_call: |
| 4 | + inputs: |
| 5 | + TA_BUILD: |
| 6 | + description: 'TA build number (e.g. s3://ta-production-artifacts/ta-apps/{ta-name}-$build_number$.spl)' |
| 7 | + required: true |
| 8 | + type: string |
| 9 | + TESTS: |
| 10 | + description: 'Comma-Separated List of detections to run (e.g. detection1,detection2,detection3)' |
| 11 | + required: true |
| 12 | + type: string |
| 13 | + secrets: |
| 14 | + AWS_ACCESS_KEY_ID: |
| 15 | + description: AWS access key id |
| 16 | + required: true |
| 17 | + AWS_DEFAULT_REGION: |
| 18 | + description: AWS default region |
| 19 | + required: true |
| 20 | + AWS_SECRET_ACCESS_KEY: |
| 21 | + description: AWS secret access key |
| 22 | + required: true |
| 23 | + OTHER_TA_REQUIRED_CONFIGS: |
| 24 | + description: other required configs |
| 25 | + required: true |
| 26 | + |
| 27 | +jobs: |
| 28 | + meta: |
| 29 | + runs-on: ubuntu-latest |
| 30 | + outputs: |
| 31 | + matrix_supportedSplunk: ${{ steps.matrix.outputs.latestSplunk }} |
| 32 | + steps: |
| 33 | + - name: Checkout |
| 34 | + uses: actions/checkout@v3 |
| 35 | + with: |
| 36 | + submodules: false |
| 37 | + persist-credentials: false |
| 38 | + - name: matrix |
| 39 | + id: matrix |
| 40 | + uses: splunk/addonfactory-test-matrix-action@v1.8 |
| 41 | + |
| 42 | + test-inventory: |
| 43 | + runs-on: ubuntu-latest |
| 44 | + outputs: |
| 45 | + escu: ${{ steps.testset.outputs.escu }} |
| 46 | + steps: |
| 47 | + - uses: actions/checkout@v3 |
| 48 | + - id: testset |
| 49 | + name: testsets |
| 50 | + run: | |
| 51 | + find tests -type d -maxdepth 1 -mindepth 1 | sed 's|^tests/||g' | while read -r TESTSET; do echo "::set-output name=$TESTSET::true"; echo "$TESTSET::true"; done |
| 52 | +
|
| 53 | + setup: |
| 54 | + runs-on: ubuntu-latest |
| 55 | + container: |
| 56 | + image: ghcr.io/splunk/workflow-engine-base:2.0.3 |
| 57 | + outputs: |
| 58 | + argo-server: ${{ steps.test-setup.outputs.argo-server }} |
| 59 | + argo-http1: ${{ steps.test-setup.outputs.argo-http1 }} |
| 60 | + argo-secure: ${{ steps.test-setup.outputs.argo-secure }} |
| 61 | + argo-href: "" |
| 62 | + argo-base-href: ${{ steps.test-setup.outputs.argo-base-href }} |
| 63 | + argo-workflow-tmpl-name: ${{ steps.test-setup.outputs.argo-workflow-tmpl-name }} |
| 64 | + argo-namespace: ${{ steps.test-setup.outputs.argo-namespace }} |
| 65 | + addon-name: ${{ steps.test-setup.outputs.addon-name }} |
| 66 | + job-name: ${{ steps.test-setup.outputs.job-name }} |
| 67 | + labels: ${{ steps.test-setup.outputs.labels }} |
| 68 | + addon-upload-path: ${{ steps.test-setup.outputs.addon-upload-path }} |
| 69 | + directory-path: ${{ steps.test-setup.outputs.directory-path }} |
| 70 | + s3-bucket: ${{ steps.test-setup.outputs.s3-bucket }} |
| 71 | + steps: |
| 72 | + - uses: actions/checkout@v3 |
| 73 | + with: |
| 74 | + submodules: recursive |
| 75 | + - name: setup for test |
| 76 | + id: test-setup |
| 77 | + shell: bash |
| 78 | + run: | |
| 79 | + echo "::set-output name=argo-server::argo.wfe.splgdi.com:443" |
| 80 | + echo "::set-output name=argo-http1::true" |
| 81 | + echo "::set-output name=argo-secure::true" |
| 82 | + echo "::set-output name=argo-base-href::\'\'" |
| 83 | + echo "::set-output name=argo-namespace::workflows" |
| 84 | + echo "::set-output name=argo-workflow-tmpl-name::ta-workflow" |
| 85 | + ADDON_NAME=$(crudini --get package/default/app.conf id name | tr '[:lower:]' '[:upper:]') |
| 86 | + if [[ -n $(echo "${ADDON_NAME}" | awk -F 'SPLUNK_TA_' '{print $2}') ]]; |
| 87 | + then |
| 88 | + ADDON_NAME=$(echo "${ADDON_NAME}" | awk -F 'SPLUNK_TA_' '{print $2}') |
| 89 | + elif [[ -n $(echo "${ADDON_NAME}" | awk -F '_FOR_SPLUNK' '{print $1}') ]]; |
| 90 | + then |
| 91 | + ADDON_NAME=$(echo "${ADDON_NAME}" | awk -F '_FOR_SPLUNK' '{print $1}') |
| 92 | + fi |
| 93 | + echo "::set-output name=addon-name::\"$ADDON_NAME\"" |
| 94 | + JOB_NAME=$(echo "$ADDON_NAME" | tail -c 16)-$(echo "${GITHUB_SHA}" | tail -c 8)-TEST-TYPE-${GITHUB_RUN_ID} |
| 95 | + JOB_NAME=${JOB_NAME//[_.]/-} |
| 96 | + echo "::set-output name=job-name::wf-$JOB_NAME" |
| 97 | + LABELS="addon-name=${ADDON_NAME}" |
| 98 | + echo "::set-output name=labels::$LABELS" |
| 99 | + ADDON_BUILD_NAME=$(crudini --get package/default/app.conf id name) |
| 100 | + ADDON_UPLOAD_PATH="s3://ta-production-artifacts/ta-apps/${ADDON_BUILD_NAME}-${{ inputs.TA_BUILD }}.spl" |
| 101 | + echo "::set-output name=addon-upload-path::$ADDON_UPLOAD_PATH" |
| 102 | + echo "::set-output name=directory-path::/tmp" |
| 103 | + echo "::set-output name=s3-bucket::ta-production-artifacts" |
| 104 | +
|
| 105 | + run-escu-tests: |
| 106 | + if: ${{ needs.test-inventory.outputs.escu == 'true' }} |
| 107 | + needs: |
| 108 | + - test-inventory |
| 109 | + - setup |
| 110 | + - meta |
| 111 | + runs-on: ubuntu-latest |
| 112 | + strategy: |
| 113 | + fail-fast: false |
| 114 | + matrix: |
| 115 | + splunk: |
| 116 | + - ${{ fromJson(needs.meta.outputs.matrix_supportedSplunk) }} |
| 117 | + container: |
| 118 | + image: ghcr.io/splunk/workflow-engine-base:2.0.3 |
| 119 | + env: |
| 120 | + ARGO_SERVER: ${{ needs.setup.outputs.argo-server }} |
| 121 | + ARGO_HTTP1: ${{ needs.setup.outputs.argo-http1 }} |
| 122 | + ARGO_SECURE: ${{ needs.setup.outputs.argo-secure }} |
| 123 | + ARGO_BASE_HREF: ${{ needs.setup.outputs.argo-href }} |
| 124 | + ARGO_NAMESPACE: ${{ needs.setup.outputs.argo-namespace }} |
| 125 | + TEST_TYPE: "escu" |
| 126 | + steps: |
| 127 | + - uses: actions/checkout@v3 |
| 128 | + with: |
| 129 | + submodules: recursive |
| 130 | + - name: Configure AWS credentials |
| 131 | + uses: aws-actions/configure-aws-credentials@v1 |
| 132 | + with: |
| 133 | + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} |
| 134 | + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} |
| 135 | + aws-region: ${{ secrets.AWS_DEFAULT_REGION }} |
| 136 | + - name: Read secrets from AWS Secrets Manager into environment variables |
| 137 | + id: get-argo-token |
| 138 | + run: | |
| 139 | + ARGO_TOKEN=$(aws secretsmanager get-secret-value --secret-id ta-github-workflow-automation-token | jq -r '.SecretString') |
| 140 | + echo "::set-output name=argo-token::$ARGO_TOKEN" |
| 141 | + - name: create job name |
| 142 | + id: create-job-name |
| 143 | + shell: bash |
| 144 | + run: | |
| 145 | + RANDOM_STRING=$(head -3 /dev/urandom | tr -cd '[:lower:]' | cut -c -4) |
| 146 | + JOB_NAME=${{ needs.setup.outputs.job-name }}-${RANDOM_STRING} |
| 147 | + JOB_NAME=${JOB_NAME//TEST-TYPE/${{ env.TEST_TYPE }}} |
| 148 | + JOB_NAME=${JOB_NAME//[_.]/-} |
| 149 | + JOB_NAME=$(echo "$JOB_NAME" | tr '[:upper:]' '[:lower:]') |
| 150 | + echo "::set-output name=job-name::$JOB_NAME" |
| 151 | + - name: run-tests |
| 152 | + id: run-tests |
| 153 | + env: |
| 154 | + ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} |
| 155 | + uses: splunk/wfe-test-runner-action@master |
| 156 | + with: |
| 157 | + splunk: ${{ matrix.splunk.version }} |
| 158 | + test-type: ${{ env.TEST_TYPE }} |
| 159 | + test-args: "-tf ${{ inputs.TESTS }}" |
| 160 | + job-name: ${{ steps.create-job-name.outputs.job-name }} |
| 161 | + labels: ${{ needs.setup.outputs.labels }} |
| 162 | + workflow-tmpl-name: ${{ needs.setup.outputs.argo-workflow-tmpl-name }} |
| 163 | + workflow-template-ns: ${{ needs.setup.outputs.argo-namespace }} |
| 164 | + delay-destroy: "No" |
| 165 | + addon-url: ${{ needs.setup.outputs.addon-upload-path }} |
| 166 | + addon-name: ${{ needs.setup.outputs.addon-name }} |
| 167 | + vendor-version: ${{ matrix.vendor-version.image }} |
| 168 | + sc4s-version: "No" |
| 169 | + - name: Check if pod was deleted |
| 170 | + id: is-pod-deleted |
| 171 | + if: always() |
| 172 | + shell: bash |
| 173 | + env: |
| 174 | + ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} |
| 175 | + run: | |
| 176 | + set -o xtrace |
| 177 | + if argo watch ${{ steps.run-tests.outputs.workflow-name }} -n workflows | grep "pod deleted"; then |
| 178 | + echo "::set-output name=retry-workflow::true" |
| 179 | + fi |
| 180 | + - name: Retrying workflow |
| 181 | + id: retry-wf |
| 182 | + shell: bash |
| 183 | + env: |
| 184 | + ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} |
| 185 | + if: always() |
| 186 | + run: | |
| 187 | + set -o xtrace |
| 188 | + set +e |
| 189 | + if [[ "${{ steps.is-pod-deleted.outputs.retry-workflow }}" == "true" ]] |
| 190 | + then |
| 191 | + WORKFLOW_NAME=$(argo resubmit -v -o json -n workflows "${{ steps.run-tests.outputs.workflow-name }}" | jq -r .metadata.name) |
| 192 | + echo "::set-output name=workflow-name::$WORKFLOW_NAME" |
| 193 | + argo logs --follow "${WORKFLOW_NAME}" -n workflows || echo "... there was an error fetching logs, the workflow is still in progress. please wait for the workflow to complete ..." |
| 194 | + else |
| 195 | + echo "No retry required" |
| 196 | + argo wait "${{ steps.run-tests.outputs.workflow-name }}" -n workflows |
| 197 | + argo watch "${{ steps.run-tests.outputs.workflow-name }}" -n workflows | grep "test-addon" |
| 198 | + fi |
| 199 | + - name: check if workflow completed |
| 200 | + env: |
| 201 | + ARGO_TOKEN: ${{ steps.get-argo-token.outputs.argo-token }} |
| 202 | + if: always() |
| 203 | + shell: bash |
| 204 | + run: | |
| 205 | + set +e |
| 206 | + # shellcheck disable=SC2157 |
| 207 | + if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then |
| 208 | + WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} |
| 209 | + else |
| 210 | + WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" |
| 211 | + fi |
| 212 | + ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') |
| 213 | + echo "Status of workflow:" "$ARGO_STATUS" |
| 214 | + while [ "$ARGO_STATUS" == "Running" ] || [ "$ARGO_STATUS" == "Pending" ] |
| 215 | + do |
| 216 | + echo "... argo Workflow ${WORKFLOW_NAME} is running, waiting for it to complete." |
| 217 | + argo wait "${WORKFLOW_NAME}" -n workflows || true |
| 218 | + ARGO_STATUS=$(argo get "${WORKFLOW_NAME}" -n workflows -o json | jq -r '.status.phase') |
| 219 | + done |
| 220 | + - name: pull artifacts from s3 bucket |
| 221 | + if: always() |
| 222 | + run: | |
| 223 | + echo "pulling artifacts" |
| 224 | + aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/artifacts-${{ steps.create-job-name.outputs.job-name }}/${{ steps.create-job-name.outputs.job-name }}.tgz ${{ needs.setup.outputs.directory-path }}/ |
| 225 | + tar -xf ${{ needs.setup.outputs.directory-path }}/${{ steps.create-job-name.outputs.job-name }}.tgz -C ${{ needs.setup.outputs.directory-path }} |
| 226 | + - name: pull logs from s3 bucket |
| 227 | + if: always() |
| 228 | + run: | |
| 229 | + # shellcheck disable=SC2157 |
| 230 | + if [ -z "${{ steps.retry-wf.outputs.workflow-name }}" ]; then |
| 231 | + WORKFLOW_NAME=${{ steps.run-tests.outputs.workflow-name }} |
| 232 | + else |
| 233 | + WORKFLOW_NAME="${{ steps.retry-wf.outputs.workflow-name }}" |
| 234 | + fi |
| 235 | + echo "pulling logs" |
| 236 | + mkdir -p ${{ needs.setup.outputs.directory-path }}/argo-logs |
| 237 | + aws s3 cp s3://${{ needs.setup.outputs.s3-bucket }}/${WORKFLOW_NAME}/ ${{ needs.setup.outputs.directory-path }}/argo-logs/ --recursive |
| 238 | + - uses: actions/upload-artifact@v3 |
| 239 | + if: always() |
| 240 | + with: |
| 241 | + name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests artifacts |
| 242 | + path: | |
| 243 | + ${{ needs.setup.outputs.directory-path }}/test-results |
| 244 | + - uses: actions/upload-artifact@v3 |
| 245 | + if: always() |
| 246 | + with: |
| 247 | + name: archive splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} tests logs |
| 248 | + path: | |
| 249 | + ${{ needs.setup.outputs.directory-path }}/argo-logs |
| 250 | + - name: Test Report |
| 251 | + uses: dorny/test-reporter@v1 |
| 252 | + if: always() |
| 253 | + with: |
| 254 | + name: splunk ${{ matrix.splunk.version }}${{ secrets.OTHER_TA_REQUIRED_CONFIGS }} ${{ env.TEST_TYPE }} test report |
| 255 | + path: "${{ needs.setup.outputs.directory-path }}/test-results/*.xml" |
| 256 | + reporter: java-junit |
0 commit comments