Skip to content

Commit

Permalink
fix: Collect right stuff in Stage job
Browse files Browse the repository at this point in the history
  • Loading branch information
jhutar committed Jun 13, 2024
1 parent 6cc0323 commit 2fa72a4
Show file tree
Hide file tree
Showing 4 changed files with 92 additions and 99 deletions.
6 changes: 4 additions & 2 deletions .github/workflows/loadtest.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -39,15 +39,17 @@ jobs:
env:
CONCURRENCY: ${{ github.event.inputs.concurrency }}
JOURNEY_REPEATS: ${{ github.event.inputs.journey_repeats }}
run: ./run-stage.sh
run: |
./run-stage.sh
- name: Collect results
working-directory: ./tests/load-tests
env:
CONCURRENCY: ${{ github.event.inputs.concurrency }}
STAGE_MEMBER_CLUSTER: ${{ secrets.STAGE_MEMBER_CLUSTER }}
OCP_PROMETHEUS_TOKEN: ${{ secrets.OCP_PROMETHEUS_TOKEN }}
run: |
./ci-scripts/stage/collect-results.sh .
./ci-scripts/stage/collect-results.sh . $CONCURRENCY
- name: Archive artifacts
uses: actions/upload-artifact@v3
Expand Down
67 changes: 0 additions & 67 deletions tests/load-tests/ci-scripts/collect-results-stage.sh

This file was deleted.

116 changes: 86 additions & 30 deletions tests/load-tests/ci-scripts/stage/collect-results.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,56 +4,112 @@ set -o nounset
set -o errexit
set -o pipefail

# construct $PROMETHEUS_HOST by extracting BASE_URL from $STAGE_MEMBER_CLUSTER
BASE_URL=$(echo $STAGE_MEMBER_CLUSTER | grep -oP 'https://api\.\K[^:]+')
PROMETHEUS_HOST="thanos-querier-openshift-monitoring.apps.$BASE_URL"

source "$( dirname $0 )/../utils.sh"

# Login to the stage member cluster with the OCP_PROMETHEUS_TOKEN credentials
TOKEN=${OCP_PROMETHEUS_TOKEN}
oc login --token="$TOKEN" --server="$STAGE_MEMBER_CLUSTER"
echo "[$(date --utc -Ins)] Collecting load test results"

# Setup directories
ARTIFACT_DIR=${ARTIFACT_DIR:-.artifacts}
CONCURRENCY="${2:-1}"
mkdir -p ${ARTIFACT_DIR}
pushd "${1:-./tests/load-tests}"

echo "Collecting load test results"
cp -vf *.log "${ARTIFACT_DIR}"
cp -vf load-tests.json "${ARTIFACT_DIR}"
# Construct $PROMETHEUS_HOST by extracting BASE_URL from $STAGE_MEMBER_CLUSTER
BASE_URL=$(echo $STAGE_MEMBER_CLUSTER | grep -oP 'https://api\.\K[^:]+')
PROMETHEUS_HOST="thanos-querier-openshift-monitoring.apps.$BASE_URL"
TOKEN=${OCP_PROMETHEUS_TOKEN}

echo "[$(date --utc -Ins)] Collecting artifacts"
find . -maxdepth 1 -type f -name '*.log' -exec cp -vf {} "${ARTIFACT_DIR}" \;
find . -maxdepth 1 -type f -name '*.csv' -exec cp -vf {} "${ARTIFACT_DIR}" \;
find . -maxdepth 1 -type f -name 'load-test-options.json' -exec cp -vf {} "${ARTIFACT_DIR}" \;
find . -maxdepth 1 -type d -name 'collected-data' -exec cp -r {} "${ARTIFACT_DIR}" \;

echo "[$(date --utc -Ins)] Create summary JSON with timings"
./evaluate.py "${ARTIFACT_DIR}/load-test-timings.csv" "${ARTIFACT_DIR}/load-test-timings.json"

echo "[$(date --utc -Ins)] Counting PRs and TRs"
ci-scripts/utility_scripts/count-multiarch-taskruns.py --data-dir "${ARTIFACT_DIR}" >"${ARTIFACT_DIR}/count-multiarch-taskruns.log"

monitoring_collection_log=$ARTIFACT_DIR/monitoring-collection.log
monitoring_collection_data=$ARTIFACT_DIR/load-tests.json
echo "[$(date --utc -Ins)] Graphing PRs and TRs"
ci-scripts/utility_scripts/show-pipelineruns.py --data-dir "${ARTIFACT_DIR}" >"${ARTIFACT_DIR}/show-pipelineruns.log" || true
mv "${ARTIFACT_DIR}/output.svg" "${ARTIFACT_DIR}/show-pipelines.svg" || true

## Monitoring data
echo "Setting up tool to collect monitoring data..."
echo "[$(date --utc -Ins)] Setting up OPL"
{
python3 -m venv venv
set +u
source venv/bin/activate
set -u
python3 -m pip install -U pip
python3 -m pip install -e "git+https://github.com/redhat-performance/opl.git#egg=opl-rhcloud-perf-team-core&subdirectory=core"
} &>"${ARTIFACT_DIR}/monitoring-setup.log"

echo "Collecting monitoring data..."
mstart=$(date --utc --date "$(status_data.py --status-data-file "$monitoring_collection_data" --get timestamp)" --iso-8601=seconds)
mend=$(date --utc --date "$(status_data.py --status-data-file "$monitoring_collection_data" --get endTimestamp)" --iso-8601=seconds)
mhost=$PROMETHEUS_HOST
echo "[$(date --utc -Ins)] Creating main status data file"
STATUS_DATA_FILE="${ARTIFACT_DIR}/load-test.json"
status_data.py \
--status-data-file "${STATUS_DATA_FILE}" \
--set "name=Konflux loadtest" "started=$( cat started )" "ended=$( cat ended )" \
--set-subtree-json "parameters.options=${ARTIFACT_DIR}/load-test-options.json" "results.measurements=${ARTIFACT_DIR}/load-test-timings.json"

echo "[$(date --utc -Ins)] Adding monitoring data"
mstarted="$( date -d "$( cat started )" --utc -Iseconds )"
mended="$( date -d "$( cat ended )" --utc -Iseconds )"
mhost="https://$PROMETHEUS_HOST"
mrawdir="${ARTIFACT_DIR}/monitoring-raw-data-dir/"
mkdir -p "$mrawdir"
status_data.py \
--status-data-file "$monitoring_collection_data" \
--additional ./ci-scripts/stage/cluster_read_config.yaml \
--monitoring-start "$mstart" \
--monitoring-end "$mend" \
--prometheus-host "https://$mhost" \
--status-data-file "${STATUS_DATA_FILE}" \
--additional ci-scripts/stage/cluster_read_config.yaml \
--monitoring-start "$mstarted" \
--monitoring-end "$mended" \
--prometheus-host "$mhost" \
--prometheus-port 443 \
--prometheus-token "$TOKEN" \
-d &>$monitoring_collection_log

if [ $? -ne 0 ]; then
echo "Error: status_data.py failed with exit code $?"
fi
--monitoring-raw-data-dir "$mrawdir" \
&>"${ARTIFACT_DIR}/monitoring-collection.log"

set +u
deactivate
set -u

echo "[$(date --utc -Ins)] Collecting additional info"
if ! [ -r users.json ]; then
echo "ERROR: Missing file with user creds"
else
login_log_stub="${ARTIFACT_DIR}/collected-oc_login"
application_stub="${ARTIFACT_DIR}/collected-applications.appstudio.redhat.com"
component_stub="${ARTIFACT_DIR}/collected-components.appstudio.redhat.com"

for uid in $( seq 1 $CONCURRENCY ); do
username="test-rhtap-$uid"
offline_token=$( cat users.json | jq --raw-output '.[] | select(.username == "'$username'").token' )
api_server=$( cat users.json | jq --raw-output '.[] | select(.username == "'$username'").apiurl' )
sso_server=$( cat users.json | jq --raw-output '.[] | select(.username == "'$username'").ssourl' )
access_token=$( curl \
--silent \
--header "Accept: application/json" \
--header "Content-Type: application/x-www-form-urlencoded" \
--data-urlencode "grant_type=refresh_token" \
--data-urlencode "client_id=cloud-services" \
--data-urlencode "refresh_token=${offline_token}" \
"${sso_server}" \
| jq --raw-output ".access_token" )
login_log="${login_log_stub}-${username}.log"
echo "Logging in as $username..."
if ! oc login --token="$access_token" --server="$api_server" &>$login_log; then
echo "ERROR: Login as $username failed:"
cat "$login_log"
continue
fi
tenant="${username}-tenant"

# Application info
echo "Collecting Application timestamps..."
collect_application "-n ${tenant}" "$application_stub-$tenant"

# Component info
echo "Collecting Component timestamps..."
collect_component "-n ${tenant}" "$component_stub-$tenant"
done
fi

popd
2 changes: 2 additions & 0 deletions tests/load-tests/run-stage.sh
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
date -Ins --utc >started
go run loadtest.go \
--applications-count "${APPLICATIONS_COUNT:-1}" \
--build-pipeline-selector-bundle "${BUILD_PIPELINE_SELECTOR_BUNDLE:-}" \
Expand All @@ -21,3 +22,4 @@ go run loadtest.go \
--waitintegrationtestspipelines="${WAIT_INTEGRATION_TESTS:-true}" \
--waitpipelines="${WAIT_PIPELINES:-true}" \
--stage
date -Ins --utc >ended

0 comments on commit 2fa72a4

Please sign in to comment.