Skip to content

coap-gateway: Allow acceptance of more than 16 options in CoAP messages #208

coap-gateway: Allow acceptance of more than 16 options in CoAP messages

coap-gateway: Allow acceptance of more than 16 options in CoAP messages #208

Workflow file for this run

# This is a basic workflow to help you get started with Actions
name: Measure memory
# Controls when the action will run. Triggers the workflow push is only on main branch and PR on any branch.
on:
push:
branches:
- main
tags:
- "*"
workflow_dispatch:
inputs:
cql:
description: "Run with CQL database(scylla)"
required: false
type: boolean
default: false
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref_name != 'main' }}
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "build"
generateSummary:
needs: coapGateway
runs-on: ubuntu-latest
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
path: outputs
- name: Update summary
run: |
echo "### CoAP Gateway memory usage" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "The following table displays the memory usage of the coap-gateway when connected to varying numbers of devices, resources, and resource sizes. One by one, the devices were connected and the following actions were executed for each device:" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- Sign up" >> $GITHUB_STEP_SUMMARY
echo "- Sign in" >> $GITHUB_STEP_SUMMARY
echo "- Publish resources" >> $GITHUB_STEP_SUMMARY
echo "- Synchronize with twin device" >> $GITHUB_STEP_SUMMARY
echo "- Ping until test ends" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "| Num devices | Num Resources | Resource Data Size(KB) | Current memory usage (MB) | Duration (seconds) |" >> $GITHUB_STEP_SUMMARY
echo "| ----------- | ------------- | ------------------ | ------------------------- | -------- |" >> $GITHUB_STEP_SUMMARY
jq -s '.|=sort_by(.NumDevices,.NumResources,.LogLevel,.LogDumpBody,.ResourceDataSize)' outputs/*/*.json > output.json
for row in $(cat output.json | jq -r '.[] | @base64'); do
_jq() {
echo ${row} | base64 --decode | jq -r ${1}
}
duration=$(echo "scale=2 ; $(_jq '.Duration') / 1000000000" | bc )
resourceDataSize=$(echo "scale=2 ; $(_jq '.ResourceDataSize') / 1024" | bc )
echo "| $(_jq '.NumDevices') | $(_jq '.NumResources') | $resourceDataSize | $(_jq '.CurrentMemRSS') | $duration |" >> $GITHUB_STEP_SUMMARY
done
coapGateway:
# The type of runner that the job will run on
runs-on: ubuntu-latest
strategy:
fail-fast: false
# max number of running jobs is 20 for a single organization, so lets keep at least 4 available to all other repositories
max-parallel: 16
matrix:
include:
#1
- name: devices/1/resources/1/size/16KB
numDevices: 1
numResources: 1
resourceDataSize: 16384
timeout: 120m
- name: devices/1/resources/1/size/1KB
numDevices: 1
numResources: 1
resourceDataSize: 1024
timeout: 120m
- name: devices/1/resources/125/size/16KB
numDevices: 1
numResources: 125
resourceDataSize: 16384
timeout: 120m
- name: devices/1/resources/125/size/1KB
numDevices: 1
numResources: 125
resourceDataSize: 1024
timeout: 120m
- name: devices/1/resources/250/size/16KB
numDevices: 1
numResources: 250
resourceDataSize: 16384
timeout: 120m
- name: devices/1/resources/250/size/1KB
numDevices: 1
numResources: 250
resourceDataSize: 1024
timeout: 120m
- name: devices/1/resources/500/size/16KB
numDevices: 1
numResources: 500
resourceDataSize: 16384
timeout: 120m
- name: devices/1/resources/500/size/1KB
numDevices: 1
numResources: 500
resourceDataSize: 1024
logLevel: info
logDumpBody: true
timeout: 120m
#1000
- name: devices/1000/resources/1/size/16KB
numDevices: 1000
numResources: 1
resourceDataSize: 16384
timeout: 120m
- name: devices/1000/resources/1/size/1KB
numDevices: 1000
numResources: 1
resourceDataSize: 1024
timeout: 120m
- name: devices/1000/resources/125/size/16KB
numDevices: 1000
numResources: 125
resourceDataSize: 16384
timeout: 120m
- name: devices/1000/resources/125/size/1KB
numDevices: 1000
numResources: 125
resourceDataSize: 1024
timeout: 120m
- name: devices/1000/resources/250/size/16KB
numDevices: 1000
numResources: 250
resourceDataSize: 16384
timeout: 120m
- name: devices/1000/resources/250/size/1KB
numDevices: 1000
numResources: 250
resourceDataSize: 1024
timeout: 120m
- name: devices/1000/resources/500/size/16KB
numDevices: 1000
numResources: 500
resourceDataSize: 16384
timeout: 120m
- name: devices/1000/resources/500/size/1KB
numDevices: 1000
numResources: 500
resourceDataSize: 1024
timeout: 120m
#2500
- name: devices/2500/resources/1/size/16KB
numDevices: 2500
numResources: 1
resourceDataSize: 16384
timeout: 120m
- name: devices/2500/resources/1/size/1KB
numDevices: 2500
numResources: 1
resourceDataSize: 1024
timeout: 120m
- name: devices/2500/resources/125/size/16KB
numDevices: 2500
numResources: 125
resourceDataSize: 16384
timeout: 120m
- name: devices/2500/resources/125/size/1KB
numDevices: 2500
numResources: 125
resourceDataSize: 1024
timeout: 120m
- name: devices/2500/resources/250/size/16KB
numDevices: 2500
numResources: 250
resourceDataSize: 16384
timeout: 120m
- name: devices/2500/resources/250/size/1KB
numDevices: 2500
numResources: 250
resourceDataSize: 1024
timeout: 120m
- name: devices/2500/resources/250/size/16KB
numDevices: 2500
numResources: 500
resourceDataSize: 16384
timeout: 120m
- name: devices/2500/resources/500/size/1KB
numDevices: 2500
numResources: 500
resourceDataSize: 1024
timeout: 120m
#5000
- name: devices/5000/resources/1/size/16KB
numDevices: 5000
numResources: 1
resourceDataSize: 16384
timeout: 120m
- name: devices/5000/resources/1/size/1KB
numDevices: 5000
numResources: 1
resourceDataSize: 1024
timeout: 120m
- name: devices/5000/resources/125/size/16KB
numDevices: 5000
numResources: 125
resourceDataSize: 16384
timeout: 120m
- name: devices/5000/resources/125/size/1KB
numDevices: 5000
numResources: 125
resourceDataSize: 1024
timeout: 120m
- name: devices/5000/resources/250/size/16KB
numDevices: 5000
numResources: 250
resourceDataSize: 16384
timeout: 120m
- name: devices/5000/resources/250/size/1KB
numDevices: 5000
numResources: 250
resourceDataSize: 1024
timeout: 120m
- name: devices/5000/resources/500/size/16KB
numDevices: 5000
numResources: 500
resourceDataSize: 16384
timeout: 120m
- name: devices/5000/resources/500/size/1KB
numDevices: 5000
numResources: 500
resourceDataSize: 1024
timeout: 120m
#10000
- name: devices/10000/resources/1/size/16KB
numDevices: 10000
numResources: 1
resourceDataSize: 16384
timeout: 120m
- name: devices/10000/resources/1/size/1KB
numDevices: 10000
numResources: 1
resourceDataSize: 1024
timeout: 120m
- name: devices/10000/resources/125/size/16KB
numDevices: 10000
numResources: 125
resourceDataSize: 16384
timeout: 120m
- name: devices/10000/resources/125/size/1KB
numDevices: 10000
numResources: 125
resourceDataSize: 1024
timeout: 120m
- name: devices/10000/resources/250/size/16KB
numDevices: 10000
numResources: 250
resourceDataSize: 16384
timeout: 120m
- name: devices/10000/resources/250/size/1KB
numDevices: 10000
numResources: 250
resourceDataSize: 1024
timeout: 120m
- name: devices/10000/resources/500/size/4KB
numDevices: 10000
numResources: 500
resourceDataSize: 4096
timeout: 300m
- name: devices/10000/resources/500/size/1KB
numDevices: 10000
numResources: 500
resourceDataSize: 1024
timeout: 120m
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
- name: CPU Info
run: |
cat /proc/cpuinfo
echo "Number of cores: $(nproc)"
echo "Number of threads: $(nproc --all)"
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
- name: Resolve database
id: db
run: |
if [ "${{ github.event_name != 'workflow_dispatch' || inputs.cql }}" == "true" ]; then
echo "name=cqldb" >> $GITHUB_OUTPUT
else
echo "name=mongodb" >> $GITHUB_OUTPUT
fi
- name: Run a test
continue-on-error: true
run: |
make test/mem TEST_DATABASE=${{ steps.db.outputs.name }} TEST_MEMORY_COAP_GATEWAY_RESOURCE_DATA_SIZE=${{ matrix.resourceDataSize }} TEST_TIMEOUT=${{ matrix.timeout }} TEST_MEMORY_COAP_GATEWAY_NUM_DEVICES=${{ matrix.numDevices }} TEST_MEMORY_COAP_GATEWAY_NUM_RESOURCES=${{ matrix.numResources }} | tee >(grep "TestMemoryWithDevices.result:" | sed -e "s/.*TestMemoryWithDevices.result://g" | jq -r -c > out.json)
- name: Dump file
if: success()
run: |
jq -rc "." out.json
- name: Upload file
if: success()
uses: actions/upload-artifact@v4
with:
name: ${{ hashFiles('out.json') || 'none' }}
path: out.json
retention-days: 1
if-no-files-found: warn