Skip to content

Commit

Permalink
tests: on_target: add ppk test
Browse files Browse the repository at this point in the history
Add ppk test and update badge accordingly.

Signed-off-by: Giacomo Dematteis <[email protected]>
  • Loading branch information
DematteisGiacomo committed Nov 15, 2024
1 parent 4220a68 commit 4f22a07
Show file tree
Hide file tree
Showing 6 changed files with 553 additions and 17 deletions.
104 changes: 88 additions & 16 deletions .github/workflows/on_target.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,14 +59,24 @@ on:
type: boolean
required: true
default: false
run_wifi_location_tests:
type: boolean
required: true
default: false
run_ppk_tests:
type: boolean
required: true
default: false

jobs:
target_test:
name: Target Test
runs-on: self-hosted
permissions:
contents: write
environment: production
container:
image: ghcr.io/hello-nrfcloud/firmware:docker-v1.0.0
image: ghcr.io/hello-nrfcloud/firmware:docker-v1.0.2
options: --privileged
volumes:
- /dev:/dev:rw
Expand Down Expand Up @@ -183,6 +193,7 @@ jobs:
LOG_FILENAME: oob_conn_bridge_test_log

- name: Run Wi-Fi location tests
if: ${{ inputs.run_wifi_location_tests }}
working-directory: thingy91x-oob/tests/on_target
run: |
mkdir -p results
Expand All @@ -192,20 +203,81 @@ jobs:
env:
SEGGER: ${{ secrets.SEGGER_DUT_1 }}

- name: Results
if: always()
uses: pmeier/[email protected]
with:
path: thingy91x-oob/tests/on_target/results/*.xml
summary: true
fail-on-empty: true
title: OOB FW Test Results
- name: Run PPK tests
if: ${{ inputs.run_ppk_tests }}
id: ppk_test
working-directory: thingy91x-oob/tests/on_target
run: |
mkdir -p results
pytest -s -v -m dut_ppk \
--junit-xml=results/test-results-ppk.xml \
tests
env:
SEGGER: ${{ secrets.SEGGER_DUT_PPK }}

# - name: Mock Power Badge JSON Creation
# working-directory: thingy91x-oob/tests/on_target
# run: |
# # Write the JSON content to power_badge.json in the correct location
# echo '{ "label": "psm_current uA", "message": "15", "schemaVersion": 1, "color": "yellow" }' > power_badge.json
# shell: bash

- name: Push log files to artifacts
- name: Commit and Push Badge File to gh-pages Branch
if: always()
uses: actions/upload-artifact@v4
id: artifact-upload-test-logs
with:
name: test-logs
path: |
thingy91x-oob/tests/on_target/outcomes/logs/*.txt
working-directory: thingy91x-oob
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Configure Git to trust the current working directory
git config --global --add safe.directory /__w/firmware/firmware
# Set Git user name and email for commits in the GitHub Actions environment
git config --global user.email "[email protected]"
git config --global user.name "GitHub Actions"
# Check if the badge file exists at the specified location
if [ -f tests/on_target/power_badge.json ]; then
echo "Badge file found, proceeding to commit."
else
echo "Badge file not found. Exiting..."
exit 1
fi
# Check if the badge file exists at the specified location
if [ -f tests/on_target/power_measurements_plot.html ]; then
echo "Html file found, proceeding to commit."
else
echo "Html file not found. Exiting..."
exit 1
fi
# Ensure the gh-pages branch exists and switch to it
git fetch origin gh-pages || git checkout -b gh-pages
git checkout gh-pages
# Stage, commit, and push changes to the branch
ls tests/on_target/
ls docs/
cp tests/on_target/power_badge.json docs/power_badge.json
cp tests/on_target/power_measurements_plot.html docs/power_measurements_plot.html
git add docs/power_badge.json docs/power_measurements_plot.html
git commit -m "Update power badge and html to docs folder"
git push origin gh-pages
# - name: Results
# if: always()
# uses: pmeier/[email protected]
# with:
# path: thingy91x-oob/tests/on_target/results/*.xml
# summary: true
# fail-on-empty: true
# title: OOB FW Test Results

# - name: Push log files to artifacts
# if: always()
# uses: actions/upload-artifact@v4
# id: artifact-upload-test-logs
# with:
# name: test-logs
# path: |
# thingy91x-oob/tests/on_target/outcomes/logs/*.txt
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@

#### Nightly:
[![Target_tests](https://github.com/hello-nrfcloud/firmware/actions/workflows/test.yml/badge.svg?event=schedule)](https://github.com/hello-nrfcloud/firmware/actions/workflows/test.yml?query=branch%3Amain+event%3Aschedule)
[![Power Consumption Badge](https://img.shields.io/endpoint?url=https://hello-nrfcloud.github.io/firmware/power_badge.json)](https://hello-nrfcloud.github.io/firmware/test_html.html)


This project is based on the
[NCS Example Application](https://github.com/nrfconnect/ncs-example-application).
Expand Down
3 changes: 3 additions & 0 deletions tests/on_target/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,6 @@ pyusb
imgtool
requests
memfault-cli
ppk2-api
pandas
plotly
2 changes: 1 addition & 1 deletion tests/on_target/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def t91x_board():
def hex_file():
# Search for the firmware hex file in the artifacts folder
artifacts_dir = "artifacts"
hex_pattern = r"hello\.nrfcloud\.com-[a-f0-9]+-thingy91x-nrf91\.hex"
hex_pattern = r"hello\.nrfcloud\.com-[a-z.0-9]+-thingy91x-nrf91\.hex"

for file in os.listdir(artifacts_dir):
if re.match(hex_pattern, file):
Expand Down
204 changes: 204 additions & 0 deletions tests/on_target/tests/test_power.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,204 @@
##########################################################################################
# Copyright (c) 2024 Nordic Semiconductor
# SPDX-License-Identifier: LicenseRef-Nordic-5-Clause
##########################################################################################

import pytest
import time
import os
import json
import types
from tests.conftest import get_uarts
from ppk2_api.ppk2_api import PPK2_API
from utils.uart import Uart
from utils.flash_tools import flash_device, reset_device, recover_device
import sys
sys.path.append(os.getcwd())
from utils.logger import get_logger

logger = get_logger()

UART_TIMEOUT = 60 * 30
POWER_TIMEOUT = 60 * 5
MAX_CURRENT_PSM_UA = 10

SEGGER = os.getenv('SEGGER')
UART_ID = os.getenv('UART_ID', SEGGER)

def get_uarts():
base_path = "/dev/serial/by-id"
try:
serial_paths = [os.path.join(base_path, entry) for entry in os.listdir(base_path)]
except (FileNotFoundError, PermissionError) as e:
logger.error(e)
return False

uarts = []

for path in sorted(serial_paths):
logger.warning(path)
logger.warning(UART_ID)
if UART_ID in path:
uarts.append(path)
else:
continue
return uarts

def save_badge_data(average):
logger.info(f"Minimum average current measured: {average}uA")
if average < 0:
pytest.fail(f"current cant be negative, current average: {average}")
if average <= 10:
color = "green"
elif average <= 50:
color = "yellow"
else:
color = "red"

badge_data = {
"label": "psm_current uA",
"message": f"{average}",
"schemaVersion": 1,
"color": f"{color}"
}

# Save the JSON data to a file
with open('power_badge.json', 'w') as json_file:
json.dump(badge_data, json_file)

logger.info(f"Minimum average current saved to 'power_badge.json'")


@pytest.fixture(scope="module")
def ppk2():
'''
This fixture sets up ppk measurement tool.
'''
ppk2s_connected = PPK2_API.list_devices()
ppk2s_connected.sort()
if len(ppk2s_connected) == 2:
ppk2_port = ppk2s_connected[0]
print(f"Found PPK2 at {ppk2_port}")
elif len(ppk2s_connected) == 0:
pytest.fail("No ppk found")
else:
pytest.fail(f"Multiple PPks found")

ppk2_test = PPK2_API(ppk2_port, timeout=1, write_timeout=1, exclusive=True)

# get modifier might fail, retry 15 times
for _ in range(15):
try:
ppk2_test.get_modifiers()
break
except Exception as e:
logger.error(f"Failed to get modifiers: {e}")
time.sleep(5)
else:
pytest.fail("Failed to get ppk modifiers after 10 attempts")


ppk2_test.set_source_voltage(3300)

ppk2_test.use_ampere_meter() # set ampere meter mode

ppk2_test.toggle_DUT_power("ON") # enable DUT power


time.sleep(10)

for i in range(10):
try:
all_uarts = get_uarts()
if not all_uarts:
logger.error("No UARTs found")
log_uart_string = all_uarts[0]
break
except Exception:
ppk2_test.toggle_DUT_power("OFF") # disable DUT power
time.sleep(2)
ppk2_test.toggle_DUT_power("ON") # enable DUT power
time.sleep(5)
continue
else:
pytest.fail("NO uart after 10 attempts")

uart = Uart(log_uart_string, timeout=UART_TIMEOUT)

yield types.SimpleNamespace(ppk2_test=ppk2_test, uart=uart,)

uart.stop()
# recover_device()
ppk2_test.stop_measuring()

# @pytest.mark.dut_ppk
def test_power(ppk2, hex_file):
# flash_device(os.path.abspath(hex_file))
reset_device()
time.sleep(5)
ppk2.uart.xfactoryreset()
patterns_boot = [
"Network connectivity established",
"Connected to Cloud",
"trigger: frequent_poll_entry: frequent_poll_entry",
"trigger: trigger_work_fn: Sending data sample trigger",
"environmental_module: sample: temp:",
"transport: state_connected_ready_run: Payload",
"Location search done"
]

ppk2.ppk2_test.start_measuring()

# Boot
ppk2.uart.flush()
reset_device()
# ppk2.uart.wait_for_str(patterns_boot, timeout=120)

# ppk2.uart.wait_for_str("Disabling UARTs", timeout=120)

start = time.time()
min_average = float('inf')
average = None
sampling_interval = 0.01
averages = []
average_of_averages = 0
last_log_time = start
while time.time() < start + POWER_TIMEOUT:
try:
read_data = ppk2.ppk2_test.get_data()
if read_data != b'':
samples, _ = ppk2.ppk2_test.get_samples(read_data)
average = sum(samples)/len(samples)

# Store the average for rolling calculation
averages.append(average)
# Keep only the averages from the last 3 seconds
if len(averages) > int(3 / sampling_interval):
averages.pop(0)

# Calculate the average of averages
average_of_averages = sum(averages) / len(averages) if averages else 0

# Log and store every 5 seconds
current_time = time.time()
if current_time - last_log_time >= 5:
logger.info(f"Average current over last 3 secs: {average_of_averages} uA")
last_log_time = current_time

if average_of_averages < min_average:
min_average = average_of_averages

except Exception as e:
logger.error(f"Catching exception: {e}")
pytest.skip("Something went wrong, unable to perform power measurements")

if average_of_averages < MAX_CURRENT_PSM_UA:
# Log and store the last sample
logger.info(f"Average current over last 3 secs: {average_of_averages} uA")
logger.info("psm target reached for more than 3 secs")
break
time.sleep(sampling_interval) # lower time between sampling -> less samples read in one sampling period
else:
save_badge_data(min_average)
pytest.fail(f"PSM target not reached after {POWER_TIMEOUT} seconds, only reached {min_average} uA")
save_badge_data(min_average)
Loading

0 comments on commit 4f22a07

Please sign in to comment.