Add MUM support in the testing framework

This commit is contained in:
Hosam-Eldin Mostafa 2026-04-28 23:37:53 +02:00
parent 58aa7350e6
commit b8f52bea39
84 changed files with 8860 additions and 4851 deletions

364
.gitignore vendored
View File

@ -1,182 +1,182 @@
# ---> Python # ---> Python
# Byte-compiled / optimized / DLL files # Byte-compiled / optimized / DLL files
__pycache__/ __pycache__/
*.py[cod] *.py[cod]
*$py.class *$py.class
# C extensions # C extensions
*.so *.so
# Distribution / packaging # Distribution / packaging
.Python .Python
build/ build/
develop-eggs/ develop-eggs/
dist/ dist/
downloads/ downloads/
eggs/ eggs/
.eggs/ .eggs/
lib/ lib/
lib64/ lib64/
parts/ parts/
sdist/ sdist/
var/ var/
wheels/ wheels/
share/python-wheels/ share/python-wheels/
*.egg-info/ *.egg-info/
.installed.cfg .installed.cfg
*.egg *.egg
MANIFEST MANIFEST
# PyInstaller # PyInstaller
# Usually these files are written by a python script from a template # Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it. # before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest *.manifest
*.spec *.spec
# Installer logs # Installer logs
pip-log.txt pip-log.txt
pip-delete-this-directory.txt pip-delete-this-directory.txt
# Unit test / coverage reports # Unit test / coverage reports
htmlcov/ htmlcov/
.tox/ .tox/
.nox/ .nox/
.coverage .coverage
.coverage.* .coverage.*
.cache .cache
nosetests.xml nosetests.xml
coverage.xml coverage.xml
*.cover *.cover
*.py,cover *.py,cover
.hypothesis/ .hypothesis/
.pytest_cache/ .pytest_cache/
cover/ cover/
# Translations # Translations
*.mo *.mo
*.pot *.pot
# Django stuff: # Django stuff:
*.log *.log
local_settings.py local_settings.py
db.sqlite3 db.sqlite3
db.sqlite3-journal db.sqlite3-journal
# Flask stuff: # Flask stuff:
instance/ instance/
.webassets-cache .webassets-cache
# Scrapy stuff: # Scrapy stuff:
.scrapy .scrapy
# Sphinx documentation # Sphinx documentation
docs/_build/ docs/_build/
# PyBuilder # PyBuilder
.pybuilder/ .pybuilder/
target/ target/
# Jupyter Notebook # Jupyter Notebook
.ipynb_checkpoints .ipynb_checkpoints
# IPython # IPython
profile_default/ profile_default/
ipython_config.py ipython_config.py
# pyenv # pyenv
# For a library or package, you might want to ignore these files since the code is # For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in: # intended to run in multiple environments; otherwise, check them in:
# .python-version # .python-version
# pipenv # pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies # However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not # having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies. # install all needed dependencies.
#Pipfile.lock #Pipfile.lock
# UV # UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more # This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries. # commonly ignored for libraries.
#uv.lock #uv.lock
# poetry # poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more # This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries. # commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock #poetry.lock
# pdm # pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock #pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control. # in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml .pdm.toml
.pdm-python .pdm-python
.pdm-build/ .pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/ __pypackages__/
# Celery stuff # Celery stuff
celerybeat-schedule celerybeat-schedule
celerybeat.pid celerybeat.pid
# SageMath parsed files # SageMath parsed files
*.sage.py *.sage.py
# Environments # Environments
.env .env
.venv .venv
env/ env/
venv/ venv/
ENV/ ENV/
env.bak/ env.bak/
venv.bak/ venv.bak/
# Spyder project settings # Spyder project settings
.spyderproject .spyderproject
.spyproject .spyproject
# Rope project settings # Rope project settings
.ropeproject .ropeproject
# mkdocs documentation # mkdocs documentation
/site /site
# mypy # mypy
.mypy_cache/ .mypy_cache/
.dmypy.json .dmypy.json
dmypy.json dmypy.json
# Pyre type checker # Pyre type checker
.pyre/ .pyre/
# pytype static type analyzer # pytype static type analyzer
.pytype/ .pytype/
# Cython debug symbols # Cython debug symbols
cython_debug/ cython_debug/
# PyCharm # PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear # and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder. # option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/ #.idea/
# --- Project specific --- # --- Project specific ---
# Test run artifacts # Test run artifacts
reports/ reports/
!reports/.gitkeep !reports/.gitkeep
# Vendor binaries (keep headers/docs and keep .dll from the SDK for now) # Vendor binaries (keep headers/docs and keep .dll from the SDK for now)
vendor/**/*.lib vendor/**/*.lib
vendor/**/*.pdb vendor/**/*.pdb
# Optional firmware blobs (uncomment if you don't want to track) # Optional firmware blobs (uncomment if you don't want to track)
# firmware/ # firmware/

611
README.md
View File

@ -1,285 +1,326 @@
# ECU Tests Framework # ECU Tests Framework
Python-based ECU testing framework built on pytest, with a pluggable LIN communication layer (Mock and BabyLin), configuration via YAML, and enhanced HTML/XML reporting with rich test metadata. Python-based ECU testing framework built on pytest, with a pluggable LIN communication layer (Mock, MUM, and legacy BabyLIN), configuration via YAML, and enhanced HTML/XML reporting with rich test metadata.
## Highlights ## Highlights
- Mock LIN adapter for fast, hardware-free development - **MUM (Melexis Universal Master) adapter** — current default for hardware tests; networked LIN master with built-in power control
- Real BabyLIN adapter using the SDK's official Python wrapper (BabyLIN_library.py) - Mock LIN adapter for fast, hardware-free development
- Hex flashing scaffold you can wire to UDS - BabyLIN adapter (legacy) using the vendor SDK's Python wrapper
- Rich pytest fixtures and example tests - Hex flashing scaffold you can wire to UDS
- Self-contained HTML report with Title, Requirements, Steps, and Expected Results extracted from test docstrings - Rich pytest fixtures and example tests
- JUnit XML report for CI/CD - Self-contained HTML report with Title, Requirements, Steps, and Expected Results extracted from test docstrings
- JUnit XML report for CI/CD
## Quick links
## Quick links
- Using the framework (common runs, markers, CI, Pi): `docs/12_using_the_framework.md`
- Plugin overview (reporting, hooks, artifacts): `docs/11_conftest_plugin_overview.md` - Using the framework (common runs, markers, CI, Pi): `docs/12_using_the_framework.md`
- Power supply (Owon) usage and troubleshooting: `docs/14_power_supply.md` - Plugin overview (reporting, hooks, artifacts): `docs/11_conftest_plugin_overview.md`
- Report properties cheatsheet (standard keys): `docs/15_report_properties_cheatsheet.md` - Power supply (Owon) usage and troubleshooting: `docs/14_power_supply.md`
- Report properties cheatsheet (standard keys): `docs/15_report_properties_cheatsheet.md`
## TL;DR quick start (copy/paste) - MUM source scripts (vendor reference): [vendor/automated_lin_test/README.md](vendor/automated_lin_test/README.md)
Mock (no hardware): ## TL;DR quick start (copy/paste)
```powershell Mock (no hardware):
python -m venv .venv; .\.venv\Scripts\Activate.ps1; pip install -r requirements.txt; pytest -m "not hardware" -v
``` ```powershell
python -m venv .venv; .\.venv\Scripts\Activate.ps1; pip install -r requirements.txt; pytest -m "not hardware" -v
Hardware (BabyLIN SDK): ```
```powershell Hardware via MUM (current default):
# Place BabyLIN_library.py and native libs under .\vendor per vendor/README.md first
$env:ECU_TESTS_CONFIG = ".\config\babylin.example.yaml"; pytest -m "hardware and babylin" -v ```powershell
``` # 1. Install Melexis 'pylin' and 'pymumclient' (see vendor/automated_lin_test/install_packages.sh)
# 2. Make sure the MUM is reachable (default IP 192.168.7.2)
## Quick start (Windows PowerShell) $env:ECU_TESTS_CONFIG = ".\config\mum.example.yaml"; pytest -m "hardware and mum" -v
```
1) Create a virtual environment and install dependencies
Hardware via BabyLIN (legacy):
```powershell
python -m venv .venv ```powershell
.\.venv\Scripts\Activate.ps1 # Place BabyLIN_library.py and native libs under .\vendor per vendor/README.md first
pip install -r requirements.txt $env:ECU_TESTS_CONFIG = ".\config\babylin.example.yaml"; pytest -m "hardware and babylin" -v
``` ```
2) Run the mock test suite (default interface) ## Quick start (Windows PowerShell)
```powershell 1) Create a virtual environment and install dependencies
python.exe -m pytest -m "not hardware" -v
``` ```powershell
python -m venv .venv
3) View the reports .\.venv\Scripts\Activate.ps1
pip install -r requirements.txt
- HTML: `reports/report.html` ```
- JUnit XML: `reports/junit.xml`
2) Run the mock test suite (default interface)
Tip: You can change output via `--html` and `--junitxml` CLI options.
```powershell
## Reporting: Metadata in HTML python.exe -m pytest -m "not hardware" -v
```
We extract these fields from each tests docstring and render them in the HTML report:
3) View the reports
- Title
- Description - HTML: `reports/report.html`
- Requirements (e.g., REQ-001) - JUnit XML: `reports/junit.xml`
- Test Steps
- Expected Result Tip: You can change output via `--html` and `--junitxml` CLI options.
Markers like `smoke`, `hardware`, and `req_00x` are also displayed. ## Reporting: Metadata in HTML
Example docstring format used by the plugin: We extract these fields from each tests docstring and render them in the HTML report:
```python - Title
""" - Description
Title: Mock LIN Interface - Send/Receive Echo Test - Requirements (e.g., REQ-001)
- Test Steps
Description: Validates basic send/receive functionality using the mock LIN interface with echo behavior. - Expected Result
Requirements: REQ-001, REQ-003 Markers like `smoke`, `hardware`, and `req_00x` are also displayed.
Test Steps: Example docstring format used by the plugin:
1. Connect to mock interface
2. Send frame ID 0x01 with data [0x55] ```python
3. Receive the echo within 100ms """
4. Assert ID and data integrity Title: Mock LIN Interface - Send/Receive Echo Test
Expected Result: Description: Validates basic send/receive functionality using the mock LIN interface with echo behavior.
- Echoed frame matches sent frame
""" Requirements: REQ-001, REQ-003
```
Test Steps:
## Configuration 1. Connect to mock interface
2. Send frame ID 0x01 with data [0x55]
Default config is `config/test_config.yaml`. Override via the `ECU_TESTS_CONFIG` environment variable. 3. Receive the echo within 100ms
4. Assert ID and data integrity
```powershell
$env:ECU_TESTS_CONFIG = (Resolve-Path .\config\test_config.yaml) Expected Result:
``` - Echoed frame matches sent frame
"""
BabyLIN configuration template: `config/babylin.example.yaml` ```
```yaml ## Configuration
interface:
type: babylin # or "mock" Default config is `config/test_config.yaml`. Override via the `ECU_TESTS_CONFIG` environment variable.
channel: 0 # Channel index used by the SDK wrapper
bitrate: 19200 # Usually determined by SDF ```powershell
sdf_path: ./vendor/Example.sdf $env:ECU_TESTS_CONFIG = (Resolve-Path .\config\test_config.yaml)
schedule_nr: 0 # Start this schedule on connect ```
```
### MUM configuration (default for hardware)
Switch to hardware profile and run only hardware tests:
Template: `config/mum.example.yaml`
```powershell
$env:ECU_TESTS_CONFIG = (Resolve-Path .\config\babylin.example.yaml) ```yaml
python.exe -m pytest -m hardware -v interface:
``` type: mum
host: 192.168.7.2 # MUM IP (USB-RNDIS default)
## Project structure lin_device: lin0 # MUM LIN device name
power_device: power_out0 # MUM power-control device (built-in PSU)
``` bitrate: 19200 # LIN baudrate
ecu_tests/ boot_settle_seconds: 0.5 # Wait after power-up before sending the first frame
├── ecu_framework/ frame_lengths:
│ ├── config.py # YAML config loader 0x0A: 8 # ALM_Req_A
│ ├── power/ 0x11: 4 # ALM_Status
│ │ └── owon_psu.py # Owon PSU serial SCPI controller (library) ```
│ ├── lin/
│ │ ├── base.py # LinInterface + LinFrame The MUM has its own power output, so `power_supply.enabled: false` is the
│ │ ├── mock.py # Mock LIN adapter typical setting when using MUM. The Owon PSU support remains for over/under-
│ │ └── babylin.py # BabyLIN SDK-wrapper adapter (uses BabyLIN_library.py) voltage scenarios but is independent of the LIN interface.
│ └── flashing/
│ └── hex_flasher.py # Hex flashing scaffold ### BabyLIN configuration (legacy)
├── tests/
│ ├── conftest.py # Shared fixtures Template: `config/babylin.example.yaml`
│ ├── test_smoke_mock.py # Mock interface smoke and boundary tests
│ ├── test_babylin_hardware_smoke.py # Hardware smoke tests ```yaml
│ ├── test_babylin_hardware_schedule_smoke.py # Hardware schedule flow interface:
│ ├── test_babylin_wrapper_mock.py # SDK adapter with mock wrapper type: babylin # or "mock", or "mum"
│ ├── plugin/ channel: 0 # Channel index used by the SDK wrapper
│ │ └── test_conftest_plugin_artifacts.py # Plugin self-test (reports artifacts) bitrate: 19200 # Usually determined by SDF
│ ├── unit/ sdf_path: ./vendor/Example.sdf
│ │ ├── test_config_loader.py # Config loader unit tests schedule_nr: 0 # Start this schedule on connect (-1 to skip)
│ │ ├── test_linframe.py # LIN frame dataclass/logic ```
│ │ ├── test_hex_flasher.py # Hex flasher scaffolding
│ │ └── test_babylin_adapter_mocked.py # BabyLIN adapter with mocks ### LIN adapter capabilities
│ └── hardware/
│ └── test_owon_psu.py # Owon PSU hardware test (uses central config) | Adapter | Power control | Diagnostic frames (Classic checksum) | Passive listen |
├── config/ | --- | --- | --- | --- |
│ ├── test_config.yaml # Default config | `mock` | n/a | n/a | yes (queue-based) |
│ ├── babylin.example.yaml # BabyLIN hardware template | `mum` | yes (`power_out0`) | yes (`MumLinInterface.send_raw()``ld_put_raw`) | no — `receive(id)` triggers a slave read |
│ ├── owon_psu.example.yaml # Owon PSU example (copy to owon_psu.yaml) | `babylin` | external (Owon PSU) | via SDF / `BLC_sendCommand` | yes (frame queue) |
│ └── owon_psu.yaml # Optional machine-specific PSU config
├── vendor/ # Place SDK wrapper and platform libs here Switch to hardware profile and run only hardware tests:
│ ├── Owon/
│ │ └── owon_psu_quick_demo.py # Quick PSU demo using the library & YAML ```powershell
│ ├── BabyLIN_library.py # Official SDK Python wrapper $env:ECU_TESTS_CONFIG = (Resolve-Path .\config\babylin.example.yaml)
│ └── BabyLIN library/ # Platform-specific binaries from SDK (DLL/.so) python.exe -m pytest -m hardware -v
├── reports/ # Generated reports ```
│ ├── report.html
│ └── junit.xml ## Project structure
├── conftest_plugin.py # HTML metadata extraction & rendering
├── pytest.ini # Markers and default addopts ```
├── requirements.txt ecu_tests/
└── README.md ├── ecu_framework/
``` │ ├── config.py # YAML config loader
│ ├── power/
## Usage recipes │ │ └── owon_psu.py # Owon PSU serial SCPI controller (library)
│ ├── lin/
- Run everything (mock and any non-hardware tests): │ │ ├── base.py # LinInterface + LinFrame
│ │ ├── mock.py # Mock LIN adapter
```powershell │ │ └── babylin.py # BabyLIN SDK-wrapper adapter (uses BabyLIN_library.py)
python.exe -m pytest -v │ └── flashing/
``` │ └── hex_flasher.py # Hex flashing scaffold
├── tests/
- Run by marker: │ ├── conftest.py # Shared fixtures
│ ├── test_smoke_mock.py # Mock interface smoke and boundary tests
```powershell │ ├── test_babylin_hardware_smoke.py # Hardware smoke tests
python.exe -m pytest -m "smoke" -v │ ├── test_babylin_hardware_schedule_smoke.py # Hardware schedule flow
python.exe -m pytest -m "req_001" -v │ ├── test_babylin_wrapper_mock.py # SDK adapter with mock wrapper
``` │ ├── plugin/
│ │ └── test_conftest_plugin_artifacts.py # Plugin self-test (reports artifacts)
- Run in parallel: │ ├── unit/
│ │ ├── test_config_loader.py # Config loader unit tests
```powershell │ │ ├── test_linframe.py # LIN frame dataclass/logic
python.exe -m pytest -n auto -v │ │ ├── test_hex_flasher.py # Hex flasher scaffolding
``` │ │ └── test_babylin_adapter_mocked.py # BabyLIN adapter with mocks
│ └── hardware/
- Run the plugin self-test (verifies reporting artifacts under `reports/`): │ └── test_owon_psu.py # Owon PSU hardware test (uses central config)
├── config/
```powershell │ ├── test_config.yaml # Default config
python -m pytest tests\plugin\test_conftest_plugin_artifacts.py -q │ ├── babylin.example.yaml # BabyLIN hardware template
``` │ ├── owon_psu.example.yaml # Owon PSU example (copy to owon_psu.yaml)
│ └── owon_psu.yaml # Optional machine-specific PSU config
- Generate separate HTML/JUnit reports for unit vs non-unit tests: ├── vendor/ # Place SDK wrapper and platform libs here
│ ├── Owon/
```powershell │ │ └── owon_psu_quick_demo.py # Quick PSU demo using the library & YAML
./scripts/run_two_reports.ps1 │ ├── BabyLIN_library.py # Official SDK Python wrapper
``` │ └── BabyLIN library/ # Platform-specific binaries from SDK (DLL/.so)
├── reports/ # Generated reports
## BabyLIN adapter notes │ ├── report.html
│ └── junit.xml
The `ecu_framework/lin/babylin.py` implementation uses the official `BabyLIN_library.py` wrapper from the SDK. Put `BabyLIN_library.py` under `vendor/` (or on `PYTHONPATH`) along with the SDK's platform-specific libraries. Configure `sdf_path` and `schedule_nr` to load an SDF and start a schedule during connect. The adapter sends frames via `BLC_mon_set_xmit` and receives via `BLC_getNextFrameTimeout`. ├── conftest_plugin.py # HTML metadata extraction & rendering
├── pytest.ini # Markers and default addopts
## Docs and references ├── requirements.txt
└── README.md
- Guide: `TESTING_FRAMEWORK_GUIDE.md` (deep dive with examples and step-by-step flows) ```
- Reports: `reports/report.html` and `reports/junit.xml` (generated on each run)
- CI summary: `reports/summary.md` (machine-friendly run summary) ## Usage recipes
- Requirements coverage: `reports/requirements_coverage.json` (requirement → tests mapping)
- Tip: Open the HTML report on Windows with: `start .\reports\report.html` - Run everything (mock and any non-hardware tests):
- Configs: `config/test_config.yaml`, `config/babylin.example.yaml` (copy and modify for your environment)
- BabyLIN SDK placement and notes: `vendor/README.md` ```powershell
- Docs index: `docs/README.md` (run sequence, config resolution, reporting, call flows) python.exe -m pytest -v
- Raspberry Pi deployment: `docs/09_raspberry_pi_deployment.md` ```
- Build custom Pi image: `docs/10_build_custom_image.md`
- Pi scripts: `scripts/pi_install.sh`, `scripts/ecu-tests.service`, `scripts/ecu-tests.timer`, `scripts/run_tests.sh` - Run by marker:
## Troubleshooting ```powershell
python.exe -m pytest -m "smoke" -v
- HTML report missing columns: ensure `pytest.ini` includes `-p conftest_plugin` in `addopts`. python.exe -m pytest -m "req_001" -v
- ImportError for BabyLIN_library: verify `vendor/BabyLIN_library.py` placement and that required native libraries (DLL/.so) from the SDK are available on PATH/LD_LIBRARY_PATH. ```
- Permission errors in PowerShell: run the venv's full Python path or adjust ExecutionPolicy for scripts.
- Import errors: activate the venv and reinstall `requirements.txt`. - Run in parallel:
## Owon Power Supply (SCPI) — library, config, tests, and quick demo ```powershell
python.exe -m pytest -n auto -v
We provide a reusable pyserial-based library, a hardware test integrated with the central config, ```
and a minimal quick demo script.
- Run the plugin self-test (verifies reporting artifacts under `reports/`):
- Library: `ecu_framework/power/owon_psu.py` (class `OwonPSU`, `SerialParams`, `scan_ports`)
- Central config: `config/test_config.yaml` (`power_supply` section) ```powershell
- Optionally merge `config/owon_psu.yaml` or set `OWON_PSU_CONFIG` to a YAML path python -m pytest tests\plugin\test_conftest_plugin_artifacts.py -q
- Hardware test: `tests/hardware/test_owon_psu.py` (skips unless `power_supply.enabled` is true) ```
- quick demo: `vendor/Owon/owon_psu_quick_demo.py` (reads `OWON_PSU_CONFIG` or `config/owon_psu.yaml`)
- Generate separate HTML/JUnit reports for unit vs non-unit tests:
Quick setup (Windows PowerShell):
```powershell
```powershell ./scripts/run_two_reports.ps1
# Ensure dependencies ```
pip install -r .\requirements.txt
## BabyLIN adapter notes
# Option A: configure centrally in test_config.yaml
# Edit config\test_config.yaml and set: The `ecu_framework/lin/babylin.py` implementation uses the official `BabyLIN_library.py` wrapper from the SDK. Put `BabyLIN_library.py` under `vendor/` (or on `PYTHONPATH`) along with the SDK's platform-specific libraries. Configure `sdf_path` and `schedule_nr` to load an SDF and start a schedule during connect. The adapter sends frames via `BLC_mon_set_xmit` and receives via `BLC_getNextFrameTimeout`.
# power_supply.enabled: true
# power_supply.port: COM4 ## Docs and references
# Option B: use a separate machine-specific YAML - Guide: `TESTING_FRAMEWORK_GUIDE.md` (deep dive with examples and step-by-step flows)
copy .\config\owon_psu.example.yaml .\config\owon_psu.yaml - Reports: `reports/report.html` and `reports/junit.xml` (generated on each run)
# edit COM port and options in .\config\owon_psu.yaml - CI summary: `reports/summary.md` (machine-friendly run summary)
- Requirements coverage: `reports/requirements_coverage.json` (requirement → tests mapping)
# Run the hardware PSU test (skips if disabled or missing port) - Tip: Open the HTML report on Windows with: `start .\reports\report.html`
pytest -k test_owon_psu_idn_and_optional_set -m hardware -q - Configs: `config/test_config.yaml`, `config/babylin.example.yaml` (copy and modify for your environment)
- BabyLIN SDK placement and notes: `vendor/README.md`
# Run the quick demo script - Docs index: `docs/README.md` (run sequence, config resolution, reporting, call flows)
python .\vendor\Owon\owon_psu_quick_demo.py - Raspberry Pi deployment: `docs/09_raspberry_pi_deployment.md`
``` - Build custom Pi image: `docs/10_build_custom_image.md`
- Pi scripts: `scripts/pi_install.sh`, `scripts/ecu-tests.service`, `scripts/ecu-tests.timer`, `scripts/run_tests.sh`
YAML keys supported by `power_supply`:
## Troubleshooting
```yaml
power_supply: - HTML report missing columns: ensure `pytest.ini` includes `-p conftest_plugin` in `addopts`.
enabled: true - ImportError for BabyLIN_library: verify `vendor/BabyLIN_library.py` placement and that required native libraries (DLL/.so) from the SDK are available on PATH/LD_LIBRARY_PATH.
port: COM4 # or /dev/ttyUSB0 - Permission errors in PowerShell: run the venv's full Python path or adjust ExecutionPolicy for scripts.
baudrate: 115200 - Import errors: activate the venv and reinstall `requirements.txt`.
timeout: 1.0
eol: "\n" # or "\r\n" ## Owon Power Supply (SCPI) — library, config, tests, and quick demo
parity: N # N|E|O
stopbits: 1 # 1|2 We provide a reusable pyserial-based library, a hardware test integrated with the central config,
xonxoff: false and a minimal quick demo script.
rtscts: false
dsrdtr: false - Library: `ecu_framework/power/owon_psu.py` (class `OwonPSU`, `SerialParams`, `scan_ports`)
idn_substr: OWON - Central config: `config/test_config.yaml` (`power_supply` section)
do_set: false - Optionally merge `config/owon_psu.yaml` or set `OWON_PSU_CONFIG` to a YAML path
set_voltage: 5.0 - Hardware test: `tests/hardware/test_owon_psu.py` (skips unless `power_supply.enabled` is true)
set_current: 0.1 - quick demo: `vendor/Owon/owon_psu_quick_demo.py` (reads `OWON_PSU_CONFIG` or `config/owon_psu.yaml`)
```
Quick setup (Windows PowerShell):
Troubleshooting:
- If `*IDN?` is empty, confirm port, parity/stopbits, and `eol` (try `\r\n`). ```powershell
- On Windows, if COM>9, use `\\.\COM10` style in some tools; here plain `COM10` usually works. # Ensure dependencies
- Ensure only one program opens the COM port at a time. pip install -r .\requirements.txt
## Next steps # Option A: configure centrally in test_config.yaml
# Edit config\test_config.yaml and set:
- Replace `HexFlasher` with a production flashing routine (UDS) # power_supply.enabled: true
- Expand tests for end-to-end ECU workflows and requirement coverage # power_supply.port: COM4
# Option B: use a separate machine-specific YAML
copy .\config\owon_psu.example.yaml .\config\owon_psu.yaml
# edit COM port and options in .\config\owon_psu.yaml
# Run the hardware PSU test (skips if disabled or missing port)
pytest -k test_owon_psu_idn_and_optional_set -m hardware -q
# Run the quick demo script
python .\vendor\Owon\owon_psu_quick_demo.py
```
YAML keys supported by `power_supply`:
```yaml
power_supply:
enabled: true
port: COM4 # or /dev/ttyUSB0
baudrate: 115200
timeout: 1.0
eol: "\n" # or "\r\n"
parity: N # N|E|O
stopbits: 1 # 1|2
xonxoff: false
rtscts: false
dsrdtr: false
idn_substr: OWON
do_set: false
set_voltage: 5.0
set_current: 0.1
```
Troubleshooting:
- If `*IDN?` is empty, confirm port, parity/stopbits, and `eol` (try `\r\n`).
- On Windows, if COM>9, use `\\.\COM10` style in some tools; here plain `COM10` usually works.
- Ensure only one program opens the COM port at a time.
## Next steps
- Replace `HexFlasher` with a production flashing routine (UDS)
- Expand tests for end-to-end ECU workflows and requirement coverage

View File

@ -1,359 +1,359 @@
# ECU Testing Framework - Complete Guide # ECU Testing Framework - Complete Guide
## Overview ## Overview
This comprehensive ECU Testing Framework provides a robust solution for testing Electronic Control Units (ECUs) using pytest with BabyLIN LIN bus communication. The framework includes detailed test documentation, enhanced reporting, mock interfaces for development, and real hardware integration capabilities. This comprehensive ECU Testing Framework provides a robust solution for testing Electronic Control Units (ECUs) using pytest with BabyLIN LIN bus communication. The framework includes detailed test documentation, enhanced reporting, mock interfaces for development, and real hardware integration capabilities.
## Framework Features ## Framework Features
### ✅ **Complete Implementation Status** ### ✅ **Complete Implementation Status**
- **✅ pytest-based testing framework** with custom plugins - **✅ pytest-based testing framework** with custom plugins
- **✅ BabyLIN LIN communication integration** via the official SDK Python wrapper (`BabyLIN_library.py`) - **✅ BabyLIN LIN communication integration** via the official SDK Python wrapper (`BabyLIN_library.py`)
- **✅ Mock interface for hardware-independent development** - **✅ Mock interface for hardware-independent development**
- **✅ Enhanced HTML/XML reporting with test metadata** - **✅ Enhanced HTML/XML reporting with test metadata**
- **✅ Detailed test documentation extraction** - **✅ Detailed test documentation extraction**
- **✅ Configuration management with YAML** - **✅ Configuration management with YAML**
- **✅ Hex file flashing capabilities (scaffold)** - **✅ Hex file flashing capabilities (scaffold)**
- **✅ Custom pytest markers for requirement traceability** - **✅ Custom pytest markers for requirement traceability**
## Enhanced Reporting System ## Enhanced Reporting System
### Test Metadata Integration ### Test Metadata Integration
The framework automatically extracts detailed test information from docstrings and integrates it into reports: The framework automatically extracts detailed test information from docstrings and integrates it into reports:
**HTML Report Features:** **HTML Report Features:**
- **Title Column**: Clear test descriptions extracted from docstrings - **Title Column**: Clear test descriptions extracted from docstrings
- **Requirements Column**: Requirement traceability (REQ-001, REQ-002, etc.) - **Requirements Column**: Requirement traceability (REQ-001, REQ-002, etc.)
- **Enhanced Test Details**: Description, test steps, and expected results - **Enhanced Test Details**: Description, test steps, and expected results
- **Marker Integration**: Custom pytest markers for categorization - **Marker Integration**: Custom pytest markers for categorization
**Example Test Documentation Format:** **Example Test Documentation Format:**
```python ```python
@pytest.mark.smoke @pytest.mark.smoke
@pytest.mark.req_001 @pytest.mark.req_001
def test_mock_send_receive_echo(self, mock_interface): def test_mock_send_receive_echo(self, mock_interface):
""" """
Title: Mock LIN Interface - Send/Receive Echo Test Title: Mock LIN Interface - Send/Receive Echo Test
Description: Validates basic send/receive functionality using the mock Description: Validates basic send/receive functionality using the mock
LIN interface with echo behavior for development testing. LIN interface with echo behavior for development testing.
Requirements: REQ-001, REQ-003 Requirements: REQ-001, REQ-003
Test Steps: Test Steps:
1. Connect to mock LIN interface 1. Connect to mock LIN interface
2. Send a test frame with ID 0x01 and data [0x55] 2. Send a test frame with ID 0x01 and data [0x55]
3. Receive the echoed frame within 100ms timeout 3. Receive the echoed frame within 100ms timeout
4. Verify frame ID and data integrity 4. Verify frame ID and data integrity
Expected Result: Expected Result:
- Frame should be echoed back successfully - Frame should be echoed back successfully
- Received data should match sent data exactly - Received data should match sent data exactly
- Operation should complete within timeout period - Operation should complete within timeout period
""" """
``` ```
### Report Generation ### Report Generation
**HTML Report (`reports/report.html`):** **HTML Report (`reports/report.html`):**
- Interactive table with sortable columns - Interactive table with sortable columns
- Test titles and requirements clearly visible - Test titles and requirements clearly visible
- Execution duration and status tracking - Execution duration and status tracking
- Enhanced metadata from docstrings - Enhanced metadata from docstrings
**XML Report (`reports/junit.xml`):** **XML Report (`reports/junit.xml`):**
- Standard JUnit XML format for CI/CD integration - Standard JUnit XML format for CI/CD integration
- Test execution data and timing information - Test execution data and timing information
- Compatible with most CI systems (Jenkins, GitLab CI, etc.) - Compatible with most CI systems (Jenkins, GitLab CI, etc.)
## Project Structure ## Project Structure
``` ```
ecu_tests/ ecu_tests/
├── ecu_framework/ # Core framework package ├── ecu_framework/ # Core framework package
│ ├── config.py # YAML configuration management │ ├── config.py # YAML configuration management
│ ├── lin/ # LIN communication interfaces │ ├── lin/ # LIN communication interfaces
│ │ ├── base.py # Abstract LinInterface definition │ │ ├── base.py # Abstract LinInterface definition
│ │ ├── mock.py # Mock interface for development │ │ ├── mock.py # Mock interface for development
│ │ └── babylin.py # Real BabyLin hardware interface │ │ └── babylin.py # Real BabyLin hardware interface
│ └── flashing/ # Hex file flashing capabilities │ └── flashing/ # Hex file flashing capabilities
│ └── hex_flasher.py # ECU flash programming │ └── hex_flasher.py # ECU flash programming
├── tests/ # Test suite ├── tests/ # Test suite
│ ├── conftest.py # pytest fixtures and configuration │ ├── conftest.py # pytest fixtures and configuration
│ ├── test_smoke_mock.py # Mock interface validation tests │ ├── test_smoke_mock.py # Mock interface validation tests
│ ├── test_babylin_hardware_smoke.py # Hardware smoke tests │ ├── test_babylin_hardware_smoke.py # Hardware smoke tests
│ └── test_hardware_placeholder.py # Future hardware tests │ └── test_hardware_placeholder.py # Future hardware tests
├── config/ # Configuration files ├── config/ # Configuration files
│ ├── test_config.yaml # Main test configuration │ ├── test_config.yaml # Main test configuration
│ └── babylin.example.yaml # BabyLin configuration template │ └── babylin.example.yaml # BabyLin configuration template
├── vendor/ # BabyLIN SDK placement ├── vendor/ # BabyLIN SDK placement
| ├── BabyLIN_library.py # Official SDK Python wrapper | ├── BabyLIN_library.py # Official SDK Python wrapper
| └── platform libs # OS-specific native libs (DLL/.so/.dylib) | └── platform libs # OS-specific native libs (DLL/.so/.dylib)
├── reports/ # Generated test reports ├── reports/ # Generated test reports
│ ├── report.html # Enhanced HTML report │ ├── report.html # Enhanced HTML report
│ └── junit.xml # JUnit XML report │ └── junit.xml # JUnit XML report
├── conftest_plugin.py # Custom pytest plugin for enhanced reporting ├── conftest_plugin.py # Custom pytest plugin for enhanced reporting
├── pytest.ini # pytest configuration with custom markers ├── pytest.ini # pytest configuration with custom markers
├── requirements.txt # Python dependencies ├── requirements.txt # Python dependencies
└── README.md # Project documentation └── README.md # Project documentation
``` ```
## Running Tests ## Running Tests
### Basic Test Execution ### Basic Test Execution
```powershell ```powershell
# Run all tests with verbose output # Run all tests with verbose output
python -m pytest -v python -m pytest -v
# Run specific test suite # Run specific test suite
python -m pytest tests\test_smoke_mock.py -v python -m pytest tests\test_smoke_mock.py -v
# Run tests with specific markers # Run tests with specific markers
python -m pytest -m "smoke" -v python -m pytest -m "smoke" -v
python -m pytest -m "req_001" -v python -m pytest -m "req_001" -v
# Run hardware tests (requires BabyLIN hardware); join with adapter marker # Run hardware tests (requires BabyLIN hardware); join with adapter marker
python -m pytest -m "hardware and babylin" -v python -m pytest -m "hardware and babylin" -v
``` ```
### Unit Tests (fast, no hardware) ### Unit Tests (fast, no hardware)
Run only unit tests using the dedicated marker or by path: Run only unit tests using the dedicated marker or by path:
```powershell ```powershell
# By marker # By marker
python -m pytest -m unit -q python -m pytest -m unit -q
# By path # By path
python -m pytest tests\unit -q python -m pytest tests\unit -q
# Plugin self-tests (verifies reporting artifacts) # Plugin self-tests (verifies reporting artifacts)
python -m pytest tests\plugin -q python -m pytest tests\plugin -q
``` ```
Reports still go to `reports/` (HTML and JUnit per defaults). Open the HTML on Windows with: Reports still go to `reports/` (HTML and JUnit per defaults). Open the HTML on Windows with:
```powershell ```powershell
start .\reports\report.html start .\reports\report.html
``` ```
Coverage: enabled by default via pytest.ini. To disable locally: Coverage: enabled by default via pytest.ini. To disable locally:
```powershell ```powershell
python -m pytest -q -o addopts="" python -m pytest -q -o addopts=""
``` ```
Optional HTML coverage: Optional HTML coverage:
```powershell ```powershell
python -m pytest --cov=ecu_framework --cov-report=html -q python -m pytest --cov=ecu_framework --cov-report=html -q
start .\htmlcov\index.html start .\htmlcov\index.html
``` ```
See also: `docs/13_unit_testing_guide.md` for more details and examples. See also: `docs/13_unit_testing_guide.md` for more details and examples.
### Report Generation ### Report Generation
Tests automatically generate enhanced reports: Tests automatically generate enhanced reports:
- **HTML Report**: `reports/report.html` - Interactive report with metadata - **HTML Report**: `reports/report.html` - Interactive report with metadata
- **XML Report**: `reports/junit.xml` - CI/CD compatible format - **XML Report**: `reports/junit.xml` - CI/CD compatible format
## Configuration ## Configuration
### Test Configuration (`config/test_config.yaml`) ### Test Configuration (`config/test_config.yaml`)
```yaml ```yaml
interface: interface:
type: mock # or babylin for hardware type: mock # or babylin for hardware
timeout: 1.0 timeout: 1.0
flash: flash:
hex_file_path: firmware/ecu_firmware.hex hex_file_path: firmware/ecu_firmware.hex
flash_timeout: 30.0 flash_timeout: 30.0
ecu: ecu:
name: Test ECU name: Test ECU
lin_id_range: [0x01, 0x3F] lin_id_range: [0x01, 0x3F]
``` ```
### BabyLIN Configuration (`config/babylin.example.yaml`) ### BabyLIN Configuration (`config/babylin.example.yaml`)
```yaml ```yaml
interface: interface:
type: babylin type: babylin
channel: 0 # channel index used by the SDK wrapper channel: 0 # channel index used by the SDK wrapper
bitrate: 19200 # typically set by SDF bitrate: 19200 # typically set by SDF
sdf_path: ./vendor/Example.sdf sdf_path: ./vendor/Example.sdf
schedule_nr: 0 # schedule to start on connect schedule_nr: 0 # schedule to start on connect
``` ```
## Test Categories ## Test Categories
### 1. Mock Interface Tests (`test_smoke_mock.py`) ### 1. Mock Interface Tests (`test_smoke_mock.py`)
**Purpose**: Hardware-independent development and validation **Purpose**: Hardware-independent development and validation
- ✅ Send/receive echo functionality - ✅ Send/receive echo functionality
- ✅ Master request/response testing - ✅ Master request/response testing
- ✅ Timeout behavior validation - ✅ Timeout behavior validation
- ✅ Frame validation boundary testing - ✅ Frame validation boundary testing
- ✅ Parameterized boundary tests for comprehensive coverage - ✅ Parameterized boundary tests for comprehensive coverage
**Status**: **7 tests passing** - Complete implementation **Status**: **7 tests passing** - Complete implementation
### 2. Hardware Smoke Tests (`test_babylin_hardware_smoke.py`) ### 2. Hardware Smoke Tests (`test_babylin_hardware_smoke.py`)
**Purpose**: Basic BabyLIN hardware connectivity validation **Purpose**: Basic BabyLIN hardware connectivity validation
- ✅ SDK wrapper import and device open - ✅ SDK wrapper import and device open
- ✅ Interface connection establishment - ✅ Interface connection establishment
- ✅ Basic send/receive operations - ✅ Basic send/receive operations
- ✅ Error handling and cleanup - ✅ Error handling and cleanup
**Status**: Ready for hardware testing **Status**: Ready for hardware testing
### 3. Hardware Integration Tests (`test_hardware_placeholder.py`) ### 3. Hardware Integration Tests (`test_hardware_placeholder.py`)
**Purpose**: Full ECU testing workflow with real hardware **Purpose**: Full ECU testing workflow with real hardware
- ECU flashing with hex files - ECU flashing with hex files
- Communication protocol validation - Communication protocol validation
- Diagnostic command testing - Diagnostic command testing
- Performance and stress testing - Performance and stress testing
**Status**: Framework ready, awaiting ECU specifications **Status**: Framework ready, awaiting ECU specifications
## Custom Pytest Markers ## Custom Pytest Markers
The framework includes custom markers for test categorization and requirement traceability: The framework includes custom markers for test categorization and requirement traceability:
```python ```python
# In pytest.ini # In pytest.ini
markers = markers =
smoke: Basic functionality tests smoke: Basic functionality tests
integration: Integration tests requiring hardware integration: Integration tests requiring hardware
hardware: Tests requiring physical BabyLin hardware hardware: Tests requiring physical BabyLin hardware
babylin: Tests targeting the BabyLIN SDK adapter babylin: Tests targeting the BabyLIN SDK adapter
unit: Fast unit tests (no hardware) unit: Fast unit tests (no hardware)
boundary: Boundary condition and edge case tests boundary: Boundary condition and edge case tests
req_001: Tests validating requirement REQ-001 (LIN Interface Basic Operations) req_001: Tests validating requirement REQ-001 (LIN Interface Basic Operations)
req_002: Tests validating requirement REQ-002 (Master Request/Response) req_002: Tests validating requirement REQ-002 (Master Request/Response)
req_003: Tests validating requirement REQ-003 (Frame Validation) req_003: Tests validating requirement REQ-003 (Frame Validation)
req_004: Tests validating requirement REQ-004 (Timeout Handling) req_004: Tests validating requirement REQ-004 (Timeout Handling)
``` ```
## BabyLIN Integration Details ## BabyLIN Integration Details
### SDK Python wrapper ### SDK Python wrapper
The framework uses the official SDK Python wrapper `BabyLIN_library.py` (placed under `vendor/`) and calls its BLC_* APIs. The framework uses the official SDK Python wrapper `BabyLIN_library.py` (placed under `vendor/`) and calls its BLC_* APIs.
Key calls in the adapter (`ecu_framework/lin/babylin.py`): Key calls in the adapter (`ecu_framework/lin/babylin.py`):
- `BLC_getBabyLinPorts`, `BLC_openPort` — discovery and open - `BLC_getBabyLinPorts`, `BLC_openPort` — discovery and open
- `BLC_loadSDF`, `BLC_getChannelHandle`, `BLC_sendCommand('start schedule N;')` — SDF + scheduling - `BLC_loadSDF`, `BLC_getChannelHandle`, `BLC_sendCommand('start schedule N;')` — SDF + scheduling
- `BLC_mon_set_xmit` — transmit - `BLC_mon_set_xmit` — transmit
- `BLC_getNextFrameTimeout` — receive - `BLC_getNextFrameTimeout` — receive
- `BLC_sendRawMasterRequest` — master request (length then bytes) - `BLC_sendRawMasterRequest` — master request (length then bytes)
## Development Workflow ## Development Workflow
### 1. Development Phase ### 1. Development Phase
```powershell ```powershell
# Use mock interface for development # Use mock interface for development
python -m pytest tests\test_smoke_mock.py -v python -m pytest tests\test_smoke_mock.py -v
``` ```
### 2. Hardware Integration Phase ### 2. Hardware Integration Phase
```powershell ```powershell
# Test with real BabyLIN hardware # Test with real BabyLIN hardware
python -m pytest -m "hardware and babylin" -v python -m pytest -m "hardware and babylin" -v
``` ```
### 3. Full System Testing ### 3. Full System Testing
```powershell ```powershell
# Complete test suite including ECU flashing # Complete test suite including ECU flashing
python -m pytest -v python -m pytest -v
``` ```
## Enhanced Reporting Output Example ## Enhanced Reporting Output Example
The enhanced HTML report includes: The enhanced HTML report includes:
| Result | Test | Title | Requirements | Duration | Links | | Result | Test | Title | Requirements | Duration | Links |
|--------|------|-------|--------------|----------|--------| |--------|------|-------|--------------|----------|--------|
| ✅ Passed | test_mock_send_receive_echo | Mock LIN Interface - Send/Receive Echo Test | REQ-001, REQ-003 | 1 ms | | | ✅ Passed | test_mock_send_receive_echo | Mock LIN Interface - Send/Receive Echo Test | REQ-001, REQ-003 | 1 ms | |
| ✅ Passed | test_mock_request_synthesized_response | Mock LIN Interface - Master Request Response Test | REQ-002 | 0 ms | | | ✅ Passed | test_mock_request_synthesized_response | Mock LIN Interface - Master Request Response Test | REQ-002 | 0 ms | |
| ✅ Passed | test_mock_receive_timeout_behavior | Mock LIN Interface - Receive Timeout Test | REQ-004 | 106 ms | | | ✅ Passed | test_mock_receive_timeout_behavior | Mock LIN Interface - Receive Timeout Test | REQ-004 | 106 ms | |
## Framework Validation Results ## Framework Validation Results
**Current Status**: ✅ **All core features implemented and tested** **Current Status**: ✅ **All core features implemented and tested**
**Mock Interface Tests**: 7/7 passing (0.14s execution time) **Mock Interface Tests**: 7/7 passing (0.14s execution time)
- Send/receive operations: ✅ Working - Send/receive operations: ✅ Working
- Timeout handling: ✅ Working - Timeout handling: ✅ Working
- Frame validation: ✅ Working - Frame validation: ✅ Working
- Boundary testing: ✅ Working - Boundary testing: ✅ Working
**Enhanced Reporting**: ✅ **Fully functional** **Enhanced Reporting**: ✅ **Fully functional**
- HTML report with metadata: ✅ Working - HTML report with metadata: ✅ Working
- XML report generation: ✅ Working - XML report generation: ✅ Working
- Custom pytest plugin: ✅ Working - Custom pytest plugin: ✅ Working
- Docstring metadata extraction: ✅ Working - Docstring metadata extraction: ✅ Working
**Configuration System**: ✅ **Complete** **Configuration System**: ✅ **Complete**
- YAML configuration loading: ✅ Working - YAML configuration loading: ✅ Working
- Environment variable override: ✅ Working - Environment variable override: ✅ Working
- BabyLIN SDF/schedule configuration: ✅ Working - BabyLIN SDF/schedule configuration: ✅ Working
- Power supply (PSU) configuration: ✅ Working (see `config/test_config.yaml``power_supply`) - Power supply (PSU) configuration: ✅ Working (see `config/test_config.yaml``power_supply`)
## Owon Power Supply (PSU) Integration ## Owon Power Supply (PSU) Integration
The framework includes a serial SCPI controller for Owon PSUs and a hardware test wired to the central config. The framework includes a serial SCPI controller for Owon PSUs and a hardware test wired to the central config.
- Library: `ecu_framework/power/owon_psu.py` (pyserial) - Library: `ecu_framework/power/owon_psu.py` (pyserial)
- Config: `config/test_config.yaml` (`power_supply` section) - Config: `config/test_config.yaml` (`power_supply` section)
- Optionally merge machine-specific settings from `config/owon_psu.yaml` or env `OWON_PSU_CONFIG` - Optionally merge machine-specific settings from `config/owon_psu.yaml` or env `OWON_PSU_CONFIG`
- Hardware test: `tests/hardware/test_owon_psu.py` (skips unless `power_supply.enabled` and `port` present) - Hardware test: `tests/hardware/test_owon_psu.py` (skips unless `power_supply.enabled` and `port` present)
- quick demo: `vendor/Owon/owon_psu_quickdemo.py` - quick demo: `vendor/Owon/owon_psu_quickdemo.py`
Quick run: Quick run:
```powershell ```powershell
pip install -r .\requirements.txt pip install -r .\requirements.txt
copy .\config\owon_psu.example.yaml .\config\owon_psu.yaml copy .\config\owon_psu.example.yaml .\config\owon_psu.yaml
# edit COM port in .\config\owon_psu.yaml # edit COM port in .\config\owon_psu.yaml
pytest -k test_owon_psu_idn_and_optional_set -m hardware -q pytest -k test_owon_psu_idn_and_optional_set -m hardware -q
python .\vendor\Owon\owon_psu_quick_demo.py python .\vendor\Owon\owon_psu_quick_demo.py
``` ```
Common config keys: Common config keys:
```yaml ```yaml
power_supply: power_supply:
enabled: true enabled: true
port: COM4 port: COM4
baudrate: 115200 baudrate: 115200
timeout: 1.0 timeout: 1.0
eol: "\n" eol: "\n"
parity: N parity: N
stopbits: 1 stopbits: 1
idn_substr: OWON idn_substr: OWON
``` ```
## Next Steps ## Next Steps
1. **Hardware Testing**: Connect BabyLin hardware and validate hardware smoke tests 1. **Hardware Testing**: Connect BabyLin hardware and validate hardware smoke tests
2. **ECU Integration**: Define ECU-specific communication protocols and diagnostic commands 2. **ECU Integration**: Define ECU-specific communication protocols and diagnostic commands
3. **Hex Flashing**: Implement complete hex file flashing workflow 3. **Hex Flashing**: Implement complete hex file flashing workflow
4. **CI/CD Integration**: Set up automated testing pipeline with generated reports 4. **CI/CD Integration**: Set up automated testing pipeline with generated reports
## Dependencies ## Dependencies
``` ```
pytest>=8.4.2 pytest>=8.4.2
pytest-html>=4.1.1 pytest-html>=4.1.1
pytest-xdist>=3.8.0 pytest-xdist>=3.8.0
pyyaml>=6.0.2 pyyaml>=6.0.2
``` ```
This framework provides a complete, production-ready testing solution for ECU development with BabyLIN communication, featuring enhanced documentation, traceability, and reporting capabilities. This framework provides a complete, production-ready testing solution for ECU development with BabyLIN communication, featuring enhanced documentation, traceability, and reporting capabilities.

View File

@ -1,11 +1,11 @@
# Example configuration for BabyLIN hardware runs (SDK Python wrapper) # Example configuration for BabyLIN hardware runs (SDK Python wrapper)
interface: interface:
type: babylin type: babylin
channel: 0 # Channel index (0-based) as used by the SDK channel: 0 # Channel index (0-based) as used by the SDK
bitrate: 19200 # Usually defined by the SDF, kept for reference bitrate: 19200 # Usually defined by the SDF, kept for reference
node_name: ECU_TEST_NODE node_name: ECU_TEST_NODE
sdf_path: .\vendor\Example.sdf # Path to your SDF file sdf_path: .\vendor\Example.sdf # Path to your SDF file
schedule_nr: 0 # Schedule number to start on connect schedule_nr: 0 # Schedule number to start on connect
flash: flash:
enabled: true enabled: true
hex_path: C:\\Path\\To\\firmware.hex # TODO: update hex_path: C:\\Path\\To\\firmware.hex # TODO: update

View File

@ -1,50 +1,50 @@
# Examples: Mock-only and BabyLIN hardware configurations # Examples: Mock-only and BabyLIN hardware configurations
# #
# How to use (Windows PowerShell): # How to use (Windows PowerShell):
# # Point the framework to a specific config file # # Point the framework to a specific config file
# $env:ECU_TESTS_CONFIG = ".\config\examples.yaml" # $env:ECU_TESTS_CONFIG = ".\config\examples.yaml"
# # Run only mock tests # # Run only mock tests
# pytest -m "not hardware" -v # pytest -m "not hardware" -v
# # Switch to the BabyLIN profile by moving it under the 'active' key or by # # Switch to the BabyLIN profile by moving it under the 'active' key or by
# # exporting a different file path containing only the desired profile. # # exporting a different file path containing only the desired profile.
# #
# This file shows both profiles in one place; typically you'll copy the relevant # This file shows both profiles in one place; typically you'll copy the relevant
# section into its own YAML file (e.g., config/mock.yaml, config/babylin.yaml). # section into its own YAML file (e.g., config/mock.yaml, config/babylin.yaml).
# --- MOCK PROFILE ----------------------------------------------------------- # --- MOCK PROFILE -----------------------------------------------------------
mock_profile: mock_profile:
interface: interface:
type: mock type: mock
channel: 1 channel: 1
bitrate: 19200 bitrate: 19200
flash: flash:
enabled: false enabled: false
hex_path: hex_path:
# --- BABYLIN PROFILE -------------------------------------------------------- # --- BABYLIN PROFILE --------------------------------------------------------
# Requires: vendor/BabyLIN_library.py and platform libraries placed per vendor/README.md # Requires: vendor/BabyLIN_library.py and platform libraries placed per vendor/README.md
babylin_profile: babylin_profile:
interface: interface:
type: babylin type: babylin
channel: 0 # SDK channel index (0-based) channel: 0 # SDK channel index (0-based)
bitrate: 19200 # Informational; SDF usually defines effective timing bitrate: 19200 # Informational; SDF usually defines effective timing
node_name: ECU_TEST_NODE # Optional label node_name: ECU_TEST_NODE # Optional label
sdf_path: .\vendor\Example.sdf # Update to your real SDF path sdf_path: .\vendor\Example.sdf # Update to your real SDF path
schedule_nr: 0 # Start this schedule on connect schedule_nr: 0 # Start this schedule on connect
flash: flash:
enabled: true enabled: true
hex_path: C:\\Path\\To\\firmware.hex # Update as needed hex_path: C:\\Path\\To\\firmware.hex # Update as needed
# --- ACTIVE SELECTION ------------------------------------------------------- # --- ACTIVE SELECTION -------------------------------------------------------
# To use one of the profiles above, copy it under the 'active' key below or # To use one of the profiles above, copy it under the 'active' key below or
# include only that profile in a separate file. The loader expects the top-level # include only that profile in a separate file. The loader expects the top-level
# keys 'interface' and 'flash' by default. For convenience, we expose a shape # keys 'interface' and 'flash' by default. For convenience, we expose a shape
# that mirrors that directly. Here is a self-contained active selection: # that mirrors that directly. Here is a self-contained active selection:
active: active:
interface: interface:
type: mock type: mock
channel: 1 channel: 1
bitrate: 19200 bitrate: 19200
flash: flash:
enabled: false enabled: false
hex_path: hex_path:

29
config/mum.example.yaml Normal file
View File

@ -0,0 +1,29 @@
# MUM (Melexis Universal Master) interface example.
# Copy to test_config.yaml or point ECU_TESTS_CONFIG at this file.
#
# Prerequisites:
# - MUM is reachable over IP (default 192.168.7.2 over USB-RNDIS).
# - Melexis Python packages 'pylin' and 'pymumclient' are importable.
# See vendor/automated_lin_test/install_packages.sh.
interface:
type: mum
host: 192.168.7.2 # MUM IP address
lin_device: lin0 # MUM LIN device name
power_device: power_out0 # MUM power-control device
bitrate: 19200 # LIN baudrate
boot_settle_seconds: 0.5 # Delay after power-up before first frame
# Optional: per-frame-id data lengths. Defaults cover the 4SEVEN library
# (ALM_Status=4, ALM_Req_A=8, etc.) — only override if your ECU differs.
frame_lengths:
0x0A: 8 # ALM_Req_A
0x11: 4 # ALM_Status
flash:
enabled: false
hex_path:
# The Owon PSU is unused on the MUM flow (MUM provides power on power_out0).
# Leave disabled unless you also want to drive the Owon for a separate test.
power_supply:
enabled: false

View File

@ -1,18 +1,18 @@
# Example configuration for Owon PSU hardware test # Example configuration for Owon PSU hardware test
# Copy to config/owon_psu.yaml and adjust values for your setup # Copy to config/owon_psu.yaml and adjust values for your setup
port: COM4 # e.g., COM4 on Windows, /dev/ttyUSB0 on Linux port: COM4 # e.g., COM4 on Windows, /dev/ttyUSB0 on Linux
baudrate: 115200 # default 115200 baudrate: 115200 # default 115200
timeout: 1.0 # seconds timeout: 1.0 # seconds
# eol: "\n" # write/query line termination (default "\n"); use "\r\n" if required # eol: "\n" # write/query line termination (default "\n"); use "\r\n" if required
# parity: N # N|E|O (default N) # parity: N # N|E|O (default N)
# stopbits: 1 # 1 or 2 (default 1) # stopbits: 1 # 1 or 2 (default 1)
# xonxoff: false # xonxoff: false
# rtscts: false # rtscts: false
# dsrdtr: false # dsrdtr: false
# Optional assertions/behavior # Optional assertions/behavior
# idn_substr: OWON # require this substring in *IDN? # idn_substr: OWON # require this substring in *IDN?
# do_set: true # briefly set V/I and toggle output # do_set: true # briefly set V/I and toggle output
# set_voltage: 1.0 # volts when do_set is true # set_voltage: 1.0 # volts when do_set is true
# set_current: 0.1 # amps when do_set is true # set_current: 0.1 # amps when do_set is true

View File

@ -1,18 +1,18 @@
# Example configuration for Owon PSU hardware test # Example configuration for Owon PSU hardware test
# Copy to config/owon_psu.yaml and adjust values for your setup # Copy to config/owon_psu.yaml and adjust values for your setup
port: COM4 # e.g., COM4 on Windows, /dev/ttyUSB0 on Linux port: COM4 # e.g., COM4 on Windows, /dev/ttyUSB0 on Linux
baudrate: 115200 # default 115200 baudrate: 115200 # default 115200
timeout: 1.0 # seconds timeout: 1.0 # seconds
eol: "\n" # write/query line termination (default "\n"); use "\r\n" if required eol: "\n" # write/query line termination (default "\n"); use "\r\n" if required
parity: N # N|E|O (default N) parity: N # N|E|O (default N)
stopbits: 1 # 1 or 2 (default 1) stopbits: 1 # 1 or 2 (default 1)
xonxoff: false xonxoff: false
rtscts: false rtscts: false
dsrdtr: false dsrdtr: false
# Optional assertions/behavior # Optional assertions/behavior
idn_substr: OWON # require this substring in *IDN? idn_substr: OWON # require this substring in *IDN?
do_set: true # briefly set V/I and toggle output do_set: true # briefly set V/I and toggle output
set_voltage: 10.0 # volts when do_set is true set_voltage: 13.0 # volts when do_set is true
set_current: 0.1 # amps when do_set is true set_current: 1.0 # amps when do_set is true (raise above ECU draw to stay in CV mode)

View File

@ -1,18 +1,34 @@
interface: interface:
type: mock # MUM (Melexis Universal Master) is the current default. Switch type to
channel: 1 # 'babylin' for the legacy SDK flow, or 'mock' for hardware-free runs.
bitrate: 19200 type: mum
host: 192.168.7.2 # MUM IP (USB-RNDIS default)
lin_device: lin0 # MUM LIN device name
power_device: power_out0 # MUM power-control device (built-in PSU)
bitrate: 19200 # LIN baudrate
boot_settle_seconds: 0.5 # Wait after power-up before sending the first frame
frame_lengths:
0x0A: 8 # ALM_Req_A (master-published, RGB control)
0x11: 4 # ALM_Status (slave-published)
# --- BabyLIN (legacy) settings, used only when type: babylin ---
channel: 0
node_name: ECU_TEST_NODE
sdf_path: .\vendor\4SEVEN_color_lib_test.sdf
schedule_nr: -1 # -1 = don't auto-start a schedule
flash: flash:
enabled: false enabled: false
hex_path: hex_path:
# Optional: central power supply config used by hardware tests/demos # Owon PSU is independent of the LIN interface. The MUM provides its own
# You can also place machine-specific values in config/owon_psu.yaml or set OWON_PSU_CONFIG # power on power_out0, so leave the PSU disabled unless you specifically
# need to drive an external supply for over/under-voltage scenarios.
power_supply: power_supply:
enabled: true enabled: false
# port: COM4 # port: COM4
baudrate: 115200 baudrate: 115200
timeout: 1.0 timeout: 2.0
eol: "\n" eol: "\n"
parity: N parity: N
stopbits: 1 stopbits: 1
@ -21,5 +37,5 @@ power_supply:
dsrdtr: false dsrdtr: false
# idn_substr: OWON # idn_substr: OWON
do_set: false do_set: false
set_voltage: 1.0 set_voltage: 13.0
set_current: 0.1 set_current: 1.0

View File

@ -1,27 +1,27 @@
""" """
Pytest configuration for this repository. Pytest configuration for this repository.
Purpose: Purpose:
- Optionally register the local plugin in `conftest_plugin.py` if present. - Optionally register the local plugin in `conftest_plugin.py` if present.
- Avoid hard failures on environments where that file isn't available. - Avoid hard failures on environments where that file isn't available.
""" """
from __future__ import annotations from __future__ import annotations
import importlib import importlib
import sys import sys
from typing import Any from typing import Any
def pytest_configure(config: Any) -> None: def pytest_configure(config: Any) -> None:
try: try:
plugin = importlib.import_module("conftest_plugin") plugin = importlib.import_module("conftest_plugin")
except Exception as e: except Exception as e:
# Soft warning only; tests can still run without the extra report features. # Soft warning only; tests can still run without the extra report features.
sys.stderr.write(f"[pytest] conftest_plugin not loaded: {e}\n") sys.stderr.write(f"[pytest] conftest_plugin not loaded: {e}\n")
return return
# Register the plugin module so its hooks are active. # Register the plugin module so its hooks are active.
try: try:
config.pluginmanager.register(plugin, name="conftest_plugin") config.pluginmanager.register(plugin, name="conftest_plugin")
except Exception as reg_err: except Exception as reg_err:
sys.stderr.write(f"[pytest] failed to register conftest_plugin: {reg_err}\n") sys.stderr.write(f"[pytest] failed to register conftest_plugin: {reg_err}\n")

View File

@ -1,261 +1,261 @@
""" """
Custom pytest plugin to enhance test reports with detailed metadata. Custom pytest plugin to enhance test reports with detailed metadata.
Why we need this plugin: Why we need this plugin:
- Surface business-facing info (Title, Description, Requirements, Steps, Expected Result) in the HTML report for quick review. - Surface business-facing info (Title, Description, Requirements, Steps, Expected Result) in the HTML report for quick review.
- Map tests to requirement IDs and produce a requirements coverage JSON artifact for traceability. - Map tests to requirement IDs and produce a requirements coverage JSON artifact for traceability.
- Emit a compact CI summary (summary.md) for dashboards and PR comments. - Emit a compact CI summary (summary.md) for dashboards and PR comments.
How it works (high level): How it works (high level):
- During collection, we track all test nodeids for later "unmapped" reporting. - During collection, we track all test nodeids for later "unmapped" reporting.
- During test execution, we parse the test function's docstring and markers to extract metadata and requirement IDs; we attach these as user_properties on the report. - During test execution, we parse the test function's docstring and markers to extract metadata and requirement IDs; we attach these as user_properties on the report.
- We add custom columns (Title, Requirements) to the HTML table. - We add custom columns (Title, Requirements) to the HTML table.
- At the end of the run, we write two artifacts into reports/: requirements_coverage.json and summary.md. - At the end of the run, we write two artifacts into reports/: requirements_coverage.json and summary.md.
""" """
import os import os
import re import re
import json import json
import datetime as _dt import datetime as _dt
import pytest import pytest
# ----------------------------- # -----------------------------
# Session-scoped state for reports # Session-scoped state for reports
# ----------------------------- # -----------------------------
# Track all collected tests (nodeids) so we can later highlight tests that had no requirement mapping. # Track all collected tests (nodeids) so we can later highlight tests that had no requirement mapping.
_ALL_COLLECTED_TESTS: set[str] = set() _ALL_COLLECTED_TESTS: set[str] = set()
# Map requirement ID (e.g., REQ-001) -> set of nodeids that cover it. # Map requirement ID (e.g., REQ-001) -> set of nodeids that cover it.
_REQ_TO_TESTS: dict[str, set[str]] = {} _REQ_TO_TESTS: dict[str, set[str]] = {}
# Nodeids that did map to at least one requirement. # Nodeids that did map to at least one requirement.
_MAPPED_TESTS: set[str] = set() _MAPPED_TESTS: set[str] = set()
def _normalize_req_id(token: str) -> str | None: def _normalize_req_id(token: str) -> str | None:
"""Normalize requirement token to REQ-XXX form. """Normalize requirement token to REQ-XXX form.
Accepts markers like 'req_001' or strings like 'REQ-001'. Accepts markers like 'req_001' or strings like 'REQ-001'.
Returns None if not a recognizable requirement. This provides a single Returns None if not a recognizable requirement. This provides a single
canonical format for coverage mapping and reporting. canonical format for coverage mapping and reporting.
""" """
token = token.strip() token = token.strip()
m1 = re.fullmatch(r"req_(\d{1,3})", token, re.IGNORECASE) m1 = re.fullmatch(r"req_(\d{1,3})", token, re.IGNORECASE)
if m1: if m1:
return f"REQ-{int(m1.group(1)):03d}" return f"REQ-{int(m1.group(1)):03d}"
m2 = re.fullmatch(r"REQ[-_ ]?(\d{1,3})", token, re.IGNORECASE) m2 = re.fullmatch(r"REQ[-_ ]?(\d{1,3})", token, re.IGNORECASE)
if m2: if m2:
return f"REQ-{int(m2.group(1)):03d}" return f"REQ-{int(m2.group(1)):03d}"
return None return None
def _extract_req_ids_from_docstring(docstring: str) -> list[str]: def _extract_req_ids_from_docstring(docstring: str) -> list[str]:
"""Parse the 'Requirements:' line in the docstring and return REQ-XXX tokens. """Parse the 'Requirements:' line in the docstring and return REQ-XXX tokens.
Supports comma- or whitespace-separated tokens and normalizes them. Supports comma- or whitespace-separated tokens and normalizes them.
""" """
reqs: list[str] = [] reqs: list[str] = []
req_match = re.search(r"Requirements:\s*(.+)", docstring) req_match = re.search(r"Requirements:\s*(.+)", docstring)
if req_match: if req_match:
raw = req_match.group(1) raw = req_match.group(1)
# split by comma or whitespace # split by comma or whitespace
parts = re.split(r"[\s,]+", raw) parts = re.split(r"[\s,]+", raw)
for p in parts: for p in parts:
rid = _normalize_req_id(p) rid = _normalize_req_id(p)
if rid: if rid:
reqs.append(rid) reqs.append(rid)
return list(dict.fromkeys(reqs)) # dedupe, preserve order return list(dict.fromkeys(reqs)) # dedupe, preserve order
def pytest_configure(config): def pytest_configure(config):
# Ensure reports directory exists early so downstream hooks can write artifacts safely # Ensure reports directory exists early so downstream hooks can write artifacts safely
os.makedirs("reports", exist_ok=True) os.makedirs("reports", exist_ok=True)
def pytest_collection_modifyitems(session, config, items): def pytest_collection_modifyitems(session, config, items):
# Track all collected tests for unmapped detection (for the final coverage JSON) # Track all collected tests for unmapped detection (for the final coverage JSON)
for item in items: for item in items:
_ALL_COLLECTED_TESTS.add(item.nodeid) _ALL_COLLECTED_TESTS.add(item.nodeid)
# (Legacy makereport implementation removed in favor of the hookwrapper below.) # (Legacy makereport implementation removed in favor of the hookwrapper below.)
def pytest_html_results_table_header(cells): def pytest_html_results_table_header(cells):
"""Add custom columns to HTML report table. """Add custom columns to HTML report table.
Why: Make the most important context (Title and Requirements) visible at a glance Why: Make the most important context (Title and Requirements) visible at a glance
in the HTML report table without opening each test details section. in the HTML report table without opening each test details section.
""" """
cells.insert(2, '<th class="sortable" data-column-type="text">Title</th>') cells.insert(2, '<th class="sortable" data-column-type="text">Title</th>')
cells.insert(3, '<th class="sortable" data-column-type="text">Requirements</th>') cells.insert(3, '<th class="sortable" data-column-type="text">Requirements</th>')
def pytest_html_results_table_row(report, cells): def pytest_html_results_table_row(report, cells):
"""Add custom data to HTML report table rows. """Add custom data to HTML report table rows.
We pull the user_properties attached during makereport and render the We pull the user_properties attached during makereport and render the
Title and Requirements columns for each test row. Title and Requirements columns for each test row.
""" """
# Get title from user properties # Get title from user properties
title = "" title = ""
requirements = "" requirements = ""
for prop in getattr(report, 'user_properties', []): for prop in getattr(report, 'user_properties', []):
if prop[0] == "title": if prop[0] == "title":
title = prop[1] title = prop[1]
elif prop[0] == "requirements": elif prop[0] == "requirements":
requirements = prop[1] requirements = prop[1]
cells.insert(2, f'<td class="col-title">{title}</td>') cells.insert(2, f'<td class="col-title">{title}</td>')
cells.insert(3, f'<td class="col-requirements">{requirements}</td>') cells.insert(3, f'<td class="col-requirements">{requirements}</td>')
@pytest.hookimpl(hookwrapper=True) @pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item, call): def pytest_runtest_makereport(item, call):
"""Active hook: attach metadata to reports and build requirement coverage. """Active hook: attach metadata to reports and build requirement coverage.
Why hook at makereport: Why hook at makereport:
- We want to attach metadata to the test report object so it shows up in - We want to attach metadata to the test report object so it shows up in
the HTML and JUnit outputs via user_properties. the HTML and JUnit outputs via user_properties.
- We also build the requirements mapping here because we have both markers - We also build the requirements mapping here because we have both markers
and docstrings available on the test item. and docstrings available on the test item.
""" """
outcome = yield outcome = yield
report = outcome.get_result() report = outcome.get_result()
if call.when == "call" and hasattr(item, "function"): if call.when == "call" and hasattr(item, "function"):
# Add test metadata from docstring: parse Title, Description, Requirements, # Add test metadata from docstring: parse Title, Description, Requirements,
# Test Steps, and Expected Result. Each is optional and extracted if present. # Test Steps, and Expected Result. Each is optional and extracted if present.
if item.function.__doc__: if item.function.__doc__:
docstring = item.function.__doc__.strip() docstring = item.function.__doc__.strip()
# Extract and add all metadata # Extract and add all metadata
metadata: dict[str, str] = {} metadata: dict[str, str] = {}
# Title # Title
title_match = re.search(r"Title:\s*(.+)", docstring) title_match = re.search(r"Title:\s*(.+)", docstring)
if title_match: if title_match:
metadata["title"] = title_match.group(1).strip() metadata["title"] = title_match.group(1).strip()
# Description # Description
desc_match = re.search(r"Description:\s*(.+?)(?=\n\s*(?:Requirements|Test Steps|Expected Result))", docstring, re.DOTALL) desc_match = re.search(r"Description:\s*(.+?)(?=\n\s*(?:Requirements|Test Steps|Expected Result))", docstring, re.DOTALL)
if desc_match: if desc_match:
metadata["description"] = " ".join(desc_match.group(1).strip().split()) metadata["description"] = " ".join(desc_match.group(1).strip().split())
# Requirements # Requirements
req_match = re.search(r"Requirements:\s*(.+)", docstring) req_match = re.search(r"Requirements:\s*(.+)", docstring)
if req_match: if req_match:
metadata["requirements"] = req_match.group(1).strip() metadata["requirements"] = req_match.group(1).strip()
# Test steps # Test steps
steps_match = re.search(r"Test Steps:\s*(.+?)(?=\n\s*Expected Result)", docstring, re.DOTALL) steps_match = re.search(r"Test Steps:\s*(.+?)(?=\n\s*Expected Result)", docstring, re.DOTALL)
if steps_match: if steps_match:
steps = steps_match.group(1).strip() steps = steps_match.group(1).strip()
steps_clean = re.sub(r"\n\s*\d+\.\s*", " | ", steps) steps_clean = re.sub(r"\n\s*\d+\.\s*", " | ", steps)
metadata["test_steps"] = steps_clean.strip(" |") metadata["test_steps"] = steps_clean.strip(" |")
# Expected result # Expected result
result_match = re.search(r"Expected Result:\s*(.+?)(?=\n\s*\"\"\"|\Z)", docstring, re.DOTALL) result_match = re.search(r"Expected Result:\s*(.+?)(?=\n\s*\"\"\"|\Z)", docstring, re.DOTALL)
if result_match: if result_match:
expected = " ".join(result_match.group(1).strip().split()) expected = " ".join(result_match.group(1).strip().split())
metadata["expected_result"] = expected.replace("- ", "") metadata["expected_result"] = expected.replace("- ", "")
# Add all metadata as user properties (HTML plugin reads these) # Add all metadata as user properties (HTML plugin reads these)
if metadata: if metadata:
if not hasattr(report, "user_properties"): if not hasattr(report, "user_properties"):
report.user_properties = [] report.user_properties = []
for key, value in metadata.items(): for key, value in metadata.items():
report.user_properties.append((key, value)) report.user_properties.append((key, value))
# Build requirement coverage mapping # Build requirement coverage mapping
nodeid = item.nodeid nodeid = item.nodeid
req_ids: set[str] = set() req_ids: set[str] = set()
# From markers: allow @pytest.mark.req_001 style to count toward coverage # From markers: allow @pytest.mark.req_001 style to count toward coverage
for mark in item.iter_markers(): for mark in item.iter_markers():
rid = _normalize_req_id(mark.name) rid = _normalize_req_id(mark.name)
if rid: if rid:
req_ids.add(rid) req_ids.add(rid)
# From docstring line 'Requirements:' # From docstring line 'Requirements:'
for rid in _extract_req_ids_from_docstring(docstring): for rid in _extract_req_ids_from_docstring(docstring):
req_ids.add(rid) req_ids.add(rid)
# Update global maps for coverage JSON # Update global maps for coverage JSON
if req_ids: if req_ids:
_MAPPED_TESTS.add(nodeid) _MAPPED_TESTS.add(nodeid)
for rid in req_ids: for rid in req_ids:
bucket = _REQ_TO_TESTS.setdefault(rid, set()) bucket = _REQ_TO_TESTS.setdefault(rid, set())
bucket.add(nodeid) bucket.add(nodeid)
def pytest_terminal_summary(terminalreporter, exitstatus): def pytest_terminal_summary(terminalreporter, exitstatus):
"""Write CI-friendly summary and requirements coverage JSON. """Write CI-friendly summary and requirements coverage JSON.
Why we write these artifacts: Why we write these artifacts:
- requirements_coverage.json Machine-readable traceability matrix for CI dashboards. - requirements_coverage.json Machine-readable traceability matrix for CI dashboards.
- summary.md Quick textual summary that can be surfaced in PR checks or CI job logs. - summary.md Quick textual summary that can be surfaced in PR checks or CI job logs.
""" """
# Compute stats # Compute stats
stats = terminalreporter.stats stats = terminalreporter.stats
def _count(key): def _count(key):
return len(stats.get(key, [])) return len(stats.get(key, []))
results = { results = {
"passed": _count("passed"), "passed": _count("passed"),
"failed": _count("failed"), "failed": _count("failed"),
"skipped": _count("skipped"), "skipped": _count("skipped"),
"error": _count("error"), "error": _count("error"),
"xfailed": _count("xfailed"), "xfailed": _count("xfailed"),
"xpassed": _count("xpassed"), "xpassed": _count("xpassed"),
"rerun": _count("rerun"), "rerun": _count("rerun"),
"total": sum(len(v) for v in stats.values()), "total": sum(len(v) for v in stats.values()),
"collected": getattr(terminalreporter, "_numcollected", None), "collected": getattr(terminalreporter, "_numcollected", None),
} }
# Prepare JSON payload for requirements coverage and quick links to artifacts # Prepare JSON payload for requirements coverage and quick links to artifacts
coverage = { coverage = {
"generated_at": _dt.datetime.now().astimezone().isoformat(), "generated_at": _dt.datetime.now().astimezone().isoformat(),
"results": results, "results": results,
"requirements": {rid: sorted(list(nodes)) for rid, nodes in sorted(_REQ_TO_TESTS.items())}, "requirements": {rid: sorted(list(nodes)) for rid, nodes in sorted(_REQ_TO_TESTS.items())},
"unmapped_tests": sorted(list(_ALL_COLLECTED_TESTS - _MAPPED_TESTS)), "unmapped_tests": sorted(list(_ALL_COLLECTED_TESTS - _MAPPED_TESTS)),
"files": { "files": {
"html": "reports/report.html", "html": "reports/report.html",
"junit": "reports/junit.xml", "junit": "reports/junit.xml",
"summary_md": "reports/summary.md", "summary_md": "reports/summary.md",
}, },
} }
# Write JSON coverage file # Write JSON coverage file
json_path = os.path.join("reports", "requirements_coverage.json") json_path = os.path.join("reports", "requirements_coverage.json")
try: try:
with open(json_path, "w", encoding="utf-8") as f: with open(json_path, "w", encoding="utf-8") as f:
json.dump(coverage, f, indent=2) json.dump(coverage, f, indent=2)
except Exception as e: except Exception as e:
terminalreporter.write_line(f"[conftest_plugin] Failed to write {json_path}: {e}") terminalreporter.write_line(f"[conftest_plugin] Failed to write {json_path}: {e}")
# Write Markdown summary for CI consumption # Write Markdown summary for CI consumption
md_path = os.path.join("reports", "summary.md") md_path = os.path.join("reports", "summary.md")
try: try:
lines = [ lines = [
"# Test Run Summary", "# Test Run Summary",
"", "",
f"Generated: {coverage['generated_at']}", f"Generated: {coverage['generated_at']}",
"", "",
f"- Collected: {results.get('collected')}", f"- Collected: {results.get('collected')}",
f"- Passed: {results['passed']}", f"- Passed: {results['passed']}",
f"- Failed: {results['failed']}", f"- Failed: {results['failed']}",
f"- Skipped: {results['skipped']}", f"- Skipped: {results['skipped']}",
f"- Errors: {results['error']}", f"- Errors: {results['error']}",
f"- XFailed: {results['xfailed']}", f"- XFailed: {results['xfailed']}",
f"- XPassed: {results['xpassed']}", f"- XPassed: {results['xpassed']}",
f"- Rerun: {results['rerun']}", f"- Rerun: {results['rerun']}",
"", "",
"## Artifacts", "## Artifacts",
"- HTML Report: ./report.html", "- HTML Report: ./report.html",
"- JUnit XML: ./junit.xml", "- JUnit XML: ./junit.xml",
"- Requirements Coverage (JSON): ./requirements_coverage.json", "- Requirements Coverage (JSON): ./requirements_coverage.json",
] ]
with open(md_path, "w", encoding="utf-8") as f: with open(md_path, "w", encoding="utf-8") as f:
f.write("\n".join(lines) + "\n") f.write("\n".join(lines) + "\n")
except Exception as e: except Exception as e:
terminalreporter.write_line(f"[conftest_plugin] Failed to write {md_path}: {e}") terminalreporter.write_line(f"[conftest_plugin] Failed to write {md_path}: {e}")

View File

@ -1,124 +1,129 @@
# Run Sequence: What Happens When You Start Tests # Run Sequence: What Happens When You Start Tests
This document walks through the exact order of operations when you run the framework with pytest, what gets called, and where configuration/data is fetched from. This document walks through the exact order of operations when you run the framework with pytest, what gets called, and where configuration/data is fetched from.
## High-level flow ## High-level flow
1. You run pytest from PowerShell 1. You run pytest from PowerShell
2. pytest reads `pytest.ini` and loads configured plugins (including our custom `conftest_plugin`) 2. pytest reads `pytest.ini` and loads configured plugins (including our custom `conftest_plugin`)
3. Test discovery collects tests under `tests/` 3. Test discovery collects tests under `tests/`
4. Session fixtures run: 4. Session fixtures run:
- `config()` loads YAML configuration - `config()` loads YAML configuration
- `lin()` selects and connects the LIN interface (Mock or BabyLin) - `lin()` selects and connects the LIN interface (Mock, MUM, or legacy BabyLIN)
- `flash_ecu()` optionally flashes the ECU (if enabled) - `flash_ecu()` optionally flashes the ECU (if enabled)
5. Tests execute using fixtures and call interface methods 5. Tests execute using fixtures and call interface methods
6. Our plugin extracts test metadata (Title, Requirements, Steps) from docstrings 6. Our plugin extracts test metadata (Title, Requirements, Steps) from docstrings
7. Reports are written to `reports/report.html` and `reports/junit.xml` 7. Reports are written to `reports/report.html` and `reports/junit.xml`
## Detailed call sequence ## Detailed call sequence
```mermaid ```mermaid
sequenceDiagram sequenceDiagram
autonumber autonumber
participant U as User (PowerShell) participant U as User (PowerShell)
participant P as pytest participant P as pytest
participant PI as pytest.ini participant PI as pytest.ini
participant PL as conftest_plugin.py participant PL as conftest_plugin.py
participant T as Test Discovery (tests/*) participant T as Test Discovery (tests/*)
participant F as Fixtures (conftest.py) participant F as Fixtures (conftest.py)
participant C as Config Loader (ecu_framework/config.py) participant C as Config Loader (ecu_framework/config.py)
participant PS as Power Supply (optional) participant PS as Power Supply (optional)
participant L as LIN Adapter (mock/BabyLIN SDK) participant L as LIN Adapter (mock/MUM/BabyLIN)
participant X as HexFlasher (optional) participant X as HexFlasher (optional)
participant R as Reports (HTML/JUnit) participant R as Reports (HTML/JUnit)
U->>P: python -m pytest [args] U->>P: python -m pytest [args]
P->>PI: Read addopts, markers, plugins P->>PI: Read addopts, markers, plugins
P->>PL: Load custom plugin hooks P->>PL: Load custom plugin hooks
P->>T: Collect tests P->>T: Collect tests
P->>F: Init session fixtures P->>F: Init session fixtures
F->>C: load_config(workspace_root) F->>C: load_config(workspace_root)
C-->>F: EcuTestConfig (merged dataclasses) C-->>F: EcuTestConfig (merged dataclasses)
F->>L: Create interface (mock or BabyLIN SDK) F->>L: Create interface (mock, MUM, or BabyLIN SDK)
L-->>F: Instance ready L-->>F: Instance ready
F->>L: connect() F->>L: connect()
alt flash.enabled and hex_path provided alt flash.enabled and hex_path provided
F->>X: HexFlasher(lin).flash_hex(hex_path) F->>X: HexFlasher(lin).flash_hex(hex_path)
X-->>F: Flash result (ok/fail) X-->>F: Flash result (ok/fail)
end end
opt power_supply.enabled and port provided opt power_supply.enabled and port provided
Note over PS: owon_psu_quick_demo may open PSU via ecu_framework.power.owon_psu Note over PS: owon_psu_quick_demo may open PSU via ecu_framework.power.owon_psu
end end
loop for each test loop for each test
P->>PL: runtest_makereport(item, call) P->>PL: runtest_makereport(item, call)
Note over PL: Parse docstring and attach metadata Note over PL: Parse docstring and attach metadata
P->>L: send()/receive()/request() P->>L: send()/receive()/request()
L-->>P: Frames or None (timeout) L-->>P: Frames or None (timeout)
end end
P->>R: Write HTML (with metadata columns) P->>R: Write HTML (with metadata columns)
P->>R: Write JUnit XML P->>R: Write JUnit XML
``` ```
```text ```text
PowerShell → python -m pytest PowerShell → python -m pytest
pytest loads pytest.ini pytest loads pytest.ini
- addopts: --junitxml, --html, --self-contained-html, -p conftest_plugin - addopts: --junitxml, --html, --self-contained-html, -p conftest_plugin
- markers registered - markers registered
pytest collects tests in tests/ pytest collects tests in tests/
Session fixture: config() Session fixture: config()
→ calls ecu_framework.config.load_config(workspace_root) → calls ecu_framework.config.load_config(workspace_root)
→ determines config file path by precedence → determines config file path by precedence
→ merges YAML + overrides into dataclasses (EcuTestConfig) → merges YAML + overrides into dataclasses (EcuTestConfig)
→ optionally merges config/owon_psu.yaml (or OWON_PSU_CONFIG) into power_supply → optionally merges config/owon_psu.yaml (or OWON_PSU_CONFIG) into power_supply
Session fixture: lin(config) Session fixture: lin(config)
→ chooses interface by config.interface.type → chooses interface by config.interface.type
- mock → ecu_framework.lin.mock.MockBabyLinInterface(...) - mock → ecu_framework.lin.mock.MockBabyLinInterface(...)
- babylin → ecu_framework.lin.babylin.BabyLinInterface(...) - mum → ecu_framework.lin.mum.MumLinInterface(host, lin_device, power_device, ...)
→ lin.connect() - babylin → ecu_framework.lin.babylin.BabyLinInterface(...) [legacy]
→ lin.connect()
Optional session fixture: flash_ecu(config, lin) - MUM connect() also powers up the ECU via power_out0 and waits boot_settle_seconds
→ if config.flash.enabled and hex_path set
→ ecu_framework.flashing.HexFlasher(lin).flash_hex(hex_path) Optional session fixture: flash_ecu(config, lin)
→ if config.flash.enabled and hex_path set
Test functions execute → ecu_framework.flashing.HexFlasher(lin).flash_hex(hex_path)
→ use the lin fixture to send/receive/request
Test functions execute
Reporting plugin (conftest_plugin.py) → use the lin fixture to send/receive/request
→ pytest_runtest_makereport parses test docstring
→ attaches user_properties: title, requirements, steps, expected_result Reporting plugin (conftest_plugin.py)
→ pytest-html hooks add Title and Requirements columns → pytest_runtest_makereport parses test docstring
→ attaches user_properties: title, requirements, steps, expected_result
Reports written → pytest-html hooks add Title and Requirements columns
→ reports/report.html (HTML with metadata columns)
→ reports/junit.xml (JUnit XML for CI) Reports written
``` → reports/report.html (HTML with metadata columns)
→ reports/junit.xml (JUnit XML for CI)
## Where information is fetched from ```
- pytest configuration: `pytest.ini` ## Where information is fetched from
- YAML config (default): `config/test_config.yaml`
- YAML override via env var: `ECU_TESTS_CONFIG` - pytest configuration: `pytest.ini`
- BabyLIN SDK wrapper and SDF path: `interface.sdf_path` and `interface.schedule_nr` in YAML - YAML config (default): `config/test_config.yaml`
- Test metadata: parsed from each tests docstring - YAML override via env var: `ECU_TESTS_CONFIG`
- Markers: declared in `pytest.ini`, attached in tests via `@pytest.mark.*` - BabyLIN SDK wrapper and SDF path: `interface.sdf_path` and `interface.schedule_nr` in YAML
- Test metadata: parsed from each tests docstring
## Key components involved - Markers: declared in `pytest.ini`, attached in tests via `@pytest.mark.*`
- `tests/conftest.py`: defines `config`, `lin`, and `flash_ecu` fixtures ## Key components involved
- `ecu_framework/config.py`: loads and merges configuration into dataclasses
- `ecu_framework/lin/base.py`: abstract LIN interface contract and frame shape - `tests/conftest.py`: defines `config`, `lin`, and `flash_ecu` fixtures
- `ecu_framework/lin/mock.py`: mock behavior for send/receive/request - `ecu_framework/config.py`: loads and merges configuration into dataclasses
- `ecu_framework/lin/babylin.py`: BabyLIN SDK wrapper adapter (real hardware via BabyLIN_library.py) - `ecu_framework/lin/base.py`: abstract LIN interface contract and frame shape
- `ecu_framework/flashing/hex_flasher.py`: placeholder flashing logic - `ecu_framework/lin/mock.py`: mock behavior for send/receive/request
- `conftest_plugin.py`: report customization and metadata extraction - `ecu_framework/lin/mum.py`: MUM adapter (Melexis Universal Master via pylin + pymumclient)
- `ecu_framework/lin/babylin.py`: BabyLIN SDK wrapper adapter (legacy real hardware via BabyLIN_library.py)
## Edge cases and behavior - `ecu_framework/flashing/hex_flasher.py`: placeholder flashing logic
- `conftest_plugin.py`: report customization and metadata extraction
- If `interface.type` is `babylin` but the SDK wrapper or libraries cannot be loaded, hardware tests are skipped
- If `flash.enabled` is true but `hex_path` is missing, flashing fixture skips ## Edge cases and behavior
- Timeouts are honored in `receive()` and `request()` implementations
- Invalid frame IDs (outside 0x000x3F) or data > 8 bytes will raise in `LinFrame` - If `interface.type` is `babylin` but the SDK wrapper or libraries cannot be loaded, hardware tests are skipped
- If `interface.type` is `mum` but `pylin` / `pymumclient` aren't importable, or `interface.host` is unset, hardware tests are skipped with a clear message
- If `flash.enabled` is true but `hex_path` is missing, flashing fixture skips
- Timeouts are honored in `receive()` and `request()` implementations
- Invalid frame IDs (outside 0x000x3F) or data > 8 bytes will raise in `LinFrame`
- MUM `receive()` is master-driven: it requires a frame ID; `receive(id=None)` raises NotImplementedError. Diagnostic frames needing LIN 1.x Classic checksum should use `MumLinInterface.send_raw()`.

View File

@ -1,124 +1,147 @@
# Configuration Resolution: What is read and when # Configuration Resolution: What is read and when
This document explains how configuration is loaded, merged, and provided to tests and interfaces. This document explains how configuration is loaded, merged, and provided to tests and interfaces.
## Sources and precedence ## Sources and precedence
From highest to lowest precedence: From highest to lowest precedence:
1. In-code overrides (if `load_config(..., overrides=...)` is used) 1. In-code overrides (if `load_config(..., overrides=...)` is used)
2. Environment variable `ECU_TESTS_CONFIG` (absolute/relative path to YAML) 2. Environment variable `ECU_TESTS_CONFIG` (absolute/relative path to YAML)
3. `config/test_config.yaml` (if present under the workspace root) 3. `config/test_config.yaml` (if present under the workspace root)
4. Built-in defaults 4. Built-in defaults
## Data model (dataclasses) ## Data model (dataclasses)
- `EcuTestConfig` - `EcuTestConfig`
- `interface: InterfaceConfig` - `interface: InterfaceConfig`
- `type`: `mock` or `babylin` - `type`: `mock`, `mum`, or `babylin`
- `channel`: LIN channel index (0-based in SDK wrapper) - `channel`: LIN channel index (0-based in SDK wrapper) — BabyLIN-specific
- `bitrate`: LIN bitrate (e.g., 19200); usually defined by SDF - `bitrate`: LIN baudrate (e.g., 19200). The MUM uses this directly; BabyLIN typically takes it from the SDF
- `sdf_path`: Path to SDF file (hardware; required for typical operation) - `sdf_path`: Path to SDF file (BabyLIN; required for typical operation)
- `schedule_nr`: Schedule number to start on connect (hardware) - `schedule_nr`: Schedule number to start on connect (BabyLIN). `-1` = skip
- `node_name`: Optional node identifier (informational) - `node_name`: Optional node identifier (informational)
- `dll_path`, `func_names`: Legacy fields from the old ctypes adapter; not used with the SDK wrapper - `dll_path`, `func_names`: Legacy fields from the old ctypes adapter; not used with the SDK wrapper
- `flash: FlashConfig` - `host`: MUM IP address (MUM-only). Required when `type: mum`
- `enabled`: whether to flash before tests - `lin_device`: MUM LIN device name (MUM-only, default `lin0`)
- `hex_path`: path to HEX file - `power_device`: MUM power-control device (MUM-only, default `power_out0`)
- `power_supply: PowerSupplyConfig` - `boot_settle_seconds`: Delay after MUM power-up before sending the first frame (default 0.5)
- `enabled`: whether PSU features/tests are active - `frame_lengths`: Optional `{frame_id: data_length}` map for the MUM adapter to drive slave-published reads. Hex keys like `0x0A` are supported in YAML
- `port`: Serial device (e.g., `COM4`, `/dev/ttyUSB0`) - `flash: FlashConfig`
- `baudrate`, `timeout`, `eol`: line settings (e.g., `"\n"` or `"\r\n"`) - `enabled`: whether to flash before tests
- `parity`: `N|E|O` - `hex_path`: path to HEX file
- `stopbits`: `1` or `2` - `power_supply: PowerSupplyConfig`
- `xonxoff`, `rtscts`, `dsrdtr`: flow control flags - `enabled`: whether PSU features/tests are active
- `idn_substr`: optional substring to assert in `*IDN?` - `port`: Serial device (e.g., `COM4`, `/dev/ttyUSB0`)
- `do_set`, `set_voltage`, `set_current`: optional demo/test actions - `baudrate`, `timeout`, `eol`: line settings (e.g., `"\n"` or `"\r\n"`)
- `parity`: `N|E|O`
## YAML examples - `stopbits`: `1` or `2`
- `xonxoff`, `rtscts`, `dsrdtr`: flow control flags
Minimal mock configuration (default): - `idn_substr`: optional substring to assert in `*IDN?`
- `do_set`, `set_voltage`, `set_current`: optional demo/test actions
```yaml
interface: ## YAML examples
type: mock
channel: 1 Minimal mock configuration (default):
bitrate: 19200
flash: ```yaml
enabled: false interface:
``` type: mock
channel: 1
Hardware (BabyLIN SDK wrapper) configuration: bitrate: 19200
flash:
```yaml enabled: false
interface: ```
type: babylin
channel: 0 # 0-based channel index Hardware via MUM (current default) — see also `config/mum.example.yaml`:
bitrate: 19200 # optional; typically driven by SDF
node_name: "ECU_TEST_NODE" ```yaml
sdf_path: "./vendor/Example.sdf" interface:
schedule_nr: 0 type: mum
flash: host: 192.168.7.2 # MUM IP address (USB-RNDIS default)
enabled: true lin_device: lin0 # MUM LIN device name
hex_path: "firmware/ecu_firmware.hex" power_device: power_out0 # MUM power-control device
bitrate: 19200 # LIN baudrate
Power supply configuration (either inline or merged from a dedicated YAML): boot_settle_seconds: 0.5 # Delay after power-up before first frame
frame_lengths:
```yaml 0x0A: 8 # ALM_Req_A
power_supply: 0x11: 4 # ALM_Status
enabled: true flash:
port: COM4 # or /dev/ttyUSB0 on Linux enabled: false
baudrate: 115200 ```
timeout: 1.0
eol: "\n" # or "\r\n" if your device requires CRLF Hardware (BabyLIN SDK wrapper) configuration:
parity: N # N|E|O
stopbits: 1 # 1|2 ```yaml
xonxoff: false interface:
rtscts: false type: babylin
dsrdtr: false channel: 0 # 0-based channel index
idn_substr: OWON bitrate: 19200 # optional; typically driven by SDF
do_set: false node_name: "ECU_TEST_NODE"
set_voltage: 5.0 sdf_path: "./vendor/Example.sdf"
set_current: 0.1 schedule_nr: 0
``` flash:
``` enabled: true
hex_path: "firmware/ecu_firmware.hex"
## Load flow
Power supply configuration (either inline or merged from a dedicated YAML):
```text
tests/conftest.py: config() fixture ```yaml
→ load_config(workspace_root) power_supply:
→ check env var ECU_TESTS_CONFIG enabled: true
→ else check config/test_config.yaml port: COM4 # or /dev/ttyUSB0 on Linux
→ else use defaults baudrate: 115200
→ convert dicts to EcuTestConfig dataclasses timeout: 1.0
→ provide to other fixtures/tests eol: "\n" # or "\r\n" if your device requires CRLF
parity: N # N|E|O
Additionally, if present, a dedicated PSU YAML is merged into `power_supply`: stopbits: 1 # 1|2
xonxoff: false
- Environment variable `OWON_PSU_CONFIG` (path to YAML), else rtscts: false
- `config/owon_psu.yaml` under the workspace root dsrdtr: false
idn_substr: OWON
This lets you keep machine-specific serial settings separate while still having do_set: false
central defaults in `config/test_config.yaml`. set_voltage: 5.0
``` set_current: 0.1
```
## How tests and adapters consume config ```
- `lin` fixture picks `mock` or `babylin` based on `interface.type` ## Load flow
- Mock adapter uses `bitrate` and `channel` to simulate timing/behavior
- BabyLIN adapter (SDK wrapper) uses `sdf_path`, `schedule_nr`, `channel` to open the device, load the SDF, and start a schedule. `bitrate` is informational unless explicitly applied via commands/SDF. ```text
- `flash_ecu` uses `flash.enabled` and `flash.hex_path` tests/conftest.py: config() fixture
- PSU-related tests or utilities read `config.power_supply` for serial parameters → load_config(workspace_root)
and optional actions (IDN assertions, on/off toggle, set/measure). The reference → check env var ECU_TESTS_CONFIG
implementation is `ecu_framework/power/owon_psu.py`, with a hardware test in → else check config/test_config.yaml
`tests/hardware/test_owon_psu.py` and a quick demo script in `vendor/Owon/owon_psu_quick_demo.py`. → else use defaults
→ convert dicts to EcuTestConfig dataclasses
## Tips → provide to other fixtures/tests
- Keep multiple YAMLs and switch via `ECU_TESTS_CONFIG` Additionally, if present, a dedicated PSU YAML is merged into `power_supply`:
- Check path validity for `sdf_path` and `hex_path` before running hardware tests
- Ensure `vendor/BabyLIN_library.py` and the platform-specific libraries from the SDK are available on `PYTHONPATH` - Environment variable `OWON_PSU_CONFIG` (path to YAML), else
- Use environment-specific YAML files for labs vs. CI - `config/owon_psu.yaml` under the workspace root
- For PSU, prefer `OWON_PSU_CONFIG` or `config/owon_psu.yaml` to avoid committing
local COM port settings. Central defaults can live in `config/test_config.yaml`. This lets you keep machine-specific serial settings separate while still having
central defaults in `config/test_config.yaml`.
```
## How tests and adapters consume config
- `lin` fixture picks `mock`, `mum`, or `babylin` based on `interface.type`
- Mock adapter uses `bitrate` and `channel` to simulate timing/behavior
- MUM adapter uses `host`, `lin_device`, `power_device`, `bitrate`, `boot_settle_seconds`, and `frame_lengths` to open the MUM, set up the LIN bus, and power up the ECU on connect
- BabyLIN adapter (SDK wrapper) uses `sdf_path`, `schedule_nr`, `channel` to open the device, load the SDF, and start a schedule. `bitrate` is informational unless explicitly applied via commands/SDF
- `flash_ecu` uses `flash.enabled` and `flash.hex_path`
- PSU-related tests or utilities read `config.power_supply` for serial parameters
and optional actions (IDN assertions, on/off toggle, set/measure). The reference
implementation is `ecu_framework/power/owon_psu.py`, with a hardware test in
`tests/hardware/test_owon_psu.py` and a quick demo script in `vendor/Owon/owon_psu_quick_demo.py`.
## Tips
- Keep multiple YAMLs and switch via `ECU_TESTS_CONFIG`
- Check path validity for `sdf_path` and `hex_path` before running hardware tests
- Ensure `vendor/BabyLIN_library.py` and the platform-specific libraries from the SDK are available on `PYTHONPATH`
- Use environment-specific YAML files for labs vs. CI
- For PSU, prefer `OWON_PSU_CONFIG` or `config/owon_psu.yaml` to avoid committing
local COM port settings. Central defaults can live in `config/test_config.yaml`.

View File

@ -1,109 +1,109 @@
# Reporting and Metadata: How your docs show up in reports # Reporting and Metadata: How your docs show up in reports
This document describes how test documentation is extracted and rendered into the HTML report, and what appears in JUnit XML. This document describes how test documentation is extracted and rendered into the HTML report, and what appears in JUnit XML.
## What the plugin does ## What the plugin does
File: `conftest_plugin.py` File: `conftest_plugin.py`
- Hooks into `pytest_runtest_makereport` to parse the tests docstring - Hooks into `pytest_runtest_makereport` to parse the tests docstring
- Extracts the following fields: - Extracts the following fields:
- Title - Title
- Description - Description
- Requirements - Requirements
- Test Steps - Test Steps
- Expected Result - Expected Result
- Attaches them as `user_properties` on the test report - Attaches them as `user_properties` on the test report
- Customizes the HTML results table to include Title and Requirements columns - Customizes the HTML results table to include Title and Requirements columns
## Docstring format to use ## Docstring format to use
```python ```python
""" """
Title: Short, human-readable test name Title: Short, human-readable test name
Description: What is this test proving and why does it matter. Description: What is this test proving and why does it matter.
Requirements: REQ-001, REQ-00X Requirements: REQ-001, REQ-00X
Test Steps: Test Steps:
1. Describe the first step 1. Describe the first step
2. Next step 2. Next step
3. etc. 3. etc.
Expected Result: Expected Result:
- Primary outcome - Primary outcome
- Any additional acceptance criteria - Any additional acceptance criteria
""" """
``` ```
## What appears in reports ## What appears in reports
- HTML (`reports/report.html`): - HTML (`reports/report.html`):
- Title and Requirements appear as columns in the table - Title and Requirements appear as columns in the table
- Other fields are available in the report payload and can be surfaced with minor tweaks - Other fields are available in the report payload and can be surfaced with minor tweaks
- JUnit XML (`reports/junit.xml`): - JUnit XML (`reports/junit.xml`):
- Standard test results and timing - Standard test results and timing
- Note: By default, the XML is compact and does not include custom properties; if you need properties in XML, we can extend the plugin to emit a custom JUnit format or produce an additional JSON artifact for traceability. - Note: By default, the XML is compact and does not include custom properties; if you need properties in XML, we can extend the plugin to emit a custom JUnit format or produce an additional JSON artifact for traceability.
Open the HTML report on Windows PowerShell: Open the HTML report on Windows PowerShell:
```powershell ```powershell
start .\reports\report.html start .\reports\report.html
``` ```
Related artifacts written by the plugin: Related artifacts written by the plugin:
- `reports/requirements_coverage.json` — requirement → test nodeids map and unmapped tests - `reports/requirements_coverage.json` — requirement → test nodeids map and unmapped tests
- `reports/summary.md` — compact pass/fail/error/skip totals, environment info - `reports/summary.md` — compact pass/fail/error/skip totals, environment info
To generate separate HTML/JUnit reports for unit vs non-unit test sets, use the helper script: To generate separate HTML/JUnit reports for unit vs non-unit test sets, use the helper script:
```powershell ```powershell
./scripts/run_two_reports.ps1 ./scripts/run_two_reports.ps1
``` ```
## Parameterized tests and metadata ## Parameterized tests and metadata
When using `@pytest.mark.parametrize`, each parameter set is treated as a distinct test case with its own nodeid, e.g.: When using `@pytest.mark.parametrize`, each parameter set is treated as a distinct test case with its own nodeid, e.g.:
``` ```
tests/test_babylin_wrapper_mock.py::test_babylin_master_request_with_mock_wrapper[wrapper0-True] tests/test_babylin_wrapper_mock.py::test_babylin_master_request_with_mock_wrapper[wrapper0-True]
tests/test_babylin_wrapper_mock.py::test_babylin_master_request_with_mock_wrapper[wrapper1-False] tests/test_babylin_wrapper_mock.py::test_babylin_master_request_with_mock_wrapper[wrapper1-False]
``` ```
Metadata handling: Metadata handling:
- The docstring on the test function is parsed once per case; the same Title/Requirements are attached to each parameterized instance. - The docstring on the test function is parsed once per case; the same Title/Requirements are attached to each parameterized instance.
- Requirement mapping (coverage JSON) records each parameterized nodeid under the normalized requirement keys, enabling fine-grained coverage. - Requirement mapping (coverage JSON) records each parameterized nodeid under the normalized requirement keys, enabling fine-grained coverage.
- In the HTML table, you will see a row per parameterized instance with identical Title/Requirements but differing nodeids (and potentially differing outcomes if parameters influence behavior). - In the HTML table, you will see a row per parameterized instance with identical Title/Requirements but differing nodeids (and potentially differing outcomes if parameters influence behavior).
## Markers ## Markers
Declared in `pytest.ini` and used via `@pytest.mark.<name>` in tests. They also appear in the HTML payload for each test (as user properties) and can be added as a column with a small change if desired. Declared in `pytest.ini` and used via `@pytest.mark.<name>` in tests. They also appear in the HTML payload for each test (as user properties) and can be added as a column with a small change if desired.
## Extensibility ## Extensibility
- Add more columns to HTML by updating `pytest_html_results_table_header/row` - Add more columns to HTML by updating `pytest_html_results_table_header/row`
- Persist full metadata (steps, expected) to a JSON file after the run for audit trails - Persist full metadata (steps, expected) to a JSON file after the run for audit trails
- Populate requirement coverage map by scanning markers and aggregating results - Populate requirement coverage map by scanning markers and aggregating results
## Runtime properties (record_property) and the `rp` helper fixture ## Runtime properties (record_property) and the `rp` helper fixture
Beyond static docstrings, you can attach dynamic key/value properties during a test. Beyond static docstrings, you can attach dynamic key/value properties during a test.
- Built-in: `record_property("key", value)` in any test - Built-in: `record_property("key", value)` in any test
- Convenience: use the shared `rp` fixture which wraps `record_property` and also prints a short line to captured output for quick scanning. - Convenience: use the shared `rp` fixture which wraps `record_property` and also prints a short line to captured output for quick scanning.
Example usage: Example usage:
```python ```python
def test_example(rp): def test_example(rp):
rp("device", "mock") rp("device", "mock")
rp("tx_id", "0x12") rp("tx_id", "0x12")
rp("rx_present", True) rp("rx_present", True)
``` ```
Where they show up: Where they show up:
- HTML report: expand a test row to see a Properties table listing all recorded key/value pairs - HTML report: expand a test row to see a Properties table listing all recorded key/value pairs
- Captured output: look for lines like `[prop] key=value` emitted by the `rp` helper - Captured output: look for lines like `[prop] key=value` emitted by the `rp` helper
Suggested standardized keys across suites live in `docs/15_report_properties_cheatsheet.md`. Suggested standardized keys across suites live in `docs/15_report_properties_cheatsheet.md`.

View File

@ -1,58 +1,89 @@
# LIN Interface Call Flow # LIN Interface Call Flow
This document explains how LIN operations flow through the abstraction for both Mock and BabyLin adapters. This document explains how LIN operations flow through the abstraction for the Mock, MUM, and legacy BabyLIN adapters.
## Contract (base) ## Contract (base)
File: `ecu_framework/lin/base.py` File: `ecu_framework/lin/base.py`
- `connect()` / `disconnect()` - `connect()` / `disconnect()`
- `send(frame: LinFrame)` - `send(frame: LinFrame)`
- `receive(id: int | None = None, timeout: float = 1.0) -> LinFrame | None` - `receive(id: int | None = None, timeout: float = 1.0) -> LinFrame | None`
- `request(id: int, length: int, timeout: float = 1.0) -> LinFrame | None` - `request(id: int, length: int, timeout: float = 1.0) -> LinFrame | None`
- `flush()` - `flush()`
`LinFrame` validates: `LinFrame` validates:
- ID is 0x000x3F (6-bit LIN ID) - ID is 0x000x3F (6-bit LIN ID)
- Data length ≤ 8 bytes - Data length ≤ 8 bytes
## Mock adapter flow ## Mock adapter flow
File: `ecu_framework/lin/mock.py` File: `ecu_framework/lin/mock.py`
- `connect()`: initialize buffers and state - `connect()`: initialize buffers and state
- `send(frame)`: enqueues the frame and (for echo behavior) schedules it for RX - `send(frame)`: enqueues the frame and (for echo behavior) schedules it for RX
- `receive(timeout)`: waits up to timeout for a frame in RX buffer - `receive(timeout)`: waits up to timeout for a frame in RX buffer
- `request(id, length, timeout)`: synthesizes a deterministic response of the given length for predictability - `request(id, length, timeout)`: synthesizes a deterministic response of the given length for predictability
- `disconnect()`: clears state - `disconnect()`: clears state
Use cases: Use cases:
- Fast local dev, deterministic responses, no hardware - Fast local dev, deterministic responses, no hardware
- Timeout and boundary behavior validation - Timeout and boundary behavior validation
## BabyLIN adapter flow (SDK wrapper) ## MUM adapter flow (Melexis Universal Master)
File: `ecu_framework/lin/babylin.py` File: `ecu_framework/lin/mum.py`
- `connect()`: import SDK `BabyLIN_library.py`, discover ports, open first, optionally `BLC_loadSDF`, get channel handle, and `BLC_sendCommand("start schedule N;")` The MUM is a networked LIN master (default IP `192.168.7.2`) with built-in
- `send(frame)`: calls `BLC_mon_set_xmit(channelHandle, frameId, data, slotTime=0)` power control on `power_out0`. It is **master-driven**: there is no passive
- `receive(timeout)`: calls `BLC_getNextFrameTimeout(channelHandle, timeout_ms)` and converts returned `BLC_FRAME` to `LinFrame` listen — to read a slave-published frame, the master triggers a header on
- `request(id, length, timeout)`: prefers `BLC_sendRawMasterRequest(channel, id, length)`; falls back to `(channel, id, bytes)`; if unavailable, sends a header and waits on `receive()` that frame ID. Diagnostic frames (BSM-SNPD, service ID 0xB5) require LIN 1.x
- `disconnect()`: calls `BLC_closeAll()` **Classic** checksum and are sent through the transport layer's
- Error handling: uses `BLC_getDetailedErrorString` (if available) `ld_put_raw`, not the regular `send_message`.
Configuration: - `connect()`: lazy-imports `pymumclient` + `pylin`; opens MUM
- `interface.sdf_path` locates the SDF to load (`MelexisUniversalMaster.open_all(host)`), gets the LIN device
- `interface.schedule_nr` sets the schedule to start upon connect (`linmaster`) and power device (`power_control`), runs `linmaster.setup()`,
- `interface.channel` selects the channel index builds `LinBusManager` + `LinDevice22`, sets `lin_dev.baudrate`, fetches
the transport layer (`get_device("bus/transport_layer")`), and finally
## Edge considerations `power_control.power_up()` followed by a `boot_settle_seconds` sleep
- `send(frame)`: `lin_dev.send_message(master_to_slave=True, frame_id, data_length, data)`
- Ensure the correct architecture (x86/x64) of the DLL matches Python - `receive(id, timeout)`: `lin_dev.send_message(master_to_slave=False, frame_id=id, data_length=frame_lengths.get(id, default_data_length))`
- Channel/bitrate must match your network configuration — pylin returns the response bytes (or raises on timeout, which we treat as `None`).
- Some SDKs require initialization/scheduling steps before transmit/receive `id=None` raises `NotImplementedError` because the MUM cannot listen passively.
- Time synchronization and timestamp units vary per SDK — convert as needed - `disconnect()`: best-effort `power_control.power_down()` followed by `linmaster.teardown()`
- MUM-only extras: `send_raw(bytes)` (Classic checksum via `ld_put_raw`),
Note on master requests: `power_up()`, `power_down()`, `power_cycle(wait)`
- Our mock wrapper returns a deterministic byte pattern when called with the `length` signature.
- When only the bytes signature is available, zeros of the requested length are used in tests. Configuration:
- `interface.host` is required; `interface.lin_device` and `interface.power_device` default to MUM conventions
- `interface.bitrate` is the actual LIN baudrate the MUM drives
- `interface.frame_lengths` lets you map slave frame IDs to their fixed data lengths so `receive(id)` can fetch the correct number of bytes; built-in defaults cover ALM_Status (4) and ALM_Req_A (8)
## BabyLIN adapter flow (SDK wrapper)
File: `ecu_framework/lin/babylin.py`
- `connect()`: import SDK `BabyLIN_library.py`, discover ports, open first, optionally `BLC_loadSDF`, get channel handle, and `BLC_sendCommand("start schedule N;")`
- `send(frame)`: calls `BLC_mon_set_xmit(channelHandle, frameId, data, slotTime=0)`
- `receive(timeout)`: calls `BLC_getNextFrameTimeout(channelHandle, timeout_ms)` and converts returned `BLC_FRAME` to `LinFrame`
- `request(id, length, timeout)`: prefers `BLC_sendRawMasterRequest(channel, id, length)`; falls back to `(channel, id, bytes)`; if unavailable, sends a header and waits on `receive()`
- `disconnect()`: calls `BLC_closeAll()`
- Error handling: uses `BLC_getDetailedErrorString` (if available)
Configuration:
- `interface.sdf_path` locates the SDF to load
- `interface.schedule_nr` sets the schedule to start upon connect
- `interface.channel` selects the channel index
## Edge considerations
- Ensure the correct architecture (x86/x64) of the DLL matches Python
- Channel/bitrate must match your network configuration
- Some SDKs require initialization/scheduling steps before transmit/receive
- Time synchronization and timestamp units vary per SDK — convert as needed
Note on master requests:
- Our mock wrapper returns a deterministic byte pattern when called with the `length` signature.
- When only the bytes signature is available, zeros of the requested length are used in tests.

View File

@ -1,77 +1,82 @@
# Architecture Overview # Architecture Overview
This document provides a high-level view of the frameworks components and how they interact, plus a Mermaid diagram for quick orientation. This document provides a high-level view of the frameworks components and how they interact, plus a Mermaid diagram for quick orientation.
## Components ## Components
- Tests (pytest) — test modules and functions under `tests/` - Tests (pytest) — test modules and functions under `tests/`
- Fixtures — defined in `tests/conftest.py` (config, lin, flash_ecu) - Fixtures — defined in `tests/conftest.py` (config, lin, flash_ecu)
- Config Loader — `ecu_framework/config.py` (YAML → dataclasses) - Config Loader — `ecu_framework/config.py` (YAML → dataclasses)
- LIN Abstraction — `ecu_framework/lin/base.py` (`LinInterface`, `LinFrame`) - LIN Abstraction — `ecu_framework/lin/base.py` (`LinInterface`, `LinFrame`)
- Mock LIN Adapter — `ecu_framework/lin/mock.py` - Mock LIN Adapter — `ecu_framework/lin/mock.py`
- BabyLIN Adapter — `ecu_framework/lin/babylin.py` (SDK wrapper → BabyLIN_library.py) - MUM LIN Adapter — `ecu_framework/lin/mum.py` (Melexis Universal Master via `pylin` + `pymumclient`)
- Flasher — `ecu_framework/flashing/hex_flasher.py` - BabyLIN Adapter — `ecu_framework/lin/babylin.py` (SDK wrapper → BabyLIN_library.py; legacy)
- Power Supply (PSU) control — `ecu_framework/power/owon_psu.py` (serial SCPI) - Flasher — `ecu_framework/flashing/hex_flasher.py`
- PSU quick demo script — `vendor/Owon/owon_psu_quick_demo.py` - Power Supply (PSU) control — `ecu_framework/power/owon_psu.py` (serial SCPI)
- Reporting Plugin — `conftest_plugin.py` (docstring → report metadata) - PSU quick demo script — `vendor/Owon/owon_psu_quick_demo.py`
- Reports — `reports/report.html`, `reports/junit.xml` - Reporting Plugin — `conftest_plugin.py` (docstring → report metadata)
- Reports — `reports/report.html`, `reports/junit.xml`
## Mermaid architecture diagram
## Mermaid architecture diagram
```mermaid
flowchart TB ```mermaid
subgraph Tests & Pytest flowchart TB
T[tests/*] subgraph Tests & Pytest
CF[tests/conftest.py] T[tests/*]
PL[conftest_plugin.py] CF[tests/conftest.py]
end PL[conftest_plugin.py]
end
subgraph Framework
CFG[ecu_framework/config.py] subgraph Framework
BASE[ecu_framework/lin/base.py] CFG[ecu_framework/config.py]
MOCK[ecu_framework/lin/mock.py] BASE[ecu_framework/lin/base.py]
BABY[ecu_framework/lin/babylin.py] MOCK[ecu_framework/lin/mock.py]
FLASH[ecu_framework/flashing/hex_flasher.py] MUM[ecu_framework/lin/mum.py]
POWER[ecu_framework/power/owon_psu.py] BABY[ecu_framework/lin/babylin.py]
end FLASH[ecu_framework/flashing/hex_flasher.py]
POWER[ecu_framework/power/owon_psu.py]
subgraph Artifacts end
REP[reports/report.html<br/>reports/junit.xml]
YAML[config/*.yaml<br/>babylin.example.yaml<br/>test_config.yaml] subgraph Artifacts
PSU_YAML[config/owon_psu.yaml<br/>OWON_PSU_CONFIG] REP[reports/report.html<br/>reports/junit.xml]
SDK[vendor/BabyLIN_library.py<br/>platform-specific libs] YAML[config/*.yaml<br/>test_config.yaml<br/>mum.example.yaml<br/>babylin.example.yaml]
OWON[vendor/Owon/owon_psu_quick_demo.py] PSU_YAML[config/owon_psu.yaml<br/>OWON_PSU_CONFIG]
end MELEXIS[Melexis pylin + pymumclient<br/>MUM @ 192.168.7.2]
SDK[vendor/BabyLIN_library.py<br/>platform-specific libs]
T --> CF OWON[vendor/Owon/owon_psu_quick_demo.py]
CF --> CFG end
CF --> BASE
CF --> MOCK T --> CF
CF --> BABY CF --> CFG
CF --> FLASH CF --> BASE
T --> POWER CF --> MOCK
PL --> REP CF --> MUM
CF --> BABY
CFG --> YAML CF --> FLASH
CFG --> PSU_YAML T --> POWER
BABY --> SDK PL --> REP
T --> OWON
T --> REP CFG --> YAML
``` CFG --> PSU_YAML
MUM --> MELEXIS
## Data and control flow summary BABY --> SDK
T --> OWON
- Tests use fixtures to obtain config and a connected LIN adapter T --> REP
- Config loader reads YAML (or env override), returns typed dataclasses ```
- LIN calls are routed through the interface abstraction to the selected adapter
- Flasher (optional) uses the same interface to program the ECU ## Data and control flow summary
- Power supply control (optional) uses `ecu_framework/power/owon_psu.py` and reads
`config.power_supply` (merged with `config/owon_psu.yaml` or `OWON_PSU_CONFIG` when present); - Tests use fixtures to obtain config and a connected LIN adapter
the quick demo script under `vendor/Owon/` provides a quick manual flow - Config loader reads YAML (or env override), returns typed dataclasses
- Reporting plugin parses docstrings and enriches the HTML report - LIN calls are routed through the interface abstraction to the selected adapter
- Flasher (optional) uses the same interface to program the ECU
## Extending the architecture - Power supply control (optional) uses `ecu_framework/power/owon_psu.py` and reads
`config.power_supply` (merged with `config/owon_psu.yaml` or `OWON_PSU_CONFIG` when present);
- Add new bus adapters by implementing `LinInterface` the quick demo script under `vendor/Owon/` provides a quick manual flow
- Add new report sinks (e.g., JSON or a DB) by extending the plugin - Reporting plugin parses docstrings and enriches the HTML report
- Add new fixtures for diagnostics or measurement tools (Scopes, power supplies, etc.)
## Extending the architecture
- Add new bus adapters by implementing `LinInterface`
- Add new report sinks (e.g., JSON or a DB) by extending the plugin
- Add new fixtures for diagnostics or measurement tools (Scopes, power supplies, etc.)

View File

@ -1,60 +1,60 @@
# Requirement Traceability # Requirement Traceability
This document shows how requirements map to tests via pytest markers and docstrings, plus how to visualize coverage. This document shows how requirements map to tests via pytest markers and docstrings, plus how to visualize coverage.
## Conventions ## Conventions
- Requirement IDs: `REQ-xxx` - Requirement IDs: `REQ-xxx`
- Use markers in tests: `@pytest.mark.req_001`, `@pytest.mark.req_002`, etc. - Use markers in tests: `@pytest.mark.req_001`, `@pytest.mark.req_002`, etc.
- Include readable requirement list in the test docstring under `Requirements:` - Include readable requirement list in the test docstring under `Requirements:`
## Example ## Example
```python ```python
@pytest.mark.req_001 @pytest.mark.req_001
@pytest.mark.req_003 @pytest.mark.req_003
""" """
Title: Mock LIN Interface - Send/Receive Echo Test Title: Mock LIN Interface - Send/Receive Echo Test
Requirements: REQ-001, REQ-003 Requirements: REQ-001, REQ-003
""" """
``` ```
## Mermaid: Requirement → Tests map ## Mermaid: Requirement → Tests map
Note: This is illustrative; maintain it as your suite grows. Note: This is illustrative; maintain it as your suite grows.
```mermaid ```mermaid
flowchart LR flowchart LR
R1[REQ-001: LIN Basic Ops] R1[REQ-001: LIN Basic Ops]
R2[REQ-002: Master Request/Response] R2[REQ-002: Master Request/Response]
R3[REQ-003: Frame Validation] R3[REQ-003: Frame Validation]
R4[REQ-004: Timeout Handling] R4[REQ-004: Timeout Handling]
T1[test_mock_send_receive_echo] T1[test_mock_send_receive_echo]
T2[test_mock_request_synthesized_response] T2[test_mock_request_synthesized_response]
T3[test_mock_receive_timeout_behavior] T3[test_mock_receive_timeout_behavior]
T4[test_mock_frame_validation_boundaries] T4[test_mock_frame_validation_boundaries]
R1 --> T1 R1 --> T1
R3 --> T1 R3 --> T1
R2 --> T2 R2 --> T2
R4 --> T3 R4 --> T3
R1 --> T4 R1 --> T4
R3 --> T4 R3 --> T4
``` ```
## Generating a live coverage artifact (optional) ## Generating a live coverage artifact (optional)
You can extend `conftest_plugin.py` to emit a JSON file with requirement-to-test mapping at the end of a run by scanning markers and docstrings. This can fuel dashboards or CI gates. You can extend `conftest_plugin.py` to emit a JSON file with requirement-to-test mapping at the end of a run by scanning markers and docstrings. This can fuel dashboards or CI gates.
Suggested JSON shape: Suggested JSON shape:
```json ```json
{ {
"requirements": { "requirements": {
"REQ-001": ["tests/test_smoke_mock.py::TestMockLinInterface::test_mock_send_receive_echo", "..."] "REQ-001": ["tests/test_smoke_mock.py::TestMockLinInterface::test_mock_send_receive_echo", "..."]
}, },
"uncovered": ["REQ-010", "REQ-012"] "uncovered": ["REQ-010", "REQ-012"]
} }
``` ```

View File

@ -1,57 +1,57 @@
# Flashing Sequence (ECU Programming) # Flashing Sequence (ECU Programming)
This document outlines the expected flashing workflow using the `HexFlasher` scaffold over the LIN interface and where you can plug in your production flasher (UDS). This document outlines the expected flashing workflow using the `HexFlasher` scaffold over the LIN interface and where you can plug in your production flasher (UDS).
## Overview ## Overview
- Flashing is controlled by configuration (`flash.enabled`, `flash.hex_path`) - Flashing is controlled by configuration (`flash.enabled`, `flash.hex_path`)
- The `flash_ecu` session fixture invokes the flasher before tests - The `flash_ecu` session fixture invokes the flasher before tests
- The flasher uses the same `LinInterface` as tests - The flasher uses the same `LinInterface` as tests
## Mermaid sequence ## Mermaid sequence
```mermaid ```mermaid
sequenceDiagram sequenceDiagram
autonumber autonumber
participant P as pytest participant P as pytest
participant F as flash_ecu fixture participant F as flash_ecu fixture
participant H as HexFlasher participant H as HexFlasher
participant L as LinInterface (mock/babylin) participant L as LinInterface (mock/mum/babylin)
participant E as ECU participant E as ECU
P->>F: Evaluate flashing precondition P->>F: Evaluate flashing precondition
alt flash.enabled == true and hex_path provided alt flash.enabled == true and hex_path provided
F->>H: HexFlasher(lin).flash_hex(hex_path) F->>H: HexFlasher(lin).flash_hex(hex_path)
H->>L: connect (ensure session ready) H->>L: connect (ensure session ready)
H->>E: Enter programming session (UDS) H->>E: Enter programming session (UDS)
H->>E: Erase memory (as required) H->>E: Erase memory (as required)
loop For each block in HEX loop For each block in HEX
H->>L: Transfer block via LIN frames H->>L: Transfer block via LIN frames
L-->>H: Acks / flow control L-->>H: Acks / flow control
end end
H->>E: Verify checksum / signature H->>E: Verify checksum / signature
H->>E: Exit programming, reset if needed H->>E: Exit programming, reset if needed
H-->>F: Return success/failure H-->>F: Return success/failure
else else
F-->>P: Skip flashing F-->>P: Skip flashing
end end
``` ```
## Implementation notes ## Implementation notes
- `ecu_framework/flashing/hex_flasher.py` is a stub — replace with your protocol implementation (UDS) - `ecu_framework/flashing/hex_flasher.py` is a stub — replace with your protocol implementation (UDS)
- Validate timing requirements and chunk sizes per ECU - Validate timing requirements and chunk sizes per ECU
- Consider power-cycle/reset hooks via programmable poewr supply. - Consider power-cycle/reset hooks via programmable poewr supply.
## Error handling ## Error handling
- On failure, the fixture calls `pytest.fail("ECU flashing failed")` - On failure, the fixture calls `pytest.fail("ECU flashing failed")`
- Make flashing idempotent when possible (can retry or detect current version) - Make flashing idempotent when possible (can retry or detect current version)
## Configuration example ## Configuration example
```yaml ```yaml
flash: flash:
enabled: true enabled: true
hex_path: "firmware/ecu_firmware.hex" hex_path: "firmware/ecu_firmware.hex"
``` ```

View File

@ -1,103 +1,103 @@
# BabyLIN Adapter Internals (SDK Python wrapper) # BabyLIN Adapter Internals (SDK Python wrapper)
This document describes how the real hardware adapter binds to the BabyLIN SDK via the official Python wrapper `BabyLIN_library.py` and how frames move across the boundary. This document describes how the real hardware adapter binds to the BabyLIN SDK via the official Python wrapper `BabyLIN_library.py` and how frames move across the boundary.
## Overview ## Overview
- Location: `ecu_framework/lin/babylin.py` - Location: `ecu_framework/lin/babylin.py`
- Uses the SDK's `BabyLIN_library.py` (place under `vendor/` or on `PYTHONPATH`) - Uses the SDK's `BabyLIN_library.py` (place under `vendor/` or on `PYTHONPATH`)
- Discovers and opens a BabyLIN device using `BLC_getBabyLinPorts` and `BLC_openPort` - Discovers and opens a BabyLIN device using `BLC_getBabyLinPorts` and `BLC_openPort`
- Optionally loads an SDF via `BLC_loadSDF(handle, sdf_path, 1)` and starts a schedule with `BLC_sendCommand("start schedule N;")` - Optionally loads an SDF via `BLC_loadSDF(handle, sdf_path, 1)` and starts a schedule with `BLC_sendCommand("start schedule N;")`
- Converts between Python `LinFrame` and the wrapper's `BLC_FRAME` structure for receive - Converts between Python `LinFrame` and the wrapper's `BLC_FRAME` structure for receive
## Mermaid: SDK connect sequence ## Mermaid: SDK connect sequence
```mermaid ```mermaid
sequenceDiagram sequenceDiagram
autonumber autonumber
participant T as Tests/Fixture participant T as Tests/Fixture
participant A as BabyLinInterface (SDK) participant A as BabyLinInterface (SDK)
participant BL as BabyLIN_library (BLC_*) participant BL as BabyLIN_library (BLC_*)
T->>A: connect() T->>A: connect()
A->>BL: BLC_getBabyLinPorts(100) A->>BL: BLC_getBabyLinPorts(100)
BL-->>A: [port0, ...] BL-->>A: [port0, ...]
A->>BL: BLC_openPort(port0) A->>BL: BLC_openPort(port0)
A->>BL: BLC_loadSDF(handle, sdf_path, 1) A->>BL: BLC_loadSDF(handle, sdf_path, 1)
A->>BL: BLC_getChannelHandle(handle, channelIndex) A->>BL: BLC_getChannelHandle(handle, channelIndex)
A->>BL: start schedule N A->>BL: start schedule N
A-->>T: connected A-->>T: connected
``` ```
## Mermaid: Binding and call flow ## Mermaid: Binding and call flow
```mermaid ```mermaid
sequenceDiagram sequenceDiagram
autonumber autonumber
participant T as Test participant T as Test
participant L as LinInterface (BabyLin) participant L as LinInterface (BabyLin)
participant D as BabyLIN_library (BLC_*) participant D as BabyLIN_library (BLC_*)
T->>L: connect() T->>L: connect()
L->>D: BLC_getBabyLinPorts() L->>D: BLC_getBabyLinPorts()
L->>D: BLC_openPort(port) L->>D: BLC_openPort(port)
D-->>L: handle/ok D-->>L: handle/ok
T->>L: send(frame) T->>L: send(frame)
L->>D: BLC_mon_set_xmit(channelHandle, frameId, data, slotTime=0) L->>D: BLC_mon_set_xmit(channelHandle, frameId, data, slotTime=0)
D-->>L: code (0=ok) D-->>L: code (0=ok)
T->>L: receive(timeout) T->>L: receive(timeout)
L->>D: BLC_getNextFrameTimeout(channelHandle, timeout_ms) L->>D: BLC_getNextFrameTimeout(channelHandle, timeout_ms)
D-->>L: code, frame D-->>L: code, frame
L->>L: convert BLC_FRAME to LinFrame L->>L: convert BLC_FRAME to LinFrame
L-->>T: LinFrame or None L-->>T: LinFrame or None
T->>L: disconnect() T->>L: disconnect()
L->>D: BLC_closeAll() L->>D: BLC_closeAll()
``` ```
## Master request behavior ## Master request behavior
When performing a master request, the adapter tries the SDK method in this order: When performing a master request, the adapter tries the SDK method in this order:
1. `BLC_sendRawMasterRequest(channel, id, length)` — preferred 1. `BLC_sendRawMasterRequest(channel, id, length)` — preferred
2. `BLC_sendRawMasterRequest(channel, id, dataBytes)` — fallback 2. `BLC_sendRawMasterRequest(channel, id, dataBytes)` — fallback
3. Send a header with zeros and wait on `receive()` — last resort 3. Send a header with zeros and wait on `receive()` — last resort
Mock behavior notes: Mock behavior notes:
- The provided mock (`vendor/mock_babylin_wrapper.py`) synthesizes a deterministic response for the `length` signature (e.g., data[i] = (id + i) & 0xFF). - The provided mock (`vendor/mock_babylin_wrapper.py`) synthesizes a deterministic response for the `length` signature (e.g., data[i] = (id + i) & 0xFF).
- For the bytes-only signature, the adapter sends zero-filled bytes of the requested length and validates by length. - For the bytes-only signature, the adapter sends zero-filled bytes of the requested length and validates by length.
## Wrapper usage highlights ## Wrapper usage highlights
```python ```python
from BabyLIN_library import create_BabyLIN from BabyLIN_library import create_BabyLIN
bl = create_BabyLIN() bl = create_BabyLIN()
ports = bl.BLC_getBabyLinPorts(100) ports = bl.BLC_getBabyLinPorts(100)
h = bl.BLC_openPort(ports[0]) h = bl.BLC_openPort(ports[0])
bl.BLC_loadSDF(h, "Example.sdf", 1) bl.BLC_loadSDF(h, "Example.sdf", 1)
ch = bl.BLC_getChannelHandle(h, 0) ch = bl.BLC_getChannelHandle(h, 0)
bl.BLC_sendCommand(ch, "start schedule 0;") bl.BLC_sendCommand(ch, "start schedule 0;")
# Transmit and receive # Transmit and receive
bl.BLC_mon_set_xmit(ch, 0x10, bytes([1,2,3,4]), 0) bl.BLC_mon_set_xmit(ch, 0x10, bytes([1,2,3,4]), 0)
frm = bl.BLC_getNextFrameTimeout(ch, 100) frm = bl.BLC_getNextFrameTimeout(ch, 100)
print(frm.frameId, list(frm.frameData)[:frm.lenOfData]) print(frm.frameId, list(frm.frameData)[:frm.lenOfData])
bl.BLC_closeAll() bl.BLC_closeAll()
``` ```
## Notes and pitfalls ## Notes and pitfalls
- Architecture: Ensure Python (x86/x64) matches the platform library bundled with the SDK - Architecture: Ensure Python (x86/x64) matches the platform library bundled with the SDK
- Timeouts: SDKs typically want milliseconds; convert Python seconds accordingly - Timeouts: SDKs typically want milliseconds; convert Python seconds accordingly
- Error handling: On non-zero return codes, use `BLC_getDetailedErrorString` (if available) for human-readable messages - Error handling: On non-zero return codes, use `BLC_getDetailedErrorString` (if available) for human-readable messages
- Threading: If you use background receive threads, protect buffers with locks - Threading: If you use background receive threads, protect buffers with locks
- Performance: Avoid excessive allocations in tight loops; reuse frame structs when possible - Performance: Avoid excessive allocations in tight loops; reuse frame structs when possible
## Extending ## Extending
- Add bitrate/channel setup functions as exposed by the SDK - Add bitrate/channel setup functions as exposed by the SDK
- Implement schedule tables or diagnostics passthrough if provided by the SDK - Implement schedule tables or diagnostics passthrough if provided by the SDK
- Wrap more SDK errors into typed Python exceptions for clarity - Wrap more SDK errors into typed Python exceptions for clarity

View File

@ -1,144 +1,171 @@
# Raspberry Pi Deployment Guide # Raspberry Pi Deployment Guide
This guide explains how to run the ECU testing framework on a Raspberry Pi (Debian/Raspberry Pi OS). It covers environment setup, optional BabyLin hardware integration, running tests headless, and installing as a systemd service. This guide explains how to run the ECU testing framework on a Raspberry Pi (Debian/Raspberry Pi OS). It covers environment setup, hardware integration via MUM (recommended) or BabyLin (legacy), running tests headless, and installing as a systemd service.
> Note: If you plan to use BabyLin hardware on a Pi, verify vendor driver support for ARM Linux. If BabyLin provides only Windows DLLs, use the Mock interface on Pi or deploy a different hardware interface that supports Linux/ARM. > Note: The MUM (Melexis Universal Master) is **networked**, so the Pi only
> needs IP reachability to the MUM (default `192.168.7.2`) — there are no
## 1) Choose your interface > Pi-side native libs to worry about. BabyLin needs ARM Linux native
> libraries; if those aren't available, use Mock or MUM on the Pi instead.
- Mock (recommended for headless/dev on Pi): `interface.type: mock`
- BabyLIN (only if ARM/Linux support is available): `interface.type: babylin` and ensure the SDK's `BabyLIN_library.py` and corresponding Linux/ARM shared libraries are available under `vendor/` or on PYTHONPATH/LD_LIBRARY_PATH. ## 1) Choose your interface
## 2) Install prerequisites - **MUM (recommended for hardware on Pi)**: `interface.type: mum`. Requires Melexis `pylin` + `pymumclient` (see `vendor/automated_lin_test/install_packages.sh`) and IP reachability to the MUM device.
- Mock (recommended for headless/dev on Pi): `interface.type: mock`
```bash - BabyLIN (only if ARM/Linux support is available): `interface.type: babylin` and ensure the SDK's `BabyLIN_library.py` and corresponding Linux/ARM shared libraries are available under `vendor/` or on PYTHONPATH/LD_LIBRARY_PATH.
sudo apt update
sudo apt install -y python3 python3-venv python3-pip git ## 2) Install prerequisites
```
```bash
Optional (for BabyLin or USB tools): sudo apt update
```bash sudo apt install -y python3 python3-venv python3-pip git
sudo apt install -y libusb-1.0-0 udev ```
```
Optional (for BabyLin or USB tools):
## 3) Clone and set up ```bash
sudo apt install -y libusb-1.0-0 udev
```bash ```
# clone your repo
git clone <your-repo-url> ~/ecu_tests ## 3) Clone and set up
cd ~/ecu_tests
```bash
# create venv # clone your repo
python3 -m venv .venv git clone <your-repo-url> ~/ecu_tests
source .venv/bin/activate cd ~/ecu_tests
# install deps # create venv
pip install -r requirements.txt python3 -m venv .venv
``` source .venv/bin/activate
## 4) Configure # install deps
pip install -r requirements.txt
Create or edit `config/test_config.yaml`: ```
```yaml ## 4) Configure
interface:
type: mock # or babylin (if supported on ARM/Linux) Create or edit `config/test_config.yaml`:
channel: 1
bitrate: 19200 ```yaml
flash: interface:
enabled: false type: mock # or babylin (if supported on ARM/Linux)
``` channel: 1
bitrate: 19200
Optionally point to another config file via env var: flash:
```bash enabled: false
export ECU_TESTS_CONFIG=$(pwd)/config/test_config.yaml ```
```
Optionally point to another config file via env var:
If using BabyLIN on Linux/ARM with the SDK wrapper, set: ```bash
```yaml export ECU_TESTS_CONFIG=$(pwd)/config/test_config.yaml
interface: ```
type: babylin
channel: 0 If using the MUM on the Pi, set:
sdf_path: "/home/pi/ecu_tests/vendor/Example.sdf"
schedule_nr: 0 ```yaml
``` interface:
type: mum
## 5) Run tests on Pi host: 192.168.7.2 # adjust to your MUM IP
lin_device: lin0
```bash power_device: power_out0
source .venv/bin/activate bitrate: 19200
python -m pytest -m "not hardware" -v boot_settle_seconds: 0.5
``` frame_lengths:
0x0A: 8
Artifacts are in `reports/` (HTML, JUnit, JSON, summary MD). 0x11: 4
```
## 6) Run as a systemd service (headless)
Confirm reachability before running tests:
This section lets the Pi run the test suite on boot or on demand.
```bash
### Create a runner script ping -c 2 192.168.7.2
```
Create `scripts/run_tests.sh`:
```bash If using BabyLIN on Linux/ARM with the SDK wrapper, set:
#!/usr/bin/env bash
set -euo pipefail ```yaml
cd "$(dirname "$0")/.." interface:
source .venv/bin/activate type: babylin
# optionally set custom config channel: 0
# export ECU_TESTS_CONFIG=$(pwd)/config/test_config.yaml sdf_path: "/home/pi/ecu_tests/vendor/Example.sdf"
python -m pytest -v schedule_nr: 0
``` ```
Make it executable:
```bash ## 5) Run tests on Pi
chmod +x scripts/run_tests.sh
``` ```bash
source .venv/bin/activate
### Create a systemd unit python -m pytest -m "not hardware" -v
```
Create `scripts/ecu-tests.service`:
```ini Artifacts are in `reports/` (HTML, JUnit, JSON, summary MD).
[Unit]
Description=ECU Tests Runner ## 6) Run as a systemd service (headless)
After=network-online.target
Wants=network-online.target This section lets the Pi run the test suite on boot or on demand.
[Service] ### Create a runner script
Type=oneshot
WorkingDirectory=/home/pi/ecu_tests Create `scripts/run_tests.sh`:
ExecStart=/home/pi/ecu_tests/scripts/run_tests.sh ```bash
User=pi #!/usr/bin/env bash
Group=pi set -euo pipefail
Environment=ECU_TESTS_CONFIG=/home/pi/ecu_tests/config/test_config.yaml cd "$(dirname "$0")/.."
# Capture output to a log file source .venv/bin/activate
StandardOutput=append:/home/pi/ecu_tests/reports/service.log # optionally set custom config
StandardError=append:/home/pi/ecu_tests/reports/service.err # export ECU_TESTS_CONFIG=$(pwd)/config/test_config.yaml
python -m pytest -v
[Install] ```
WantedBy=multi-user.target Make it executable:
``` ```bash
chmod +x scripts/run_tests.sh
Install and run: ```
```bash
sudo mkdir -p /home/pi/ecu_tests/reports ### Create a systemd unit
sudo cp scripts/ecu-tests.service /etc/systemd/system/ecu-tests.service
sudo systemctl daemon-reload Create `scripts/ecu-tests.service`:
sudo systemctl enable ecu-tests.service ```ini
# Start manually [Unit]
sudo systemctl start ecu-tests.service Description=ECU Tests Runner
# Check status After=network-online.target
systemctl status ecu-tests.service Wants=network-online.target
```
[Service]
## 7) USB and permissions (if using hardware) Type=oneshot
WorkingDirectory=/home/pi/ecu_tests
- Create udev rules for your device (if required by vendor) ExecStart=/home/pi/ecu_tests/scripts/run_tests.sh
- Add user to dialout or plugdev groups if serial/USB access is needed User=pi
- Confirm your hardware library is found by Python and the dynamic linker: Group=pi
- Ensure `vendor/BabyLIN_library.py` is importable (add `vendor/` to PYTHONPATH if needed) Environment=ECU_TESTS_CONFIG=/home/pi/ecu_tests/config/test_config.yaml
- Ensure `.so` files are discoverable (e.g., place in `/usr/local/lib` and run `sudo ldconfig`, or set `LD_LIBRARY_PATH`) # Capture output to a log file
StandardOutput=append:/home/pi/ecu_tests/reports/service.log
## 8) Tips StandardError=append:/home/pi/ecu_tests/reports/service.err
- Use the mock interface on Pi for quick smoke tests and documentation/report generation [Install]
- For full HIL, ensure vendor SDK supports Linux/ARM and provide a shared object (`.so`) and headers WantedBy=multi-user.target
- If only Windows is supported, run the hardware suite on a Windows host and use the Pi for lightweight tasks (archiving, reporting, quick checks) ```
Install and run:
```bash
sudo mkdir -p /home/pi/ecu_tests/reports
sudo cp scripts/ecu-tests.service /etc/systemd/system/ecu-tests.service
sudo systemctl daemon-reload
sudo systemctl enable ecu-tests.service
# Start manually
sudo systemctl start ecu-tests.service
# Check status
systemctl status ecu-tests.service
```
## 7) USB and permissions (if using hardware)
- Create udev rules for your device (if required by vendor)
- Add user to dialout or plugdev groups if serial/USB access is needed
- Confirm your hardware library is found by Python and the dynamic linker:
- Ensure `vendor/BabyLIN_library.py` is importable (add `vendor/` to PYTHONPATH if needed)
- Ensure `.so` files are discoverable (e.g., place in `/usr/local/lib` and run `sudo ldconfig`, or set `LD_LIBRARY_PATH`)
## 8) Tips
- Use the mock interface on Pi for quick smoke tests and documentation/report generation
- For full HIL on Pi, the **MUM is the easiest path** — it's IP-reachable so the Pi doesn't need vendor-specific native libraries, just the Melexis Python packages (`pylin`, `pymumclient`)
- For BabyLIN HIL, ensure vendor SDK supports Linux/ARM and provide a shared object (`.so`) and headers
- If only Windows is supported by your hardware path, run the hardware suite on a Windows host and use the Pi for lightweight tasks (archiving, reporting, quick checks)

View File

@ -1,80 +1,86 @@
# Build a Custom Raspberry Pi Image with ECU Tests # Build a Custom Raspberry Pi Image with ECU Tests
This guide walks you through building your own Raspberry Pi OS image that already contains this framework, dependencies, config, and services. It uses the official pi-gen tool (used by Raspberry Pi OS) or the simpler pi-gen-lite alternatives. This guide walks you through building your own Raspberry Pi OS image that already contains this framework, dependencies, config, and services. It uses the official pi-gen tool (used by Raspberry Pi OS) or the simpler pi-gen-lite alternatives.
> Important: BabyLin support on ARM/Linux depends on vendor SDKs. If no `.so` is provided for ARM, either use the Mock interface on the Pi, or keep hardware tests on Windows. > Important: For full HIL on the Pi, the **MUM (Melexis Universal Master)** is
> the recommended hardware path — it's IP-reachable so the Pi only needs the
## Approach A: Using pi-gen (official) > Melexis Python packages (`pylin`, `pymumclient`), no native libraries. Bake
> those into the image's site-packages from the Melexis IDE bundle. BabyLin
1. Prepare a build host (Debian/Ubuntu) > support on ARM/Linux depends on vendor SDKs; if no `.so` is provided for
```bash > ARM, either use the Mock or MUM interface on the Pi, or keep BabyLIN
sudo apt update && sudo apt install -y git coreutils quilt parted qemu-user-static debootstrap zerofree \ > hardware tests on Windows.
pxz zip dosfstools libcap2-bin grep rsync xz-utils file bc curl jq
``` ## Approach A: Using pi-gen (official)
2. Clone pi-gen
```bash 1. Prepare a build host (Debian/Ubuntu)
git clone https://github.com/RPi-Distro/pi-gen.git ```bash
cd pi-gen sudo apt update && sudo apt install -y git coreutils quilt parted qemu-user-static debootstrap zerofree \
``` pxz zip dosfstools libcap2-bin grep rsync xz-utils file bc curl jq
3. Create a custom stage for ECU Tests (e.g., `stage2/02-ecu-tests/`): ```
- `00-packages` (optional OS deps like python3, libusb-1.0-0) 2. Clone pi-gen
- `01-run.sh` to clone your repo, create venv, install deps, and set up systemd units ```bash
git clone https://github.com/RPi-Distro/pi-gen.git
Example `01-run.sh` contents: cd pi-gen
```bash ```
#!/bin/bash -e 3. Create a custom stage for ECU Tests (e.g., `stage2/02-ecu-tests/`):
REPO_DIR=/home/pi/ecu_tests - `00-packages` (optional OS deps like python3, libusb-1.0-0)
sudo -u pi git clone <your-repo-url> "$REPO_DIR" - `01-run.sh` to clone your repo, create venv, install deps, and set up systemd units
cd "$REPO_DIR"
sudo -u pi python3 -m venv .venv Example `01-run.sh` contents:
sudo -u pi bash -lc "source .venv/bin/activate && pip install --upgrade pip && pip install -r requirements.txt" ```bash
sudo mkdir -p "$REPO_DIR/reports" #!/bin/bash -e
sudo chown -R pi:pi "$REPO_DIR/reports" REPO_DIR=/home/pi/ecu_tests
sudo install -Dm644 "$REPO_DIR/scripts/ecu-tests.service" /etc/systemd/system/ecu-tests.service sudo -u pi git clone <your-repo-url> "$REPO_DIR"
sudo install -Dm644 "$REPO_DIR/scripts/ecu-tests.timer" /etc/systemd/system/ecu-tests.timer cd "$REPO_DIR"
sudo systemctl enable ecu-tests.service sudo -u pi python3 -m venv .venv
sudo systemctl enable ecu-tests.timer || true sudo -u pi bash -lc "source .venv/bin/activate && pip install --upgrade pip && pip install -r requirements.txt"
# Optional udev rules sudo mkdir -p "$REPO_DIR/reports"
if [ -f "$REPO_DIR/scripts/99-babylin.rules" ]; then sudo chown -R pi:pi "$REPO_DIR/reports"
sudo install -Dm644 "$REPO_DIR/scripts/99-babylin.rules" /etc/udev/rules.d/99-babylin.rules sudo install -Dm644 "$REPO_DIR/scripts/ecu-tests.service" /etc/systemd/system/ecu-tests.service
fi sudo install -Dm644 "$REPO_DIR/scripts/ecu-tests.timer" /etc/systemd/system/ecu-tests.timer
``` sudo systemctl enable ecu-tests.service
4. Configure build options (`config` file in pi-gen root): sudo systemctl enable ecu-tests.timer || true
```bash # Optional udev rules
IMG_NAME=ecu-tests-os if [ -f "$REPO_DIR/scripts/99-babylin.rules" ]; then
ENABLE_SSH=1 sudo install -Dm644 "$REPO_DIR/scripts/99-babylin.rules" /etc/udev/rules.d/99-babylin.rules
STAGE_LIST="stage0 stage1 stage2" # include your custom stage2 additions fi
``` ```
5. Build 4. Configure build options (`config` file in pi-gen root):
```bash ```bash
sudo ./build.sh IMG_NAME=ecu-tests-os
``` ENABLE_SSH=1
6. Flash the resulting `.img` to SD card with `Raspberry Pi Imager` or `dd`. STAGE_LIST="stage0 stage1 stage2" # include your custom stage2 additions
```
## Approach B: Preseed on first boot (lighter) 5. Build
```bash
- Ship a minimal Raspberry Pi OS image and a cloud-init/user-data or first-boot script that pulls your repo and runs `scripts/pi_install.sh`. sudo ./build.sh
- Pros: Faster iteration; you control repo URL at install time. ```
- Cons: Requires internet on first boot. 6. Flash the resulting `.img` to SD card with `Raspberry Pi Imager` or `dd`.
## CI Integration (optional) ## Approach B: Preseed on first boot (lighter)
- You can automate image builds with GitHub Actions or GitLab CI using a Docker runner that executes pi-gen. - Ship a minimal Raspberry Pi OS image and a cloud-init/user-data or first-boot script that pulls your repo and runs `scripts/pi_install.sh`.
- Upload the `.img` as a release asset or pipeline artifact. - Pros: Faster iteration; you control repo URL at install time.
- Optionally, bake environment-specific `config/test_config.yaml` or keep it external and set `ECU_TESTS_CONFIG` in the systemd unit. - Cons: Requires internet on first boot.
## Hardware Notes ## CI Integration (optional)
- If using BabyLin, ensure: `.so` for ARM, udev rules, and any kernel modules. - You can automate image builds with GitHub Actions or GitLab CI using a Docker runner that executes pi-gen.
- Validate the SDK wrapper and libraries are present under `/opt/ecu_tests/vendor/` (or your chosen path). Ensure `.so` files are on the linker path (run `sudo ldconfig`) and `BabyLIN_library.py` is importable. - Upload the `.img` as a release asset or pipeline artifact.
- Optionally, bake environment-specific `config/test_config.yaml` or keep it external and set `ECU_TESTS_CONFIG` in the systemd unit.
## Boot-time Behavior
## Hardware Notes
- The `ecu-tests.timer` can schedule daily or hourly test runs; edit `OnUnitActiveSec` as needed.
- Logs are written to `reports/service.log` and `reports/service.err` on the Pi. - If using BabyLin, ensure: `.so` for ARM, udev rules, and any kernel modules.
- Validate the SDK wrapper and libraries are present under `/opt/ecu_tests/vendor/` (or your chosen path). Ensure `.so` files are on the linker path (run `sudo ldconfig`) and `BabyLIN_library.py` is importable.
## Security
## Boot-time Behavior
- Consider read-only root filesystem for robustness.
- Use a dedicated user with limited privileges for test execution. - The `ecu-tests.timer` can schedule daily or hourly test runs; edit `OnUnitActiveSec` as needed.
- Keep secrets (if any) injected via environment and not committed. - Logs are written to `reports/service.log` and `reports/service.err` on the Pi.
## Security
- Consider read-only root filesystem for robustness.
- Use a dedicated user with limited privileges for test execution.
- Keep secrets (if any) injected via environment and not committed.

View File

@ -1,91 +1,91 @@
# Pytest Plugin: Reporting & Traceability Overview # Pytest Plugin: Reporting & Traceability Overview
This guide explains the custom pytest plugin in `conftest_plugin.py` that enriches reports with business-facing metadata and builds requirements traceability artifacts. This guide explains the custom pytest plugin in `conftest_plugin.py` that enriches reports with business-facing metadata and builds requirements traceability artifacts.
## What it does ## What it does
- Extracts metadata (Title, Description, Requirements, Test Steps, Expected Result) from test docstrings and markers. - Extracts metadata (Title, Description, Requirements, Test Steps, Expected Result) from test docstrings and markers.
- Attaches this metadata as `user_properties` on each test report. - Attaches this metadata as `user_properties` on each test report.
- Adds custom columns (Title, Requirements) to the HTML report. - Adds custom columns (Title, Requirements) to the HTML report.
- Produces two artifacts under `reports/` at the end of the run: - Produces two artifacts under `reports/` at the end of the run:
- `requirements_coverage.json`: a traceability matrix mapping requirement IDs to test nodeids, plus unmapped tests. - `requirements_coverage.json`: a traceability matrix mapping requirement IDs to test nodeids, plus unmapped tests.
- `summary.md`: a compact summary of results suitable for CI dashboards or PR comments. - `summary.md`: a compact summary of results suitable for CI dashboards or PR comments.
## Inputs and sources ## Inputs and sources
- Test docstrings prefixed lines: - Test docstrings prefixed lines:
- `Title:` one-line title - `Title:` one-line title
- `Description:` free-form text until the next section - `Description:` free-form text until the next section
- `Requirements:` comma- or space-separated tokens such as `REQ-001`, `req_002` - `Requirements:` comma- or space-separated tokens such as `REQ-001`, `req_002`
- `Test Steps:` numbered list (1., 2., 3., ...) - `Test Steps:` numbered list (1., 2., 3., ...)
- `Expected Result:` free-form text - `Expected Result:` free-form text
- Pytest markers on tests: `@pytest.mark.req_001` etc. are normalized to `REQ-001`. - Pytest markers on tests: `@pytest.mark.req_001` etc. are normalized to `REQ-001`.
## Normalization logic ## Normalization logic
Requirement IDs are normalized to the canonical form `REQ-XYZ` using: Requirement IDs are normalized to the canonical form `REQ-XYZ` using:
- `req_001``REQ-001` - `req_001``REQ-001`
- `REQ-1` / `REQ-001` / `REQ_001``REQ-001` - `REQ-1` / `REQ-001` / `REQ_001``REQ-001`
This ensures consistent keys in the coverage JSON and HTML. This ensures consistent keys in the coverage JSON and HTML.
## Hook call sequence ## Hook call sequence
Below is the high-level call sequence of relevant plugin hooks during a typical run: Below is the high-level call sequence of relevant plugin hooks during a typical run:
```mermaid ```mermaid
sequenceDiagram sequenceDiagram
autonumber autonumber
participant Pytest participant Pytest
participant Plugin as conftest_plugin participant Plugin as conftest_plugin
participant FS as File System participant FS as File System
Pytest->>Plugin: pytest_configure(config) Pytest->>Plugin: pytest_configure(config)
Note right of Plugin: Ensure ./reports exists Note right of Plugin: Ensure ./reports exists
Pytest->>Plugin: pytest_collection_modifyitems(session, config, items) Pytest->>Plugin: pytest_collection_modifyitems(session, config, items)
Note right of Plugin: Track all collected nodeids for unmapped detection Note right of Plugin: Track all collected nodeids for unmapped detection
loop For each test phase loop For each test phase
Pytest->>Plugin: pytest_runtest_makereport(item, call) Pytest->>Plugin: pytest_runtest_makereport(item, call)
Note right of Plugin: hookwrapper Note right of Plugin: hookwrapper
Plugin-->>Pytest: yield to get report Plugin-->>Pytest: yield to get report
Plugin->>Plugin: parse docstring & markers Plugin->>Plugin: parse docstring & markers
Plugin->>Plugin: attach user_properties (Title, Requirements, ...) Plugin->>Plugin: attach user_properties (Title, Requirements, ...)
Plugin->>Plugin: update _REQ_TO_TESTS, _MAPPED_TESTS Plugin->>Plugin: update _REQ_TO_TESTS, _MAPPED_TESTS
end end
Pytest->>Plugin: pytest_terminal_summary(terminalreporter, exitstatus) Pytest->>Plugin: pytest_terminal_summary(terminalreporter, exitstatus)
Plugin->>Plugin: compile stats, coverage map, unmapped tests Plugin->>Plugin: compile stats, coverage map, unmapped tests
Plugin->>FS: write reports/requirements_coverage.json Plugin->>FS: write reports/requirements_coverage.json
Plugin->>FS: write reports/summary.md Plugin->>FS: write reports/summary.md
``` ```
## HTML report integration ## HTML report integration
- `pytest_html_results_table_header`: inserts Title and Requirements columns. - `pytest_html_results_table_header`: inserts Title and Requirements columns.
- `pytest_html_results_table_row`: fills in values from `report.user_properties`. - `pytest_html_results_table_row`: fills in values from `report.user_properties`.
The HTML plugin reads `user_properties` to render the extra metadata per test row. The HTML plugin reads `user_properties` to render the extra metadata per test row.
## Artifacts ## Artifacts
- `reports/requirements_coverage.json` - `reports/requirements_coverage.json`
- `generated_at`: ISO timestamp - `generated_at`: ISO timestamp
- `results`: counts of passed/failed/skipped/etc. - `results`: counts of passed/failed/skipped/etc.
- `requirements`: map of `REQ-XXX` to an array of test nodeids - `requirements`: map of `REQ-XXX` to an array of test nodeids
- `unmapped_tests`: tests with no requirement mapping - `unmapped_tests`: tests with no requirement mapping
- `files`: relative locations of key artifacts - `files`: relative locations of key artifacts
- `reports/summary.md` - `reports/summary.md`
- Human-readable summary with counts and quick artifact links - Human-readable summary with counts and quick artifact links
## Error handling ## Error handling
Artifact writes are wrapped in try/except to avoid failing the test run if the filesystem is read-only or unavailable. Any write failure is logged to the terminal. Artifact writes are wrapped in try/except to avoid failing the test run if the filesystem is read-only or unavailable. Any write failure is logged to the terminal.
## Extensibility ideas ## Extensibility ideas
- Add more normalized marker families (e.g., `capability_*`, `risk_*`). - Add more normalized marker families (e.g., `capability_*`, `risk_*`).
- Emit CSV or Excel in addition to JSON/Markdown. - Emit CSV or Excel in addition to JSON/Markdown.
- Include per-test durations and flakiness stats in the summary. - Include per-test durations and flakiness stats in the summary.
- Support a `--requirement` CLI filter that selects tests by normalized req IDs. - Support a `--requirement` CLI filter that selects tests by normalized req IDs.

View File

@ -1,188 +1,220 @@
# Using the ECU Test Framework # Using the ECU Test Framework
This guide shows common ways to run the test framework: from fast local mock runs to full hardware loops, CI, and Raspberry Pi deployments. Commands use Windows PowerShell (as your default shell). This guide shows common ways to run the test framework: from fast local mock runs to full hardware loops, CI, and Raspberry Pi deployments. Commands use Windows PowerShell (as your default shell).
## Prerequisites ## Prerequisites
- Python 3.x and a virtual environment - Python 3.x and a virtual environment
- Dependencies installed (see `requirements.txt`) - Dependencies installed (see `requirements.txt`)
- Optional: BabyLIN SDK files placed under `vendor/` as described in `vendor/README.md` when running hardware tests - For MUM hardware: Melexis `pylin` and `pymumclient` Python packages on `PYTHONPATH` (see `vendor/automated_lin_test/install_packages.sh`) plus a reachable MUM (default IP `192.168.7.2`)
- For BabyLIN (legacy) hardware: SDK files placed under `vendor/` as described in `vendor/README.md`
## Configuring tests
## Configuring tests
- Configuration is loaded from YAML files and can be selected via the environment variable `ECU_TESTS_CONFIG`.
- See `docs/02_configuration_resolution.md` for details and examples. - Configuration is loaded from YAML files and can be selected via the environment variable `ECU_TESTS_CONFIG`.
- See `docs/02_configuration_resolution.md` for details and examples.
Example PowerShell:
Example PowerShell:
```powershell
# Use a mock-only config for fast local runs ```powershell
$env:ECU_TESTS_CONFIG = ".\config\mock.yml" # Use a mock-only config for fast local runs
$env:ECU_TESTS_CONFIG = ".\config\mock.yml"
# Use a hardware config with BabyLIN SDK wrapper
$env:ECU_TESTS_CONFIG = ".\config\hardware_babylin.yml" # Use a hardware config with the MUM (current default)
``` $env:ECU_TESTS_CONFIG = ".\config\mum.example.yaml"
Quick try with provided examples: # Use a hardware config with the BabyLIN SDK wrapper (legacy)
$env:ECU_TESTS_CONFIG = ".\config\babylin.example.yaml"
```powershell ```
# Point to the combined examples file
$env:ECU_TESTS_CONFIG = ".\config\examples.yaml" Quick try with provided examples:
# The 'active' section defaults to the mock profile; run non-hardware tests
pytest -m "not hardware" -v ```powershell
# Edit 'active' to the babylin profile (or point to babylin.example.yaml) and run hardware tests # Point to the combined examples file
``` $env:ECU_TESTS_CONFIG = ".\config\examples.yaml"
``` # The 'active' section defaults to the mock profile; run non-hardware tests
pytest -m "not hardware" -v
## Running locally (mock interface) # Edit 'active' to the mum or babylin profile (or point to mum.example.yaml /
# babylin.example.yaml) and run hardware tests
Use the mock interface to develop tests quickly without hardware: ```
```powershell ## Running locally (mock interface)
# Run all mock tests with HTML and JUnit outputs (see pytest.ini defaults)
pytest Use the mock interface to develop tests quickly without hardware:
# Run only smoke tests (mock) and show progress ```powershell
pytest -m smoke -q # Run all mock tests with HTML and JUnit outputs (see pytest.ini defaults)
pytest
# Filter by test file or node id
pytest tests\test_smoke_mock.py::TestMockLinInterface::test_mock_send_receive_echo -q # Run only smoke tests (mock) and show progress
``` pytest -m smoke -q
What you get: # Filter by test file or node id
- Fast execution, deterministic results pytest tests\test_smoke_mock.py::TestMockLinInterface::test_mock_send_receive_echo -q
- Reports in `reports/` (HTML, JUnit, coverage JSON, CI summary) ```
Open the HTML report on Windows: What you get:
- Fast execution, deterministic results
```powershell - Reports in `reports/` (HTML, JUnit, coverage JSON, CI summary)
start .\reports\report.html
``` Open the HTML report on Windows:
## Running on hardware (BabyLIN SDK wrapper) ```powershell
start .\reports\report.html
1) Place SDK files per `vendor/README.md`. ```
2) Select a config that defines `interface.type: babylin`, `sdf_path`, and `schedule_nr`.
3) Markers allow restricting to hardware tests. ## Running on hardware (MUM — current default)
```powershell 1) Install Melexis `pylin` and `pymumclient` (see `vendor/automated_lin_test/install_packages.sh` — on Windows, point `pip` at a wheel or extend `PYTHONPATH` to the Melexis IDE site-packages).
# Example environment selection 2) Make sure the MUM is reachable: `ping 192.168.7.2`.
$env:ECU_TESTS_CONFIG = ".\config\babylin.example.yaml" 3) Select a config that defines `interface.type: mum` plus `host`/`lin_device`/`power_device`.
# Run only hardware tests ```powershell
pytest -m "hardware and babylin" $env:ECU_TESTS_CONFIG = ".\config\mum.example.yaml"
# Run the schedule smoke only # Run only the MUM-marked hardware tests
pytest tests\test_babylin_hardware_schedule_smoke.py -q pytest -m "hardware and mum" -v
```
# Run a single MUM test by file
Tips: pytest tests\hardware\test_e2e_mum_led_activate.py -q
- If multiple devices are attached, update your config to select the desired port (future enhancement) or keep only one connected. ```
- On timeout, tests often accept None to avoid flakiness; increase timeouts if your bus is slow.
- Master request behavior: the adapter prefers `BLC_sendRawMasterRequest(channel, id, length)`; it falls back to the bytes variant or a header+receive strategy as needed. The mock covers both forms. Tips:
- The MUM owns ECU power on `power_out0`; it powers up automatically in `connect()` and powers down on `disconnect()`. The Owon PSU is independent and can be left disabled (`power_supply.enabled: false`).
## Selecting tests with markers - The MUM is master-driven: `lin.receive(id)` requires a frame ID. The default `frame_lengths` covers ALM_Status (4 B) and ALM_Req_A (8 B); add others in YAML when you need slave-published frames at non-standard lengths.
- For BSM-SNPD diagnostic frames (service ID 0xB5), use `lin.send_raw(bytes)` — it routes through the transport layer's `ld_put_raw`, which uses LIN 1.x **Classic** checksum. `send()` uses Enhanced and the firmware will reject these frames.
Markers in use:
- `smoke`: quick confidence tests ## Running on hardware (BabyLIN SDK wrapper — legacy)
- `hardware`: needs real device
- `babylin`: targets the BabyLIN SDK adapter 1) Place SDK files per `vendor/README.md`.
- `req_XXX`: requirement mapping (e.g., `@pytest.mark.req_001`) 2) Select a config that defines `interface.type: babylin`, `sdf_path`, and `schedule_nr`.
3) Markers allow restricting to hardware tests.
Examples:
```powershell
```powershell $env:ECU_TESTS_CONFIG = ".\config\babylin.example.yaml"
# Only smoke tests (mock + hardware smoke)
pytest -m smoke # Run only hardware tests
pytest -m "hardware and babylin"
# Requirements-based selection (docstrings and markers are normalized)
pytest -k REQ-001 # Run the schedule smoke only
``` pytest tests\test_babylin_hardware_schedule_smoke.py -q
```
## Enriched reporting
Tips:
- HTML report includes custom columns (Title, Requirements) - If multiple devices are attached, update your config to select the desired port (future enhancement) or keep only one connected.
- JUnit XML written for CI - On timeout, tests often accept None to avoid flakiness; increase timeouts if your bus is slow.
- `reports/requirements_coverage.json` maps requirement IDs to tests and lists unmapped tests - Master request behavior: the adapter prefers `BLC_sendRawMasterRequest(channel, id, length)`; it falls back to the bytes variant or a header+receive strategy as needed. The mock covers both forms.
- `reports/summary.md` aggregates key counts (pass/fail/etc.) - `interface.schedule_nr: -1` defers schedule start to the test code (useful when the test wants to pick a specific schedule by name via `lin.start_schedule("CCO")`).
See `docs/03_reporting_and_metadata.md` and `docs/11_conftest_plugin_overview.md`. ## Selecting tests with markers
To verify the reporting pipeline end-to-end, run the plugin self-test: Markers in use:
```powershell - `smoke`: quick confidence tests
python -m pytest tests\plugin\test_conftest_plugin_artifacts.py -q - `hardware`: needs real device (any LIN master)
``` - `mum`: targets the Melexis Universal Master adapter (current default)
- `babylin`: targets the legacy BabyLIN SDK adapter
To generate two separate HTML/JUnit reports (unit vs non-unit): - `unit`: pure unit tests (no hardware, no external I/O)
- `req_XXX`: requirement mapping (e.g., `@pytest.mark.req_001`)
```powershell
./scripts/run_two_reports.ps1 Examples:
```
```powershell
## Writing well-documented tests # Only smoke tests (mock + hardware smoke)
pytest -m smoke
Use a docstring template so the plugin can extract metadata:
# Requirements-based selection (docstrings and markers are normalized)
```python pytest -k REQ-001
""" ```
Title: <short title>
## Enriched reporting
Description:
<what the test validates and why> - HTML report includes custom columns (Title, Requirements)
- JUnit XML written for CI
Requirements: REQ-001, REQ-002 - `reports/requirements_coverage.json` maps requirement IDs to tests and lists unmapped tests
- `reports/summary.md` aggregates key counts (pass/fail/etc.)
Test Steps:
1. <step one> See `docs/03_reporting_and_metadata.md` and `docs/11_conftest_plugin_overview.md`.
2. <step two>
To verify the reporting pipeline end-to-end, run the plugin self-test:
Expected Result:
<succinct expected outcome> ```powershell
""" python -m pytest tests\plugin\test_conftest_plugin_artifacts.py -q
``` ```
Tip: For runtime properties in reports, prefer the shared `rp` fixture (wrapper around `record_property`) and use standardized keys from `docs/15_report_properties_cheatsheet.md`. To generate two separate HTML/JUnit reports (unit vs non-unit):
## Continuous Integration (CI) ```powershell
./scripts/run_two_reports.ps1
- Run `pytest` with your preferred markers in your pipeline. ```
- Publish artifacts from `reports/` (HTML, JUnit, coverage JSON, summary.md).
- Optionally parse `requirements_coverage.json` to power dashboards and gates. ## Writing well-documented tests
Example PowerShell (local CI mimic): Use a docstring template so the plugin can extract metadata:
```powershell ```python
# Run smoke tests and collect reports """
pytest -m smoke --maxfail=1 -q Title: <short title>
```
Description:
## Raspberry Pi / Headless usage <what the test validates and why>
- Follow `docs/09_raspberry_pi_deployment.md` to set up a venv and systemd service Requirements: REQ-001, REQ-002
- For a golden image approach, see `docs/10_build_custom_image.md`
Test Steps:
Running tests headless via systemd typically involves: 1. <step one>
- A service that sets `ECU_TESTS_CONFIG` to a hardware YAML 2. <step two>
- Running `pytest -m "hardware and babylin"` on boot or via timer
Expected Result:
## Troubleshooting quick hits <succinct expected outcome>
"""
- ImportError for `BabyLIN_library`: verify placement under `vendor/` and native library presence. ```
- No BabyLIN devices found: check USB connection, drivers, and permissions.
- Timeouts on receive: increase `timeout` or verify schedule activity and SDF correctness. Tip: For runtime properties in reports, prefer the shared `rp` fixture (wrapper around `record_property`) and use standardized keys from `docs/15_report_properties_cheatsheet.md`.
- Missing reports: ensure `pytest.ini` includes the HTML/JUnit plugins and the custom plugin is loaded.
## Continuous Integration (CI)
## Power supply (Owon) hardware test
- Run `pytest` with your preferred markers in your pipeline.
Enable `power_supply` in your config and set the serial port, then run the dedicated test or the quick demo script. - Publish artifacts from `reports/` (HTML, JUnit, coverage JSON, summary.md).
- Optionally parse `requirements_coverage.json` to power dashboards and gates.
```powershell
copy .\config\owon_psu.example.yaml .\config\owon_psu.yaml Example PowerShell (local CI mimic):
# edit COM port in .\config\owon_psu.yaml or set values in config\test_config.yaml
```powershell
pytest -k test_owon_psu_idn_and_optional_set -m hardware -q # Run smoke tests and collect reports
python .\vendor\Owon\owon_psu_quick_demo.py pytest -m smoke --maxfail=1 -q
``` ```
See also: `docs/14_power_supply.md` for details and troubleshooting. ## Raspberry Pi / Headless usage
- Follow `docs/09_raspberry_pi_deployment.md` to set up a venv and systemd service
- For a golden image approach, see `docs/10_build_custom_image.md`
Running tests headless via systemd typically involves:
- A service that sets `ECU_TESTS_CONFIG` to a hardware YAML
- Running `pytest -m "hardware and mum"` (or `"hardware and babylin"`) on boot or via timer
## Troubleshooting quick hits
- ImportError for `pylin` / `pymumclient`: install Melexis packages (`vendor/automated_lin_test/install_packages.sh`); the MUM adapter raises a clear error pointing at this script.
- "interface.host is required when interface.type == 'mum'": set `interface.host` in YAML.
- MUM unreachable: `ping 192.168.7.2`; check the USB-RNDIS link.
- ImportError for `BabyLIN_library`: verify placement under `vendor/` and native library presence.
- No BabyLIN devices found: check USB connection, drivers, and permissions.
- Timeouts on receive: increase `timeout` or verify schedule activity and SDF correctness.
- Missing reports: ensure `pytest.ini` includes the HTML/JUnit plugins and the custom plugin is loaded.
## Power supply (Owon) hardware test
Enable `power_supply` in your config and set the serial port, then run the dedicated test or the quick demo script.
```powershell
copy .\config\owon_psu.example.yaml .\config\owon_psu.yaml
# edit COM port in .\config\owon_psu.yaml or set values in config\test_config.yaml
pytest -k test_owon_psu_idn_and_optional_set -m hardware -q
python .\vendor\Owon\owon_psu_quick_demo.py
```
See also: `docs/14_power_supply.md` for details and troubleshooting.

View File

@ -1,125 +1,140 @@
# Unit Testing Guide # Unit Testing Guide
This guide explains how the project's unit tests are organized, how to run them (with and without markers), how coverage is generated, and tips for writing effective tests. This guide explains how the project's unit tests are organized, how to run them (with and without markers), how coverage is generated, and tips for writing effective tests.
## Why unit tests? ## Why unit tests?
- Fast feedback without hardware - Fast feedback without hardware
- Validate contracts (config loader, frames, adapters, flashing scaffold) - Validate contracts (config loader, frames, adapters, flashing scaffold)
- Keep behavior stable as the framework evolves - Keep behavior stable as the framework evolves
## Test layout ## Test layout
- `tests/unit/` — pure unit tests (no hardware, no external I/O) - `tests/unit/` — pure unit tests (no hardware, no external I/O)
- `test_config_loader.py` — config precedence and defaults - `test_config_loader.py` — config precedence and defaults
- `test_linframe.py``LinFrame` validation - `test_linframe.py``LinFrame` validation
- `test_babylin_adapter_mocked.py` — BabyLIN adapter error paths with a mocked SDK wrapper - `test_babylin_adapter_mocked.py` — BabyLIN adapter error paths with a mocked SDK wrapper
- `test_hex_flasher.py` — flashing scaffold against a stub LIN interface - `test_mum_adapter_mocked.py` — MUM adapter (`MumLinInterface`) plumbing exercised through fake `pylin` / `pymumclient` modules
- `tests/plugin/` — plugin self-tests using `pytester` - `test_hex_flasher.py` — flashing scaffold against a stub LIN interface
- `test_conftest_plugin_artifacts.py` — verifies JSON coverage and summary artifacts - `tests/plugin/` — plugin self-tests using `pytester`
- `tests/` — existing smoke/mock/hardware tests - `test_conftest_plugin_artifacts.py` — verifies JSON coverage and summary artifacts
- `tests/` — existing smoke/mock/hardware tests
## Markers and selection
## Markers and selection
A `unit` marker is provided for easy selection:
A `unit` marker is provided for easy selection:
- By marker (recommended):
- By marker (recommended):
```powershell
pytest -m unit -q ```powershell
``` pytest -m unit -q
```
- By path:
- By path:
```powershell
pytest tests\unit -q ```powershell
``` pytest tests\unit -q
```
- Exclude hardware:
- Exclude hardware:
```powershell
pytest -m "not hardware" -v ```powershell
``` pytest -m "not hardware" -v
```
## Coverage
## Coverage
Coverage is enabled by default via `pytest.ini` addopts:
Coverage is enabled by default via `pytest.ini` addopts:
- `--cov=ecu_framework --cov-report=term-missing`
- `--cov=ecu_framework --cov-report=term-missing`
Youll see a summary with missing lines directly in the terminal. To disable coverage locally, override addopts on the command line:
Youll see a summary with missing lines directly in the terminal. To disable coverage locally, override addopts on the command line:
```powershell
pytest -q -o addopts="" ```powershell
``` pytest -q -o addopts=""
```
(Optional) To produce an HTML coverage report, you can add `--cov-report=html` and open `htmlcov/index.html`.
(Optional) To produce an HTML coverage report, you can add `--cov-report=html` and open `htmlcov/index.html`.
## Writing unit tests
## Writing unit tests
- Prefer small, focused tests
- For BabyLIN adapter logic, inject `wrapper_module` with the mock: - Prefer small, focused tests
- For BabyLIN adapter logic, inject `wrapper_module` with the mock:
```python
from ecu_framework.lin.babylin import BabyLinInterface ```python
from vendor import mock_babylin_wrapper as mock_bl from ecu_framework.lin.babylin import BabyLinInterface
from vendor import mock_babylin_wrapper as mock_bl
lin = BabyLinInterface(wrapper_module=mock_bl)
lin.connect() lin = BabyLinInterface(wrapper_module=mock_bl)
# exercise send/receive/request lin.connect()
``` # exercise send/receive/request
```
- To simulate specific SDK signatures, use a thin shim (see `_MockBytesOnly` in `tests/test_babylin_wrapper_mock.py`).
- Include a docstring with Title/Description/Requirements/Steps/Expected Result so the reporting plugin can extract metadata (this also helps the HTML report). - For MUM adapter logic, inject `mum_module` and `pylin_module` with fakes
- When testing the plugin itself, use the `pytester` fixture to generate a temporary test run and validate artifacts exist and contain expected entries. (see `tests/unit/test_mum_adapter_mocked.py` for a full example):
## Typical commands (Windows PowerShell) ```python
from ecu_framework.lin.mum import MumLinInterface
- Run unit tests with coverage:
# fake_mum exposes MelexisUniversalMaster() returning an object with
```powershell # open_all(host) and get_device(name)
pytest -m unit -q # fake_pylin exposes LinBusManager(linmaster) and LinDevice22(lin_bus)
``` lin = MumLinInterface(host="10.0.0.1", mum_module=fake_mum, pylin_module=fake_pylin)
lin.connect()
- Run only plugin self-tests: # exercise send / receive / send_raw / power_*
```
```powershell
pytest tests\plugin -q - To simulate specific SDK signatures, use a thin shim (see `_MockBytesOnly` in `tests/test_babylin_wrapper_mock.py`).
``` - Include a docstring with Title/Description/Requirements/Steps/Expected Result so the reporting plugin can extract metadata (this also helps the HTML report).
- When testing the plugin itself, use the `pytester` fixture to generate a temporary test run and validate artifacts exist and contain expected entries.
- Run the specific plugin artifact test (verifies HTML/JUnit, summary, and coverage JSON under `reports/`):
## Typical commands (Windows PowerShell)
```powershell
python -m pytest tests\plugin\test_conftest_plugin_artifacts.py -q - Run unit tests with coverage:
```
```powershell
- Run all non-hardware tests with verbose output: pytest -m unit -q
```
```powershell
pytest -m "not hardware" -v - Run only plugin self-tests:
```
```powershell
- Open the HTML report: pytest tests\plugin -q
```
```powershell
start .\reports\report.html - Run the specific plugin artifact test (verifies HTML/JUnit, summary, and coverage JSON under `reports/`):
```
```powershell
- Generate two separate reports (unit vs non-unit): python -m pytest tests\plugin\test_conftest_plugin_artifacts.py -q
```
```powershell
./scripts/run_two_reports.ps1 - Run all non-hardware tests with verbose output:
```
```powershell
## CI suggestions pytest -m "not hardware" -v
```
- Run `-m unit` and `tests/plugin` on every PR
- Optionally run mock integration/smoke on PR - Open the HTML report:
- Run hardware test matrix on a nightly or on-demand basis (`-m "hardware and babylin"`)
- Publish artifacts from `reports/`: HTML/JUnit/coverage JSON/summary MD ```powershell
start .\reports\report.html
## Troubleshooting ```
- Coverage not showing: ensure `pytest-cov` is installed (see `requirements.txt`) and `pytest.ini` addopts include `--cov`. - Generate two separate reports (unit vs non-unit):
- Import errors: activate the venv and reinstall requirements.
- Plugin artifacts missing under `pytester`: verify tests write to `reports/` (our plugin creates the folder automatically in `pytest_configure`). ```powershell
./scripts/run_two_reports.ps1
```
## CI suggestions
- Run `-m unit` and `tests/plugin` on every PR
- Optionally run mock integration/smoke on PR
- Run hardware test matrix on a nightly or on-demand basis (`-m "hardware and mum"` or `-m "hardware and babylin"`)
- Publish artifacts from `reports/`: HTML/JUnit/coverage JSON/summary MD
## Troubleshooting
- Coverage not showing: ensure `pytest-cov` is installed (see `requirements.txt`) and `pytest.ini` addopts include `--cov`.
- Import errors: activate the venv and reinstall requirements.
- Plugin artifacts missing under `pytester`: verify tests write to `reports/` (our plugin creates the folder automatically in `pytest_configure`).

View File

@ -1,103 +1,110 @@
# Power Supply (Owon) — control, configuration, tests, and quick demo # Power Supply (Owon) — control, configuration, tests, and quick demo
This guide covers using the Owon bench power supply via SCPI over serial with the framework. This guide covers using the Owon bench power supply via SCPI over serial with the framework.
- Library: `ecu_framework/power/owon_psu.py` > **MUM users**: the Melexis Universal Master has its own power output on
- Hardware test: `tests/hardware/test_owon_psu.py` > `power_out0` and the MUM adapter calls `power_up()` / `power_down()` in
- quick demo script: `vendor/Owon/owon_psu_quick_demo.py` > `connect()` / `disconnect()` automatically. The Owon PSU is **not required**
- Configuration: `config/test_config.yaml` (`power_supply`), optionally merged from `config/owon_psu.yaml` or env `OWON_PSU_CONFIG` > for the standard MUM flow — leave `power_supply.enabled: false`. The Owon
> remains useful for over/under-voltage scenarios, separate-rail tests, or
## Install dependencies > when running with the legacy BabyLIN adapter (which has no built-in power).
```powershell - Library: `ecu_framework/power/owon_psu.py`
pip install -r .\requirements.txt - Hardware test: `tests/hardware/test_owon_psu.py`
``` - quick demo script: `vendor/Owon/owon_psu_quick_demo.py`
- Configuration: `config/test_config.yaml` (`power_supply`), optionally merged from `config/owon_psu.yaml` or env `OWON_PSU_CONFIG`
## Configure
## Install dependencies
You can keep PSU settings centrally or in a machine-specific YAML.
```powershell
- Central: `config/test_config.yaml``power_supply` section pip install -r .\requirements.txt
- Separate: `config/owon_psu.yaml` (or `OWON_PSU_CONFIG` env var) ```
Supported keys: ## Configure
```yaml You can keep PSU settings centrally or in a machine-specific YAML.
power_supply:
enabled: true - Central: `config/test_config.yaml``power_supply` section
port: COM4 # e.g., COM4 (Windows) or /dev/ttyUSB0 (Linux) - Separate: `config/owon_psu.yaml` (or `OWON_PSU_CONFIG` env var)
baudrate: 115200
timeout: 1.0 Supported keys:
eol: "\n" # or "\r\n" if required
parity: N # N|E|O ```yaml
stopbits: 1 # 1|2 power_supply:
xonxoff: false enabled: true
rtscts: false port: COM4 # e.g., COM4 (Windows) or /dev/ttyUSB0 (Linux)
dsrdtr: false baudrate: 115200
idn_substr: OWON timeout: 1.0
do_set: false eol: "\n" # or "\r\n" if required
set_voltage: 5.0 parity: N # N|E|O
set_current: 0.1 stopbits: 1 # 1|2
``` xonxoff: false
rtscts: false
The central config loader automatically merges `config/owon_psu.yaml` (or the path in `OWON_PSU_CONFIG`) into `power_supply`. dsrdtr: false
idn_substr: OWON
## Run the hardware test do_set: false
set_voltage: 5.0
Skips unless `power_supply.enabled` is true and `port` is set. set_current: 0.1
```
```powershell
pytest -k test_owon_psu_idn_and_optional_set -m hardware -q The central config loader automatically merges `config/owon_psu.yaml` (or the path in `OWON_PSU_CONFIG`) into `power_supply`.
```
## Run the hardware test
What it does:
- Opens serial with your configured line params Skips unless `power_supply.enabled` is true and `port` is set.
- Queries `*IDN?` (checks `idn_substr` if provided)
- If `do_set` is true, sets voltage/current, enables output briefly, then disables ```powershell
pytest -k test_owon_psu_idn_and_optional_set -m hardware -q
## Use the library programmatically ```
```python What it does:
from ecu_framework.power import OwonPSU, SerialParams - Opens serial with your configured line params
- Queries `*IDN?` (checks `idn_substr` if provided)
params = SerialParams(baudrate=115200, timeout=1.0) - If `do_set` is true, sets voltage/current, enables output briefly, then disables
with OwonPSU("COM4", params, eol="\n") as psu:
print(psu.idn()) ## Use the library programmatically
psu.set_voltage(1, 5.0)
psu.set_current(1, 0.1) ```python
psu.set_output(True) from ecu_framework.power import OwonPSU, SerialParams
# ... measure, etc.
psu.set_output(False) params = SerialParams(baudrate=115200, timeout=1.0)
``` with OwonPSU("COM4", params, eol="\n") as psu:
print(psu.idn())
Notes: psu.set_voltage(1, 5.0)
- Commands use newline-terminated writes; reads use `readline()` psu.set_current(1, 0.1)
- SCPI forms: `SOUR:VOLT`, `SOUR:CURR`, `MEAS:VOLT?`, `MEAS:CURR?`, `output 0/1`, `output?` psu.set_output(True)
# ... measure, etc.
## quick demo script psu.set_output(False)
```
The quick demo reads `OWON_PSU_CONFIG` or `config/owon_psu.yaml` and performs a small sequence.
Notes:
```powershell - Commands use newline-terminated writes; reads use `readline()`
python .\vendor\Owon\owon_psu_quick_demo.py - SCPI forms: `SOUR:VOLT`, `SOUR:CURR`, `MEAS:VOLT?`, `MEAS:CURR?`, `output 0/1`, `output?`
```
## quick demo script
It also scans ports with `*IDN?` using `scan_ports()`.
The quick demo reads `OWON_PSU_CONFIG` or `config/owon_psu.yaml` and performs a small sequence.
## Troubleshooting
```powershell
- Empty `*IDN?` or timeouts: python .\vendor\Owon\owon_psu_quick_demo.py
- Verify COM port and exclusivity (no other program holding it) ```
- Try `eol: "\r\n"`
- Adjust `parity` and `stopbits` per your device manual It also scans ports with `*IDN?` using `scan_ports()`.
- Windows COM > 9:
- Most Python code accepts `COM10` directly; if needed in other tools, use `\\.\\COM10` ## Troubleshooting
- Flow control:
- Keep `xonxoff`, `rtscts`, `dsrdtr` false unless required - Empty `*IDN?` or timeouts:
- Verify COM port and exclusivity (no other program holding it)
## Related files - Try `eol: "\r\n"`
- Adjust `parity` and `stopbits` per your device manual
- `ecu_framework/power/owon_psu.py` — PSU controller (pyserial) - Windows COM > 9:
- `tests/hardware/test_owon_psu.py` — Hardware test using central config - Most Python code accepts `COM10` directly; if needed in other tools, use `\\.\\COM10`
- `vendor/Owon/owon_psu_quick_demo.py` — Quick demo runner - Flow control:
- `config/owon_psu.example.yaml` — Example machine-specific YAML - Keep `xonxoff`, `rtscts`, `dsrdtr` false unless required
## Related files
- `ecu_framework/power/owon_psu.py` — PSU controller (pyserial)
- `tests/hardware/test_owon_psu.py` — Hardware test using central config
- `vendor/Owon/owon_psu_quick_demo.py` — Quick demo runner
- `config/owon_psu.example.yaml` — Example machine-specific YAML

View File

@ -1,53 +1,53 @@
# Report properties cheatsheet (record_property / rp) # Report properties cheatsheet (record_property / rp)
Use these standardized keys when calling `record_property("key", value)` or the `rp("key", value)` helper. Use these standardized keys when calling `record_property("key", value)` or the `rp("key", value)` helper.
This keeps reports consistent and easy to scan across suites. This keeps reports consistent and easy to scan across suites.
## General ## General
- test_phase: setup | call | teardown (if you want to distinguish) - test_phase: setup | call | teardown (if you want to distinguish)
- environment: local | ci | lab - environment: local | ci | lab
- config_source: defaults | file | env | env+overrides (already used in unit tests) - config_source: defaults | file | env | env+overrides (already used in unit tests)
## LIN (common) ## LIN (common)
- lin_type: mock | babylin - lin_type: mock | babylin
- tx_id: hex string or int (e.g., "0x12") - tx_id: hex string or int (e.g., "0x12")
- tx_data: list of ints (bytes) - tx_data: list of ints (bytes)
- rx_present: bool - rx_present: bool
- rx_id: hex string or int - rx_id: hex string or int
- rx_data: list of ints - rx_data: list of ints
- timeout_s: float seconds - timeout_s: float seconds
## BabyLIN specifics ## BabyLIN specifics
- sdf_path: string - sdf_path: string
- schedule_nr: int - schedule_nr: int
- receive_result: frame | timeout - receive_result: frame | timeout
- wrapper: mock_bl | _MockBytesOnly | real (for future) - wrapper: mock_bl | _MockBytesOnly | real (for future)
## Mock-specific ## Mock-specific
- expected_data: list of ints - expected_data: list of ints
## Power supply (PSU) ## Power supply (PSU)
- psu_idn: string from `*IDN?` - psu_idn: string from `*IDN?`
- output_status_before: bool - output_status_before: bool
- output_status_after: bool - output_status_after: bool
- set_voltage: float (V) - set_voltage: float (V)
- set_current: float (A) - set_current: float (A)
- measured_voltage: float (V) - measured_voltage: float (V)
- measured_current: float (A) - measured_current: float (A)
- psu_port: e.g., COM4 or /dev/ttyUSB0 (if helpful) - psu_port: e.g., COM4 or /dev/ttyUSB0 (if helpful)
## Flashing ## Flashing
- hex_path: string - hex_path: string
- sent_count: int (frames sent by stub/mock) - sent_count: int (frames sent by stub/mock)
- flash_result: ok | fail (for future real flashing) - flash_result: ok | fail (for future real flashing)
## Configuration highlights ## Configuration highlights
- interface_type: mock | babylin - interface_type: mock | babylin
- interface_channel: int - interface_channel: int
- flash_enabled: bool - flash_enabled: bool
## Tips ## Tips
- Prefer simple, lowercase snake_case keys - Prefer simple, lowercase snake_case keys
- Use lists for byte arrays so they render clearly in JSON and HTML - Use lists for byte arrays so they render clearly in JSON and HTML
- Log both expected and actual when asserting patterns (e.g., deterministic responses) - Log both expected and actual when asserting patterns (e.g., deterministic responses)
- Keep units in the key name when helpful (voltage/current include V/A in the name) - Keep units in the key name when helpful (voltage/current include V/A in the name)

167
docs/16_mum_internals.md Normal file
View File

@ -0,0 +1,167 @@
# MUM Adapter Internals (Melexis Universal Master)
This document describes how the `MumLinInterface` adapter wraps the Melexis
`pymumclient` and `pylin` packages, how frames flow across the LIN bus, and
which MUM-specific behaviors callers need to understand.
## Overview
- Location: `ecu_framework/lin/mum.py`
- Vendor reference scripts: `vendor/automated_lin_test/` (`test_led_control.py`, `test_auto_addressing.py`, `power_cycle.py`)
- Default MUM endpoint: `192.168.7.2` over USB-RNDIS
- LIN device name on MUM: `lin0`
- Power-control device on MUM: `power_out0`
- Required Python packages: `pylin`, `pymumclient` (Melexis-supplied; not on PyPI). See `vendor/automated_lin_test/install_packages.sh`.
## What the MUM gives you that BabyLIN doesn't
- **Built-in power control** on `power_out0` — the adapter calls `power_up()` in `connect()` and `power_down()` in `disconnect()`. No external Owon PSU needed for the standard flow.
- **Network access**: the MUM is IP-reachable, so the host machine (Windows, Linux, Pi) does not need vendor native libraries — only the two Python packages.
- **Direct transport-layer access** for sending raw frames with LIN 1.x **Classic** checksum (required for BSM-SNPD diagnostic frames).
## What it doesn't give you
- **No passive listen.** The MUM is master-driven. To "receive" a slave-published frame, the master sends a header on that frame ID and the slave must respond. `MumLinInterface.receive(id=None)` raises `NotImplementedError` for that reason.
- **No SDF / schedule manager.** The adapter does not run a schedule; tests publish frames explicitly (or pull slave frames explicitly) on each call.
## Mermaid: connect / receive / send
```mermaid
sequenceDiagram
autonumber
participant T as Test/Fixture
participant A as MumLinInterface
participant MM as pymumclient (MelexisUniversalMaster)
participant PL as pylin (LinDevice22 / TransportLayer)
participant E as ECU
T->>A: connect()
A->>MM: MelexisUniversalMaster()
A->>MM: open_all(host)
A->>MM: get_device('power_out0')
A->>MM: get_device('lin0')
A->>MM: linmaster.setup()
A->>PL: LinBusManager(linmaster)
A->>PL: LinDevice22(lin_bus); set baudrate
A->>PL: get_device('bus/transport_layer')
A->>MM: power_control.power_up()
Note over A: sleep(boot_settle_seconds)
A-->>T: connected
T->>A: receive(id=0x11)
A->>PL: send_message(master_to_slave=False, frame_id=0x11, data_length=4)
PL->>E: header for 0x11
E-->>PL: response bytes
PL-->>A: bytes
A-->>T: LinFrame(id=0x11, data=...)
T->>A: send(LinFrame(0x0A, payload))
A->>PL: send_message(master_to_slave=True, frame_id=0x0A, data_length=8, data=payload)
PL->>E: header + payload (Enhanced checksum)
T->>A: send_raw(b"\x7F\x06\xB5...")
A->>PL: transport_layer.ld_put_raw(data, baudrate)
Note over PL,E: LIN 1.x Classic checksum (required for BSM-SNPD)
T->>A: disconnect()
A->>MM: power_control.power_down()
A->>MM: linmaster.teardown()
```
## Public API
`MumLinInterface(host, lin_device='lin0', power_device='power_out0', baudrate=19200, frame_lengths=None, default_data_length=8, boot_settle_seconds=0.5)`
LinInterface contract (matches Mock and BabyLIN adapters):
- `connect()` — opens MUM, sets up LIN, **and powers up the ECU**
- `disconnect()` — powers down and tears down (best-effort)
- `send(frame: LinFrame)` — publishes a master-to-slave frame using Enhanced checksum
- `receive(id: int, timeout: float = 1.0) -> LinFrame | None` — triggers a slave read for `id`. The `timeout` argument is informational; the underlying `pylin` call is synchronous. Any pylin exception is treated as "no data" and returns `None`. Passing `id=None` raises `NotImplementedError`.
MUM-only extras:
- `send_raw(bytes)` — sends a raw LIN frame using **Classic** checksum via the transport layer's `ld_put_raw`. Use this for BSM-SNPD diagnostic frames; the firmware will reject them if Enhanced is used.
- `power_up()` / `power_down()` — direct control over `power_out0`
- `power_cycle(wait=2.0)` — convenience: `power_down()`, sleep, `power_up()`, then `boot_settle_seconds` sleep
## Frame-length resolution
Because the MUM is master-driven, every receive needs to know how many bytes
to ask for. The adapter resolves this from `frame_lengths`:
1. Built-in defaults for the 4SEVEN library (ALM_Status=4, ALM_Req_A=8, ConfigFrame=3, PWM_Frame=8, VF_Frame=8, Tj_Frame=8, PWM_wo_Comp=8, NVM_Debug=8).
2. Anything in the constructor's `frame_lengths` argument **overrides** the defaults.
3. If a frame ID isn't in the map, `default_data_length` (default 8) is used.
In YAML, hex keys work:
```yaml
interface:
type: mum
frame_lengths:
0x0A: 8
0x11: 4
```
The config loader coerces hex strings (`"0x0A"`) and integers alike.
## Diagnostic frames (BSM-SNPD)
The vendor's `test_auto_addressing.py` flow runs LIN 2.1 BSM-SNPD via raw
frames on `0x3C` (MasterReq). The framework supports the same flow:
```python
# inside a test that already has the MUM 'lin' fixture
data = bytearray([
0x7F, # NAD broadcast
0x06, # PCI: 6 data bytes
0xB5, # SID: BSM-SNPD
0xFF, # Supplier ID LSB
0x7F, # Supplier ID MSB
0x01, # subfunction (INIT)
0x02, # param 1
0xFF, # param 2
])
lin.send_raw(bytes(data))
```
`send_raw()` calls `transport_layer.ld_put_raw(data=..., baudrate=...)`
which uses LIN 1.x Classic checksum. Using `lin.send()` for these frames
would compute Enhanced checksum and the firmware would discard the frame.
## Error surfaces
- **`pymumclient is not installed`** / **`pylin is not installed`** — raised on `connect()` if the Melexis packages aren't importable. The error message points at `vendor/automated_lin_test/install_packages.sh`.
- **`MUM not connected`** — calling `send` / `receive` / `send_raw` before `connect()` (or after `disconnect()`).
- **`MUM transport layer not available`** — raised by `send_raw` when the LIN device didn't expose `bus/transport_layer`. Practically always available on MUM firmware that supports diagnostic frames.
- **pylin exceptions during `receive`** — converted to `None` (treated as a timeout / no-data). Use this to drive timeout-tolerant tests without try/except in the test body.
## Unit testing without hardware
The adapter accepts `mum_module=` and `pylin_module=` constructor arguments
that bypass the real package imports. Tests in
`tests/unit/test_mum_adapter_mocked.py` use simple in-memory fakes to drive
the connect / send / receive / send_raw / power-cycle paths end to end. See
that file for a complete shim implementation.
```python
from ecu_framework.lin.mum import MumLinInterface
iface = MumLinInterface(
host="10.0.0.1",
boot_settle_seconds=0.0,
mum_module=fake_mum,
pylin_module=fake_pylin,
)
iface.connect()
# ... assertions ...
iface.disconnect()
```
## Notes and pitfalls
- **Boot settling**: After `power_up()` the adapter sleeps `boot_settle_seconds` (default 0.5 s) so the ECU has time to come up before the first frame. Increase if your ECU boots slowly.
- **Owon PSU coexistence**: the MUM provides power on `power_out0` independently of `ecu_framework/power/`. Leave `power_supply.enabled: false` for the standard MUM flow; enable it only for over/under-voltage scenarios that need a separate, programmable rail.
- **Networking**: USB-RNDIS bring-up can take a few seconds after plugging in the MUM. If `connect()` fails with a connection-refused, `ping 192.168.7.2` first.
- **Multiple MUMs**: only one MUM is supported per `MumLinInterface` instance. Different `host` addresses can run different fixture sessions side-by-side.

View File

@ -1,71 +1,71 @@
# Developer Commit Guide # Developer Commit Guide
This guide explains exactly what to commit to source control for this repository, and what to keep out. It also includes a suggested commit message and safe commands to stage changes. This guide explains exactly what to commit to source control for this repository, and what to keep out. It also includes a suggested commit message and safe commands to stage changes.
## Commit these files ## Commit these files
### Core framework (source) ### Core framework (source)
- `ecu_framework/config.py` - `ecu_framework/config.py`
- `ecu_framework/lin/base.py` - `ecu_framework/lin/base.py`
- `ecu_framework/lin/mock.py` - `ecu_framework/lin/mock.py`
- `ecu_framework/lin/babylin.py` - `ecu_framework/lin/babylin.py`
- `ecu_framework/flashing/hex_flasher.py` - `ecu_framework/flashing/hex_flasher.py`
### Pytest plugin and config ### Pytest plugin and config
- `conftest_plugin.py` - `conftest_plugin.py`
Generates HTML columns, requirements coverage JSON, and CI summary Generates HTML columns, requirements coverage JSON, and CI summary
- `pytest.ini` - `pytest.ini`
- `requirements.txt` - `requirements.txt`
### Tests and fixtures ### Tests and fixtures
- `tests/conftest.py` - `tests/conftest.py`
- `tests/test_smoke_mock.py` - `tests/test_smoke_mock.py`
- `tests/test_babylin_hardware_smoke.py` (if present) - `tests/test_babylin_hardware_smoke.py` (if present)
- `tests/test_hardware_placeholder.py` (if present) - `tests/test_hardware_placeholder.py` (if present)
### Documentation ### Documentation
- `README.md` - `README.md`
- `TESTING_FRAMEWORK_GUIDE.md` - `TESTING_FRAMEWORK_GUIDE.md`
- `docs/README.md` - `docs/README.md`
- `docs/01_run_sequence.md` - `docs/01_run_sequence.md`
- `docs/02_configuration_resolution.md` - `docs/02_configuration_resolution.md`
- `docs/03_reporting_and_metadata.md` - `docs/03_reporting_and_metadata.md`
- `docs/04_lin_interface_call_flow.md` - `docs/04_lin_interface_call_flow.md`
- `docs/05_architecture_overview.md` - `docs/05_architecture_overview.md`
- `docs/06_requirement_traceability.md` - `docs/06_requirement_traceability.md`
- `docs/07_flash_sequence.md` - `docs/07_flash_sequence.md`
- `docs/08_babylin_internals.md` - `docs/08_babylin_internals.md`
### Vendor guidance (no binaries) ### Vendor guidance (no binaries)
- `vendor/README.md` - `vendor/README.md`
- Any headers in `vendor/` (if added per SDK) - Any headers in `vendor/` (if added per SDK)
### Housekeeping ### Housekeeping
- `.gitignore` - `.gitignore`
Ignores reports and vendor binaries Ignores reports and vendor binaries
- `reports/.gitkeep` - `reports/.gitkeep`
Retains folder structure without committing artifacts Retains folder structure without committing artifacts
## Do NOT commit (ignored or should be excluded) ## Do NOT commit (ignored or should be excluded)
- Virtual environments: `.venv/`, `venv/`, etc. - Virtual environments: `.venv/`, `venv/`, etc.
- Generated test artifacts: - Generated test artifacts:
`reports/report.html`, `reports/junit.xml`, `reports/summary.md`, `reports/requirements_coverage.json` `reports/report.html`, `reports/junit.xml`, `reports/summary.md`, `reports/requirements_coverage.json`
<!-- - Vendor binaries: anything under `vendor/**` with `.dll`, `.lib`, `.pdb` keep them for now --> <!-- - Vendor binaries: anything under `vendor/**` with `.dll`, `.lib`, `.pdb` keep them for now -->
- Python caches: `__pycache__/`, `.pytest_cache/` - Python caches: `__pycache__/`, `.pytest_cache/`
- Local env files: `.env` - Local env files: `.env`
## Safe commit commands (PowerShell) ## Safe commit commands (PowerShell)
```powershell ```powershell
# Stage everything except what .gitignore already excludes # Stage everything except what .gitignore already excludes
git add -A git add -A
# Commit with a helpful message # Commit with a helpful message
git commit -m "ECU framework: docs, reporting plugin (HTML metadata + requirements JSON + CI summary), .gitignore updates" git commit -m "ECU framework: docs, reporting plugin (HTML metadata + requirements JSON + CI summary), .gitignore updates"
``` ```
## Notes ## Notes
<!-- - Do not commit BabyLin DLLs or proprietary binaries. Keep only the placement/readme and headers. Keep them for now --> <!-- - Do not commit BabyLin DLLs or proprietary binaries. Keep only the placement/readme and headers. Keep them for now -->
- The plugin writes CI-friendly artifacts into `reports/`; theyre ignored by default but published in CI. - The plugin writes CI-friendly artifacts into `reports/`; theyre ignored by default but published in CI.

View File

@ -1,26 +1,29 @@
# Documentation Index # Documentation Index
A guided tour of the ECU testing framework. Start here: A guided tour of the ECU testing framework. Start here:
1. `01_run_sequence.md` — End-to-end run sequence and call flow 1. `01_run_sequence.md` — End-to-end run sequence and call flow
2. `02_configuration_resolution.md` — How configuration is loaded and merged 2. `02_configuration_resolution.md` — How configuration is loaded and merged
3. `03_reporting_and_metadata.md` — How test documentation becomes report metadata 3. `03_reporting_and_metadata.md` — How test documentation becomes report metadata
4. `11_conftest_plugin_overview.md` — Custom pytest plugin: hooks, call sequence, and artifacts 4. `11_conftest_plugin_overview.md` — Custom pytest plugin: hooks, call sequence, and artifacts
5. `04_lin_interface_call_flow.md` — LIN abstraction and adapter behavior (Mock vs BabyLIN SDK wrapper) 5. `04_lin_interface_call_flow.md` — LIN abstraction and adapter behavior (Mock, MUM, legacy BabyLIN)
6. `05_architecture_overview.md` — High-level architecture and components 6. `05_architecture_overview.md` — High-level architecture and components
7. `06_requirement_traceability.md` — Requirement markers and coverage visuals 7. `06_requirement_traceability.md` — Requirement markers and coverage visuals
8. `07_flash_sequence.md` — ECU flashing workflow and sequence diagram 8. `07_flash_sequence.md` — ECU flashing workflow and sequence diagram
9. `08_babylin_internals.md` — BabyLIN SDK wrapper internals and call flow 9. `08_babylin_internals.md` — BabyLIN SDK wrapper internals and call flow (legacy)
9. `DEVELOPER_COMMIT_GUIDE.md` — What to commit vs ignore, commands 10. `16_mum_internals.md` — MUM (Melexis Universal Master) adapter internals and call flow
10. `09_raspberry_pi_deployment.md` — Run on Raspberry Pi (venv, service, hardware notes) 11. `DEVELOPER_COMMIT_GUIDE.md` — What to commit vs ignore, commands
11. `10_build_custom_image.md` — Build a custom Raspberry Pi OS image with the framework baked in 12. `09_raspberry_pi_deployment.md` — Run on Raspberry Pi (venv, service, hardware notes)
12. `12_using_the_framework.md` — Practical usage: local, hardware, CI, and Pi 13. `10_build_custom_image.md` — Build a custom Raspberry Pi OS image with the framework baked in
13. `13_unit_testing_guide.md` — Unit tests layout, markers, coverage, and tips 14. `12_using_the_framework.md` — Practical usage: local, hardware (MUM/BabyLIN), CI, and Pi
14. `14_power_supply.md` — Owon PSU control, configuration, tests, and quick demo script 15. `13_unit_testing_guide.md` — Unit tests layout, markers, coverage, and tips
15. `15_report_properties_cheatsheet.md` — Standardized keys for record_property/rp across suites 16. `14_power_supply.md` — Owon PSU control, configuration, tests, and quick demo script
17. `15_report_properties_cheatsheet.md` — Standardized keys for record_property/rp across suites
Related references:
- Root project guide: `../README.md` Related references:
- Full framework guide: `../TESTING_FRAMEWORK_GUIDE.md`
- BabyLIN placement and integration: `../vendor/README.md` - Root project guide: `../README.md`
- PSU quick demo and scripts: `../vendor/Owon/` - Full framework guide: `../TESTING_FRAMEWORK_GUIDE.md`
- BabyLIN placement and integration: `../vendor/README.md`
- MUM source scripts and protocol details: `../vendor/automated_lin_test/README.md`
- PSU quick demo and scripts: `../vendor/Owon/`

View File

@ -1,15 +1,15 @@
""" """
ECU Tests framework package. ECU Tests framework package.
Provides: Provides:
- config: YAML configuration loader and types - config: YAML configuration loader and types
- lin: LIN interface abstraction and adapters (mock and BabyLIN) - lin: LIN interface abstraction and adapters (mock and BabyLIN)
Package version is exposed as __version__. Package version is exposed as __version__.
""" """
__all__ = [ __all__ = [
"config", "config",
"lin", "lin",
] ]
__version__ = "0.1.0" __version__ = "0.1.0"

View File

@ -1,236 +1,266 @@
from __future__ import annotations # Postponed annotations for forward references and speed from __future__ import annotations # Postponed annotations for forward references and speed
import os # For environment variables and filesystem checks import os # For environment variables and filesystem checks
import pathlib # Path handling across platforms import pathlib # Path handling across platforms
from dataclasses import dataclass, field # Lightweight typed containers from dataclasses import dataclass, field # Lightweight typed containers
from typing import Any, Dict, Optional # Type hints for clarity from typing import Any, Dict, Optional # Type hints for clarity
import yaml # Safe YAML parsing for configuration files import yaml # Safe YAML parsing for configuration files
@dataclass @dataclass
class FlashConfig: class FlashConfig:
"""Flashing-related configuration. """Flashing-related configuration.
enabled: Whether to trigger flashing at session start. enabled: Whether to trigger flashing at session start.
hex_path: Path to the firmware HEX file (if any). hex_path: Path to the firmware HEX file (if any).
""" """
enabled: bool = False # Off by default enabled: bool = False # Off by default
hex_path: Optional[str] = None # No default file path hex_path: Optional[str] = None # No default file path
@dataclass @dataclass
class InterfaceConfig: class InterfaceConfig:
"""LIN interface configuration. """LIN interface configuration.
type: Adapter type name: "mock" for the simulated adapter, "babylin" for real hardware via SDK. type: Adapter type "mock" (simulated), "babylin" (legacy BabyLIN SDK), or "mum"
channel: Channel index to use (0-based in most SDKs); default chosen by project convention. (Melexis Universal Master).
bitrate: Informational; typically SDF/schedule defines effective bitrate for BabyLIN. channel: Channel index to use (0-based in most SDKs); BabyLIN-specific.
dll_path: Legacy/optional pointer to vendor DLLs when using ctypes (not used by SDK wrapper). bitrate: Effective LIN bitrate; the MUM uses this directly, the BabyLIN SDF may override.
node_name: Optional friendly name for display/logging. dll_path: Legacy/optional pointer to vendor DLLs when using ctypes (not used by SDK wrapper).
func_names: Legacy mapping for ctypes function names; ignored by SDK wrapper. node_name: Optional friendly name for display/logging.
sdf_path: Path to the SDF to load on connect (BabyLIN only). func_names: Legacy mapping for ctypes function names; ignored by SDK wrapper.
schedule_nr: Schedule index to start after connect (BabyLIN only). sdf_path: Path to the SDF to load on connect (BabyLIN only).
""" schedule_nr: Schedule index to start after connect (BabyLIN only). -1 = skip.
host: MUM IP address (MUM only).
type: str = "mock" # "mock" or "babylin" lin_device: MUM LIN device name (MUM only, default 'lin0').
channel: int = 1 # Default channel index (project-specific default) power_device: MUM power-control device name (MUM only, default 'power_out0').
bitrate: int = 19200 # Typical LIN bitrate; SDF may override boot_settle_seconds: Delay after MUM power-up before sending the first frame.
dll_path: Optional[str] = None # Legacy ctypes option; not used with SDK wrapper frame_lengths: Optional map of frame_id (int) -> data length (int) used by the
node_name: Optional[str] = None # Optional label for node/adapter MUM adapter when receiving slave-published frames.
func_names: Dict[str, str] = field(default_factory=dict) # Legacy ctypes mapping; safe to leave empty """
# SDK wrapper options
sdf_path: Optional[str] = None # Path to SDF file to load (BabyLIN) type: str = "mock" # "mock", "babylin", or "mum"
schedule_nr: int = 0 # Schedule number to start after connect (BabyLIN) channel: int = 1
bitrate: int = 19200
dll_path: Optional[str] = None
@dataclass node_name: Optional[str] = None
class EcuTestConfig: func_names: Dict[str, str] = field(default_factory=dict)
"""Top-level, fully-typed configuration for the framework. # BabyLIN-specific
sdf_path: Optional[str] = None
interface: Settings for LIN communication (mock or BabyLIN). schedule_nr: int = 0
flash: Optional flashing behavior configuration. # MUM-specific
""" host: Optional[str] = None
lin_device: str = "lin0"
interface: InterfaceConfig = field(default_factory=InterfaceConfig) power_device: str = "power_out0"
flash: FlashConfig = field(default_factory=FlashConfig) boot_settle_seconds: float = 0.5
# Serial power supply (e.g., Owon) configuration frame_lengths: Dict[int, int] = field(default_factory=dict)
# Test code can rely on these values to interact with PSU if enabled
power_supply: "PowerSupplyConfig" = field(default_factory=lambda: PowerSupplyConfig())
@dataclass
class EcuTestConfig:
@dataclass """Top-level, fully-typed configuration for the framework.
class PowerSupplyConfig:
"""Serial power supply configuration (e.g., Owon PSU). interface: Settings for LIN communication (mock or BabyLIN).
flash: Optional flashing behavior configuration.
enabled: Whether PSU tests/features should be active. """
port: Serial device (e.g., COM4 on Windows, /dev/ttyUSB0 on Linux).
baudrate/timeout/eol: Basic line settings; eol often "\n" or "\r\n". interface: InterfaceConfig = field(default_factory=InterfaceConfig)
parity: One of "N", "E", "O". flash: FlashConfig = field(default_factory=FlashConfig)
stopbits: 1 or 2. # Serial power supply (e.g., Owon) configuration
xonxoff/rtscts/dsrdtr: Flow control flags. # Test code can rely on these values to interact with PSU if enabled
idn_substr: Optional substring to assert in *IDN? responses. power_supply: "PowerSupplyConfig" = field(default_factory=lambda: PowerSupplyConfig())
do_set/set_voltage/set_current: Optional demo/test actions.
"""
@dataclass
enabled: bool = False class PowerSupplyConfig:
port: Optional[str] = None """Serial power supply configuration (e.g., Owon PSU).
baudrate: int = 115200
timeout: float = 1.0 enabled: Whether PSU tests/features should be active.
eol: str = "\n" port: Serial device (e.g., COM4 on Windows, /dev/ttyUSB0 on Linux).
parity: str = "N" baudrate/timeout/eol: Basic line settings; eol often "\n" or "\r\n".
stopbits: float = 1.0 parity: One of "N", "E", "O".
xonxoff: bool = False stopbits: 1 or 2.
rtscts: bool = False xonxoff/rtscts/dsrdtr: Flow control flags.
dsrdtr: bool = False idn_substr: Optional substring to assert in *IDN? responses.
idn_substr: Optional[str] = None do_set/set_voltage/set_current: Optional demo/test actions.
do_set: bool = False """
set_voltage: float = 1.0
set_current: float = 0.1 enabled: bool = False
port: Optional[str] = None
baudrate: int = 115200
DEFAULT_CONFIG_RELATIVE = pathlib.Path("config") / "test_config.yaml" # Default config path relative to repo root timeout: float = 1.0
ENV_CONFIG_PATH = "ECU_TESTS_CONFIG" # Env var to override config file location eol: str = "\n"
parity: str = "N"
stopbits: float = 1.0
def _deep_update(base: Dict[str, Any], updates: Dict[str, Any]) -> Dict[str, Any]: xonxoff: bool = False
"""Recursively merge dict 'updates' into dict 'base'. rtscts: bool = False
dsrdtr: bool = False
- Nested dicts are merged by key idn_substr: Optional[str] = None
- Scalars/collections at any level are replaced entirely do_set: bool = False
- Mutation occurs in-place on 'base' and the same object is returned set_voltage: float = 1.0
""" set_current: float = 0.1
for k, v in updates.items(): # Iterate all update keys
if isinstance(v, dict) and isinstance(base.get(k), dict): # Both sides dict → recurse
base[k] = _deep_update(base[k], v) DEFAULT_CONFIG_RELATIVE = pathlib.Path("config") / "test_config.yaml" # Default config path relative to repo root
else: # Otherwise replace ENV_CONFIG_PATH = "ECU_TESTS_CONFIG" # Env var to override config file location
base[k] = v
return base # Return the mutated base for chaining
def _deep_update(base: Dict[str, Any], updates: Dict[str, Any]) -> Dict[str, Any]:
"""Recursively merge dict 'updates' into dict 'base'.
def _to_dataclass(cfg: Dict[str, Any]) -> EcuTestConfig:
"""Convert a merged plain dict config into strongly-typed dataclasses. - Nested dicts are merged by key
- Scalars/collections at any level are replaced entirely
Defensive casting is used to ensure correct types even if YAML contains strings. - Mutation occurs in-place on 'base' and the same object is returned
""" """
iface = cfg.get("interface", {}) # Sub-config for interface for k, v in updates.items(): # Iterate all update keys
flash = cfg.get("flash", {}) # Sub-config for flashing if isinstance(v, dict) and isinstance(base.get(k), dict): # Both sides dict → recurse
psu = cfg.get("power_supply", {}) # Sub-config for power supply base[k] = _deep_update(base[k], v)
return EcuTestConfig( else: # Otherwise replace
interface=InterfaceConfig( base[k] = v
type=str(iface.get("type", "mock")).lower(), # Normalize to lowercase return base # Return the mutated base for chaining
channel=int(iface.get("channel", 1)), # Coerce to int
bitrate=int(iface.get("bitrate", 19200)), # Coerce to int
dll_path=iface.get("dll_path"), # Optional legacy field def _to_dataclass(cfg: Dict[str, Any]) -> EcuTestConfig:
node_name=iface.get("node_name"), # Optional friendly name """Convert a merged plain dict config into strongly-typed dataclasses.
func_names=dict(iface.get("func_names", {}) or {}), # Ensure a dict
sdf_path=iface.get("sdf_path"), # Optional SDF path Defensive casting is used to ensure correct types even if YAML contains strings.
schedule_nr=int(iface.get("schedule_nr", 0)), # Coerce to int """
), iface = cfg.get("interface", {}) # Sub-config for interface
flash=FlashConfig( flash = cfg.get("flash", {}) # Sub-config for flashing
enabled=bool(flash.get("enabled", False)), # Coerce to bool psu = cfg.get("power_supply", {}) # Sub-config for power supply
hex_path=flash.get("hex_path"), # Optional hex path # Coerce frame_lengths keys to int (YAML may parse numeric keys as int already,
), # but accept hex strings like "0x0A: 8" too).
power_supply=PowerSupplyConfig( raw_fl = iface.get("frame_lengths", {}) or {}
enabled=bool(psu.get("enabled", False)), frame_lengths: Dict[int, int] = {}
port=psu.get("port"), if isinstance(raw_fl, dict):
baudrate=int(psu.get("baudrate", 115200)), for k, v in raw_fl.items():
timeout=float(psu.get("timeout", 1.0)), try:
eol=str(psu.get("eol", "\n")), key = int(k, 0) if isinstance(k, str) else int(k)
parity=str(psu.get("parity", "N")), frame_lengths[key] = int(v)
stopbits=float(psu.get("stopbits", 1.0)), except (TypeError, ValueError):
xonxoff=bool(psu.get("xonxoff", False)), continue
rtscts=bool(psu.get("rtscts", False)),
dsrdtr=bool(psu.get("dsrdtr", False)), return EcuTestConfig(
idn_substr=psu.get("idn_substr"), interface=InterfaceConfig(
do_set=bool(psu.get("do_set", False)), type=str(iface.get("type", "mock")).lower(),
set_voltage=float(psu.get("set_voltage", 1.0)), channel=int(iface.get("channel", 1)),
set_current=float(psu.get("set_current", 0.1)), bitrate=int(iface.get("bitrate", 19200)),
), dll_path=iface.get("dll_path"),
) node_name=iface.get("node_name"),
func_names=dict(iface.get("func_names", {}) or {}),
sdf_path=iface.get("sdf_path"),
def load_config(workspace_root: Optional[str] = None, overrides: Optional[Dict[str, Any]] = None) -> EcuTestConfig: schedule_nr=int(iface.get("schedule_nr", 0)),
"""Load configuration from YAML file, environment, overrides, or defaults. host=iface.get("host"),
lin_device=str(iface.get("lin_device", "lin0")),
Precedence (highest to lowest): power_device=str(iface.get("power_device", "power_out0")),
1. in-memory 'overrides' dict boot_settle_seconds=float(iface.get("boot_settle_seconds", 0.5)),
2. YAML file specified by env var ECU_TESTS_CONFIG frame_lengths=frame_lengths,
3. YAML at ./config/test_config.yaml (relative to workspace_root) ),
4. built-in defaults in this function flash=FlashConfig(
""" enabled=bool(flash.get("enabled", False)), # Coerce to bool
# Start with built-in defaults; minimal, safe baseline hex_path=flash.get("hex_path"), # Optional hex path
base: Dict[str, Any] = { ),
"interface": { power_supply=PowerSupplyConfig(
"type": "mock", # mock by default for developer friendliness enabled=bool(psu.get("enabled", False)),
"channel": 1, port=psu.get("port"),
"bitrate": 19200, baudrate=int(psu.get("baudrate", 115200)),
}, timeout=float(psu.get("timeout", 1.0)),
"flash": { eol=str(psu.get("eol", "\n")),
"enabled": False, parity=str(psu.get("parity", "N")),
"hex_path": None, stopbits=float(psu.get("stopbits", 1.0)),
}, xonxoff=bool(psu.get("xonxoff", False)),
"power_supply": { rtscts=bool(psu.get("rtscts", False)),
"enabled": False, dsrdtr=bool(psu.get("dsrdtr", False)),
"port": None, idn_substr=psu.get("idn_substr"),
"baudrate": 115200, do_set=bool(psu.get("do_set", False)),
"timeout": 1.0, set_voltage=float(psu.get("set_voltage", 1.0)),
"eol": "\n", set_current=float(psu.get("set_current", 0.1)),
"parity": "N", ),
"stopbits": 1.0, )
"xonxoff": False,
"rtscts": False,
"dsrdtr": False, def load_config(workspace_root: Optional[str] = None, overrides: Optional[Dict[str, Any]] = None) -> EcuTestConfig:
"idn_substr": None, """Load configuration from YAML file, environment, overrides, or defaults.
"do_set": False,
"set_voltage": 1.0, Precedence (highest to lowest):
"set_current": 0.1, 1. in-memory 'overrides' dict
}, 2. YAML file specified by env var ECU_TESTS_CONFIG
} 3. YAML at ./config/test_config.yaml (relative to workspace_root)
4. built-in defaults in this function
cfg_path: Optional[pathlib.Path] = None # Resolved configuration file path """
# Start with built-in defaults; minimal, safe baseline
# 2) Environment variable can point to any YAML file base: Dict[str, Any] = {
env_path = os.getenv(ENV_CONFIG_PATH) "interface": {
if env_path: "type": "mock", # mock by default for developer friendliness
candidate = pathlib.Path(env_path) "channel": 1,
if candidate.is_file(): # Only accept existing files "bitrate": 19200,
cfg_path = candidate },
"flash": {
# 3) Fallback to default path under the provided workspace root "enabled": False,
if cfg_path is None and workspace_root: "hex_path": None,
candidate = pathlib.Path(workspace_root) / DEFAULT_CONFIG_RELATIVE },
if candidate.is_file(): "power_supply": {
cfg_path = candidate "enabled": False,
"port": None,
# Load YAML file if we have one "baudrate": 115200,
if cfg_path and cfg_path.is_file(): "timeout": 1.0,
with open(cfg_path, "r", encoding="utf-8") as f: "eol": "\n",
file_cfg = yaml.safe_load(f) or {} # Parse YAML safely; empty → {} "parity": "N",
if isinstance(file_cfg, dict): # Only merge dicts "stopbits": 1.0,
_deep_update(base, file_cfg) "xonxoff": False,
"rtscts": False,
# Optionally merge a dedicated PSU YAML if present (or env var path) "dsrdtr": False,
# This allows users to keep sensitive or machine-specific serial settings separate "idn_substr": None,
psu_env = os.getenv("OWON_PSU_CONFIG") "do_set": False,
psu_default = None "set_voltage": 1.0,
if workspace_root: "set_current": 0.1,
candidate = pathlib.Path(workspace_root) / "config" / "owon_psu.yaml" },
if candidate.is_file(): }
psu_default = candidate
psu_path: Optional[pathlib.Path] = pathlib.Path(psu_env) if psu_env else psu_default cfg_path: Optional[pathlib.Path] = None # Resolved configuration file path
if psu_path and psu_path.is_file():
with open(psu_path, "r", encoding="utf-8") as f: # 2) Environment variable can point to any YAML file
psu_cfg = yaml.safe_load(f) or {} env_path = os.getenv(ENV_CONFIG_PATH)
if isinstance(psu_cfg, dict): if env_path:
base.setdefault("power_supply", {}) candidate = pathlib.Path(env_path)
# Merge PSU YAML into power_supply section if candidate.is_file(): # Only accept existing files
base["power_supply"] = _deep_update(base["power_supply"], psu_cfg) cfg_path = candidate
# 1) In-memory overrides always win # 3) Fallback to default path under the provided workspace root
if overrides: if cfg_path is None and workspace_root:
_deep_update(base, overrides) candidate = pathlib.Path(workspace_root) / DEFAULT_CONFIG_RELATIVE
if candidate.is_file():
# Convert to typed dataclasses for ergonomic downstream usage cfg_path = candidate
return _to_dataclass(base)
# Load YAML file if we have one
if cfg_path and cfg_path.is_file():
with open(cfg_path, "r", encoding="utf-8") as f:
file_cfg = yaml.safe_load(f) or {} # Parse YAML safely; empty → {}
if isinstance(file_cfg, dict): # Only merge dicts
_deep_update(base, file_cfg)
# Optionally merge a dedicated PSU YAML if present (or env var path)
# This allows users to keep sensitive or machine-specific serial settings separate
psu_env = os.getenv("OWON_PSU_CONFIG")
psu_default = None
if workspace_root:
candidate = pathlib.Path(workspace_root) / "config" / "owon_psu.yaml"
if candidate.is_file():
psu_default = candidate
psu_path: Optional[pathlib.Path] = pathlib.Path(psu_env) if psu_env else psu_default
if psu_path and psu_path.is_file():
with open(psu_path, "r", encoding="utf-8") as f:
psu_cfg = yaml.safe_load(f) or {}
if isinstance(psu_cfg, dict):
base.setdefault("power_supply", {})
# Merge PSU YAML into power_supply section
base["power_supply"] = _deep_update(base["power_supply"], psu_cfg)
# 1) In-memory overrides always win
if overrides:
_deep_update(base, overrides)
# Convert to typed dataclasses for ergonomic downstream usage
return _to_dataclass(base)

View File

@ -1,9 +1,9 @@
""" """
Flashing package. Flashing package.
Exports: Exports:
- HexFlasher: scaffold class to wire up UDS-based ECU programming over LIN. - HexFlasher: scaffold class to wire up UDS-based ECU programming over LIN.
""" """
from .hex_flasher import HexFlasher from .hex_flasher import HexFlasher
__all__ = ["HexFlasher"] __all__ = ["HexFlasher"]

View File

@ -1,25 +1,25 @@
from __future__ import annotations from __future__ import annotations
import pathlib import pathlib
from typing import Optional from typing import Optional
from ..lin.base import LinInterface from ..lin.base import LinInterface
class HexFlasher: class HexFlasher:
"""Stubbed ECU flasher over LIN. """Stubbed ECU flasher over LIN.
Replace with your actual UDS flashing sequence. For now, just validates the file exists Replace with your actual UDS flashing sequence. For now, just validates the file exists
and pretends to flash successfully. and pretends to flash successfully.
""" """
def __init__(self, lin: LinInterface) -> None: def __init__(self, lin: LinInterface) -> None:
self.lin = lin self.lin = lin
def flash_hex(self, hex_path: str, *, erase: bool = True, verify: bool = True, timeout_s: float = 120.0) -> bool: def flash_hex(self, hex_path: str, *, erase: bool = True, verify: bool = True, timeout_s: float = 120.0) -> bool:
path = pathlib.Path(hex_path) path = pathlib.Path(hex_path)
if not path.is_file(): if not path.is_file():
raise FileNotFoundError(f"HEX file not found: {hex_path}") raise FileNotFoundError(f"HEX file not found: {hex_path}")
# TODO: Implement real flashing over LIN (UDS). This is a placeholder. # TODO: Implement real flashing over LIN (UDS). This is a placeholder.
# You might send specific frames or use a higher-level protocol library. # You might send specific frames or use a higher-level protocol library.
return True return True

View File

@ -1,17 +1,20 @@
""" """
LIN interface package. LIN interface package.
Exports: Exports:
- LinInterface, LinFrame: core abstraction and frame type - LinInterface, LinFrame: core abstraction and frame type
- MockBabyLinInterface: mock implementation for fast, hardware-free tests - MockBabyLinInterface: mock implementation for fast, hardware-free tests
Real hardware adapter (BabyLIN) is available in babylin.py. Real hardware adapters live in their own modules and are imported by the
""" fixture only when selected by config:
from .base import LinInterface, LinFrame - babylin.BabyLinInterface (legacy; needs the BabyLIN SDK + native libs)
from .mock import MockBabyLinInterface - mum.MumLinInterface (current; needs Melexis pylin + pymumclient)
"""
__all__ = [ from .base import LinInterface, LinFrame
"LinInterface", from .mock import MockBabyLinInterface
"LinFrame",
"MockBabyLinInterface", __all__ = [
] "LinInterface",
"LinFrame",
"MockBabyLinInterface",
]

View File

@ -1,220 +1,393 @@
from __future__ import annotations # Enable postponed evaluation of annotations (PEP 563/649 style) from __future__ import annotations # Enable postponed evaluation of annotations (PEP 563/649 style)
from typing import Optional # For optional type hints from typing import Optional # For optional type hints
from .base import LinInterface, LinFrame # Base abstraction and frame dataclass used by all LIN adapters from .base import LinInterface, LinFrame # Base abstraction and frame dataclass used by all LIN adapters
class BabyLinInterface(LinInterface): class BabyLinInterface(LinInterface):
"""LIN adapter that uses the vendor's BabyLIN Python SDK wrapper. """LIN adapter that uses the vendor's BabyLIN Python SDK wrapper.
- Avoids manual ctypes; relies on BabyLIN_library.py BLC_* functions. - Avoids manual ctypes; relies on BabyLIN_library.py BLC_* functions.
- Keeps the same LinInterface contract for send/receive/request/flush. - Keeps the same LinInterface contract for send/receive/request/flush.
""" """
def __init__( def __init__(
self, self,
dll_path: Optional[str] = None, # Not used by SDK wrapper (auto-selects platform libs) dll_path: Optional[str] = None, # Not used by SDK wrapper (auto-selects platform libs)
bitrate: int = 19200, # Informational; typically defined by SDF/schedule bitrate: int = 19200, # Informational; typically defined by SDF/schedule
channel: int = 0, # Channel index used with BLC_getChannelHandle (0-based) channel: int = 0, # Channel index used with BLC_getChannelHandle (0-based)
node_name: Optional[str] = None, # Optional friendly name (not used by SDK calls) node_name: Optional[str] = None, # Optional friendly name (not used by SDK calls)
func_names: Optional[dict] = None, # Legacy (ctypes) compatibility; unused here func_names: Optional[dict] = None, # Legacy (ctypes) compatibility; unused here
sdf_path: Optional[str] = None, # Optional SDF file to load after open sdf_path: Optional[str] = None, # Optional SDF file to load after open
schedule_nr: int = 0, # Schedule number to start after connect schedule_nr: int = 0, # Schedule number to start after connect
wrapper_module: Optional[object] = None, # Inject a wrapper (e.g., mock) for tests wrapper_module: Optional[object] = None, # Inject a wrapper (e.g., mock) for tests
) -> None: ) -> None:
self.bitrate = bitrate # Store configured (informational) bitrate self.bitrate = bitrate # Store configured (informational) bitrate
self.channel_index = channel # Desired channel index self.channel_index = channel # Desired channel index
self.node_name = node_name or "ECU_TEST_NODE" # Default node name if not provided self.node_name = node_name or "ECU_TEST_NODE" # Default node name if not provided
self.sdf_path = sdf_path # SDF to load (if provided) self.sdf_path = sdf_path # SDF to load (if provided)
self.schedule_nr = schedule_nr # Schedule to start on connect self.schedule_nr = schedule_nr # Schedule to start on connect
# Choose the BabyLIN wrapper module to use: # Choose the BabyLIN wrapper module to use:
# - If wrapper_module provided (unit tests with mock), use it # - If wrapper_module provided (unit tests with mock), use it
# - Else dynamically import the real SDK wrapper (BabyLIN_library.py) # - Else dynamically import the real SDK wrapper (BabyLIN_library.py)
if wrapper_module is not None: if wrapper_module is not None:
_bl = wrapper_module _bl = wrapper_module
else: else:
import importlib, sys, os # Local import to avoid global dependency during unit tests import importlib, sys, os # Local import to avoid global dependency during unit tests
_bl = None # Placeholder for resolved module _bl = None # Placeholder for resolved module
import_errors = [] # Accumulate import errors for diagnostics import_errors = [] # Accumulate import errors for diagnostics
for modname in ("BabyLIN_library", "vendor.BabyLIN_library"): for modname in ("BabyLIN_library", "vendor.BabyLIN_library"):
try: try:
_bl = importlib.import_module(modname) _bl = importlib.import_module(modname)
break break
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
import_errors.append((modname, str(e))) import_errors.append((modname, str(e)))
if _bl is None: if _bl is None:
# Try adding the common 'vendor' folder to sys.path then retry import # Try adding the common 'vendor' folder to sys.path then retry import
repo_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..")) repo_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
vendor_dir = os.path.join(repo_root, "vendor") vendor_dir = os.path.join(repo_root, "vendor")
if os.path.isdir(vendor_dir) and vendor_dir not in sys.path: if os.path.isdir(vendor_dir) and vendor_dir not in sys.path:
sys.path.insert(0, vendor_dir) sys.path.insert(0, vendor_dir)
try: try:
_bl = importlib.import_module("BabyLIN_library") _bl = importlib.import_module("BabyLIN_library")
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
import_errors.append(("BabyLIN_library", str(e))) import_errors.append(("BabyLIN_library", str(e)))
if _bl is None: if _bl is None:
# Raise a helpful error with all attempted import paths # Raise a helpful error with all attempted import paths
details = "; ".join([f"{m}: {err}" for m, err in import_errors]) or "not found" details = "; ".join([f"{m}: {err}" for m, err in import_errors]) or "not found"
raise RuntimeError( raise RuntimeError(
"Failed to import BabyLIN_library. Ensure the SDK's BabyLIN_library.py is present in the project (e.g., vendor/BabyLIN_library.py). Details: " "Failed to import BabyLIN_library. Ensure the SDK's BabyLIN_library.py is present in the project (e.g., vendor/BabyLIN_library.py). Details: "
+ details + details
) )
# Create the BabyLIN SDK instance (module exposes create_BabyLIN()) # Create the BabyLIN SDK instance (module exposes create_BabyLIN())
self._BabyLIN = _bl.create_BabyLIN() self._BabyLIN = _bl.create_BabyLIN()
# Small helper to call BLC_* functions by name (keeps call sites concise) # Small helper to call BLC_* functions by name (keeps call sites concise)
self._bl_call = lambda name, *args, **kwargs: getattr(self._BabyLIN, name)(*args, **kwargs) self._bl_call = lambda name, *args, **kwargs: getattr(self._BabyLIN, name)(*args, **kwargs)
self._handle = None # Device handle returned by BLC_openPort self._handle = None # Device handle returned by BLC_openPort
self._channel_handle = None # Per-channel handle returned by BLC_getChannelHandle self._channel_handle = None # Per-channel handle returned by BLC_getChannelHandle
self._connected = False # Internal connection state flag self._connected = False # Internal connection state flag
def _err(self, rc: int) -> None: def _detail_for(self, rc) -> str:
"""Raise a RuntimeError with a readable SDK error message for rc != BL_OK.""" """Look up a human-readable SDK error message; never raises.
if rc == self._BabyLIN.BL_OK:
return Tries (in order):
# Prefer a human-friendly error string if the SDK provides it 1. BLC_getLastError(channel_handle) device-side last error (best detail)
try: 2. BLC_getErrorString(rc) simple rc lookup
get_str = getattr(self._BabyLIN, 'BLC_getDetailedErrorString', None) 3. BLC_getDetailedErrorString(rc, 0) detailed lookup (rc + report_param)
msg = get_str(rc) if get_str else f"rc={rc}" Returns the first non-empty message, or "".
if not isinstance(msg, str): """
msg = str(msg) parts = []
except Exception:
msg = f"rc={rc}" # 1. Device-side last error — usually the most informative.
raise RuntimeError(f"BabyLIN error: {msg}") # BLC_getLastError takes the device connection handle; fall back to the
# channel handle if the device handle isn't set yet.
def connect(self) -> None: for h in (self._handle, self._channel_handle):
"""Open device, optionally load SDF, select channel, and start schedule.""" if h is None:
# Discover BabyLIN devices (returns a list of port identifiers) continue
ports = self._bl_call('BLC_getBabyLinPorts', 100) try:
if not ports: fn = getattr(self._BabyLIN, 'BLC_getLastError', None)
raise RuntimeError("No BabyLIN devices found") if fn is not None:
# Open the first available device port (you could extend to select by config) s = fn(h)
self._handle = self._bl_call('BLC_openPort', ports[0]) if isinstance(s, bytes):
if not self._handle: s = s.decode('utf-8', errors='ignore')
raise RuntimeError("Failed to open BabyLIN port") if s:
parts.append(str(s))
# Load SDF onto the device, if configured (3rd arg '1' often means 'download') break
if self.sdf_path: except Exception:
rc = self._bl_call('BLC_loadSDF', self._handle, self.sdf_path, 1) continue
if rc != self._BabyLIN.BL_OK:
self._err(rc) if rc is None:
return " | ".join(parts)
# Get channel count and pick the configured channel index (default 0)
ch_count = self._bl_call('BLC_getChannelCount', self._handle) # 2. Simple error string by rc
if ch_count <= 0: try:
raise RuntimeError("No channels reported by device") fn = getattr(self._BabyLIN, 'BLC_getErrorString', None)
ch_idx = int(self.channel_index) if fn is not None:
if ch_idx < 0 or ch_idx >= ch_count: s = fn(int(rc))
ch_idx = 0 if isinstance(s, bytes):
# Resolve a channel handle used for all subsequent Tx/Rx commands s = s.decode('utf-8', errors='ignore')
self._channel_handle = self._bl_call('BLC_getChannelHandle', self._handle, ch_idx) if s:
parts.append(str(s))
# Start a schedule if configured (common requirement for regular polling/masters) except Exception:
if self.schedule_nr is not None: pass
cmd = f"start schedule {int(self.schedule_nr)};"
rc = self._bl_call('BLC_sendCommand', self._channel_handle, cmd) # 3. Detailed string (rc + report_parameter)
if rc != self._BabyLIN.BL_OK: try:
self._err(rc) fn = getattr(self._BabyLIN, 'BLC_getDetailedErrorString', None)
if fn is not None:
self._connected = True # Mark interface as connected s = fn(int(rc), 0)
if isinstance(s, bytes):
def disconnect(self) -> None: s = s.decode('utf-8', errors='ignore')
"""Close device handles and reset internal state (best-effort).""" if s:
try: parts.append(str(s))
self._bl_call('BLC_closeAll') # Close all device connections via SDK except Exception:
except Exception: pass
pass # Ignore SDK exceptions during shutdown
self._connected = False return " | ".join(parts)
self._handle = None
self._channel_handle = None def _err(self, rc: int, context: str = "") -> None:
"""Raise a RuntimeError with a readable SDK error message for rc != BL_OK."""
def send(self, frame: LinFrame) -> None: if rc == self._BabyLIN.BL_OK:
"""Transmit a LIN frame using BLC_mon_set_xmit.""" return
if not self._connected or not self._channel_handle: msg = self._detail_for(rc) or f"rc={rc}"
raise RuntimeError("BabyLIN not connected") prefix = f"BabyLIN error{(' (' + context + ')') if context else ''}"
# slotTime=0 means use default timing configured by schedule/SDF raise RuntimeError(f"{prefix}: {msg} (rc={rc})")
rc = self._bl_call('BLC_mon_set_xmit', self._channel_handle, int(frame.id), bytes(frame.data), 0)
if rc != self._BabyLIN.BL_OK: def _exec_command(self, cmd: str) -> None:
self._err(rc) """Run a BLC_sendCommand on the channel handle, surfacing detailed errors.
def receive(self, id: Optional[int] = None, timeout: float = 1.0): The SDK's wrapper raises BabyLINException for any non-zero rc. We catch
"""Receive a LIN frame with optional ID filter and timeout (seconds).""" that and re-raise a RuntimeError that includes BLC_getDetailedErrorString,
if not self._connected or not self._channel_handle: so callers see e.g. "schedule index out of range" instead of opaque "303".
raise RuntimeError("BabyLIN not connected") """
ms = max(0, int(timeout * 1000)) # SDK expects milliseconds if self._channel_handle is None:
try: raise RuntimeError("BabyLIN not connected")
frame = self._bl_call('BLC_getNextFrameTimeout', self._channel_handle, ms) try:
except Exception: rc = self._bl_call('BLC_sendCommand', self._channel_handle, cmd)
# Many wrappers raise on timeout; unify as 'no data' except Exception as e:
return None rc = getattr(e, 'errorCode', None)
if not frame: if rc is None:
return None # Try common alternate attributes used by SDK exception types
# Convert SDK frame to our LinFrame (mask to classic 6-bit LIN ID range) for attr in ('rc', 'returncode', 'code'):
fid = int(frame.frameId & 0x3F) rc = getattr(e, attr, None)
data = bytes(list(frame.frameData)[: int(frame.lenOfData)]) if rc is not None:
lin_frame = LinFrame(id=fid, data=data) break
if id is None or fid == id: detail = self._detail_for(rc) if rc is not None else ""
return lin_frame rc_part = f"rc={rc}" if rc is not None else "rc=?"
# If a different ID was received and caller requested a filter, return None extra = f"{detail}" if detail else ""
return None raise RuntimeError(
f"BabyLIN command failed: {cmd!r} ({rc_part}){extra}"
def flush(self) -> None: ) from e
"""Flush RX buffers if the SDK exposes such a function (optional).""" if rc != self._BabyLIN.BL_OK:
if not self._connected or not self._channel_handle: self._err(rc, context=f"command {cmd!r}")
return
try: def connect(self) -> None:
# Some SDKs may not expose flush; no-op if missing """Open device, optionally load SDF, select channel, and start schedule."""
flush = getattr(self._BabyLIN, 'BLC_flush', None) # Discover BabyLIN devices (returns a list of port identifiers)
if flush: ports = self._bl_call('BLC_getBabyLinPorts', 100)
flush(self._channel_handle) if not ports:
except Exception: raise RuntimeError("No BabyLIN devices found")
pass # Open the first available device port (you could extend to select by config)
self._handle = self._bl_call('BLC_openPort', ports[0])
def request(self, id: int, length: int, timeout: float = 1.0): if not self._handle:
"""Perform a LIN master request and wait for response. raise RuntimeError("Failed to open BabyLIN port")
Strategy: # Load SDF onto the device, if configured (3rd arg '1' often means 'download')
- Prefer SDK method `BLC_sendRawMasterRequest` if present (bytes or length variants). if self.sdf_path:
- Fallback: transmit a header with zeroed payload; then wait for response. rc = self._bl_call('BLC_loadSDF', self._handle, self.sdf_path, 1)
- Always attempt to receive a frame with matching ID within 'timeout'. if rc != self._BabyLIN.BL_OK:
""" self._err(rc)
if not self._connected or not self._channel_handle:
raise RuntimeError("BabyLIN not connected") # Get channel count and resolve the channel handle.
# A BabyLIN device may expose multiple channel types (LIN/CAN/...).
sent = False # Track whether a request command was successfully issued # When the SDK supports BLC_getChannelInfo, we filter by info.type==0
# Attempt to use raw master request if provided by SDK # to find LIN channels (mirrors vendor/BLCInterfaceExample.py).
# Preference: try (channel, frameId, length) first because our mock wrapper # Without it (older SDKs, mock wrappers), we fall back to honoring
# synthesizes a deterministic payload for this form (see vendor/mock_babylin_wrapper.py), # the configured index and validating the handle.
# then fall back to (channel, frameId, dataBytes) if the SDK only supports that. ch_count = self._bl_call('BLC_getChannelCount', self._handle)
raw_req = getattr(self._BabyLIN, 'BLC_sendRawMasterRequest', None) if ch_count <= 0:
if raw_req: raise RuntimeError("No channels reported by device")
# Prefer the (channel, frameId, length) variant first if supported
try: configured_idx = int(self.channel_index)
rc = raw_req(self._channel_handle, int(id), int(length)) get_info = getattr(self._BabyLIN, 'BLC_getChannelInfo', None)
if rc == self._BabyLIN.BL_OK:
sent = True if get_info is not None:
else: lin_channels = [] # [(idx, handle, info)] for type==0 channels
self._err(rc) seen = [] # diagnostics if no LIN channel is found
except TypeError: for idx in range(int(ch_count)):
# Fallback to (channel, frameId, dataBytes) h = self._bl_call('BLC_getChannelHandle', self._handle, idx)
try: if not h:
payload = bytes([0] * max(0, min(8, int(length)))) seen.append((idx, None, None))
rc = raw_req(self._channel_handle, int(id), payload) continue
if rc == self._BabyLIN.BL_OK: try:
sent = True info = get_info(h)
else: except Exception:
self._err(rc) info = None
except Exception: seen.append((idx, h, info))
sent = False if info is not None and getattr(info, 'type', None) == 0:
except Exception: lin_channels.append((idx, h, info))
sent = False
if not lin_channels:
if not sent: details = ", ".join(
# Fallback: issue a transmit; many stacks will respond on the bus f"idx={i} handle={'ok' if h else 'None'} "
self.send(LinFrame(id=id, data=bytes([0] * max(0, min(8, int(length)))))) f"type={getattr(info, 'type', '?') if info is not None else '?'} "
f"name={getattr(info, 'name', b'').decode('utf-8', errors='ignore') if info is not None else ''}"
# Wait for the response frame with matching ID (or None on timeout) for i, h, info in seen
return self.receive(id=id, timeout=timeout) )
raise RuntimeError(
f"No LIN channel (type==0) found on device. Channels seen: [{details}]"
)
# Prefer the configured index if it is a LIN channel; otherwise the first LIN channel.
chosen = next((t for t in lin_channels if t[0] == configured_idx), lin_channels[0])
ch_idx, self._channel_handle, _ = chosen
else:
ch_idx = configured_idx if 0 <= configured_idx < int(ch_count) else 0
self._channel_handle = self._bl_call('BLC_getChannelHandle', self._handle, ch_idx)
if not self._channel_handle:
raise RuntimeError(f"BLC_getChannelHandle returned invalid handle for channel {ch_idx}")
# Mark connected before any sendCommand so send_command()/_exec_command()
# accept the call. Auto-start a schedule only if a non-negative index is set;
# use -1 (or None) in config to defer starting to the test/caller.
self._connected = True
if self.schedule_nr is not None and int(self.schedule_nr) >= 0:
self._exec_command(f"start schedule {int(self.schedule_nr)};")
def send_command(self, cmd: str) -> None:
"""Send a raw BabyLIN SDK command via BLC_sendCommand on the channel handle.
Useful for actions that don't fit the abstract LinInterface, e.g.:
send_command("stop;")
send_command("setsig 0 255;")
Note: BabyLIN firmware accepts 'start schedule <index>;' but not the
schedule name. Use start_schedule() for name-or-index lookup.
"""
if not self._connected:
raise RuntimeError("BabyLIN not connected")
self._exec_command(cmd)
def schedule_nr_for_name(self, name: str) -> int:
"""Return the schedule index matching `name` from the loaded SDF.
Tries BLC_SDF_getScheduleNr first; falls back to enumerating with
BLC_SDF_getNumSchedules + BLC_SDF_getScheduleName for older SDKs.
Raises RuntimeError if the schedule isn't found.
"""
if self._channel_handle is None:
raise RuntimeError("BabyLIN not connected")
get_nr = getattr(self._BabyLIN, 'BLC_SDF_getScheduleNr', None)
if get_nr is not None:
try:
return int(get_nr(self._channel_handle, name))
except Exception:
pass # fall through to enumeration
get_count = getattr(self._BabyLIN, 'BLC_SDF_getNumSchedules', None)
get_name = getattr(self._BabyLIN, 'BLC_SDF_getScheduleName', None)
if get_count is None or get_name is None:
raise RuntimeError(
f"SDK does not expose schedule lookup; cannot resolve schedule {name!r}"
)
count = int(get_count(self._channel_handle))
names = []
for i in range(count):
try:
n = get_name(self._channel_handle, i)
except Exception:
n = ""
names.append(n)
if n == name:
return i
raise RuntimeError(
f"Schedule {name!r} not found in SDF. Available: {names}"
)
def start_schedule(self, name_or_nr) -> int:
"""Start a schedule by name (str) or index (int). Returns the index used."""
nr = name_or_nr if isinstance(name_or_nr, int) else self.schedule_nr_for_name(str(name_or_nr))
self.send_command(f"start schedule {int(nr)};")
return int(nr)
def disconnect(self) -> None:
"""Close device handles and reset internal state (best-effort)."""
try:
self._bl_call('BLC_closeAll') # Close all device connections via SDK
except Exception:
pass # Ignore SDK exceptions during shutdown
self._connected = False
self._handle = None
self._channel_handle = None
def send(self, frame: LinFrame) -> None:
"""Transmit a LIN frame using BLC_mon_set_xmit."""
if not self._connected or not self._channel_handle:
raise RuntimeError("BabyLIN not connected")
# slotTime=0 means use default timing configured by schedule/SDF
rc = self._bl_call('BLC_mon_set_xmit', self._channel_handle, int(frame.id), bytes(frame.data), 0)
if rc != self._BabyLIN.BL_OK:
self._err(rc)
def receive(self, id: Optional[int] = None, timeout: float = 1.0):
"""Receive a LIN frame with optional ID filter and timeout (seconds)."""
if not self._connected or not self._channel_handle:
raise RuntimeError("BabyLIN not connected")
ms = max(0, int(timeout * 1000)) # SDK expects milliseconds
try:
frame = self._bl_call('BLC_getNextFrameTimeout', self._channel_handle, ms)
except Exception:
# Many wrappers raise on timeout; unify as 'no data'
return None
if not frame:
return None
# Convert SDK frame to our LinFrame (mask to classic 6-bit LIN ID range)
fid = int(frame.frameId & 0x3F)
data = bytes(list(frame.frameData)[: int(frame.lenOfData)])
lin_frame = LinFrame(id=fid, data=data)
if id is None or fid == id:
return lin_frame
# If a different ID was received and caller requested a filter, return None
return None
def flush(self) -> None:
"""Flush RX buffers if the SDK exposes such a function (optional)."""
if not self._connected or not self._channel_handle:
return
try:
# Some SDKs may not expose flush; no-op if missing
flush = getattr(self._BabyLIN, 'BLC_flush', None)
if flush:
flush(self._channel_handle)
except Exception:
pass
def request(self, id: int, length: int, timeout: float = 1.0):
"""Perform a LIN master request and wait for response.
Strategy:
- Prefer SDK method `BLC_sendRawMasterRequest` if present (bytes or length variants).
- Fallback: transmit a header with zeroed payload; then wait for response.
- Always attempt to receive a frame with matching ID within 'timeout'.
"""
if not self._connected or not self._channel_handle:
raise RuntimeError("BabyLIN not connected")
sent = False # Track whether a request command was successfully issued
# Attempt to use raw master request if provided by SDK
# Preference: try (channel, frameId, length) first because our mock wrapper
# synthesizes a deterministic payload for this form (see vendor/mock_babylin_wrapper.py),
# then fall back to (channel, frameId, dataBytes) if the SDK only supports that.
raw_req = getattr(self._BabyLIN, 'BLC_sendRawMasterRequest', None)
if raw_req:
# Prefer the (channel, frameId, length) variant first if supported
try:
rc = raw_req(self._channel_handle, int(id), int(length))
if rc == self._BabyLIN.BL_OK:
sent = True
else:
self._err(rc)
except TypeError:
# Fallback to (channel, frameId, dataBytes)
try:
payload = bytes([0] * max(0, min(8, int(length))))
rc = raw_req(self._channel_handle, int(id), payload)
if rc == self._BabyLIN.BL_OK:
sent = True
else:
self._err(rc)
except Exception:
sent = False
except Exception:
sent = False
if not sent:
# Fallback: issue a transmit; many stacks will respond on the bus
self.send(LinFrame(id=id, data=bytes([0] * max(0, min(8, int(length))))))
# Wait for the response frame with matching ID (or None on timeout)
return self.receive(id=id, timeout=timeout)

View File

@ -1,60 +1,60 @@
from __future__ import annotations from __future__ import annotations
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from dataclasses import dataclass from dataclasses import dataclass
from typing import Optional from typing import Optional
@dataclass @dataclass
class LinFrame: class LinFrame:
"""Represents a LIN frame. """Represents a LIN frame.
id: Frame identifier (0x00 - 0x3F typical for classic LIN IDs) id: Frame identifier (0x00 - 0x3F typical for classic LIN IDs)
data: Up to 8 bytes payload. data: Up to 8 bytes payload.
""" """
id: int id: int
data: bytes data: bytes
def __post_init__(self) -> None: def __post_init__(self) -> None:
if not (0 <= self.id <= 0x3F): if not (0 <= self.id <= 0x3F):
raise ValueError(f"LIN ID out of range: {self.id}") raise ValueError(f"LIN ID out of range: {self.id}")
if not isinstance(self.data, (bytes, bytearray)): if not isinstance(self.data, (bytes, bytearray)):
# allow list of ints # allow list of ints
try: try:
self.data = bytes(self.data) # type: ignore[arg-type] self.data = bytes(self.data) # type: ignore[arg-type]
except Exception as e: # pragma: no cover - defensive except Exception as e: # pragma: no cover - defensive
raise TypeError("data must be bytes-like") from e raise TypeError("data must be bytes-like") from e
if len(self.data) > 8: if len(self.data) > 8:
raise ValueError("LIN data length must be <= 8") raise ValueError("LIN data length must be <= 8")
class LinInterface(ABC): class LinInterface(ABC):
"""Abstract interface for LIN communication.""" """Abstract interface for LIN communication."""
@abstractmethod @abstractmethod
def connect(self) -> None: def connect(self) -> None:
"""Open the interface connection.""" """Open the interface connection."""
@abstractmethod @abstractmethod
def disconnect(self) -> None: def disconnect(self) -> None:
"""Close the interface connection.""" """Close the interface connection."""
@abstractmethod @abstractmethod
def send(self, frame: LinFrame) -> None: def send(self, frame: LinFrame) -> None:
"""Send a LIN frame.""" """Send a LIN frame."""
@abstractmethod @abstractmethod
def receive(self, id: Optional[int] = None, timeout: float = 1.0) -> Optional[LinFrame]: def receive(self, id: Optional[int] = None, timeout: float = 1.0) -> Optional[LinFrame]:
"""Receive a LIN frame, optionally filtered by ID. Returns None on timeout.""" """Receive a LIN frame, optionally filtered by ID. Returns None on timeout."""
def request(self, id: int, length: int, timeout: float = 1.0) -> Optional[LinFrame]: def request(self, id: int, length: int, timeout: float = 1.0) -> Optional[LinFrame]:
"""Default request implementation: send header then wait a frame. """Default request implementation: send header then wait a frame.
Override in concrete implementation if different behavior is needed. Override in concrete implementation if different behavior is needed.
""" """
# By default, just wait for any frame with this ID # By default, just wait for any frame with this ID
return self.receive(id=id, timeout=timeout) return self.receive(id=id, timeout=timeout)
def flush(self) -> None: def flush(self) -> None:
"""Optional: flush RX buffers.""" """Optional: flush RX buffers."""
pass pass

View File

@ -1,73 +1,73 @@
from __future__ import annotations from __future__ import annotations
import queue import queue
import threading import threading
import time import time
from typing import Optional from typing import Optional
from .base import LinInterface, LinFrame from .base import LinInterface, LinFrame
class MockBabyLinInterface(LinInterface): class MockBabyLinInterface(LinInterface):
"""A mock LIN interface that echoes frames and synthesizes responses. """A mock LIN interface that echoes frames and synthesizes responses.
Useful for local development without hardware. Thread-safe. Useful for local development without hardware. Thread-safe.
""" """
def __init__(self, bitrate: int = 19200, channel: int = 1) -> None: def __init__(self, bitrate: int = 19200, channel: int = 1) -> None:
self.bitrate = bitrate self.bitrate = bitrate
self.channel = channel self.channel = channel
self._rx: "queue.Queue[LinFrame]" = queue.Queue() self._rx: "queue.Queue[LinFrame]" = queue.Queue()
self._lock = threading.RLock() self._lock = threading.RLock()
self._connected = False self._connected = False
def connect(self) -> None: def connect(self) -> None:
with self._lock: with self._lock:
self._connected = True self._connected = True
def disconnect(self) -> None: def disconnect(self) -> None:
with self._lock: with self._lock:
self._connected = False self._connected = False
# drain queue # drain queue
try: try:
while True: while True:
self._rx.get_nowait() self._rx.get_nowait()
except queue.Empty: except queue.Empty:
pass pass
def send(self, frame: LinFrame) -> None: def send(self, frame: LinFrame) -> None:
if not self._connected: if not self._connected:
raise RuntimeError("Mock interface not connected") raise RuntimeError("Mock interface not connected")
# echo back the frame as a received event # echo back the frame as a received event
self._rx.put(frame) self._rx.put(frame)
def receive(self, id: Optional[int] = None, timeout: float = 1.0) -> Optional[LinFrame]: def receive(self, id: Optional[int] = None, timeout: float = 1.0) -> Optional[LinFrame]:
if not self._connected: if not self._connected:
raise RuntimeError("Mock interface not connected") raise RuntimeError("Mock interface not connected")
deadline = time.time() + max(0.0, timeout) deadline = time.time() + max(0.0, timeout)
while time.time() < deadline: while time.time() < deadline:
try: try:
frm = self._rx.get(timeout=max(0.0, deadline - time.time())) frm = self._rx.get(timeout=max(0.0, deadline - time.time()))
if id is None or frm.id == id: if id is None or frm.id == id:
return frm return frm
# not matching, requeue tail-safe # not matching, requeue tail-safe
self._rx.put(frm) self._rx.put(frm)
except queue.Empty: except queue.Empty:
break break
return None return None
def request(self, id: int, length: int, timeout: float = 1.0) -> Optional[LinFrame]: def request(self, id: int, length: int, timeout: float = 1.0) -> Optional[LinFrame]:
if not self._connected: if not self._connected:
raise RuntimeError("Mock interface not connected") raise RuntimeError("Mock interface not connected")
# synthesize a deterministic response payload of requested length # synthesize a deterministic response payload of requested length
payload = bytes((id + i) & 0xFF for i in range(max(0, min(8, length)))) payload = bytes((id + i) & 0xFF for i in range(max(0, min(8, length))))
frm = LinFrame(id=id, data=payload) frm = LinFrame(id=id, data=payload)
self._rx.put(frm) self._rx.put(frm)
return self.receive(id=id, timeout=timeout) return self.receive(id=id, timeout=timeout)
def flush(self) -> None: def flush(self) -> None:
while not self._rx.empty(): while not self._rx.empty():
try: try:
self._rx.get_nowait() self._rx.get_nowait()
except queue.Empty: # pragma: no cover - race guard except queue.Empty: # pragma: no cover - race guard
break break

220
ecu_framework/lin/mum.py Normal file
View File

@ -0,0 +1,220 @@
"""LIN adapter that uses the Melexis Universal Master (MUM) over the network.
Wraps the vendor's `pylin` + `pymumclient` packages so test code can talk to
the MUM through the same `LinInterface` abstraction used by the BabyLIN and
mock adapters. The MUM is a BeagleBone-based LIN master reachable over IP
(default 192.168.7.2) with built-in power control on `power_out0`.
The MUM is master-driven: a slave frame is fetched by issuing a request via
`send_message(master_to_slave=False, frame_id, data_length)`, so `receive()`
requires a frame ID. Per-frame `data_length` is taken from the constructor's
`frame_lengths` map; ALM_Status (0x11, 4 bytes) and ALM_Req_A (0x0A, 8 bytes)
have built-in defaults so the common cases work out of the box.
Diagnostic frames (BSM-SNPD) need the LIN 1.x **Classic** checksum, which
`send_message` does not produce. Use `send_raw()` (which calls the transport
layer's `ld_put_raw`) for those frames.
"""
from __future__ import annotations
import time
from typing import Dict, Optional
from .base import LinInterface, LinFrame
# Sensible defaults for the 4SEVEN_color_lib_test ECU. Callers can extend or
# override these via the `frame_lengths` constructor argument.
_DEFAULT_FRAME_LENGTHS: Dict[int, int] = {
0x0A: 8, # ALM_Req_A (master-published, RGB control)
0x11: 4, # ALM_Status (slave-published)
0x06: 3, # ConfigFrame (master-published)
0x12: 8, # PWM_Frame (slave-published)
0x13: 8, # VF_Frame (slave-published)
0x14: 8, # Tj_Frame (slave-published)
0x15: 8, # PWM_wo_Comp (slave-published)
0x16: 8, # NVM_Debug (slave-published)
}
class MumLinInterface(LinInterface):
"""LIN adapter for the Melexis Universal Master."""
def __init__(
self,
host: str = "192.168.7.2",
lin_device: str = "lin0",
power_device: str = "power_out0",
baudrate: int = 19200,
frame_lengths: Optional[Dict[int, int]] = None,
default_data_length: int = 8,
boot_settle_seconds: float = 0.5,
# Test seam: inject pre-built modules to bypass real hardware.
mum_module: object = None,
pylin_module: object = None,
) -> None:
self.host = host
self.lin_device = lin_device
self.power_device = power_device
self.baudrate = int(baudrate)
self.boot_settle_seconds = float(boot_settle_seconds)
self.default_data_length = int(default_data_length)
self.frame_lengths = dict(_DEFAULT_FRAME_LENGTHS)
if frame_lengths:
self.frame_lengths.update({int(k): int(v) for k, v in frame_lengths.items()})
self._mum_module = mum_module
self._pylin_module = pylin_module
self._mum = None
self._linmaster = None
self._power_control = None
self._lin_dev = None
self._transport_layer = None
self._connected = False
# -----------------------------
# Lifecycle
# -----------------------------
def _resolve_modules(self):
"""Lazy-import MUM stack so the framework still loads without it."""
if self._mum_module is None:
try:
import pymumclient # type: ignore
except Exception as e:
raise RuntimeError(
"pymumclient is not installed. The MUM adapter requires Melexis "
"packages 'pymumclient' and 'pylin'. See "
"vendor/automated_lin_test/install_packages.sh."
) from e
self._mum_module = pymumclient
if self._pylin_module is None:
try:
import pylin # type: ignore
except Exception as e:
raise RuntimeError(
"pylin is not installed. The MUM adapter requires Melexis "
"packages 'pymumclient' and 'pylin'. See "
"vendor/automated_lin_test/install_packages.sh."
) from e
self._pylin_module = pylin
return self._mum_module, self._pylin_module
def connect(self) -> None:
"""Open MUM, set up LIN master, attach LIN bus, and power up the ECU."""
pymumclient, pylin = self._resolve_modules()
self._mum = pymumclient.MelexisUniversalMaster()
self._mum.open_all(self.host)
self._power_control = self._mum.get_device(self.power_device)
self._linmaster = self._mum.get_device(self.lin_device)
self._linmaster.setup()
lin_bus = pylin.LinBusManager(self._linmaster)
self._lin_dev = pylin.LinDevice22(lin_bus)
self._lin_dev.baudrate = self.baudrate
# Transport layer is needed for Classic-checksum diagnostic frames.
try:
self._transport_layer = self._lin_dev.get_device("bus/transport_layer")
except Exception:
self._transport_layer = None
# Power up and let the ECU boot before the first frame.
self._power_control.power_up()
if self.boot_settle_seconds > 0:
time.sleep(self.boot_settle_seconds)
self._connected = True
def disconnect(self) -> None:
"""Power down the ECU and tear down the MUM connection (best-effort)."""
if self._power_control is not None:
try:
self._power_control.power_down()
except Exception:
pass
if self._linmaster is not None:
try:
self._linmaster.teardown()
except Exception:
pass
self._connected = False
self._mum = None
self._linmaster = None
self._power_control = None
self._lin_dev = None
self._transport_layer = None
# -----------------------------
# LinInterface contract
# -----------------------------
def send(self, frame: LinFrame) -> None:
"""Publish a master-to-slave frame using Enhanced checksum."""
if not self._connected or self._lin_dev is None:
raise RuntimeError("MUM not connected")
self._lin_dev.send_message(
master_to_slave=True,
frame_id=int(frame.id),
data_length=len(frame.data),
data=list(frame.data),
)
def receive(self, id: Optional[int] = None, timeout: float = 1.0) -> Optional[LinFrame]:
"""Trigger a slave-to-master read for `id` and return the response.
The MUM is master-driven, so a frame ID is required; passing None
raises NotImplementedError. `timeout` is informational only the
underlying pylin call is synchronous and uses its own timing.
"""
if not self._connected or self._lin_dev is None:
raise RuntimeError("MUM not connected")
if id is None:
raise NotImplementedError(
"MUM receive requires a frame ID; passive listen is not supported"
)
length = self.frame_lengths.get(int(id), self.default_data_length)
try:
response = self._lin_dev.send_message(
master_to_slave=False,
frame_id=int(id),
data_length=int(length),
data=None,
)
except Exception:
return None # treat any pylin exception as a timeout / no-data
if not response:
return None
return LinFrame(id=int(id) & 0x3F, data=bytes(response[: int(length)]))
# -----------------------------
# MUM-specific extras
# -----------------------------
def send_raw(self, data: bytes) -> None:
"""Send a raw LIN frame using LIN 1.x **Classic** checksum.
Required for BSM-SNPD diagnostic frames (service ID 0xB5) the
firmware rejects these if Enhanced checksum is used.
"""
if not self._connected or self._transport_layer is None:
raise RuntimeError("MUM transport layer not available")
self._transport_layer.ld_put_raw(data=bytearray(data), baudrate=self.baudrate)
def power_up(self) -> None:
if self._power_control is None:
raise RuntimeError("MUM not connected")
self._power_control.power_up()
def power_down(self) -> None:
if self._power_control is None:
raise RuntimeError("MUM not connected")
self._power_control.power_down()
def power_cycle(self, wait: float = 2.0) -> None:
"""Power the ECU down, wait `wait` seconds, then back up."""
self.power_down()
time.sleep(wait)
self.power_up()
if self.boot_settle_seconds > 0:
time.sleep(self.boot_settle_seconds)

View File

@ -1,13 +1,13 @@
"""Power control helpers for ECU tests. """Power control helpers for ECU tests.
Currently includes Owon PSU serial SCPI controller. Currently includes Owon PSU serial SCPI controller.
""" """
from .owon_psu import SerialParams, OwonPSU, scan_ports, auto_detect from .owon_psu import SerialParams, OwonPSU, scan_ports, auto_detect
__all__ = [ __all__ = [
"SerialParams", "SerialParams",
"OwonPSU", "OwonPSU",
"scan_ports", "scan_ports",
"auto_detect", "auto_detect",
] ]

View File

@ -1,193 +1,193 @@
"""Owon PSU SCPI control over raw serial (pyserial). """Owon PSU SCPI control over raw serial (pyserial).
This module provides a small, programmatic API suitable for tests: This module provides a small, programmatic API suitable for tests:
- OwonPSU: context-manageable controller class - OwonPSU: context-manageable controller class
- scan_ports(): find devices responding to *IDN? - scan_ports(): find devices responding to *IDN?
- auto_detect(): select the first matching device by IDN substring - auto_detect(): select the first matching device by IDN substring
Behavior follows the working quick demo example (serial): Behavior follows the working quick demo example (serial):
- Both commands and queries are terminated with a newline ("\n" by default). - Both commands and queries are terminated with a newline ("\n" by default).
- Queries use readline() to fetch a single-line response. - Queries use readline() to fetch a single-line response.
- Command set uses: 'output 0/1', 'output?', 'SOUR:VOLT <V>', 'SOUR:CURR <A>', 'MEAS:VOLT?', 'MEAS:CURR?', '*IDN?' - Command set uses: 'output 0/1', 'output?', 'SOUR:VOLT <V>', 'SOUR:CURR <A>', 'MEAS:VOLT?', 'MEAS:CURR?', '*IDN?'
""" """
from __future__ import annotations from __future__ import annotations
from dataclasses import dataclass from dataclasses import dataclass
from time import sleep from time import sleep
from typing import Iterable, Optional from typing import Iterable, Optional
import serial import serial
from serial import Serial from serial import Serial
from serial.tools import list_ports from serial.tools import list_ports
@dataclass @dataclass
class SerialParams: class SerialParams:
baudrate: int = 115200 baudrate: int = 115200
timeout: float = 1.0 # seconds timeout: float = 1.0 # seconds
bytesize: int = serial.EIGHTBITS bytesize: int = serial.EIGHTBITS
parity: str = serial.PARITY_NONE parity: str = serial.PARITY_NONE
stopbits: float = serial.STOPBITS_ONE stopbits: float = serial.STOPBITS_ONE
xonxoff: bool = False xonxoff: bool = False
rtscts: bool = False rtscts: bool = False
dsrdtr: bool = False dsrdtr: bool = False
write_timeout: float = 1.0 # seconds write_timeout: float = 1.0 # seconds
class OwonPSU: class OwonPSU:
def __init__(self, port: str, params: SerialParams | None = None, eol: str = "\n") -> None: def __init__(self, port: str, params: SerialParams | None = None, eol: str = "\n") -> None:
self.port = port self.port = port
self.params = params or SerialParams() self.params = params or SerialParams()
self.eol = eol self.eol = eol
self._ser: Optional[Serial] = None self._ser: Optional[Serial] = None
def open(self) -> None: def open(self) -> None:
if self._ser and self._ser.is_open: if self._ser and self._ser.is_open:
return return
ser = Serial() ser = Serial()
ser.port = self.port ser.port = self.port
ser.baudrate = self.params.baudrate ser.baudrate = self.params.baudrate
ser.bytesize = self.params.bytesize ser.bytesize = self.params.bytesize
ser.parity = self.params.parity ser.parity = self.params.parity
ser.stopbits = self.params.stopbits ser.stopbits = self.params.stopbits
ser.xonxoff = self.params.xonxoff ser.xonxoff = self.params.xonxoff
ser.rtscts = self.params.rtscts ser.rtscts = self.params.rtscts
ser.dsrdtr = self.params.dsrdtr ser.dsrdtr = self.params.dsrdtr
ser.timeout = self.params.timeout ser.timeout = self.params.timeout
ser.write_timeout = self.params.write_timeout ser.write_timeout = self.params.write_timeout
ser.open() ser.open()
self._ser = ser self._ser = ser
def close(self) -> None: def close(self) -> None:
if self._ser and self._ser.is_open: if self._ser and self._ser.is_open:
try: try:
self._ser.close() self._ser.close()
finally: finally:
self._ser = None self._ser = None
def __enter__(self) -> "OwonPSU": def __enter__(self) -> "OwonPSU":
self.open() self.open()
return self return self
def __exit__(self, exc_type, exc, tb) -> None: def __exit__(self, exc_type, exc, tb) -> None:
self.close() self.close()
@property @property
def is_open(self) -> bool: def is_open(self) -> bool:
return bool(self._ser and self._ser.is_open) return bool(self._ser and self._ser.is_open)
# ---- low-level ops ---- # ---- low-level ops ----
def write(self, cmd: str) -> None: def write(self, cmd: str) -> None:
"""Write a SCPI command (append eol).""" """Write a SCPI command (append eol)."""
if not self._ser: if not self._ser:
raise RuntimeError("Port is not open") raise RuntimeError("Port is not open")
data = (cmd + self.eol).encode("ascii", errors="ignore") data = (cmd + self.eol).encode("ascii", errors="ignore")
self._ser.write(data) self._ser.write(data)
self._ser.flush() self._ser.flush()
def query(self, q: str) -> str: def query(self, q: str) -> str:
"""Send a query with terminator and return a single-line response using readline().""" """Send a query with terminator and return a single-line response using readline()."""
if not self._ser: if not self._ser:
raise RuntimeError("Port is not open") raise RuntimeError("Port is not open")
# clear buffers to avoid stale data # clear buffers to avoid stale data
try: try:
self._ser.reset_input_buffer() self._ser.reset_input_buffer()
self._ser.reset_output_buffer() self._ser.reset_output_buffer()
except Exception: except Exception:
pass pass
self._ser.write((q + self.eol).encode("ascii", errors="ignore")) self._ser.write((q + self.eol).encode("ascii", errors="ignore"))
self._ser.flush() self._ser.flush()
line = self._ser.readline().strip() line = self._ser.readline().strip()
return line.decode("ascii", errors="ignore") return line.decode("ascii", errors="ignore")
# ---- high-level ops ---- # ---- high-level ops ----
def idn(self) -> str: def idn(self) -> str:
return self.query("*IDN?") return self.query("*IDN?")
def set_voltage(self, channel: int, volts: float) -> None: def set_voltage(self, channel: int, volts: float) -> None:
# Using SOUR:VOLT <V> per working example # Using SOUR:VOLT <V> per working example
self.write(f"SOUR:VOLT {volts:.3f}") self.write(f"SOUR:VOLT {volts:.3f}")
def set_current(self, channel: int, amps: float) -> None: def set_current(self, channel: int, amps: float) -> None:
# Using SOUR:CURR <A> per working example # Using SOUR:CURR <A> per working example
self.write(f"SOUR:CURR {amps:.3f}") self.write(f"SOUR:CURR {amps:.3f}")
def set_output(self, on: bool) -> None: def set_output(self, on: bool) -> None:
# Using 'output 1/0' per working example # Using 'output 1/0' per working example
self.write("output 1" if on else "output 0") self.write("output 1" if on else "output 0")
def output_status(self) -> str: def output_status(self) -> str:
return self.query("output?") return self.query("output?")
def measure_voltage(self) -> str: def measure_voltage(self) -> str:
return self.query("MEAS:VOLT?") return self.query("MEAS:VOLT?")
def measure_current(self) -> str: def measure_current(self) -> str:
return self.query("MEAS:CURR?") return self.query("MEAS:CURR?")
# ------- discovery helpers ------- # ------- discovery helpers -------
def try_idn_on_port(port: str, params: SerialParams) -> str: def try_idn_on_port(port: str, params: SerialParams) -> str:
dev: Optional[Serial] = None dev: Optional[Serial] = None
try: try:
dev = Serial() dev = Serial()
dev.port = port dev.port = port
dev.baudrate = params.baudrate dev.baudrate = params.baudrate
dev.bytesize = params.bytesize dev.bytesize = params.bytesize
dev.parity = params.parity dev.parity = params.parity
dev.stopbits = params.stopbits dev.stopbits = params.stopbits
dev.xonxoff = params.xonxoff dev.xonxoff = params.xonxoff
dev.rtscts = params.rtscts dev.rtscts = params.rtscts
dev.dsrdtr = params.dsrdtr dev.dsrdtr = params.dsrdtr
dev.timeout = params.timeout dev.timeout = params.timeout
dev.write_timeout = params.write_timeout dev.write_timeout = params.write_timeout
dev.open() dev.open()
# Query with newline terminator and read a single line # Query with newline terminator and read a single line
dev.reset_input_buffer(); dev.reset_output_buffer() dev.reset_input_buffer(); dev.reset_output_buffer()
dev.write(b"*IDN?\n"); dev.flush() dev.write(b"*IDN?\n"); dev.flush()
line = dev.readline().strip() line = dev.readline().strip()
return line.decode("ascii", errors="ignore") return line.decode("ascii", errors="ignore")
except Exception: except Exception:
return "" return ""
finally: finally:
if dev and dev.is_open: if dev and dev.is_open:
try: try:
dev.close() dev.close()
except Exception: except Exception:
pass pass
def scan_ports(params: SerialParams | None = None) -> list[tuple[str, str]]: def scan_ports(params: SerialParams | None = None) -> list[tuple[str, str]]:
"""Return [(port, idn_response), ...] for ports that responded.""" """Return [(port, idn_response), ...] for ports that responded."""
params = params or SerialParams() params = params or SerialParams()
results: list[tuple[str, str]] = [] results: list[tuple[str, str]] = []
for p in list_ports.comports(): for p in list_ports.comports():
dev = p.device dev = p.device
resp = try_idn_on_port(dev, params) resp = try_idn_on_port(dev, params)
if resp: if resp:
results.append((dev, resp)) results.append((dev, resp))
return results return results
def auto_detect(params: SerialParams | None = None, idn_substr: str | None = None) -> Optional[str]: def auto_detect(params: SerialParams | None = None, idn_substr: str | None = None) -> Optional[str]:
"""Return the first port whose *IDN? contains idn_substr (case-insensitive), else first responder.""" """Return the first port whose *IDN? contains idn_substr (case-insensitive), else first responder."""
params = params or SerialParams() params = params or SerialParams()
matches = scan_ports(params) matches = scan_ports(params)
if not matches: if not matches:
return None return None
if idn_substr: if idn_substr:
isub = idn_substr.lower() isub = idn_substr.lower()
for port, idn in matches: for port, idn in matches:
if isub in idn.lower(): if isub in idn.lower():
return port return port
return matches[0][0] return matches[0][0]
__all__ = [ __all__ = [
"SerialParams", "SerialParams",
"OwonPSU", "OwonPSU",
"scan_ports", "scan_ports",
"auto_detect", "auto_detect",
] ]

View File

@ -1,30 +1,31 @@
[pytest] [pytest]
# addopts: Default CLI options applied to every pytest run. # addopts: Default CLI options applied to every pytest run.
# -ra → Show extra test summary info for skipped, xfailed, etc. # -ra → Show extra test summary info for skipped, xfailed, etc.
# --junitxml=... → Emit JUnit XML for CI systems (machines can parse it). # --junitxml=... → Emit JUnit XML for CI systems (machines can parse it).
# --html=... → Generate a human-friendly HTML report after each run. # --html=... → Generate a human-friendly HTML report after each run.
# --self-contained-html → Inline CSS/JS in the HTML report for easy sharing. # --self-contained-html → Inline CSS/JS in the HTML report for easy sharing.
# --tb=short → Short tracebacks to keep logs readable. # --tb=short → Short tracebacks to keep logs readable.
# Plugin note: We no longer force-load via `-p conftest_plugin` to avoid ImportError # Plugin note: We no longer force-load via `-p conftest_plugin` to avoid ImportError
# on environments where the file might be missing. Instead, `conftest.py` will # on environments where the file might be missing. Instead, `conftest.py` will
# register the plugin if present. The plugin: # register the plugin if present. The plugin:
# - extracts Title/Description/Requirements/Steps from test docstrings # - extracts Title/Description/Requirements/Steps from test docstrings
# - adds custom columns to the HTML report # - adds custom columns to the HTML report
# - writes requirements_coverage.json and summary.md in reports/ # - writes requirements_coverage.json and summary.md in reports/
addopts = -ra --junitxml=reports/junit.xml --html=reports/report.html --self-contained-html --tb=short --cov=ecu_framework --cov-report=term-missing addopts = -ra --junitxml=reports/junit.xml --html=reports/report.html --self-contained-html --tb=short --cov=ecu_framework --cov-report=term-missing
# markers: Document all custom markers so pytest doesn't warn and so usage is clear. # markers: Document all custom markers so pytest doesn't warn and so usage is clear.
# Use with: pytest -m "markername" # Use with: pytest -m "markername"
markers = markers =
hardware: requires real hardware (BabyLIN device and ECU); excluded by default in mock runs hardware: requires real hardware (LIN master + ECU); excluded by default in mock runs
babylin: tests that use the BabyLIN interface (may require hardware) babylin: tests that use the legacy BabyLIN interface (may require hardware)
unit: fast, isolated tests (no hardware, no external I/O) mum: tests that use the Melexis Universal Master (MUM) interface (requires hardware)
req_001: REQ-001 - Mock interface shall echo transmitted frames for local testing unit: fast, isolated tests (no hardware, no external I/O)
req_002: REQ-002 - Mock interface shall synthesize deterministic responses for request operations req_001: REQ-001 - Mock interface shall echo transmitted frames for local testing
req_003: REQ-003 - Mock interface shall support frame filtering by ID req_002: REQ-002 - Mock interface shall synthesize deterministic responses for request operations
req_004: REQ-004 - Mock interface shall handle timeout scenarios gracefully req_003: REQ-003 - Mock interface shall support frame filtering by ID
smoke: Basic functionality validation tests req_004: REQ-004 - Mock interface shall handle timeout scenarios gracefully
boundary: Boundary condition and edge case tests smoke: Basic functionality validation tests
boundary: Boundary condition and edge case tests
# testpaths: Where pytest looks for tests by default.
testpaths = tests # testpaths: Where pytest looks for tests by default.
testpaths = tests

View File

@ -1,17 +1,17 @@
# Core testing and utilities # Core testing and utilities
pytest>=8,<9 # Test runner and framework (parametrize, fixtures, markers) pytest>=8,<9 # Test runner and framework (parametrize, fixtures, markers)
pyyaml>=6,<7 # Parse YAML config files under ./config/ pyyaml>=6,<7 # Parse YAML config files under ./config/
pyserial>=3,<4 # Serial communication for Owon PSU and hardware tests pyserial>=3,<4 # Serial communication for Owon PSU and hardware tests
# BabyLIN SDK wrapper requires 'six' on some platforms # BabyLIN SDK wrapper requires 'six' on some platforms
six>=1.16,<2 six>=1.16,<2
# Test productivity # Test productivity
pytest-xdist>=3.6,<4 # Parallel test execution (e.g., pytest -n auto) pytest-xdist>=3.6,<4 # Parallel test execution (e.g., pytest -n auto)
pytest-html>=4,<5 # Generate HTML test reports for CI and sharing pytest-html>=4,<5 # Generate HTML test reports for CI and sharing
pytest-cov>=5,<6 # Coverage reports for Python packages pytest-cov>=5,<6 # Coverage reports for Python packages
# Logging and config extras # Logging and config extras
configparser>=6,<7 # Optional INI-based config support if you add .ini configs later configparser>=6,<7 # Optional INI-based config support if you add .ini configs later
colorlog>=6,<7 # Colored logging output for readable test logs colorlog>=6,<7 # Colored logging output for readable test logs
typing-extensions>=4.12,<5 # Typing backports for older Python versions typing-extensions>=4.12,<5 # Typing backports for older Python versions

View File

@ -1,5 +1,5 @@
# Example udev rules for BabyLin-like USB device # Example udev rules for BabyLin-like USB device
# Replace ATTRS{idVendor} and ATTRS{idProduct} with actual values # Replace ATTRS{idVendor} and ATTRS{idProduct} with actual values
# Find values with: lsusb # Find values with: lsusb
SUBSYSTEM=="usb", ATTRS{idVendor}=="1234", ATTRS{idProduct}=="5678", MODE="0660", GROUP="plugdev", TAG+="uaccess" SUBSYSTEM=="usb", ATTRS{idVendor}=="1234", ATTRS{idProduct}=="5678", MODE="0660", GROUP="plugdev", TAG+="uaccess"

View File

@ -1,17 +1,17 @@
[Unit] [Unit]
Description=ECU Tests Runner Description=ECU Tests Runner
After=network-online.target After=network-online.target
Wants=network-online.target Wants=network-online.target
[Service] [Service]
Type=oneshot Type=oneshot
WorkingDirectory=/home/pi/ecu_tests WorkingDirectory=/home/pi/ecu_tests
ExecStart=/home/pi/ecu_tests/scripts/run_tests.sh ExecStart=/home/pi/ecu_tests/scripts/run_tests.sh
User=pi User=pi
Group=pi Group=pi
Environment=ECU_TESTS_CONFIG=/home/pi/ecu_tests/config/test_config.yaml Environment=ECU_TESTS_CONFIG=/home/pi/ecu_tests/config/test_config.yaml
StandardOutput=append:/home/pi/ecu_tests/reports/service.log StandardOutput=append:/home/pi/ecu_tests/reports/service.log
StandardError=append:/home/pi/ecu_tests/reports/service.err StandardError=append:/home/pi/ecu_tests/reports/service.err
[Install] [Install]
WantedBy=multi-user.target WantedBy=multi-user.target

View File

@ -1,10 +1,10 @@
[Unit] [Unit]
Description=Schedule ECU Tests Runner Description=Schedule ECU Tests Runner
[Timer] [Timer]
OnBootSec=2min OnBootSec=2min
OnUnitActiveSec=24h OnUnitActiveSec=24h
Persistent=true Persistent=true
[Install] [Install]
WantedBy=timers.target WantedBy=timers.target

View File

@ -1,29 +1,29 @@
# Runs two pytest invocations to generate separate HTML/JUnit reports # Runs two pytest invocations to generate separate HTML/JUnit reports
# - Unit tests → reports/report-unit.html, reports/junit-unit.xml # - Unit tests → reports/report-unit.html, reports/junit-unit.xml
# - All non-unit tests → reports/report-tests.html, reports/junit-tests.xml # - All non-unit tests → reports/report-tests.html, reports/junit-tests.xml
# #
# Usage (from repo root, PowerShell): # Usage (from repo root, PowerShell):
# .\scripts\run_two_reports.ps1 # .\scripts\run_two_reports.ps1
# #
# Notes: # Notes:
# - We override pytest.ini addopts to avoid duplicate --html/--junitxml and explicitly # - We override pytest.ini addopts to avoid duplicate --html/--junitxml and explicitly
# load our custom plugin. # load our custom plugin.
# - Adjust the second marker to exclude hardware if desired (see commented example). # - Adjust the second marker to exclude hardware if desired (see commented example).
# Ensure reports directory exists # Ensure reports directory exists
if (-not (Test-Path -LiteralPath "reports")) { New-Item -ItemType Directory -Path "reports" | Out-Null } if (-not (Test-Path -LiteralPath "reports")) { New-Item -ItemType Directory -Path "reports" | Out-Null }
# 1) Unit tests report # 1) Unit tests report
pytest -q -o addopts="" -p conftest_plugin -ra --tb=short --self-contained-html ` pytest -q -o addopts="" -p conftest_plugin -ra --tb=short --self-contained-html `
--cov=ecu_framework --cov-report=term-missing ` --cov=ecu_framework --cov-report=term-missing `
--html=reports/report-unit.html ` --html=reports/report-unit.html `
--junitxml=reports/junit-unit.xml ` --junitxml=reports/junit-unit.xml `
-m unit -m unit
# 2) All non-unit tests (integration/smoke/hardware) report # 2) All non-unit tests (integration/smoke/hardware) report
# To exclude hardware here, change the marker expression to: -m "not unit and not hardware" # To exclude hardware here, change the marker expression to: -m "not unit and not hardware"
pytest -q -o addopts="" -p conftest_plugin -ra --tb=short --self-contained-html ` pytest -q -o addopts="" -p conftest_plugin -ra --tb=short --self-contained-html `
--cov=ecu_framework --cov-report=term-missing ` --cov=ecu_framework --cov-report=term-missing `
--html=reports/report-tests.html ` --html=reports/report-tests.html `
--junitxml=reports/junit-tests.xml ` --junitxml=reports/junit-tests.xml `
-m "not unit" -m "not unit"

View File

@ -1,83 +1,101 @@
import os import os
import pathlib import pathlib
import typing as t import typing as t
import pytest import pytest
from ecu_framework.config import load_config, EcuTestConfig from ecu_framework.config import load_config, EcuTestConfig
from ecu_framework.lin.base import LinInterface from ecu_framework.lin.base import LinInterface
from ecu_framework.lin.mock import MockBabyLinInterface from ecu_framework.lin.mock import MockBabyLinInterface
try: try:
from ecu_framework.lin.babylin import BabyLinInterface # type: ignore from ecu_framework.lin.babylin import BabyLinInterface # type: ignore
except Exception: except Exception:
BabyLinInterface = None # type: ignore BabyLinInterface = None # type: ignore
try:
WORKSPACE_ROOT = pathlib.Path(__file__).resolve().parents[1] from ecu_framework.lin.mum import MumLinInterface # type: ignore
except Exception:
MumLinInterface = None # type: ignore
@pytest.fixture(scope="session")
def config() -> EcuTestConfig:
cfg = load_config(str(WORKSPACE_ROOT)) WORKSPACE_ROOT = pathlib.Path(__file__).resolve().parents[1]
return cfg
@pytest.fixture(scope="session")
@pytest.fixture(scope="session") def config() -> EcuTestConfig:
def lin(config: EcuTestConfig) -> t.Iterator[LinInterface]: cfg = load_config(str(WORKSPACE_ROOT))
iface_type = config.interface.type return cfg
if iface_type == "mock":
lin = MockBabyLinInterface(bitrate=config.interface.bitrate, channel=config.interface.channel)
elif iface_type == "babylin": @pytest.fixture(scope="session")
if BabyLinInterface is None: def lin(config: EcuTestConfig) -> t.Iterator[LinInterface]:
pytest.skip("BabyLin interface not available in this environment") iface_type = config.interface.type
lin = BabyLinInterface( if iface_type == "mock":
dll_path=config.interface.dll_path, lin = MockBabyLinInterface(bitrate=config.interface.bitrate, channel=config.interface.channel)
bitrate=config.interface.bitrate, elif iface_type == "babylin":
channel=config.interface.channel, if BabyLinInterface is None:
node_name=config.interface.node_name, pytest.skip("BabyLin interface not available in this environment")
func_names=config.interface.func_names, lin = BabyLinInterface(
sdf_path=config.interface.sdf_path, dll_path=config.interface.dll_path,
schedule_nr=config.interface.schedule_nr, bitrate=config.interface.bitrate,
) channel=config.interface.channel,
else: node_name=config.interface.node_name,
raise RuntimeError(f"Unknown interface type: {iface_type}") func_names=config.interface.func_names,
sdf_path=config.interface.sdf_path,
lin.connect() schedule_nr=config.interface.schedule_nr,
yield lin )
lin.disconnect() elif iface_type == "mum":
if MumLinInterface is None:
pytest.skip("MUM interface not available in this environment")
@pytest.fixture(scope="session", autouse=False) if not config.interface.host:
def flash_ecu(config: EcuTestConfig, lin: LinInterface) -> None: pytest.skip("interface.host is required when interface.type == 'mum'")
if not config.flash.enabled: lin = MumLinInterface(
pytest.skip("Flashing disabled in config") host=config.interface.host,
# Lazy import to avoid dependency during mock-only runs lin_device=config.interface.lin_device,
from ecu_framework.flashing import HexFlasher power_device=config.interface.power_device,
baudrate=config.interface.bitrate,
if not config.flash.hex_path: boot_settle_seconds=config.interface.boot_settle_seconds,
pytest.skip("No HEX path provided in config") frame_lengths=config.interface.frame_lengths or None,
)
flasher = HexFlasher(lin) else:
ok = flasher.flash_hex(config.flash.hex_path) raise RuntimeError(f"Unknown interface type: {iface_type}")
if not ok:
pytest.fail("ECU flashing failed") lin.connect()
yield lin
lin.disconnect()
@pytest.fixture
def rp(record_property: "pytest.RecordProperty"):
"""Convenience reporter: attaches a key/value as a test property and echoes to captured output. @pytest.fixture(scope="session", autouse=False)
def flash_ecu(config: EcuTestConfig, lin: LinInterface) -> None:
Usage in tests: if not config.flash.enabled:
def test_something(rp): pytest.skip("Flashing disabled in config")
rp("key", value) # Lazy import to avoid dependency during mock-only runs
""" from ecu_framework.flashing import HexFlasher
def _rp(key: str, value):
# Attach property (pytest-html will show in Properties table) if not config.flash.hex_path:
record_property(str(key), value) pytest.skip("No HEX path provided in config")
# Echo to captured output for quick scanning in report details
try: flasher = HexFlasher(lin)
print(f"[prop] {key}={value}") ok = flasher.flash_hex(config.flash.hex_path)
except Exception: if not ok:
pass pytest.fail("ECU flashing failed")
return _rp
@pytest.fixture
def rp(record_property: "pytest.RecordProperty"):
"""Convenience reporter: attaches a key/value as a test property and echoes to captured output.
Usage in tests:
def test_something(rp):
rp("key", value)
"""
def _rp(key: str, value):
# Attach property (pytest-html will show in Properties table)
record_property(str(key), value)
# Echo to captured output for quick scanning in report details
try:
print(f"[prop] {key}={value}")
except Exception:
pass
return _rp

View File

@ -0,0 +1,118 @@
"""End-to-end hardware test on the MUM (Melexis Universal Master).
Power the ECU via MUM's built-in power output, then activate the RGB LED via
the master-published ALM_Req_A frame (ID 0x0A) and verify the slave responds
on ALM_Status (ID 0x11).
Frame layout (from vendor/4SEVEN_color_lib_test.ldf, ALM_Req_A @ 0x0A, 8B):
byte 0 AmbLightColourRed (0..255)
byte 1 AmbLightColourGreen (0..255)
byte 2 AmbLightColourBlue (0..255)
byte 3 AmbLightIntensity (0..255)
byte 4 AmbLightUpdate (bits 0-1) | AmbLightMode (bits 2-7)
byte 5 AmbLightDuration
byte 6 AmbLightLIDFrom
byte 7 AmbLightLIDTo
The ECU answers ALM_Req_A only when AmbLightLIDFrom <= ALMNadNo <= LIDTo, so
we read the current NAD from ALM_Status first and target that NAD exactly.
"""
from __future__ import annotations
import pytest
from ecu_framework.config import EcuTestConfig
from ecu_framework.lin.base import LinFrame, LinInterface
pytestmark = [pytest.mark.hardware, pytest.mark.mum]
ALM_REQ_A_ID = 0x0A
ALM_STATUS_ID = 0x11
DEFAULT_RGB = (0xFF, 0xFF, 0xFF)
DEFAULT_INTENSITY = 0xFF
def _build_alm_req_a_payload(
r: int, g: int, b: int,
intensity: int = DEFAULT_INTENSITY,
update: int = 0,
mode: int = 0,
duration: int = 0,
lid_from: int = 0x01,
lid_to: int = 0xFF,
) -> bytes:
"""Pack RGB+mode signals into the 8-byte ALM_Req_A payload."""
byte4 = (update & 0x03) | ((mode & 0x3F) << 2)
return bytes([
r & 0xFF, g & 0xFF, b & 0xFF,
intensity & 0xFF,
byte4 & 0xFF,
duration & 0xFF,
lid_from & 0xFF,
lid_to & 0xFF,
])
def test_mum_e2e_power_on_then_led_activate(config: EcuTestConfig, lin: LinInterface, rp):
"""
Title: MUM E2E - Power ECU, Read NAD, Activate RGB LED
Description:
Drives the full hardware path through the Melexis Universal Master:
the `lin` fixture has already powered the ECU via power_out0 and set
up the LIN bus. This test reads ALM_Status to discover the slave's
NAD, publishes ALM_Req_A targeting that NAD with full white at full
intensity, and re-reads ALM_Status to confirm the bus is alive.
Requirements: REQ-MUM-LED-ACTIVATE
Test Steps:
1. Skip unless interface.type == 'mum'
2. Read ALM_Status (0x11) and extract ALMNadNo (byte 0 lower 8 bits)
3. Build ALM_Req_A payload with RGB=(0xFF,0xFF,0xFF), intensity=0xFF,
targeting LIDFrom=LIDTo=current_nad
4. Publish ALM_Req_A via lin.send()
5. Re-read ALM_Status and assert it still returns a valid frame
Expected Result:
- First ALM_Status read returns a 4-byte frame with a NAD in 0x01..0xFE
- Second ALM_Status read returns a frame (bus still alive after Tx)
"""
if config.interface.type != "mum":
pytest.skip("interface.type must be 'mum' for this test")
# Step 2: read current NAD from ALM_Status
status = lin.receive(id=ALM_STATUS_ID, timeout=1.0)
assert status is not None, "No ALM_Status received — check MUM/ECU wiring and power"
assert len(status.data) >= 1, f"ALM_Status too short: {status.data!r}"
current_nad = status.data[0]
rp("alm_status_data_hex", bytes(status.data).hex())
rp("current_nad", f"0x{current_nad:02X}")
assert 0x01 <= current_nad <= 0xFE, (
f"ALMNadNo {current_nad:#x} is out of valid range; ECU may be unconfigured"
)
# Step 3 + 4: target the discovered NAD with full white
payload = _build_alm_req_a_payload(
*DEFAULT_RGB,
intensity=DEFAULT_INTENSITY,
lid_from=current_nad,
lid_to=current_nad,
)
rp("tx_id", f"0x{ALM_REQ_A_ID:02X}")
rp("tx_data_hex", payload.hex())
rp("rgb", list(DEFAULT_RGB))
rp("intensity", DEFAULT_INTENSITY)
lin.send(LinFrame(id=ALM_REQ_A_ID, data=payload))
# Step 5: confirm bus liveness after the activation frame
status_after = lin.receive(id=ALM_STATUS_ID, timeout=1.0)
rp("post_status_present", status_after is not None)
if status_after is not None:
rp("post_status_data_hex", bytes(status_after.data).hex())
assert status_after is not None, (
"ALM_Status not received after publishing ALM_Req_A — ECU may have reset"
)

View File

@ -0,0 +1,235 @@
"""End-to-end hardware test: power the ECU on via Owon PSU, switch to the
'CCO' schedule, and publish an RGB activation frame on ALM_Req_A (ID 0x0A).
Frame layout (from vendor/4SEVEN_color_lib_test.ldf, ALM_Req_A @ ID 0x0A, 8B):
byte 0 AmbLightColourRed (0..255)
byte 1 AmbLightColourGreen (0..255)
byte 2 AmbLightColourBlue (0..255)
byte 3 AmbLightIntensity (0..255)
byte 4 AmbLightUpdate (bits 0-1) | AmbLightMode (bits 2-7)
byte 5 AmbLightDuration
byte 6 AmbLightLIDFrom
byte 7 AmbLightLIDTo
Schedule 'CCO' polls ALM_Req_A every 10 ms (LDF line 252-263). Updating the
master-published frame data via BLC_mon_set_xmit makes the next CCO slot
publish the new RGB values. The slave answers ALM_Status (ID 0x11) which we
use as evidence the bus is alive.
"""
from __future__ import annotations
import time
import pytest
import serial
from ecu_framework.config import EcuTestConfig
from ecu_framework.lin.base import LinFrame, LinInterface
from ecu_framework.power import OwonPSU, SerialParams
pytestmark = [pytest.mark.hardware, pytest.mark.babylin]
# Frame IDs from the LDF
ALM_REQ_A_ID = 0x0A # master-published RGB control frame
ALM_STATUS_ID = 0x11 # slave-published status frame
# Default RGB activation: full white at full intensity, immediate setpoint.
DEFAULT_RGB = (0xFF, 0xFF, 0xFF)
DEFAULT_INTENSITY = 0xFF
_PARITY_MAP = {
"N": serial.PARITY_NONE,
"E": serial.PARITY_EVEN,
"O": serial.PARITY_ODD,
}
_STOPBITS_MAP = {
1: serial.STOPBITS_ONE,
2: serial.STOPBITS_TWO,
}
def _build_serial_params(psu_cfg) -> SerialParams:
return SerialParams(
baudrate=int(psu_cfg.baudrate),
timeout=float(psu_cfg.timeout),
parity=_PARITY_MAP.get(str(psu_cfg.parity or "N").upper(), serial.PARITY_NONE),
stopbits=_STOPBITS_MAP.get(int(float(psu_cfg.stopbits or 1)), serial.STOPBITS_ONE),
xonxoff=bool(psu_cfg.xonxoff),
rtscts=bool(psu_cfg.rtscts),
dsrdtr=bool(psu_cfg.dsrdtr),
)
def _build_alm_req_a_payload(
r: int, g: int, b: int,
intensity: int = DEFAULT_INTENSITY,
update: int = 0, # 0 = Immediate color update
mode: int = 0, # 0 = Immediate Setpoint
duration: int = 0,
lid_from: int = 0,
lid_to: int = 0,
) -> bytes:
"""Pack RGB-activation signals into the 8-byte ALM_Req_A payload."""
# byte 4 packs Update (2 bits, LSB) and Mode (6 bits) per the LDF offsets.
byte4 = (update & 0x03) | ((mode & 0x3F) << 2)
return bytes([
r & 0xFF, g & 0xFF, b & 0xFF,
intensity & 0xFF,
byte4 & 0xFF,
duration & 0xFF,
lid_from & 0xFF,
lid_to & 0xFF,
])
def test_e2e_power_on_then_cco_rgb_activate(config: EcuTestConfig, lin: LinInterface, rp):
"""
Title: E2E - Power ECU, Switch to CCO Schedule, Activate RGB
Description:
Powers the ECU via the Owon PSU, switches the BabyLIN master to the
'CCO' schedule (which polls ALM_Req_A every 10 ms per the LDF), and
publishes an RGB activation payload on ALM_Req_A (ID 0x0A). Captures
bus traffic for a short window to confirm activity (typically the
slave-published ALM_Status at ID 0x11 will appear).
Requirements: REQ-E2E-CCO-RGB
Test Steps:
1. Skip unless interface.type == 'babylin'
2. Skip unless power_supply is enabled and a port is configured
3. Open the PSU, IDN check, set V/I, enable output
4. Wait for ECU boot (boot_settle_seconds, default 1.5 s)
5. Stop any current schedule and start schedule 'CCO'
6. Build the ALM_Req_A payload from RGB+intensity+mode+update
7. Publish the payload via lin.send(LinFrame(0x0A, ...))
8. Drain RX briefly and collect frames seen during the activation window
9. Assert at least one frame was observed; report IDs/lengths
10. Disable PSU output (always)
Expected Result:
- PSU comes up, ECU boots, CCO starts without SDK errors
- At least one LIN frame is observed on the bus during the window
- PSU output is disabled at end of test
"""
# Step 1 / 2: gate on hardware availability
if config.interface.type != "babylin":
pytest.skip("interface.type must be 'babylin' for this E2E test")
psu_cfg = config.power_supply
if not psu_cfg.enabled:
pytest.skip("Power supply disabled in config.power_supply.enabled")
if not psu_cfg.port:
pytest.skip("No power supply 'port' configured (config.power_supply.port)")
set_v = float(psu_cfg.set_voltage)
print(f"Debug: set_v={set_v}, type={type(set_v)}")
set_i = float(psu_cfg.set_current)
print(f"Debug: set_i={set_i}, type={type(set_i)}")
eol = psu_cfg.eol or "\n"
port = str(psu_cfg.port).strip()
boot_settle_s = float(getattr(psu_cfg, "boot_settle_seconds", 1.5))
activation_window_s = float(getattr(psu_cfg, "activation_window", 1.0))
# The adapter is hardware-only here; the test is gated on interface.type=='babylin'.
send_command = getattr(lin, "send_command", None)
start_schedule = getattr(lin, "start_schedule", None)
if send_command is None or start_schedule is None:
pytest.skip("LIN adapter does not expose send_command/start_schedule (need BabyLinInterface)")
rgb = (DEFAULT_RGB[0], DEFAULT_RGB[1], DEFAULT_RGB[2])
rp("interface_type", config.interface.type)
rp("psu_port", port)
rp("set_voltage", set_v)
rp("set_current", set_i)
rp("schedule", "CCO")
rp("rgb", list(rgb))
rp("intensity", DEFAULT_INTENSITY)
sparams = _build_serial_params(psu_cfg)
with OwonPSU(port, sparams, eol=eol) as psu:
# Step 3: bring up PSU
idn = psu.idn()
rp("psu_idn", idn)
assert isinstance(idn, str) and idn != "", "PSU *IDN? returned empty"
if psu_cfg.idn_substr:
assert str(psu_cfg.idn_substr).lower() in idn.lower(), (
f"PSU IDN does not contain expected substring "
f"{psu_cfg.idn_substr!r}; got {idn!r}"
)
psu.set_voltage(1, set_v)
psu.set_current(1, set_i)
try:
psu.set_output(True)
# Step 4: let ECU boot
time.sleep(boot_settle_s)
try:
rp("measured_voltage", psu.measure_voltage())
rp("measured_current", psu.measure_current())
except Exception as meas_err:
rp("measure_error", repr(meas_err))
# Step 5: switch to schedule CCO. The BabyLIN firmware only accepts
# 'start schedule <index>;', so we resolve the name to its SDF index
# via BLC_SDF_getScheduleNr (handled inside start_schedule).
try:
send_command("stop;")
except Exception as e:
rp("stop_error", repr(e))
cco_idx = start_schedule("CCO")
rp("schedule_index", cco_idx)
# Step 6 + 7: build and publish the RGB activation frame.
payload = _build_alm_req_a_payload(*rgb, intensity=DEFAULT_INTENSITY)
rp("tx_id", f"0x{ALM_REQ_A_ID:02X}")
rp("tx_data_hex", payload.hex())
lin.send(LinFrame(id=ALM_REQ_A_ID, data=payload))
# Step 8: collect frames over the activation window. CCO publishes
# ALM_Req_A (0x0A) and ALM_Status (0x11) every ~10 ms each.
try:
lin.flush()
except Exception:
pass
seen = []
deadline = time.monotonic() + activation_window_s
while time.monotonic() < deadline:
rx = lin.receive(timeout=0.1)
if rx is None:
continue
seen.append((rx.id, bytes(rx.data)))
ids = sorted({fid for fid, _ in seen})
rp("rx_count", len(seen))
rp("rx_ids", [f"0x{i:02X}" for i in ids])
if seen:
last_id, last_data = seen[-1]
rp("rx_last_id", f"0x{last_id:02X}")
rp("rx_last_data_hex", last_data.hex())
# Step 9: minimal liveness assertion. We don't require ALM_Status
# specifically because absence-of-slave is a separate failure mode
# to diagnose; we just want to know the bus moved at all.
assert seen, (
f"No LIN frames observed during {activation_window_s:.2f}s on schedule CCO. "
f"Check wiring, SDF, and that 'CCO' exists in the loaded SDF."
)
if ALM_STATUS_ID in ids:
rp("alm_status_seen", True)
else:
# Not asserted, but logged so the report shows it clearly.
rp("alm_status_seen", False)
finally:
# Step 10: always cut power
try:
psu.set_output(False)
except Exception as off_err:
rp("set_output_off_error", repr(off_err))

View File

@ -1,102 +1,102 @@
import time import time
import pytest import pytest
import serial import serial
from ecu_framework.power import OwonPSU, SerialParams from ecu_framework.power import OwonPSU, SerialParams
from ecu_framework.config import EcuTestConfig from ecu_framework.config import EcuTestConfig
pytestmark = [pytest.mark.hardware] pytestmark = [pytest.mark.hardware]
def test_owon_psu_idn_and_optional_set(config: EcuTestConfig, rp): def test_owon_psu_idn_and_optional_set(config: EcuTestConfig, rp):
""" """
Title: Owon PSU - IDN, Output Status, Set/Measure Verification Title: Owon PSU - IDN, Output Status, Set/Measure Verification
Description: Description:
Validates serial SCPI control of an Owon PSU: IDN retrieval, output status query, Validates serial SCPI control of an Owon PSU: IDN retrieval, output status query,
and optional set/measure cycle using values from central configuration. and optional set/measure cycle using values from central configuration.
Test Steps: Test Steps:
1. Load PSU config from EcuTestConfig.power_supply 1. Load PSU config from EcuTestConfig.power_supply
2. Open serial connection and query *IDN? 2. Open serial connection and query *IDN?
3. Query output status (output?) and record initial state 3. Query output status (output?) and record initial state
4. If configured, set voltage/current, enable output briefly, measure V/I, then disable output 4. If configured, set voltage/current, enable output briefly, measure V/I, then disable output
5. Record IDN, output status before/after, set values, and measured values in the report 5. Record IDN, output status before/after, set values, and measured values in the report
Expected Result: Expected Result:
*IDN? returns a non-empty string (containing idn_substr if configured), serial operations succeed, *IDN? returns a non-empty string (containing idn_substr if configured), serial operations succeed,
and, when enabled, the output toggles on then off with measurements returned. and, when enabled, the output toggles on then off with measurements returned.
""" """
psu_cfg = config.power_supply psu_cfg = config.power_supply
if not psu_cfg.enabled: if not psu_cfg.enabled:
pytest.skip("Power supply tests disabled in config.power_supply.enabled") pytest.skip("Power supply tests disabled in config.power_supply.enabled")
if not psu_cfg.port: if not psu_cfg.port:
pytest.skip("No power supply 'port' configured (config.power_supply.port)") pytest.skip("No power supply 'port' configured (config.power_supply.port)")
# Serial params (with sensible defaults via central config) # Serial params (with sensible defaults via central config)
baud = int(psu_cfg.baudrate) baud = int(psu_cfg.baudrate)
timeout = float(psu_cfg.timeout) timeout = float(psu_cfg.timeout)
parity = psu_cfg.parity or "N" parity = psu_cfg.parity or "N"
stopbits = psu_cfg.stopbits or 1 stopbits = psu_cfg.stopbits or 1
xonxoff = bool(psu_cfg.xonxoff) xonxoff = bool(psu_cfg.xonxoff)
rtscts = bool(psu_cfg.rtscts) rtscts = bool(psu_cfg.rtscts)
dsrdtr = bool(psu_cfg.dsrdtr) dsrdtr = bool(psu_cfg.dsrdtr)
eol = psu_cfg.eol or "\n" eol = psu_cfg.eol or "\n"
ps = SerialParams( ps = SerialParams(
baudrate=baud, baudrate=baud,
timeout=timeout, timeout=timeout,
parity={"N": serial.PARITY_NONE, "E": serial.PARITY_EVEN, "O": serial.PARITY_ODD}.get(str(parity).upper(), serial.PARITY_NONE), parity={"N": serial.PARITY_NONE, "E": serial.PARITY_EVEN, "O": serial.PARITY_ODD}.get(str(parity).upper(), serial.PARITY_NONE),
stopbits={1: serial.STOPBITS_ONE, 2: serial.STOPBITS_TWO}.get(int(float(stopbits)), serial.STOPBITS_ONE), stopbits={1: serial.STOPBITS_ONE, 2: serial.STOPBITS_TWO}.get(int(float(stopbits)), serial.STOPBITS_ONE),
xonxoff=xonxoff, xonxoff=xonxoff,
rtscts=rtscts, rtscts=rtscts,
dsrdtr=dsrdtr, dsrdtr=dsrdtr,
) )
want_substr = psu_cfg.idn_substr want_substr = psu_cfg.idn_substr
do_set = bool(psu_cfg.do_set) do_set = bool(psu_cfg.do_set)
set_v = float(psu_cfg.set_voltage) set_v = float(psu_cfg.set_voltage)
set_i = float(psu_cfg.set_current) set_i = float(psu_cfg.set_current)
port = str(psu_cfg.port).strip() port = str(psu_cfg.port).strip()
with OwonPSU(port, ps, eol=eol) as psu: with OwonPSU(port, ps, eol=eol) as psu:
# Step 2: IDN # Step 2: IDN
idn = psu.idn() idn = psu.idn()
rp("psu_idn", idn) rp("psu_idn", idn)
print(f"PSU IDN: {idn}") print(f"PSU IDN: {idn}")
assert isinstance(idn, str) assert isinstance(idn, str)
assert idn != "", "*IDN? returned empty response" assert idn != "", "*IDN? returned empty response"
if want_substr: if want_substr:
assert str(want_substr).lower() in idn.lower(), f"IDN does not contain expected substring: {want_substr}. Got: {idn}" assert str(want_substr).lower() in idn.lower(), f"IDN does not contain expected substring: {want_substr}. Got: {idn}"
# Step 3: Output status before # Step 3: Output status before
out_before = psu.output_status() out_before = psu.output_status()
rp("output_status_before", str(out_before)) rp("output_status_before", str(out_before))
print(f"Output status (before): {out_before}") print(f"Output status (before): {out_before}")
if do_set: if do_set:
# Step 4: Set and measure # Step 4: Set and measure
rp("set_voltage", set_v) rp("set_voltage", set_v)
rp("set_current", set_i) rp("set_current", set_i)
print(f"Setting: voltage={set_v}V, current={set_i}A") print(f"Setting: voltage={set_v}V, current={set_i}A")
psu.set_voltage(1, set_v) psu.set_voltage(1, set_v)
psu.set_current(1, set_i) psu.set_current(1, set_i)
psu.set_output(True) psu.set_output(True)
time.sleep(1.0) # allow settling time.sleep(1.0) # allow settling
try: try:
mv = psu.measure_voltage() mv = psu.measure_voltage()
mi = psu.measure_current() mi = psu.measure_current()
rp("measured_voltage", mv) rp("measured_voltage", mv)
rp("measured_current", mi) rp("measured_current", mi)
print(f"Measured: voltage={mv}V, current={mi}A") print(f"Measured: voltage={mv}V, current={mi}A")
finally: finally:
psu.set_output(False) psu.set_output(False)
out_after = psu.output_status() out_after = psu.output_status()
rp("output_status_after", str(out_after)) rp("output_status_after", str(out_after))
print(f"Output status (after): {out_after}") print(f"Output status (after): {out_after}")

View File

@ -1,61 +1,61 @@
import json import json
from pathlib import Path from pathlib import Path
import pytest import pytest
# Enable access to the built-in 'pytester' fixture # Enable access to the built-in 'pytester' fixture
pytest_plugins = ("pytester",) pytest_plugins = ("pytester",)
@pytest.mark.unit @pytest.mark.unit
def test_plugin_writes_artifacts(pytester): def test_plugin_writes_artifacts(pytester):
# Make the project root importable so '-p conftest_plugin' works inside pytester # Make the project root importable so '-p conftest_plugin' works inside pytester
project_root = Path(__file__).resolve().parents[2] project_root = Path(__file__).resolve().parents[2]
pytester.syspathinsert(str(project_root)) pytester.syspathinsert(str(project_root))
# Create a minimal test file that includes a rich docstring # Create a minimal test file that includes a rich docstring
pytester.makepyfile( pytester.makepyfile(
test_sample=''' test_sample='''
import pytest import pytest
@pytest.mark.req_001 @pytest.mark.req_001
def test_docstring_metadata(): def test_docstring_metadata():
""" """
Title: Example Test Title: Example Test
Description: Description:
Small sample to exercise the reporting plugin. Small sample to exercise the reporting plugin.
Requirements: REQ-001 Requirements: REQ-001
Test Steps: Test Steps:
1. do it 1. do it
Expected Result: Expected Result:
- done - done
""" """
assert True assert True
''' '''
) )
# Run pytest in the temporary test environment, loading our reporting plugin # Run pytest in the temporary test environment, loading our reporting plugin
result = pytester.runpytest( result = pytester.runpytest(
"-q", "-q",
"-p", "-p",
"conftest_plugin", "conftest_plugin",
"--html=reports/report.html", "--html=reports/report.html",
"--self-contained-html", "--self-contained-html",
"--junitxml=reports/junit.xml", "--junitxml=reports/junit.xml",
) )
result.assert_outcomes(passed=1) result.assert_outcomes(passed=1)
# Check for the JSON coverage artifact # Check for the JSON coverage artifact
cov = pytester.path / "reports" / "requirements_coverage.json" cov = pytester.path / "reports" / "requirements_coverage.json"
assert cov.is_file() assert cov.is_file()
data = json.loads(cov.read_text()) data = json.loads(cov.read_text())
# Validate REQ mapping and presence of artifacts # Validate REQ mapping and presence of artifacts
assert "REQ-001" in data["requirements"] assert "REQ-001" in data["requirements"]
assert data["files"]["html"].endswith("report.html") assert data["files"]["html"].endswith("report.html")
assert data["files"]["junit"].endswith("junit.xml") assert data["files"]["junit"].endswith("junit.xml")
# Check that the CI summary exists # Check that the CI summary exists
summary = pytester.path / "reports" / "summary.md" summary = pytester.path / "reports" / "summary.md"
assert summary.is_file() assert summary.is_file()

View File

@ -1,48 +1,48 @@
import os import os
import pathlib import pathlib
import pytest import pytest
# Hardware + babylin + smoke: this is the canonical end-to-end schedule flow # Hardware + babylin + smoke: this is the canonical end-to-end schedule flow
pytestmark = [pytest.mark.hardware, pytest.mark.babylin, pytest.mark.smoke] pytestmark = [pytest.mark.hardware, pytest.mark.babylin, pytest.mark.smoke]
WORKSPACE_ROOT = pathlib.Path(__file__).resolve().parents[1] WORKSPACE_ROOT = pathlib.Path(__file__).resolve().parents[1]
def test_babylin_sdk_example_flow(config, lin, rp): def test_babylin_sdk_example_flow(config, lin, rp):
""" """
Title: BabyLIN SDK Example Flow - Open, Load SDF, Start Schedule, Rx Timeout Title: BabyLIN SDK Example Flow - Open, Load SDF, Start Schedule, Rx Timeout
Description: Description:
Mirrors the vendor example flow: discover/open, load SDF, start a Mirrors the vendor example flow: discover/open, load SDF, start a
schedule, and attempt a receive. Validates that the adapter can perform schedule, and attempt a receive. Validates that the adapter can perform
the essential control sequence without exceptions and that the receive the essential control sequence without exceptions and that the receive
path is operational even if it times out. path is operational even if it times out.
Requirements: REQ-HW-OPEN, REQ-HW-SDF, REQ-HW-SCHEDULE Requirements: REQ-HW-OPEN, REQ-HW-SDF, REQ-HW-SCHEDULE
Preconditions: Preconditions:
- ECU_TESTS_CONFIG points to a hardware YAML with interface.sdf_path and schedule_nr - ECU_TESTS_CONFIG points to a hardware YAML with interface.sdf_path and schedule_nr
- BabyLIN_library.py and native libs placed per vendor/README.md - BabyLIN_library.py and native libs placed per vendor/README.md
Test Steps: Test Steps:
1. Verify hardware config requests the BabyLIN SDK with SDF path 1. Verify hardware config requests the BabyLIN SDK with SDF path
2. Connect via fixture (opens device, loads SDF, starts schedule) 2. Connect via fixture (opens device, loads SDF, starts schedule)
3. Try to receive a frame with a short timeout 3. Try to receive a frame with a short timeout
4. Assert no crash; accept None or a LinFrame (environment-dependent) 4. Assert no crash; accept None or a LinFrame (environment-dependent)
Expected Result: Expected Result:
- No exceptions during open/load/start - No exceptions during open/load/start
- Receive returns None (timeout) or a LinFrame - Receive returns None (timeout) or a LinFrame
""" """
# Step 1: Ensure config is set for hardware with SDK wrapper # Step 1: Ensure config is set for hardware with SDK wrapper
assert config.interface.type == "babylin" assert config.interface.type == "babylin"
assert config.interface.sdf_path is not None assert config.interface.sdf_path is not None
rp("sdf_path", str(config.interface.sdf_path)) rp("sdf_path", str(config.interface.sdf_path))
rp("schedule_nr", int(config.interface.schedule_nr)) rp("schedule_nr", int(config.interface.schedule_nr))
# Step 3: Attempt a short receive to validate RX path while schedule runs # Step 3: Attempt a short receive to validate RX path while schedule runs
rx = lin.receive(timeout=0.2) rx = lin.receive(timeout=0.2)
rp("receive_result", "timeout" if rx is None else "frame") rp("receive_result", "timeout" if rx is None else "frame")
# Step 4: Accept timeout or a valid frame object depending on bus activity # Step 4: Accept timeout or a valid frame object depending on bus activity
assert rx is None or hasattr(rx, "id") assert rx is None or hasattr(rx, "id")

View File

@ -1,34 +1,34 @@
import pytest import pytest
# Mark entire module as hardware + babylin so it's easy to select/deselect via -m # Mark entire module as hardware + babylin so it's easy to select/deselect via -m
pytestmark = [pytest.mark.hardware, pytest.mark.babylin] pytestmark = [pytest.mark.hardware, pytest.mark.babylin]
def test_babylin_connect_receive_timeout(lin, rp): def test_babylin_connect_receive_timeout(lin, rp):
""" """
Title: BabyLIN Hardware Smoke - Connect and Timed Receive Title: BabyLIN Hardware Smoke - Connect and Timed Receive
Description: Description:
Minimal hardware sanity check that relies on the configured fixtures to Minimal hardware sanity check that relies on the configured fixtures to
connect to a BabyLIN device and perform a short receive call. connect to a BabyLIN device and perform a short receive call.
The test is intentionally permissive: it accepts either a valid LinFrame The test is intentionally permissive: it accepts either a valid LinFrame
or a None (timeout) as success, focusing on verifying that the adapter or a None (timeout) as success, focusing on verifying that the adapter
is functional and not crashing. is functional and not crashing.
Requirements: REQ-HW-SMOKE Requirements: REQ-HW-SMOKE
Test Steps: Test Steps:
1. Use the 'lin' fixture to connect to the BabyLIN SDK adapter 1. Use the 'lin' fixture to connect to the BabyLIN SDK adapter
2. Call receive() with a short timeout 2. Call receive() with a short timeout
3. Assert the outcome is either a LinFrame or None (timeout) 3. Assert the outcome is either a LinFrame or None (timeout)
Expected Result: Expected Result:
- No exceptions are raised - No exceptions are raised
- Return value is None (timeout) or an object with an 'id' attribute - Return value is None (timeout) or an object with an 'id' attribute
""" """
# Step 2: Perform a short receive to verify operability # Step 2: Perform a short receive to verify operability
rx = lin.receive(timeout=0.2) rx = lin.receive(timeout=1.0) # 1 second timeout
rp("receive_result", "timeout" if rx is None else "frame") rp("receive_result", "timeout" if rx is None else "frame")
# Step 3: Accept either a timeout (None) or a frame-like object # Step 3: Accept either a timeout (None) or a frame-like object
assert rx is None or hasattr(rx, "id") assert rx is None or hasattr(rx, "id")

View File

@ -1,145 +1,145 @@
import pytest import pytest
from ecu_framework.lin.base import LinFrame from ecu_framework.lin.base import LinFrame
from ecu_framework.lin.babylin import BabyLinInterface from ecu_framework.lin.babylin import BabyLinInterface
# Inject the pure-Python mock wrapper to run SDK adapter tests without hardware # Inject the pure-Python mock wrapper to run SDK adapter tests without hardware
from vendor import mock_babylin_wrapper as mock_bl from vendor import mock_babylin_wrapper as mock_bl
class _MockBytesOnly: class _MockBytesOnly:
"""Shim exposing BLC_sendRawMasterRequest(bytes) only, to test bytes signature. """Shim exposing BLC_sendRawMasterRequest(bytes) only, to test bytes signature.
We wrap the existing mock but override BLC_sendRawMasterRequest to accept We wrap the existing mock but override BLC_sendRawMasterRequest to accept
only the bytes payload form. The response still uses the deterministic pattern only the bytes payload form. The response still uses the deterministic pattern
implied by the payload length (zeros are fine; we assert by length here). implied by the payload length (zeros are fine; we assert by length here).
""" """
@staticmethod @staticmethod
def create_BabyLIN(): def create_BabyLIN():
base = mock_bl.create_BabyLIN() base = mock_bl.create_BabyLIN()
def bytes_only(channel, frame_id, payload): def bytes_only(channel, frame_id, payload):
# Delegate to the base mock's bytes variant by ensuring we pass bytes # Delegate to the base mock's bytes variant by ensuring we pass bytes
if not isinstance(payload, (bytes, bytearray)): if not isinstance(payload, (bytes, bytearray)):
raise TypeError("expected bytes payload") raise TypeError("expected bytes payload")
return base.BLC_sendRawMasterRequest(channel, frame_id, bytes(payload)) return base.BLC_sendRawMasterRequest(channel, frame_id, bytes(payload))
# Monkey-patch the method to raise TypeError when a length is provided # Monkey-patch the method to raise TypeError when a length is provided
def patched_raw_req(*args): def patched_raw_req(*args):
# Expected signature: (channel, frame_id, payload_bytes) # Expected signature: (channel, frame_id, payload_bytes)
if len(args) != 3 or not isinstance(args[2], (bytes, bytearray)): if len(args) != 3 or not isinstance(args[2], (bytes, bytearray)):
raise TypeError("bytes signature only") raise TypeError("bytes signature only")
return bytes_only(*args) return bytes_only(*args)
base.BLC_sendRawMasterRequest = patched_raw_req base.BLC_sendRawMasterRequest = patched_raw_req
return base return base
@pytest.mark.babylin @pytest.mark.babylin
@pytest.mark.smoke @pytest.mark.smoke
@pytest.mark.req_001 @pytest.mark.req_001
def test_babylin_sdk_adapter_with_mock_wrapper(rp): def test_babylin_sdk_adapter_with_mock_wrapper(rp):
""" """
Title: SDK Adapter - Send/Receive with Mock Wrapper Title: SDK Adapter - Send/Receive with Mock Wrapper
Description: Description:
Validate that the BabyLIN SDK-based adapter can send and receive using Validate that the BabyLIN SDK-based adapter can send and receive using
a mocked wrapper exposing BLC_* APIs. The mock implements loopback by a mocked wrapper exposing BLC_* APIs. The mock implements loopback by
echoing transmitted frames into the receive queue. echoing transmitted frames into the receive queue.
Requirements: REQ-001 Requirements: REQ-001
Test Steps: Test Steps:
1. Construct BabyLinInterface with injected mock wrapper 1. Construct BabyLinInterface with injected mock wrapper
2. Connect (discovers port, opens, loads SDF, starts schedule) 2. Connect (discovers port, opens, loads SDF, starts schedule)
3. Send a frame via BLC_mon_set_xmit 3. Send a frame via BLC_mon_set_xmit
4. Receive the same frame via BLC_getNextFrameTimeout 4. Receive the same frame via BLC_getNextFrameTimeout
5. Disconnect 5. Disconnect
Expected Result: Expected Result:
- Received frame matches sent frame (ID and payload) - Received frame matches sent frame (ID and payload)
""" """
# Step 1-2: Create adapter with wrapper injection and connect # Step 1-2: Create adapter with wrapper injection and connect
lin = BabyLinInterface(sdf_path="./vendor/Example.sdf", schedule_nr=0, wrapper_module=mock_bl) lin = BabyLinInterface(sdf_path="./vendor/Example.sdf", schedule_nr=0, wrapper_module=mock_bl)
rp("wrapper", "mock_bl") rp("wrapper", "mock_bl")
lin.connect() lin.connect()
try: try:
# Step 3: Transmit a known payload on a chosen ID # Step 3: Transmit a known payload on a chosen ID
tx = LinFrame(id=0x12, data=bytes([0xAA, 0x55, 0x01])) tx = LinFrame(id=0x12, data=bytes([0xAA, 0x55, 0x01]))
lin.send(tx) lin.send(tx)
# Step 4: Receive from the mock's RX queue (loopback) # Step 4: Receive from the mock's RX queue (loopback)
rx = lin.receive(timeout=0.1) rx = lin.receive(timeout=0.1)
rp("tx_id", f"0x{tx.id:02X}") rp("tx_id", f"0x{tx.id:02X}")
rp("tx_data", list(tx.data)) rp("tx_data", list(tx.data))
rp("rx_present", rx is not None) rp("rx_present", rx is not None)
# Step 5: Validate ID and payload integrity # Step 5: Validate ID and payload integrity
assert rx is not None, "Expected a frame from mock loopback" assert rx is not None, "Expected a frame from mock loopback"
assert rx.id == tx.id assert rx.id == tx.id
assert rx.data == tx.data assert rx.data == tx.data
finally: finally:
# Always disconnect to leave the mock in a clean state # Always disconnect to leave the mock in a clean state
lin.disconnect() lin.disconnect()
@pytest.mark.babylin @pytest.mark.babylin
@pytest.mark.smoke @pytest.mark.smoke
@pytest.mark.req_001 @pytest.mark.req_001
@pytest.mark.parametrize("wrapper,expect_pattern", [ @pytest.mark.parametrize("wrapper,expect_pattern", [
(mock_bl, True), # length signature available: expect deterministic pattern (mock_bl, True), # length signature available: expect deterministic pattern
(_MockBytesOnly, False), # bytes-only signature: expect zeros of requested length (_MockBytesOnly, False), # bytes-only signature: expect zeros of requested length
]) ])
def test_babylin_master_request_with_mock_wrapper(wrapper, expect_pattern, rp): def test_babylin_master_request_with_mock_wrapper(wrapper, expect_pattern, rp):
""" """
Title: SDK Adapter - Master Request using Mock Wrapper Title: SDK Adapter - Master Request using Mock Wrapper
Description: Description:
Verify that request() prefers the SDK's BLC_sendRawMasterRequest when Verify that request() prefers the SDK's BLC_sendRawMasterRequest when
available. The mock wrapper enqueues a deterministic response where available. The mock wrapper enqueues a deterministic response where
data[i] = (id + i) & 0xFF, allowing predictable assertions. data[i] = (id + i) & 0xFF, allowing predictable assertions.
Requirements: REQ-001 Requirements: REQ-001
Test Steps: Test Steps:
1. Construct BabyLinInterface with injected mock wrapper 1. Construct BabyLinInterface with injected mock wrapper
2. Connect (mock open/initialize) 2. Connect (mock open/initialize)
3. Issue a master request for a specific ID and length 3. Issue a master request for a specific ID and length
4. Receive the response frame 4. Receive the response frame
5. Validate ID and deterministic payload pattern 5. Validate ID and deterministic payload pattern
Expected Result: Expected Result:
- Response frame ID matches request ID - Response frame ID matches request ID
- Response data length matches requested length - Response data length matches requested length
- Response data follows deterministic pattern - Response data follows deterministic pattern
""" """
# Step 1-2: Initialize mock-backed adapter # Step 1-2: Initialize mock-backed adapter
lin = BabyLinInterface(wrapper_module=wrapper) lin = BabyLinInterface(wrapper_module=wrapper)
rp("wrapper", getattr(wrapper, "__name__", str(wrapper))) rp("wrapper", getattr(wrapper, "__name__", str(wrapper)))
lin.connect() lin.connect()
try: try:
# Step 3: Request 4 bytes for ID 0x22 # Step 3: Request 4 bytes for ID 0x22
req_id = 0x22 req_id = 0x22
length = 4 length = 4
rp("req_id", f"0x{req_id:02X}") rp("req_id", f"0x{req_id:02X}")
rp("req_len", length) rp("req_len", length)
rx = lin.request(id=req_id, length=length, timeout=0.1) rx = lin.request(id=req_id, length=length, timeout=0.1)
# Step 4-5: Validate response # Step 4-5: Validate response
assert rx is not None, "Expected a response from mock master request" assert rx is not None, "Expected a response from mock master request"
assert rx.id == req_id assert rx.id == req_id
if expect_pattern: if expect_pattern:
# length-signature mock returns deterministic pattern # length-signature mock returns deterministic pattern
expected = bytes(((req_id + i) & 0xFF) for i in range(length)) expected = bytes(((req_id + i) & 0xFF) for i in range(length))
rp("expected_data", list(expected)) rp("expected_data", list(expected))
rp("rx_data", list(rx.data)) rp("rx_data", list(rx.data))
assert rx.data == expected assert rx.data == expected
else: else:
# bytes-only mock returns exactly the bytes we sent (zeros of requested length) # bytes-only mock returns exactly the bytes we sent (zeros of requested length)
expected = bytes([0] * length) expected = bytes([0] * length)
rp("expected_data", list(expected)) rp("expected_data", list(expected))
rp("rx_data", list(rx.data)) rp("rx_data", list(rx.data))
assert rx.data == expected assert rx.data == expected
finally: finally:
lin.disconnect() lin.disconnect()

View File

@ -1,19 +1,19 @@
import pytest import pytest
# This module is gated by 'hardware' and 'babylin' markers to only run in hardware jobs # This module is gated by 'hardware' and 'babylin' markers to only run in hardware jobs
pytestmark = [pytest.mark.hardware, pytest.mark.babylin] pytestmark = [pytest.mark.hardware, pytest.mark.babylin]
def test_babylin_placeholder(): def test_babylin_placeholder():
""" """
Title: Hardware Test Placeholder Title: Hardware Test Placeholder
Description: Description:
Minimal placeholder to verify hardware selection and CI plumbing. It Minimal placeholder to verify hardware selection and CI plumbing. It
ensures that -m hardware pipelines and marker-based selection work as ensures that -m hardware pipelines and marker-based selection work as
expected even when no specific hardware assertions are needed. expected even when no specific hardware assertions are needed.
Expected Result: Expected Result:
- Always passes. - Always passes.
""" """
assert True assert True

View File

@ -1,190 +1,202 @@
import pytest import pytest
from ecu_framework.lin.base import LinFrame
from ecu_framework.lin.base import LinFrame
from ecu_framework.lin.mock import MockBabyLinInterface
class TestMockLinInterface:
"""Test suite validating the pure-Python mock LIN interface behavior.
@pytest.fixture(scope="module")
Coverage goals: def lin():
- REQ-001: Echo loopback for local testing (send -> receive same frame) """Module-local override: these tests are explicitly mock-only and must
- REQ-002: Deterministic master request responses (no randomness) not depend on whatever real-hardware interface the central config selects."""
- REQ-003: Frame ID filtering in receive() iface = MockBabyLinInterface(bitrate=19200, channel=0)
- REQ-004: Graceful handling of timeout when no frame is available iface.connect()
yield iface
Notes: iface.disconnect()
- These tests run entirely without hardware and should be fast and stable.
- The injected mock interface enqueues frames on transmit to emulate a bus.
- Deterministic responses allow exact byte-for-byte assertions. class TestMockLinInterface:
""" """Test suite validating the pure-Python mock LIN interface behavior.
@pytest.mark.smoke Coverage goals:
@pytest.mark.req_001 - REQ-001: Echo loopback for local testing (send -> receive same frame)
@pytest.mark.req_003 - REQ-002: Deterministic master request responses (no randomness)
def test_mock_send_receive_echo(self, lin, rp): - REQ-003: Frame ID filtering in receive()
""" - REQ-004: Graceful handling of timeout when no frame is available
Title: Mock LIN Interface - Send/Receive Echo Test
Notes:
Description: - These tests run entirely without hardware and should be fast and stable.
Validates that the mock LIN interface correctly echoes frames sent on the bus, - The injected mock interface enqueues frames on transmit to emulate a bus.
enabling loopback testing without hardware dependencies. - Deterministic responses allow exact byte-for-byte assertions.
"""
Requirements: REQ-001, REQ-003
@pytest.mark.smoke
Test Steps: @pytest.mark.req_001
1. Create a LIN frame with specific ID and data payload @pytest.mark.req_003
2. Send the frame via the mock interface def test_mock_send_receive_echo(self, lin, rp):
3. Attempt to receive the echoed frame with ID filtering """
4. Verify the received frame matches the transmitted frame exactly Title: Mock LIN Interface - Send/Receive Echo Test
Expected Result: Description:
- Frame is successfully echoed by mock interface Validates that the mock LIN interface correctly echoes frames sent on the bus,
- Received frame ID matches transmitted frame ID (0x12) enabling loopback testing without hardware dependencies.
- Received frame data payload matches transmitted data [1, 2, 3]
""" Requirements: REQ-001, REQ-003
# Step 1: Create test frame with known ID and payload
test_frame = LinFrame(id=0x12, data=bytes([1, 2, 3])) Test Steps:
rp("lin_type", "mock") 1. Create a LIN frame with specific ID and data payload
rp("tx_id", f"0x{test_frame.id:02X}") 2. Send the frame via the mock interface
rp("tx_data", list(test_frame.data)) 3. Attempt to receive the echoed frame with ID filtering
4. Verify the received frame matches the transmitted frame exactly
# Step 2: Transmit frame via mock interface (mock will enqueue to RX)
lin.send(test_frame) Expected Result:
- Frame is successfully echoed by mock interface
# Step 3: Receive echoed frame with ID filtering and timeout - Received frame ID matches transmitted frame ID (0x12)
received_frame = lin.receive(id=0x12, timeout=0.5) - Received frame data payload matches transmitted data [1, 2, 3]
rp("rx_present", received_frame is not None) """
if received_frame is not None: # Step 1: Create test frame with known ID and payload
rp("rx_id", f"0x{received_frame.id:02X}") test_frame = LinFrame(id=0x12, data=bytes([1, 2, 3]))
rp("rx_data", list(received_frame.data)) rp("lin_type", "mock")
rp("tx_id", f"0x{test_frame.id:02X}")
# Step 4: Validate echo functionality and payload integrity rp("tx_data", list(test_frame.data))
assert received_frame is not None, "Mock interface should echo transmitted frames"
assert received_frame.id == test_frame.id, f"Expected ID {test_frame.id:#x}, got {received_frame.id:#x}" # Step 2: Transmit frame via mock interface (mock will enqueue to RX)
assert received_frame.data == test_frame.data, f"Expected data {test_frame.data!r}, got {received_frame.data!r}" lin.send(test_frame)
@pytest.mark.smoke # Step 3: Receive echoed frame with ID filtering and timeout
@pytest.mark.req_002 received_frame = lin.receive(id=0x12, timeout=0.5)
def test_mock_request_synthesized_response(self, lin, rp): rp("rx_present", received_frame is not None)
""" if received_frame is not None:
Title: Mock LIN Interface - Master Request Response Test rp("rx_id", f"0x{received_frame.id:02X}")
rp("rx_data", list(received_frame.data))
Description:
Validates that the mock interface synthesizes deterministic responses # Step 4: Validate echo functionality and payload integrity
for master request operations, simulating slave node behavior. assert received_frame is not None, "Mock interface should echo transmitted frames"
assert received_frame.id == test_frame.id, f"Expected ID {test_frame.id:#x}, got {received_frame.id:#x}"
Requirements: REQ-002 assert received_frame.data == test_frame.data, f"Expected data {test_frame.data!r}, got {received_frame.data!r}"
Test Steps: @pytest.mark.smoke
1. Issue a master request for specific frame ID and data length @pytest.mark.req_002
2. Verify mock interface generates a response frame def test_mock_request_synthesized_response(self, lin, rp):
3. Validate response frame ID matches request ID """
4. Verify response data length matches requested length Title: Mock LIN Interface - Master Request Response Test
5. Confirm response data is deterministic (not random)
Description:
Expected Result: Validates that the mock interface synthesizes deterministic responses
- Mock interface generates response within timeout period for master request operations, simulating slave node behavior.
- Response frame ID matches request ID (0x21)
- Response data length equals requested length (4 bytes) Requirements: REQ-002
- Response data follows deterministic pattern: [id+0, id+1, id+2, id+3]
""" Test Steps:
# Step 1: Issue master request with specific parameters 1. Issue a master request for specific frame ID and data length
request_id = 0x21 2. Verify mock interface generates a response frame
requested_length = 4 3. Validate response frame ID matches request ID
4. Verify response data length matches requested length
# Step 2: Execute request operation; mock synthesizes deterministic bytes 5. Confirm response data is deterministic (not random)
rp("lin_type", "mock")
rp("req_id", f"0x{request_id:02X}") Expected Result:
rp("req_len", requested_length) - Mock interface generates response within timeout period
response_frame = lin.request(id=request_id, length=requested_length, timeout=0.5) - Response frame ID matches request ID (0x21)
- Response data length equals requested length (4 bytes)
# Step 3: Validate response generation - Response data follows deterministic pattern: [id+0, id+1, id+2, id+3]
assert response_frame is not None, "Mock interface should generate response for master requests" """
# Step 1: Issue master request with specific parameters
# Step 4: Verify response frame properties (ID and length) request_id = 0x21
assert response_frame.id == request_id, f"Response ID {response_frame.id:#x} should match request ID {request_id:#x}" requested_length = 4
assert len(response_frame.data) == requested_length, f"Response length {len(response_frame.data)} should match requested length {requested_length}"
# Step 2: Execute request operation; mock synthesizes deterministic bytes
# Step 5: Validate deterministic response pattern rp("lin_type", "mock")
expected_data = bytes((request_id + i) & 0xFF for i in range(requested_length)) rp("req_id", f"0x{request_id:02X}")
rp("rx_data", list(response_frame.data) if response_frame else None) rp("req_len", requested_length)
rp("expected_data", list(expected_data)) response_frame = lin.request(id=request_id, length=requested_length, timeout=0.5)
assert response_frame.data == expected_data, f"Response data {response_frame.data!r} should follow deterministic pattern {expected_data!r}"
# Step 3: Validate response generation
@pytest.mark.smoke assert response_frame is not None, "Mock interface should generate response for master requests"
@pytest.mark.req_004
def test_mock_receive_timeout_behavior(self, lin, rp): # Step 4: Verify response frame properties (ID and length)
""" assert response_frame.id == request_id, f"Response ID {response_frame.id:#x} should match request ID {request_id:#x}"
Title: Mock LIN Interface - Receive Timeout Test assert len(response_frame.data) == requested_length, f"Response length {len(response_frame.data)} should match requested length {requested_length}"
Description: # Step 5: Validate deterministic response pattern
Validates that the mock interface properly handles timeout scenarios expected_data = bytes((request_id + i) & 0xFF for i in range(requested_length))
when no matching frames are available for reception. rp("rx_data", list(response_frame.data) if response_frame else None)
rp("expected_data", list(expected_data))
Requirements: REQ-004 assert response_frame.data == expected_data, f"Response data {response_frame.data!r} should follow deterministic pattern {expected_data!r}"
Test Steps: @pytest.mark.smoke
1. Attempt to receive a frame with non-existent ID @pytest.mark.req_004
2. Use short timeout to avoid blocking test execution def test_mock_receive_timeout_behavior(self, lin, rp):
3. Verify timeout behavior returns None rather than blocking indefinitely """
Title: Mock LIN Interface - Receive Timeout Test
Expected Result:
- Receive operation returns None when no matching frames available Description:
- Operation completes within specified timeout period Validates that the mock interface properly handles timeout scenarios
- No exceptions or errors during timeout scenario when no matching frames are available for reception.
"""
# Step 1: Attempt to receive frame with ID that hasn't been transmitted Requirements: REQ-004
non_existent_id = 0xFF
short_timeout = 0.1 # 100ms timeout Test Steps:
1. Attempt to receive a frame with non-existent ID
# Step 2: Execute receive with timeout (should return None quickly) 2. Use short timeout to avoid blocking test execution
rp("lin_type", "mock") 3. Verify timeout behavior returns None rather than blocking indefinitely
rp("rx_id", f"0x{non_existent_id:02X}")
rp("timeout_s", short_timeout) Expected Result:
result = lin.receive(id=non_existent_id, timeout=short_timeout) - Receive operation returns None when no matching frames available
rp("rx_present", result is not None) - Operation completes within specified timeout period
- No exceptions or errors during timeout scenario
# Step 3: Verify proper timeout behavior (no exceptions, returns None) """
assert result is None, "Receive operation should return None when no matching frames available" # Step 1: Attempt to receive frame with ID that hasn't been transmitted
non_existent_id = 0xFF
@pytest.mark.boundary short_timeout = 0.1 # 100ms timeout
@pytest.mark.req_001
@pytest.mark.req_003 # Step 2: Execute receive with timeout (should return None quickly)
@pytest.mark.parametrize("frame_id,data_payload", [ rp("lin_type", "mock")
(0x01, bytes([0x55])), rp("rx_id", f"0x{non_existent_id:02X}")
(0x3F, bytes([0xAA, 0x55])), rp("timeout_s", short_timeout)
(0x20, bytes([0x01, 0x02, 0x03, 0x04, 0x05])), result = lin.receive(id=non_existent_id, timeout=short_timeout)
(0x15, bytes([0xFF, 0x00, 0xCC, 0x33, 0xF0, 0x0F, 0xA5, 0x5A])), rp("rx_present", result is not None)
])
def test_mock_frame_validation_boundaries(self, lin, rp, frame_id, data_payload): # Step 3: Verify proper timeout behavior (no exceptions, returns None)
""" assert result is None, "Receive operation should return None when no matching frames available"
Title: Mock LIN Interface - Frame Validation Boundaries Test
@pytest.mark.boundary
Description: @pytest.mark.req_001
Validates mock interface handling of various frame configurations @pytest.mark.req_003
including boundary conditions for frame IDs and data lengths. @pytest.mark.parametrize("frame_id,data_payload", [
(0x01, bytes([0x55])),
Requirements: REQ-001, REQ-003 (0x3F, bytes([0xAA, 0x55])),
(0x20, bytes([0x01, 0x02, 0x03, 0x04, 0x05])),
Test Steps: (0x15, bytes([0xFF, 0x00, 0xCC, 0x33, 0xF0, 0x0F, 0xA5, 0x5A])),
1. Test various valid frame ID values (0x01 to 0x3F) ])
2. Test different data payload lengths (1 to 8 bytes) def test_mock_frame_validation_boundaries(self, lin, rp, frame_id, data_payload):
3. Verify proper echo behavior for all valid combinations """
Title: Mock LIN Interface - Frame Validation Boundaries Test
Expected Result:
- All valid frame configurations are properly echoed Description:
- Frame ID and data integrity preserved across echo operation Validates mock interface handling of various frame configurations
""" including boundary conditions for frame IDs and data lengths.
# Step 1: Create frame with parameterized values
test_frame = LinFrame(id=frame_id, data=data_payload) Requirements: REQ-001, REQ-003
rp("lin_type", "mock")
rp("tx_id", f"0x{frame_id:02X}") Test Steps:
rp("tx_len", len(data_payload)) 1. Test various valid frame ID values (0x01 to 0x3F)
2. Test different data payload lengths (1 to 8 bytes)
# Step 2: Send and receive frame 3. Verify proper echo behavior for all valid combinations
lin.send(test_frame)
received_frame = lin.receive(id=frame_id, timeout=0.5) Expected Result:
- All valid frame configurations are properly echoed
# Step 3: Validate frame integrity across IDs and payload sizes - Frame ID and data integrity preserved across echo operation
assert received_frame is not None, f"Frame with ID {frame_id:#x} should be echoed" """
assert received_frame.id == frame_id, f"Frame ID should be preserved: expected {frame_id:#x}" # Step 1: Create frame with parameterized values
assert received_frame.data == data_payload, f"Frame data should be preserved for ID {frame_id:#x}" test_frame = LinFrame(id=frame_id, data=data_payload)
rp("lin_type", "mock")
rp("tx_id", f"0x{frame_id:02X}")
rp("tx_len", len(data_payload))
# Step 2: Send and receive frame
lin.send(test_frame)
received_frame = lin.receive(id=frame_id, timeout=0.5)
# Step 3: Validate frame integrity across IDs and payload sizes
assert received_frame is not None, f"Frame with ID {frame_id:#x} should be echoed"
assert received_frame.id == frame_id, f"Frame ID should be preserved: expected {frame_id:#x}"
assert received_frame.data == data_payload, f"Frame data should be preserved for ID {frame_id:#x}"

View File

@ -1,22 +1,22 @@
import pytest import pytest
from ecu_framework.lin.babylin import BabyLinInterface from ecu_framework.lin.babylin import BabyLinInterface
from vendor import mock_babylin_wrapper as mock_bl from vendor import mock_babylin_wrapper as mock_bl
class _ErrMock: class _ErrMock:
@staticmethod @staticmethod
def create_BabyLIN(): def create_BabyLIN():
bl = mock_bl.create_BabyLIN() bl = mock_bl.create_BabyLIN()
# Force loadSDF to return a non-OK code # Force loadSDF to return a non-OK code
def fail_load(*args, **kwargs): def fail_load(*args, **kwargs):
return 1 # non BL_OK return 1 # non BL_OK
bl.BLC_loadSDF = fail_load bl.BLC_loadSDF = fail_load
return bl return bl
@pytest.mark.unit @pytest.mark.unit
def test_connect_sdf_error_raises(): def test_connect_sdf_error_raises():
lin = BabyLinInterface(sdf_path="dummy.sdf", wrapper_module=_ErrMock) lin = BabyLinInterface(sdf_path="dummy.sdf", wrapper_module=_ErrMock)
with pytest.raises(RuntimeError): with pytest.raises(RuntimeError):
lin.connect() lin.connect()

View File

@ -1,40 +1,40 @@
import os import os
import json import json
import pathlib import pathlib
import pytest import pytest
from ecu_framework.config import load_config from ecu_framework.config import load_config
@pytest.mark.unit @pytest.mark.unit
def test_config_precedence_env_overrides(monkeypatch, tmp_path, rp): def test_config_precedence_env_overrides(monkeypatch, tmp_path, rp):
# Create a YAML file to use via env var # Create a YAML file to use via env var
yaml_path = tmp_path / "cfg.yaml" yaml_path = tmp_path / "cfg.yaml"
yaml_path.write_text("interface:\n type: babylin\n channel: 7\n") yaml_path.write_text("interface:\n type: babylin\n channel: 7\n")
# Point ECU_TESTS_CONFIG to env YAML # Point ECU_TESTS_CONFIG to env YAML
monkeypatch.setenv("ECU_TESTS_CONFIG", str(yaml_path)) monkeypatch.setenv("ECU_TESTS_CONFIG", str(yaml_path))
# Apply overrides on top # Apply overrides on top
cfg = load_config(workspace_root=str(tmp_path), overrides={"interface": {"channel": 9}}) cfg = load_config(workspace_root=str(tmp_path), overrides={"interface": {"channel": 9}})
rp("config_source", "env+overrides") rp("config_source", "env+overrides")
rp("interface_type", cfg.interface.type) rp("interface_type", cfg.interface.type)
rp("interface_channel", cfg.interface.channel) rp("interface_channel", cfg.interface.channel)
# Env file applied # Env file applied
assert cfg.interface.type == "babylin" assert cfg.interface.type == "babylin"
# Overrides win # Overrides win
assert cfg.interface.channel == 9 assert cfg.interface.channel == 9
@pytest.mark.unit @pytest.mark.unit
def test_config_defaults_when_no_file(monkeypatch, rp): def test_config_defaults_when_no_file(monkeypatch, rp):
# Ensure no env path # Ensure no env path
monkeypatch.delenv("ECU_TESTS_CONFIG", raising=False) monkeypatch.delenv("ECU_TESTS_CONFIG", raising=False)
cfg = load_config(workspace_root=None) cfg = load_config(workspace_root=None)
rp("config_source", "defaults") rp("config_source", "defaults")
rp("interface_type", cfg.interface.type) rp("interface_type", cfg.interface.type)
rp("flash_enabled", cfg.flash.enabled) rp("flash_enabled", cfg.flash.enabled)
assert cfg.interface.type == "mock" assert cfg.interface.type == "mock"
assert cfg.flash.enabled is False assert cfg.flash.enabled is False

View File

@ -1,32 +1,32 @@
import pytest import pytest
from ecu_framework.flashing.hex_flasher import HexFlasher from ecu_framework.flashing.hex_flasher import HexFlasher
from ecu_framework.lin.base import LinFrame from ecu_framework.lin.base import LinFrame
class _StubLin: class _StubLin:
def __init__(self): def __init__(self):
self.sent = [] self.sent = []
def connect(self): def connect(self):
pass pass
def disconnect(self): def disconnect(self):
pass pass
def send(self, frame: LinFrame): def send(self, frame: LinFrame):
self.sent.append(frame) self.sent.append(frame)
def receive(self, id=None, timeout=1.0): def receive(self, id=None, timeout=1.0):
return None return None
@pytest.mark.unit @pytest.mark.unit
def test_hex_flasher_sends_basic_sequence(tmp_path, rp): def test_hex_flasher_sends_basic_sequence(tmp_path, rp):
# Minimal valid Intel HEX file (EOF record) # Minimal valid Intel HEX file (EOF record)
hex_path = tmp_path / "fw.hex" hex_path = tmp_path / "fw.hex"
hex_path.write_text(":00000001FF\n") hex_path.write_text(":00000001FF\n")
lin = _StubLin() lin = _StubLin()
flasher = HexFlasher(lin) flasher = HexFlasher(lin)
flasher.flash_hex(str(hex_path)) flasher.flash_hex(str(hex_path))
rp("hex_path", str(hex_path)) rp("hex_path", str(hex_path))
rp("sent_count", len(lin.sent)) rp("sent_count", len(lin.sent))
# Placeholder assertion; refine as the flasher gains functionality # Placeholder assertion; refine as the flasher gains functionality
assert isinstance(lin.sent, list) assert isinstance(lin.sent, list)

View File

@ -1,25 +1,25 @@
import pytest import pytest
from ecu_framework.lin.base import LinFrame from ecu_framework.lin.base import LinFrame
@pytest.mark.unit @pytest.mark.unit
def test_linframe_accepts_valid_ranges(record_property: "pytest.RecordProperty"): # type: ignore[name-defined] def test_linframe_accepts_valid_ranges(record_property: "pytest.RecordProperty"): # type: ignore[name-defined]
f = LinFrame(id=0x3F, data=bytes([0] * 8)) f = LinFrame(id=0x3F, data=bytes([0] * 8))
record_property("valid_id", f"0x{f.id:02X}") record_property("valid_id", f"0x{f.id:02X}")
record_property("data_len", len(f.data)) record_property("data_len", len(f.data))
assert f.id == 0x3F and len(f.data) == 8 assert f.id == 0x3F and len(f.data) == 8
@pytest.mark.unit @pytest.mark.unit
@pytest.mark.parametrize("bad_id", [-1, 0x40]) @pytest.mark.parametrize("bad_id", [-1, 0x40])
def test_linframe_invalid_id_raises(bad_id, record_property: "pytest.RecordProperty"): # type: ignore[name-defined] def test_linframe_invalid_id_raises(bad_id, record_property: "pytest.RecordProperty"): # type: ignore[name-defined]
record_property("bad_id", bad_id) record_property("bad_id", bad_id)
with pytest.raises(ValueError): with pytest.raises(ValueError):
LinFrame(id=bad_id, data=b"\x00") LinFrame(id=bad_id, data=b"\x00")
@pytest.mark.unit @pytest.mark.unit
def test_linframe_too_long_raises(record_property: "pytest.RecordProperty"): # type: ignore[name-defined] def test_linframe_too_long_raises(record_property: "pytest.RecordProperty"): # type: ignore[name-defined]
record_property("data_len", 9) record_property("data_len", 9)
with pytest.raises(ValueError): with pytest.raises(ValueError):
LinFrame(id=0x01, data=bytes(range(9))) LinFrame(id=0x01, data=bytes(range(9)))

View File

@ -0,0 +1,242 @@
"""Unit tests for the MUM LIN adapter using fake pylin/pymumclient modules.
These tests don't talk to real hardware — they inject lightweight fakes via
the adapter's `mum_module` / `pylin_module` constructor args to validate the
adapter's plumbing (connect/disconnect, send, receive, send_raw, power_*).
"""
from __future__ import annotations
import pytest
from ecu_framework.lin.base import LinFrame
from ecu_framework.lin.mum import MumLinInterface
# ---- fakes ---------------------------------------------------------------
class _FakePower:
def __init__(self):
self.up_calls = 0
self.down_calls = 0
def power_up(self):
self.up_calls += 1
def power_down(self):
self.down_calls += 1
class _FakeTransport:
def __init__(self):
self.raw_frames = []
def ld_put_raw(self, data, baudrate):
self.raw_frames.append((bytes(data), int(baudrate)))
class _FakeLinDev:
def __init__(self, transport):
self.baudrate = 0
self.tx = []
self._transport = transport
# Pre-canned slave responses keyed by frame_id
self.slave_responses = {0x11: [0x07, 0x00, 0x00, 0x00]}
self.fail_on_recv_id = None
def get_device(self, name):
if name == "bus/transport_layer":
return self._transport
raise KeyError(name)
def send_message(self, master_to_slave, frame_id, data_length, data=None):
if master_to_slave:
self.tx.append((int(frame_id), int(data_length), list(data or [])))
return None
# slave-to-master
if self.fail_on_recv_id == int(frame_id):
raise RuntimeError("simulated rx timeout")
return self.slave_responses.get(int(frame_id))
class _FakeLinMaster:
def __init__(self):
self.setup_calls = 0
self.teardown_calls = 0
def setup(self):
self.setup_calls += 1
def teardown(self):
self.teardown_calls += 1
class _FakeMUM:
"""Stand-in for pymumclient.MelexisUniversalMaster()."""
def __init__(self):
self.opened_with = None
self._lin_master = _FakeLinMaster()
self._power = _FakePower()
self._transport = _FakeTransport()
self._lin_dev = _FakeLinDev(self._transport)
def open_all(self, host):
self.opened_with = host
def get_device(self, name):
if name == "lin0":
return self._lin_master
if name == "power_out0":
return self._power
raise KeyError(name)
class _FakeMumModule:
def __init__(self):
self.last = None
def MelexisUniversalMaster(self): # noqa: N802 - matches vendor API
self.last = _FakeMUM()
return self.last
class _FakePylinModule:
"""Stand-in for pylin: provides LinBusManager and LinDevice22."""
def __init__(self, lin_dev_factory):
# lin_dev_factory(lin_bus) returns an object with the .get_device,
# .send_message and .baudrate API used by MumLinInterface.
self._lin_dev_factory = lin_dev_factory
def LinBusManager(self, linmaster): # noqa: N802
return ("bus_for", linmaster)
def LinDevice22(self, lin_bus): # noqa: N802
return self._lin_dev_factory(lin_bus)
# ---- helpers -------------------------------------------------------------
def _build_iface(boot_settle=0.0):
"""Construct a MumLinInterface wired to fake modules; return (iface, fakes)."""
mum_mod = _FakeMumModule()
# Pylin's LinDevice22 should hand back the same FakeLinDev that's
# attached to the MUM instance for this test, so assertions can read tx.
captured = {}
def lin_dev_factory(lin_bus):
# The mum module's get_device('lin0') will be called from connect();
# but pylin.LinDevice22(lin_bus) just needs to expose the same API.
# We pull the FakeLinDev off the FakeMUM that was constructed.
captured["lin_dev"] = mum_mod.last._lin_dev
return mum_mod.last._lin_dev
pylin_mod = _FakePylinModule(lin_dev_factory)
iface = MumLinInterface(
host="10.0.0.1",
boot_settle_seconds=boot_settle,
mum_module=mum_mod,
pylin_module=pylin_mod,
)
return iface, mum_mod, captured
# ---- tests ---------------------------------------------------------------
@pytest.mark.unit
def test_connect_opens_mum_and_powers_up():
iface, mum_mod, _ = _build_iface()
iface.connect()
try:
assert mum_mod.last.opened_with == "10.0.0.1"
assert mum_mod.last._lin_master.setup_calls == 1
assert mum_mod.last._power.up_calls == 1
assert iface._lin_dev.baudrate == 19200
finally:
iface.disconnect()
@pytest.mark.unit
def test_disconnect_powers_down_and_tears_down():
iface, mum_mod, _ = _build_iface()
iface.connect()
iface.disconnect()
assert mum_mod.last._power.down_calls == 1
assert mum_mod.last._lin_master.teardown_calls == 1
@pytest.mark.unit
def test_send_publishes_master_frame():
iface, mum_mod, _ = _build_iface()
iface.connect()
try:
iface.send(LinFrame(id=0x0A, data=bytes([1, 2, 3, 4, 5, 6, 7, 8])))
tx = mum_mod.last._lin_dev.tx
assert tx == [(0x0A, 8, [1, 2, 3, 4, 5, 6, 7, 8])]
finally:
iface.disconnect()
@pytest.mark.unit
def test_receive_uses_frame_lengths_default():
iface, _, _ = _build_iface()
iface.connect()
try:
frame = iface.receive(id=0x11, timeout=0.1)
assert frame is not None
assert frame.id == 0x11
# Default frame_lengths maps 0x11 -> 4
assert len(frame.data) == 4
assert frame.data[0] == 0x07
finally:
iface.disconnect()
@pytest.mark.unit
def test_receive_returns_none_on_pylin_exception():
iface, mum_mod, _ = _build_iface()
iface.connect()
try:
mum_mod.last._lin_dev.fail_on_recv_id = 0x11
assert iface.receive(id=0x11, timeout=0.1) is None
finally:
iface.disconnect()
@pytest.mark.unit
def test_receive_without_id_raises():
iface, _, _ = _build_iface()
iface.connect()
try:
with pytest.raises(NotImplementedError):
iface.receive(id=None)
finally:
iface.disconnect()
@pytest.mark.unit
def test_send_raw_uses_classic_checksum_path():
iface, mum_mod, _ = _build_iface()
iface.connect()
try:
iface.send_raw(b"\x7f\x06\xb5\xff\x7f\x01\x02\xff")
raw = mum_mod.last._transport.raw_frames
assert len(raw) == 1
assert raw[0][0] == b"\x7f\x06\xb5\xff\x7f\x01\x02\xff"
assert raw[0][1] == 19200
finally:
iface.disconnect()
@pytest.mark.unit
def test_power_cycle_calls_down_then_up():
iface, mum_mod, _ = _build_iface()
iface.connect()
try:
iface.power_cycle(wait=0.0)
finally:
iface.disconnect()
assert mum_mod.last._power.up_calls >= 2 # initial connect + cycle
assert mum_mod.last._power.down_calls >= 1

405
vendor/4SEVEN_color_lib_test.ldf vendored Normal file
View File

@ -0,0 +1,405 @@
LIN_description_file;
LIN_protocol_version = "2.1";
LIN_language_version = "2.1";
LIN_speed = 19.2 kbps;
Nodes {
Master: Master_Node, 5 ms, 0.5 ms ;
Slaves: ALM_Node ;
}
Signals {
AmbLightColourRed:8,0x00,Master_Node,ALM_Node;
AmbLightColourGreen:8,0x00,Master_Node,ALM_Node;
AmbLightColourBlue:8,0x00,Master_Node,ALM_Node;
AmbLightIntensity:8,0x00,Master_Node,ALM_Node;
AmbLightUpdate:2,0x0,Master_Node,ALM_Node;
AmbLightMode:6,0x0,Master_Node,ALM_Node;
AmbLightDuration:8,0x00,Master_Node,ALM_Node;
AmbLightLIDFrom:8,0x00,Master_Node,ALM_Node;
AmbLightLIDTo:8,0x00,Master_Node,ALM_Node;
ALMNVMStatus:4,0x0,ALM_Node,Master_Node;
ALMThermalStatus:4,0x0,ALM_Node,Master_Node;
ALMNadNo:8,0x00,ALM_Node,Master_Node;
SigCommErr:1,0x0,ALM_Node,Master_Node;
ALMVoltageStatus:4,0x0,ALM_Node,Master_Node;
ALMLEDState:2,0x0,ALM_Node,Master_Node;
ColorConfigFrameRed_X: 16, 5665, Master_Node, ALM_Node ;
ColorConfigFrameRed_Y: 16, 2396, Master_Node, ALM_Node ;
ColorConfigFrameRed_Z: 16, 0, Master_Node, ALM_Node ;
ColorConfigFrameGreen_X: 16, 1094, Master_Node, ALM_Node ;
ColorConfigFrameGreen_Y: 16, 5534, Master_Node, ALM_Node ;
ColorConfigFrameGreen_Z: 16, 996, Master_Node, ALM_Node ;
ColorConfigFrameBlue_X: 16, 9618, Master_Node, ALM_Node ;
ColorConfigFrameBlue_Y: 16, 0, Master_Node, ALM_Node ;
ColorConfigFrameBlue_Z: 16, 51922, Master_Node, ALM_Node ;
PWM_Frame_Red: 16, 0, ALM_Node, Master_Node ;
PWM_Frame_Green: 16, 0, ALM_Node, Master_Node ;
PWM_Frame_Blue1: 16, 0, ALM_Node, Master_Node ;
ConfigFrame_Calibration: 1, 0, Master_Node, ALM_Node ;
PWM_Frame_Blue2: 16, 0, ALM_Node, Master_Node ;
ColorConfigFrameRed_Vf_Cal: 16, 2031, Master_Node, ALM_Node ;
ColorConfigFrameGreen_VfCal: 16, 2903, Master_Node, ALM_Node ;
ColorConfigFrameBlue_VfCal: 16, 2950, Master_Node, ALM_Node ;
VF_Frame_Red_VF: 16, 0, ALM_Node, Master_Node ;
VF_Frame_Green_VF: 16, 0, ALM_Node, Master_Node ;
VF_Frame_Blue1_VF: 16, 0, ALM_Node, Master_Node ;
VF_Frame_VLED: 16, 0, ALM_Node, Master_Node ;
VF_Frame_VS: 16, 0, ALM_Node, Master_Node ;
Tj_Frame_Red: 16, 0, ALM_Node, Master_Node ;
Tj_Frame_Green: 16, 0, ALM_Node, Master_Node ;
Tj_Frame_Blue: 16, 0, ALM_Node, Master_Node ;
ConfigFrame_MaxLM: 16, 3840, Master_Node, ALM_Node ;
Calibration_status: 1, 0, ALM_Node, Master_Node ;
Tj_Frame_NTC: 15, 0, ALM_Node, Master_Node ;
PWM_wo_Comp_Red: 16, 0, ALM_Node, Master_Node ;
PWM_wo_Comp_Green: 16, 0, ALM_Node, Master_Node ;
PWM_wo_Comp_Blue: 16, 0, ALM_Node, Master_Node ;
NVM_Static_Valid: 16, 0, ALM_Node, Master_Node ;
NVM_Static_Rev: 16, 0, ALM_Node, Master_Node ;
NVM_Calib_Version: 8, 0, ALM_Node, Master_Node ;
NVM_OADCCAL: 8, 0, ALM_Node, Master_Node ;
NVM_GainADCLowCal: 8, 0, ALM_Node, Master_Node ;
NVM_GainADCHighCal: 8, 0, ALM_Node, Master_Node ;
ConfigFrame_EnableDerating: 1, 1, Master_Node, ALM_Node ;
ConfigFrame_EnableCompensation: 1, 1, Master_Node, ALM_Node ;
}
Diagnostic_signals {
MasterReqB0: 8, 0 ;
MasterReqB1: 8, 0 ;
MasterReqB2: 8, 0 ;
MasterReqB3: 8, 0 ;
MasterReqB4: 8, 0 ;
MasterReqB5: 8, 0 ;
MasterReqB6: 8, 0 ;
MasterReqB7: 8, 0 ;
SlaveRespB0: 8, 0 ;
SlaveRespB1: 8, 0 ;
SlaveRespB2: 8, 0 ;
SlaveRespB3: 8, 0 ;
SlaveRespB4: 8, 0 ;
SlaveRespB5: 8, 0 ;
SlaveRespB6: 8, 0 ;
SlaveRespB7: 8, 0 ;
}
Frames {
ALM_Req_A:0x0A,Master_Node,8{
AmbLightColourRed,0;
AmbLightColourGreen,8;
AmbLightColourBlue,16;
AmbLightIntensity,24;
AmbLightUpdate,32;
AmbLightMode,34;
AmbLightDuration,40;
AmbLightLIDFrom,48;
AmbLightLIDTo,56;
}
ALM_Status:0x11,ALM_Node,4{
ALMNVMStatus,16;
SigCommErr,24;
ALMLEDState,20;
ALMVoltageStatus,8;
ALMNadNo,0;
ALMThermalStatus,12;
}
ColorConfigFrameRed: 3, Master_Node, 8 {
ColorConfigFrameRed_X, 0 ;
ColorConfigFrameRed_Y, 16 ;
ColorConfigFrameRed_Z, 32 ;
ColorConfigFrameRed_Vf_Cal, 48 ;
}
ColorConfigFrameGreen: 4, Master_Node, 8 {
ColorConfigFrameGreen_X, 0 ;
ColorConfigFrameGreen_Y, 16 ;
ColorConfigFrameGreen_Z, 32 ;
ColorConfigFrameGreen_VfCal, 48 ;
}
ColorConfigFrameBlue: 5, Master_Node, 8 {
ColorConfigFrameBlue_X, 0 ;
ColorConfigFrameBlue_Y, 16 ;
ColorConfigFrameBlue_Z, 32 ;
ColorConfigFrameBlue_VfCal, 48 ;
}
PWM_Frame: 18, ALM_Node, 8 {
PWM_Frame_Red, 0 ;
PWM_Frame_Green, 16 ;
PWM_Frame_Blue1, 32 ;
PWM_Frame_Blue2, 48 ;
}
ConfigFrame: 6, Master_Node, 3 {
ConfigFrame_Calibration, 0 ;
ConfigFrame_MaxLM, 3 ;
ConfigFrame_EnableDerating, 1 ;
ConfigFrame_EnableCompensation, 2 ;
}
VF_Frame: 19, ALM_Node, 8 {
VF_Frame_Red_VF, 0 ;
VF_Frame_Green_VF, 16 ;
VF_Frame_Blue1_VF, 32 ;
VF_Frame_VLED, 48 ;
}
Tj_Frame: 20, ALM_Node, 8 {
Tj_Frame_Red, 0 ;
Tj_Frame_Green, 16 ;
Tj_Frame_Blue, 32 ;
Calibration_status, 63 ;
Tj_Frame_NTC, 48 ;
}
PWM_wo_Comp: 21, ALM_Node, 8 {
PWM_wo_Comp_Red, 0 ;
PWM_wo_Comp_Green, 16 ;
PWM_wo_Comp_Blue, 32 ;
VF_Frame_VS, 48 ;
}
NVM_Debug: 22, ALM_Node, 8 {
NVM_Static_Valid, 0 ;
NVM_Static_Rev, 16 ;
NVM_Calib_Version, 32 ;
NVM_OADCCAL, 40 ;
NVM_GainADCLowCal, 48 ;
NVM_GainADCHighCal, 56 ;
}
}
Diagnostic_frames {
MasterReq: 0x3c {
MasterReqB0, 0 ;
MasterReqB1, 8 ;
MasterReqB2, 16 ;
MasterReqB3, 24 ;
MasterReqB4, 32 ;
MasterReqB5, 40 ;
MasterReqB6, 48 ;
MasterReqB7, 56 ;
}
SlaveResp: 0x3d {
SlaveRespB0, 0 ;
SlaveRespB1, 8 ;
SlaveRespB2, 16 ;
SlaveRespB3, 24 ;
SlaveRespB4, 32 ;
SlaveRespB5, 40 ;
SlaveRespB6, 48 ;
SlaveRespB7, 56 ;
}
}
Node_attributes {
ALM_Node {
LIN_protocol = 2.1 ;
configured_NAD = 0x01 ;
initial_NAD = 0x02 ;
product_id = 0x0013, 0x0003, 1 ;
response_error = SigCommErr ;
P2_min = 50.0000 ms ;
ST_min = 20.0000 ms ;
configurable_frames {
ALM_Req_A;
ALM_Status;
ColorConfigFrameRed ;
ColorConfigFrameGreen ;
ColorConfigFrameBlue ;
PWM_Frame ;
ConfigFrame ;
VF_Frame ;
Tj_Frame ;
PWM_wo_Comp ;
NVM_Debug ;
}
}
}
Schedule_tables {
LIN_AA {
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x1, 0x2, 0xFF } delay 50 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x1 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x2 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x3 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x4 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x5 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x6 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x7 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x8 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x9 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0xA } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0xB } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0xC } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0xD } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0xE } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0xF } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x10 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x3, 0x2, 0xFF } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x4, 0x2, 0xFF } delay 20 ms ;
}
User_serv {
ALM_Req_A delay 10.0000 ms ;
}
Pub_serv {
ALM_Status delay 20.0000 ms ;
}
RequestResponse {
ALM_Req_A delay 10 ms ;
ALM_Status delay 10 ms ;
}
CCO {
ALM_Req_A delay 10 ms ;
ALM_Status delay 10 ms ;
ConfigFrame delay 10 ms ;
ColorConfigFrameRed delay 10 ms ;
ColorConfigFrameGreen delay 10 ms ;
ColorConfigFrameBlue delay 10 ms ;
VF_Frame delay 10 ms ;
PWM_Frame delay 10 ms ;
Tj_Frame delay 10 ms ;
PWM_wo_Comp delay 10 ms ;
}
calib {
NVM_Debug delay 10 ms ;
}
}
Signal_encoding_types {
Red {
physical_value,0,255,1.0000,0.0000,"Red" ;
}
Green {
physical_value,0,255,1.0000,0.0000,"Green" ;
}
Blue {
physical_value,0,255,1.0000,0.0000,"Blue" ;
}
Intensity {
physical_value,0,255,1.0000,0.0000,"Intensity" ;
}
Update {
logical_value,0x00,"Immediate color Update" ;
logical_value,0x01,"Color memorization" ;
logical_value,0x02,"Apply memorized color" ;
logical_value,0x03,"Discard memorized color" ;
}
Mode {
logical_value,0x00,"Immediate Setpoint" ;
logical_value,0x01,"Fading effect 1 (color and intensity fade)" ;
logical_value,0x02,"Fading effect 2 (intensity fade only; color changes immediately)" ;
logical_value,0x03,"TBD" ;
logical_value,0x04,"TBD" ;
physical_value,5,63,1.0000,0.0000,"Not Used" ;
}
Duration {
physical_value,0,255,0.2000,0.0000,"s" ;
}
ModuleID {
physical_value,0,255,1.0000,0.0000,"ModuleID" ;
}
NVMStatus {
logical_value,0x00,"NVM OK" ;
logical_value,0x01,"NVM NOK" ;
logical_value,0x02,"Reserved" ;
logical_value,0x03,"Reserved" ;
logical_value,0x04,"Reserved" ;
logical_value,0x05,"Reserved" ;
logical_value,0x06,"Reserved" ;
logical_value,0x07,"Reserved" ;
logical_value,0x08,"Reserved" ;
logical_value,0x09,"Reserved" ;
logical_value,0x0A,"Reserved" ;
logical_value,0x0B,"Reserved" ;
logical_value,0x0C,"Reserved" ;
logical_value,0x0D,"Reserved" ;
logical_value,0x0E,"Reserved" ;
logical_value,0x0F,"Reserved" ;
}
VoltageStatus {
logical_value,0x00,"Normal Voltage" ;
logical_value,0x01,"Power UnderVoltage" ;
logical_value,0x02,"Power OverVoltage" ;
logical_value,0x03,"Reserved" ;
logical_value,0x04,"Reserved" ;
logical_value,0x05,"Reserved" ;
logical_value,0x06,"Reserved" ;
logical_value,0x07,"Reserved" ;
logical_value,0x08,"Reserved" ;
logical_value,0x09,"Reserved" ;
logical_value,0x0A,"Reserved" ;
logical_value,0x0B,"Reserved" ;
logical_value,0x0C,"Reserved" ;
logical_value,0x0D,"Reserved" ;
logical_value,0x0E,"Reserved" ;
logical_value,0x0F,"Reserved" ;
}
ThermalStatus {
logical_value,0x00,"Normal Temperature" ;
logical_value,0x01,"Thermal derating" ;
logical_value,0x02,"Thermal shutdown" ;
logical_value,0x03,"Reserved" ;
logical_value,0x04,"Reserved" ;
logical_value,0x05,"Reserved" ;
logical_value,0x06,"Reserved" ;
logical_value,0x07,"Reserved" ;
logical_value,0x08,"Reserved" ;
logical_value,0x09,"Reserved" ;
logical_value,0x0A,"Reserved" ;
logical_value,0x0B,"Reserved" ;
logical_value,0x0C,"Reserved" ;
logical_value,0x0D,"Reserved" ;
logical_value,0x0E,"Reserved" ;
logical_value,0x0F,"Reserved" ;
}
LED_State {
logical_value,0x00,"LED OFF" ;
logical_value,0x01,"LED ANIMATING" ;
logical_value,0x02,"LED ON" ;
logical_value,0x03,"Reserved" ;
}
NVM_Static_Valid_Encoding {
logical_value, 0, "NVM Corrupted/Zero" ;
logical_value, 42331, "NVM Valid (0xA55B)" ;
logical_value, 65535, "NVM Empty/Erased" ;
}
NVM_Static_Rev_Encoding {
logical_value, 0, "Invalid Revision" ;
logical_value, 1, "Revision 1 (Current)" ;
logical_value, 65535, "Not Programmed" ;
}
NVM_Calib_Version_Encoding {
physical_value, 0, 255, 1, 0, "Factory Calib Version (>=1 valid)" ;
}
NVM_OADCCAL_Encoding {
physical_value, 0, 255, 1, 0, "ADC Offset Cal (signed 8-bit)" ;
}
NVM_GainADCLowCal_Encoding {
physical_value, 0, 255, 1, 0, "ADC Gain Low Temp (signed 8-bit)" ;
}
NVM_GainADCHighCal_Encoding {
physical_value, 0, 255, 1, 0, "ADC Gain High Temp (signed 8-bit)" ;
}
}
Signal_representation {
Red:AmbLightColourRed;
Green:AmbLightColourGreen;
Blue:AmbLightColourBlue;
Intensity:AmbLightIntensity;
Update:AmbLightUpdate;
Mode:AmbLightMode;
Duration:AmbLightDuration;
ModuleID:AmbLightLIDFrom,AmbLightLIDTo;
NVMStatus:ALMNVMStatus;
LED_State:ALMLEDState;
NVM_Calib_Version_Encoding: NVM_Calib_Version ;
NVM_GainADCHighCal_Encoding: NVM_GainADCHighCal ;
NVM_GainADCLowCal_Encoding: NVM_GainADCLowCal ;
NVM_OADCCAL_Encoding: NVM_OADCCAL ;
NVM_Static_Rev_Encoding: NVM_Static_Rev ;
NVM_Static_Valid_Encoding: NVM_Static_Valid ;
}

Binary file not shown.

BIN
vendor/4SEVEN_color_lib_test.sdf vendored Normal file

Binary file not shown.

Binary file not shown.

View File

@ -1,95 +1,95 @@
"""Owon PSU quick demo (optimized to use ecu_framework.power.owon_psu). """Owon PSU quick demo (optimized to use ecu_framework.power.owon_psu).
This script reads configuration from OWON_PSU_CONFIG (YAML) or ./config/owon_psu.yaml, This script reads configuration from OWON_PSU_CONFIG (YAML) or ./config/owon_psu.yaml,
prints discovered ports responding to *IDN?, then connects to the configured port prints discovered ports responding to *IDN?, then connects to the configured port
and performs a small sequence (IDN, optional V/I set, toggle output, measure V/I). and performs a small sequence (IDN, optional V/I set, toggle output, measure V/I).
No CLI flags; edit YAML to change behavior. No CLI flags; edit YAML to change behavior.
""" """
from __future__ import annotations from __future__ import annotations
import os import os
import time import time
from pathlib import Path from pathlib import Path
import yaml import yaml
try: try:
from ecu_framework.power import OwonPSU, SerialParams, scan_ports from ecu_framework.power import OwonPSU, SerialParams, scan_ports
except ModuleNotFoundError: except ModuleNotFoundError:
# Ensure repository root is on sys.path when running this file directly # Ensure repository root is on sys.path when running this file directly
import sys import sys
repo_root = Path(__file__).resolve().parents[2] repo_root = Path(__file__).resolve().parents[2]
if str(repo_root) not in sys.path: if str(repo_root) not in sys.path:
sys.path.insert(0, str(repo_root)) sys.path.insert(0, str(repo_root))
from ecu_framework.power import OwonPSU, SerialParams, scan_ports from ecu_framework.power import OwonPSU, SerialParams, scan_ports
def _load_yaml_config() -> dict: def _load_yaml_config() -> dict:
cfg_path = str(Path("config") / "owon_psu.yaml") cfg_path = str(Path("config") / "owon_psu.yaml")
p = Path(cfg_path).resolve() p = Path(cfg_path).resolve()
print("Using config path:", str(p)) print("Using config path:", str(p))
if not p.is_file(): if not p.is_file():
return {} return {}
with p.open("r", encoding="utf-8") as f: with p.open("r", encoding="utf-8") as f:
data = yaml.safe_load(f) or {} data = yaml.safe_load(f) or {}
return data if isinstance(data, dict) else {} return data if isinstance(data, dict) else {}
def run_demo() -> int: def run_demo() -> int:
cfg = _load_yaml_config() cfg = _load_yaml_config()
if not cfg or "port" not in cfg: if not cfg or "port" not in cfg:
print("Config not found or missing 'port'. Set OWON_PSU_CONFIG or create ./config/owon_psu.yaml") print("Config not found or missing 'port'. Set OWON_PSU_CONFIG or create ./config/owon_psu.yaml")
return 2 return 2
print("Scanning ports (responding to *IDN?):") print("Scanning ports (responding to *IDN?):")
for dev, idn in scan_ports(SerialParams(baudrate=int(cfg.get("baudrate", 115200)), timeout=float(cfg.get("timeout", 1.0)))): for dev, idn in scan_ports(SerialParams(baudrate=int(cfg.get("baudrate", 115200)), timeout=float(cfg.get("timeout", 1.0)))):
print(f" {dev} -> {idn}") print(f" {dev} -> {idn}")
# Serial params # Serial params
baud = int(cfg.get("baudrate", 115200)) baud = int(cfg.get("baudrate", 115200))
timeout = float(cfg.get("timeout", 1.0)) timeout = float(cfg.get("timeout", 1.0))
eol = cfg.get("eol", "\n") eol = cfg.get("eol", "\n")
from serial import PARITY_NONE, PARITY_EVEN, PARITY_ODD, STOPBITS_ONE, STOPBITS_TWO from serial import PARITY_NONE, PARITY_EVEN, PARITY_ODD, STOPBITS_ONE, STOPBITS_TWO
parity = {"N": PARITY_NONE, "E": PARITY_EVEN, "O": PARITY_ODD}.get(str(cfg.get("parity", "N")).upper(), PARITY_NONE) parity = {"N": PARITY_NONE, "E": PARITY_EVEN, "O": PARITY_ODD}.get(str(cfg.get("parity", "N")).upper(), PARITY_NONE)
stopbits = {1: STOPBITS_ONE, 2: STOPBITS_TWO}.get(int(float(cfg.get("stopbits", 1))), STOPBITS_ONE) stopbits = {1: STOPBITS_ONE, 2: STOPBITS_TWO}.get(int(float(cfg.get("stopbits", 1))), STOPBITS_ONE)
xonxoff = bool(cfg.get("xonxoff", False)) xonxoff = bool(cfg.get("xonxoff", False))
rtscts = bool(cfg.get("rtscts", False)) rtscts = bool(cfg.get("rtscts", False))
dsrdtr = bool(cfg.get("dsrdtr", False)) dsrdtr = bool(cfg.get("dsrdtr", False))
ps = SerialParams( ps = SerialParams(
baudrate=baud, baudrate=baud,
timeout=timeout, timeout=timeout,
parity=parity, parity=parity,
stopbits=stopbits, stopbits=stopbits,
xonxoff=xonxoff, xonxoff=xonxoff,
rtscts=rtscts, rtscts=rtscts,
dsrdtr=dsrdtr, dsrdtr=dsrdtr,
) )
port = str(cfg["port"]).strip() port = str(cfg["port"]).strip()
do_set = bool(cfg.get("do_set", False)) do_set = bool(cfg.get("do_set", False))
set_v = float(cfg.get("set_voltage", 1.0)) set_v = float(cfg.get("set_voltage", 1.0))
set_i = float(cfg.get("set_current", 0.1)) set_i = float(cfg.get("set_current", 0.1))
with OwonPSU(port, ps, eol=eol) as psu: with OwonPSU(port, ps, eol=eol) as psu:
idn = psu.idn() idn = psu.idn()
print(f"IDN: {idn}") print(f"IDN: {idn}")
print(f"Output status: {psu.output_status()}") print(f"Output status: {psu.output_status()}")
if do_set: if do_set:
# psu.set_output(True) # psu.set_output(True)
time.sleep(0.8) time.sleep(0.8)
psu.set_voltage(1, set_v) psu.set_voltage(1, set_v)
psu.set_current(1, set_i) psu.set_current(1, set_i)
time.sleep(0.75) time.sleep(0.75)
print(f"Measured V: {psu.measure_voltage()} V") print(f"Measured V: {psu.measure_voltage()} V")
print(f"Measured I: {psu.measure_current()} A") print(f"Measured I: {psu.measure_current()} A")
time.sleep(0.5) time.sleep(0.5)
# psu.set_output(False) # psu.set_output(False)
return 0 return 0
if __name__ == "__main__": if __name__ == "__main__":
raise SystemExit(run_demo()) raise SystemExit(run_demo())

118
vendor/README.md vendored
View File

@ -1,59 +1,59 @@
# BabyLIN SDK placement # BabyLIN SDK placement
Place the SDK's Python wrapper and platform-specific libraries here so the test framework can import and use them. Place the SDK's Python wrapper and platform-specific libraries here so the test framework can import and use them.
## Required files ## Required files
- BabyLIN_library.py - BabyLIN_library.py
- BabyLIN library/ (directory provided by the SDK containing platform-specific binaries) - BabyLIN library/ (directory provided by the SDK containing platform-specific binaries)
- Windows: `BabyLIN library/Windows/x64/*.dll` - Windows: `BabyLIN library/Windows/x64/*.dll`
- Linux x86_64: `BabyLIN library/Linux/x86_64/*.so` - Linux x86_64: `BabyLIN library/Linux/x86_64/*.so`
- Raspberry Pi (ARM): `BabyLIN library/Linux/armv7/*.so` (or as provided by your SDK) - Raspberry Pi (ARM): `BabyLIN library/Linux/armv7/*.so` (or as provided by your SDK)
- Optional: Example SDF file (e.g., `Example.sdf`) - Optional: Example SDF file (e.g., `Example.sdf`)
Folder structure example: Folder structure example:
``` ```
vendor/ vendor/
├─ BabyLIN_library.py ├─ BabyLIN_library.py
├─ Example.sdf ├─ Example.sdf
└─ BabyLIN library/ └─ BabyLIN library/
├─ Windows/ ├─ Windows/
│ └─ x64/ │ └─ x64/
│ ├─ BabyLIN.dll │ ├─ BabyLIN.dll
│ ├─ BabyLIN_FTDI.dll │ ├─ BabyLIN_FTDI.dll
│ └─ ... (other DLLs from SDK) │ └─ ... (other DLLs from SDK)
├─ Linux/ ├─ Linux/
│ ├─ x86_64/ │ ├─ x86_64/
│ │ └─ libBabyLIN.so │ │ └─ libBabyLIN.so
│ └─ armv7/ │ └─ armv7/
│ └─ libBabyLIN.so │ └─ libBabyLIN.so
└─ ... └─ ...
``` ```
Notes: Notes:
- Keep the directory names and casing exactly as the SDK expects (often referenced in `BabyLIN_library.py`). - Keep the directory names and casing exactly as the SDK expects (often referenced in `BabyLIN_library.py`).
- Ensure your Python environment architecture matches the binaries (e.g., 64-bit Python with 64-bit DLLs). - Ensure your Python environment architecture matches the binaries (e.g., 64-bit Python with 64-bit DLLs).
- On Linux/RPi, you may need to set `LD_LIBRARY_PATH` to include the directory with the shared libraries. - On Linux/RPi, you may need to set `LD_LIBRARY_PATH` to include the directory with the shared libraries.
## Configuration ## Configuration
Point your config to the SDF and schedule: Point your config to the SDF and schedule:
```yaml ```yaml
interface: interface:
type: babylin type: babylin
channel: 0 channel: 0
sdf_path: ./vendor/Example.sdf sdf_path: ./vendor/Example.sdf
schedule_nr: 0 schedule_nr: 0
``` ```
## Troubleshooting ## Troubleshooting
- ImportError: BabyLIN_library not found - ImportError: BabyLIN_library not found
- Ensure `vendor/BabyLIN_library.py` exists or add the vendor folder to `PYTHONPATH`. - Ensure `vendor/BabyLIN_library.py` exists or add the vendor folder to `PYTHONPATH`.
- DLL/SO not found - DLL/SO not found
- On Windows, ensure the DLLs are in PATH or next to `BabyLIN_library.py` per SDK instructions. - On Windows, ensure the DLLs are in PATH or next to `BabyLIN_library.py` per SDK instructions.
- On Linux/RPi, export `LD_LIBRARY_PATH` to the folder with the `.so` files. - On Linux/RPi, export `LD_LIBRARY_PATH` to the folder with the `.so` files.
- Device not found - Device not found
- Check USB connection, drivers, and that no other tool holds the device open. - Check USB connection, drivers, and that no other tool holds the device open.

321
vendor/automated_lin_test/README.md vendored Normal file
View File

@ -0,0 +1,321 @@
# LIN Automated Test Scripts
Automated test scripts for LIN bus communication and auto-addressing functionality using the Melexis Universal Master (MUM) hardware.
## Purpose
This folder contains Python scripts to automate LIN bus testing without requiring manual tool switching between MUM and babylin. The scripts provide:
- **LIN Auto-Addressing Test**: Automated BSM-SNPD (Bus Shunt Method - Slave Node Position Detection) auto-addressing
- **LED Control Test**: Verify LIN communication by controlling the board LED
- **Power Cycle Utility**: Power cycle the ECU through MUM
- **Dependency Installation**: Automated setup of required Python packages
## Hardware Setup
### Required Hardware
1. **Melexis Universal Master (MUM)**
- BeagleBone-based LIN master device
- Default IP: 192.168.7.2
- LIN interface: lin0
- Power control: power_out0
2. **ALM Platform MLX81124 Board**
- Target ECU with LIN auto-addressing support
- RGB LED for visual feedback
### Hardware Connections
```
┌─────────────────┐ ┌──────────────────┐
│ MUM │ │ ALM Platform │
│ (192.168.7.2) │ │ MLX81124 │
├─────────────────┤ ├──────────────────┤
│ │ │ │
│ LIN (lin0) ├────────────────────┤ LIN │
│ │ │ │
│ Power ├────────────────────┤ VCC/GND │
│ (power_out0) │ │ │
│ │ │ RGB LED │
└─────────────────┘ └──────────────────┘
```
### Connection Details
1. **LIN Bus**: Connect MUM LIN0 to ALM Platform LIN pin
2. **Power**: Connect MUM power_out0 to ALM Platform power (controlled by scripts)
3. **Ground**: Common ground between MUM and ALM Platform
## Files
### Scripts
- **`test_auto_addressing.py`** - Main auto-addressing test
- **`test_led_control.py`** - LED control verification test
- **`power_cycle.py`** - ECU power cycle utility
- **`install_packages.sh`** - Dependency installer
### Configuration
- **`config.py`** - Hardware and protocol configuration
- MUM connection settings
- LIN bus parameters
- BSM-SNPD protocol constants
- Test defaults
## Dependencies
### Python Packages
The scripts require these Python packages:
- `pylin` - LIN bus communication library
- `pymumclient` - Melexis Universal Master client library
### Installation
Run the installer script to set up dependencies:
```bash
./install_packages.sh
```
Or manually install:
```bash
pip3 install pylin pymumclient
```
## Usage
### 1. Auto-Addressing Test
Tests LIN auto-addressing using BSM-SNPD protocol. Automatically selects a target NAD different from the current NAD.
**Basic usage:**
```bash
python3 test_auto_addressing.py
```
**With options:**
```bash
python3 test_auto_addressing.py --iterations 1 --check-interval 1
```
**Parameters:**
- `--host` - MUM IP address (default: 192.168.7.2)
- `--iterations` - Number of auto-addressing iterations (default: 1)
- `--check-interval` - Check status every N iterations (0 = only at end)
**What it does:**
1. Connects to MUM
2. Reads current NAD from ECU
3. Selects target NAD (automatically different from current)
4. Sends BSM-SNPD sequence:
- INIT (0x01) - Initialize auto-addressing
- ASSIGN (0x02) - Assign NAD (16 frames)
- STORE (0x03) - Store to NVM
- FINALIZE (0x04) - Exit auto-addressing mode
5. Polls status frames between iterations
6. Verifies NAD change
**Expected output:**
```
Initial NAD: 0x07
Target NAD: 0x01
SUCCESS! NAD changed from 0x07 to 0x01
```
### 2. LED Control Test
Verifies LIN communication by controlling the RGB LED through color fades.
**Basic usage:**
```bash
python3 test_led_control.py
```
**With options:**
```bash
python3 test_led_control.py --nad 0x02 --cycles 3 --duration 3.0
```
**Parameters:**
- `--host` - MUM IP address (default: 192.168.7.2)
- `--nad` - Node address to control (default: 0x01)
- `--cycles` - Number of fade cycles (default: 3)
- `--duration` - Duration per color in seconds (default: 3.0)
**What it does:**
1. Connects to MUM
2. Reads current NAD from ECU
3. Fades LED through Red → Green → Blue
4. Each color fades in and out smoothly
**Expected output:**
```
Current NAD: 0x02
Fading Red...
Fading Green...
Fading Blue...
LED test complete
```
### 3. Power Cycle Utility
Power cycles the ECU through MUM power control.
**Basic usage:**
```bash
python3 power_cycle.py
```
**With options:**
```bash
python3 power_cycle.py --wait 3.0
```
**Parameters:**
- `--host` - MUM IP address (default: 192.168.7.2)
- `--wait` - Wait time after power down/up in seconds (default: 2.0)
**What it does:**
1. Powers down ECU
2. Waits specified duration
3. Powers up ECU
4. Waits for ECU to boot
## Configuration
All hardware-specific settings are centralized in [`config.py`](config.py). Edit this file to match your setup:
### Common Settings to Modify
```python
# MUM Configuration
MUM_HOST = '192.168.7.2' # Change if MUM has different IP
# LIN Bus Configuration
LIN_BAUDRATE = 19200 # Change if using different baudrate
# Test Parameters
AUTOADDRESSING_DEFAULT_ITERATIONS = 1 # Default test iterations
LED_DEFAULT_NAD = 0x01 # Default NAD for LED test
```
## Firmware Requirements
The firmware must have auto-addressing enabled with twist detection disabled for single-node MUM testing:
**File:** `02-Software/02-Source-Code/code/src/03-HAL/LAA/cfg/HAL_LAA_cfg.h`
```c
#define HAL_LAA_LINAATWISTDETECTDISABLE (1u)
```
This allows the `LASTSLAVE` flag to be set directly without requiring multi-node hardware setup.
## Troubleshooting
### MUM Connection Issues
**Problem:** Cannot connect to MUM
```
Error: Connection to 192.168.7.2 failed
```
**Solution:**
1. Check MUM is powered and connected via USB
2. Verify IP address with `ip addr show` or `ifconfig`
3. Ping MUM: `ping 192.168.7.2`
4. Check USB connection is recognized: `lsusb`
### No Response from ECU
**Problem:** ECU not responding to LIN frames
```
Error: S2M frame receiving failed with error code: 3 - Rx timeout error
```
**Solution:**
1. Check LIN bus connections
2. Verify ECU is powered (use power_cycle.py)
3. Check baudrate matches (19200)
4. Verify NAD is correct
### NAD Not Changing
**Problem:** Auto-addressing completes but NAD doesn't change
**Solution:**
1. Verify firmware has `HAL_LAA_LINAATWISTDETECTDISABLE = 1`
2. Rebuild and flash firmware
3. Check initial NAD is in valid range (0x01-0x10)
4. Run test with `--check-interval 1` to see intermediate status
### LED Not Changing
**Problem:** LED control test doesn't change LED color
**Solution:**
1. Verify NAD parameter matches ECU NAD
2. Check `ALM_Req_A` frame ID is 0x01 in LDF
3. Run auto-addressing test first to verify communication
4. Check LED connections on hardware
## Integration with Build/Flash Pipeline
These tests integrate with the automated firmware development pipeline:
```bash
# 1. Modify firmware
vim 02-Software/02-Source-Code/code/src/...
# 2. Build
./00-Tools/migrate_mlx_tools_linux/build_linux.sh
# 3. Flash
./00-Tools/migrate_mlx_tools_linux/flash_linux.sh
# 4. Test auto-addressing
python3 00-Tools/automated_lin_test/test_auto_addressing.py
# 5. Verify LED control
python3 00-Tools/automated_lin_test/test_led_control.py
```
## Technical Details
### LIN Frame IDs
- `0x3C` - MasterReq (diagnostic frames)
- `0x11` - ALM_Status (4 bytes, contains NAD in byte 0)
- `0x01` - ALM_Req_A (8 bytes, LED control)
### BSM-SNPD Protocol
Auto-addressing uses diagnostic service 0xB5 with subfunctions:
- `0x01` - INIT: Enable auto-addressing mode
- `0x02` - ASSIGN: Assign NAD to node
- `0x03` - STORE: Save NAD to NVM
- `0x04` - FINALIZE: Exit auto-addressing mode
Frame structure:
```
Byte 0: NAD = 0x7F (broadcast)
Byte 1: PCI = 0x06 (6 data bytes)
Byte 2: SID = 0xB5 (BSM-SNPD service)
Byte 3: Supplier ID LSB = 0xFF
Byte 4: Supplier ID MSB = 0x7F
Byte 5: Subfunction
Byte 6: Parameter 1
Byte 7: Parameter 2
```
### Checksum Requirements
**Critical:** BSM frames must use **LIN 1.x Classic checksum**. The scripts use `ld_put_raw()` to ensure Classic checksum. Using `send_message()` with Enhanced checksum will cause frames to be rejected by firmware.
## License
Part of the ALM Platform MLX81124 project.

Binary file not shown.

190
vendor/automated_lin_test/config.py vendored Normal file
View File

@ -0,0 +1,190 @@
#!/usr/bin/env python3
"""
Configuration file for LIN automated test scripts
This file contains all hardware-specific settings and tool dependencies.
Modify these values to match your test setup.
"""
# ============================================================================
# Hardware Configuration
# ============================================================================
# MUM (Melexis Universal Master) Configuration
MUM_HOST = '192.168.7.2' # Default MUM IP address on BeagleBone
MUM_LIN_DEVICE = 'lin0' # LIN interface name on MUM
MUM_POWER_DEVICE = 'power_out0' # Power control device name
# LIN Bus Configuration
LIN_BAUDRATE = 19200 # LIN bus baudrate in bps
# Valid NAD range for auto-addressing
VALID_NAD_RANGE = range(0x01, 0x11) # NADs 0x01 through 0x10
# ============================================================================
# External Tool Dependencies
# ============================================================================
# Python packages required (install with: pip3 install <package>)
REQUIRED_PACKAGES = [
'pylin', # LIN bus communication library
'pymumclient', # Melexis Universal Master client library
]
# ============================================================================
# Test Parameters
# ============================================================================
# Auto-addressing test defaults
AUTOADDRESSING_DEFAULT_ITERATIONS = 1 # Number of BSM iterations
AUTOADDRESSING_POLL_DURATION = 2.0 # Status polling duration between iterations (seconds)
AUTOADDRESSING_STATUS_POLL_INTERVAL = 0.020 # Status frame poll interval (20ms)
# LED control test defaults
LED_DEFAULT_NAD = 0x01 # Default NAD for LED control test
# Power cycle defaults
POWER_CYCLE_WAIT_TIME = 2.0 # Wait time after power down/up (seconds)
# ============================================================================
# Frame IDs (from 4SEVEN_color_lib_test.ldf)
# ============================================================================
LIN_FRAME_ID_MASTERREQ = 0x3C # Diagnostic master request frame
LIN_FRAME_ID_ALM_STATUS = 0x11 # ALM_Status (slave-to-master, 4 bytes)
LIN_FRAME_ID_ALM_REQ_A = 0x0A # ALM_Req_A (master-to-slave, 8 bytes, LED control)
LIN_FRAME_ID_CONFIG_FRAME = 0x06 # ConfigFrame (master-to-slave, 3 bytes)
LIN_FRAME_ID_VF_FRAME = 0x13 # VF_Frame (slave-to-master, 8 bytes, LED forward voltages + VLED)
LIN_FRAME_ID_PWM_WO_COMP = 0x15 # PWM_wo_Comp (slave-to-master, 8 bytes, PWM values + VS)
# ============================================================================
# Frame Definitions (from 4SEVEN_color_lib_test.ldf)
# ============================================================================
# Each entry mirrors the LDF Frames section. The signal tuple is:
# 'SignalName': (start_bit, width_in_bits)
# where start_bit comes from the LDF Frames block and width comes from
# the LDF Signals section. To update after an LDF change, copy the new
# Frames entry here and adjust widths from the Signals section.
#
# NAD selection for ALM_Req_A:
# node responds if AmbLightLIDFrom <= ALMNadNo <= AmbLightLIDTo
# single node -> set both to the target NAD
# broadcast -> AmbLightLIDFrom=0x01, AmbLightLIDTo=0xFF
# ALM_Req_A: 0x0A, Master_Node, 8
ALM_REQ_A_FRAME = {
'frame_id': LIN_FRAME_ID_ALM_REQ_A,
'length': 8,
'signals': {
'AmbLightColourRed': (0, 8), # AmbLightColourRed, 0;
'AmbLightColourGreen': (8, 8), # AmbLightColourGreen, 8;
'AmbLightColourBlue': (16, 8), # AmbLightColourBlue, 16;
'AmbLightIntensity': (24, 8), # AmbLightIntensity, 24;
'AmbLightUpdate': (32, 2), # AmbLightUpdate, 32;
'AmbLightMode': (34, 6), # AmbLightMode, 34;
'AmbLightDuration': (40, 8), # AmbLightDuration, 40;
'AmbLightLIDFrom': (48, 8), # AmbLightLIDFrom, 48;
'AmbLightLIDTo': (56, 8), # AmbLightLIDTo, 56;
},
}
# ALM_Status: 0x11, ALM_Node, 4
ALM_STATUS_FRAME = {
'frame_id': LIN_FRAME_ID_ALM_STATUS,
'length': 4,
'signals': {
'ALMNadNo': (0, 8), # ALMNadNo, 0;
'ALMVoltageStatus': (8, 4), # ALMVoltageStatus, 8;
'ALMThermalStatus': (12, 4), # ALMThermalStatus, 12;
'ALMNVMStatus': (16, 4), # ALMNVMStatus, 16;
'ALMLEDState': (20, 2), # ALMLEDState, 20;
'SigCommErr': (24, 1), # SigCommErr, 24;
},
}
# ConfigFrame: 6, Master_Node, 3
CONFIG_FRAME = {
'frame_id': LIN_FRAME_ID_CONFIG_FRAME,
'length': 3,
'signals': {
'ConfigFrame_Calibration': (0, 1), # ConfigFrame_Calibration, 0;
'ConfigFrame_EnableDerating': (1, 1), # ConfigFrame_EnableDerating, 1;
'ConfigFrame_EnableCompensation': (2, 1), # ConfigFrame_EnableCompensation, 2;
'ConfigFrame_MaxLM': (3, 16), # ConfigFrame_MaxLM, 3;
},
}
# VF_Frame: 19 (0x13), ALM_Node, 8
VF_FRAME = {
'frame_id': LIN_FRAME_ID_VF_FRAME,
'length': 8,
'signals': {
'VF_Frame_Red_VF': (0, 16), # VF_Frame_Red_VF, 0;
'VF_Frame_Green_VF': (16, 16), # VF_Frame_Green_VF, 16;
'VF_Frame_Blue1_VF': (32, 16), # VF_Frame_Blue1_VF, 32;
'VF_Frame_VLED': (48, 16), # VF_Frame_VLED, 48;
},
}
# PWM_wo_Comp: 21 (0x15), ALM_Node, 8
PWM_WO_COMP_FRAME = {
'frame_id': LIN_FRAME_ID_PWM_WO_COMP,
'length': 8,
'signals': {
'PWM_wo_Comp_Red': (0, 16), # PWM_wo_Comp_Red, 0;
'PWM_wo_Comp_Green': (16, 16), # PWM_wo_Comp_Green, 16;
'PWM_wo_Comp_Blue': (32, 16), # PWM_wo_Comp_Blue, 32;
'VF_Frame_VS': (48, 16), # VF_Frame_VS, 48;
},
}
def pack_frame(frame_def, **signals):
"""Pack signal values into a byte list using a frame definition.
Unlisted signals default to 0. Bit ordering follows the LDF/LIN
convention: bit 0 of the signal sits at start_bit in the frame,
packed little-endian within each byte.
"""
data = bytearray(frame_def['length'])
for name, value in signals.items():
start_bit, width = frame_def['signals'][name]
value = int(value) & ((1 << width) - 1)
for i in range(width):
bit_pos = start_bit + i
if value & (1 << i):
data[bit_pos // 8] |= 1 << (bit_pos % 8)
return list(data)
def unpack_frame(frame_def, data):
"""Unpack a received byte sequence into a dict of signal values."""
result = {}
for name, (start_bit, width) in frame_def['signals'].items():
value = 0
for i in range(width):
bit_pos = start_bit + i
if data[bit_pos // 8] & (1 << (bit_pos % 8)):
value |= 1 << i
result[name] = value
return result
# ============================================================================
# BSM-SNPD Protocol Constants
# ============================================================================
BSM_NAD_BROADCAST = 0x7F # Broadcast NAD for BSM frames
BSM_PCI = 0x06 # Protocol Control Information (6 data bytes)
BSM_SID = 0xB5 # Service ID for BSM-SNPD
BSM_SUPPLIER_ID_LSB = 0xFF # Supplier ID LSB (broadcast)
BSM_SUPPLIER_ID_MSB = 0x7F # Supplier ID MSB (broadcast)
# BSM Subfunctions
BSM_SUBF_INIT = 0x01 # Initialize auto-addressing
BSM_SUBF_ASSIGN = 0x02 # Assign NAD
BSM_SUBF_STORE = 0x03 # Store to NVM
BSM_SUBF_FINALIZE = 0x04 # Finalize auto-addressing
# Timing parameters (matching babylin behavior)
BSM_INIT_DELAY = 0.050 # Delay after INIT subfunction (50ms)
BSM_FRAME_DELAY = 0.020 # Delay between frames (20ms)

Binary file not shown.

View File

@ -0,0 +1,71 @@
#!/bin/bash
# Install Melexis Python packages to system Python
echo "Installing Melexis LIN packages to system Python..."
MELEXIS_SITE_PACKAGES="/mnt/WINDRV/InstalledPrograms/Melexis IDE/plugins/com.melexis.mlxide.python_1.2.0.202408130945/python/Lib/site-packages"
# Try to install from Melexis packages
if [ -d "$MELEXIS_SITE_PACKAGES" ]; then
echo "Found Melexis packages at: $MELEXIS_SITE_PACKAGES"
# Copy packages to system site-packages
SYSTEM_SITE_PACKAGES=$(python3 -c "import site; print(site.getsitepackages()[0])" 2>/dev/null)
if [ -z "$SYSTEM_SITE_PACKAGES" ]; then
echo "Error: Could not determine system site-packages directory"
exit 1
fi
echo "System site-packages: $SYSTEM_SITE_PACKAGES"
# Check if we have write permissions
if [ ! -w "$SYSTEM_SITE_PACKAGES" ]; then
echo "Note: You may need sudo to install packages system-wide"
SUDO="sudo"
else
SUDO=""
fi
# Copy packages
echo "Copying pylin..."
$SUDO cp -r "$MELEXIS_SITE_PACKAGES/pylin" "$SYSTEM_SITE_PACKAGES/"
$SUDO cp -r "$MELEXIS_SITE_PACKAGES/pylin-"*".dist-info" "$SYSTEM_SITE_PACKAGES/"
echo "Copying pylinframe..."
$SUDO cp -r "$MELEXIS_SITE_PACKAGES/pylinframe" "$SYSTEM_SITE_PACKAGES/"
$SUDO cp -r "$MELEXIS_SITE_PACKAGES/pylinframe-"*".dist-info" "$SYSTEM_SITE_PACKAGES/"
echo "Copying pymumclient..."
$SUDO cp -r "$MELEXIS_SITE_PACKAGES/pymumclient" "$SYSTEM_SITE_PACKAGES/"
$SUDO cp -r "$MELEXIS_SITE_PACKAGES/pymumclient-"*".dist-info" "$SYSTEM_SITE_PACKAGES/"
# Copy all dependencies
echo "Copying all Melexis dependencies..."
for pkg_dir in "$MELEXIS_SITE_PACKAGES"/*; do
pkg=$(basename "$pkg_dir")
# Skip dist-info directories and __pycache__
if [[ "$pkg" == *".dist-info" ]] || [[ "$pkg" == "__pycache__" ]]; then
continue
fi
# Only copy directories (packages)
if [ -d "$pkg_dir" ]; then
echo " - $pkg"
$SUDO cp -r "$pkg_dir" "$SYSTEM_SITE_PACKAGES/"
# Copy corresponding .dist-info if exists
$SUDO cp -r "$MELEXIS_SITE_PACKAGES/${pkg}-"*".dist-info" "$SYSTEM_SITE_PACKAGES/" 2>/dev/null || true
fi
done
echo ""
echo "Installation complete!"
echo ""
echo "Verifying installation..."
python3 -c "import pylin; import pymumclient; print('✓ Packages imported successfully')" && echo "Success!" || echo "Failed - some packages missing"
else
echo "Error: Melexis packages not found"
exit 1
fi

Binary file not shown.

View File

@ -0,0 +1,37 @@
#!/usr/bin/env python3
"""
Power cycle the ECU via MUM
"""
import argparse
import time
from pymumclient import MelexisUniversalMaster
from config import MUM_HOST, MUM_POWER_DEVICE, POWER_CYCLE_WAIT_TIME
def main():
parser = argparse.ArgumentParser(description='Power cycle ECU via MUM')
parser.add_argument('--host', default=MUM_HOST,
help=f'MUM IP address (default: {MUM_HOST})')
parser.add_argument('--wait', type=float, default=POWER_CYCLE_WAIT_TIME,
help=f'Wait time in seconds (default: {POWER_CYCLE_WAIT_TIME})')
args = parser.parse_args()
print(f"Connecting to MUM at {args.host}...")
mum = MelexisUniversalMaster()
mum.open_all(args.host)
power_control = mum.get_device(MUM_POWER_DEVICE)
print("Powering down ECU...")
power_control.power_down()
print(f"Waiting {args.wait} seconds...")
time.sleep(args.wait)
print("Powering up ECU...")
power_control.power_up()
print(f"Waiting {args.wait} seconds for ECU to boot...")
time.sleep(args.wait)
print("Power cycle complete!")
if __name__ == "__main__":
main()

Binary file not shown.

View File

@ -0,0 +1,493 @@
#!/usr/bin/env python3
"""
LIN ADC Measurement Verification Test
This test reads ADC measurement values from the ECU over LIN and verifies
they are within expected ranges across multiple LED states.
Test cases:
1. All LEDs off
2. Only Red on (color=255, intensity=255)
3. Only Green on (color=255, intensity=255)
4. Only Blue on (color=255, intensity=255)
5. All LEDs on (color=255, intensity=255)
Verified signals:
- VF_Frame_VS: Supply voltage (expected ~12V = ~12000 mV)
- VF_Frame_VLED: DC-DC converter output voltage feeding LEDs (expected ~5V = ~5000 mV)
- VF_Frame_Red_VF: Red LED forward voltage (0 when off, ~1500-3500 mV when on)
- VF_Frame_Green_VF: Green LED forward voltage (0 when off, ~1500-3500 mV when on)
- VF_Frame_Blue1_VF: Blue LED forward voltage (0 when off, ~1500-3500 mV when on)
Frame structures:
ALM_Req_A (ID=0x0A, master-to-slave, 8 bytes):
- Byte 0: AmbLightColourRed (0-255)
- Byte 1: AmbLightColourGreen (0-255)
- Byte 2: AmbLightColourBlue (0-255)
- Byte 3: AmbLightIntensity (0-255)
- Byte 4: AmbLightUpdate[1:0] | (AmbLightMode[5:0] << 2)
- Byte 5: AmbLightDuration (0-255)
- Byte 6: AmbLightLIDFrom (NAD range start set equal to LIDTo to target one node)
- Byte 7: AmbLightLIDTo (NAD range end)
PWM_wo_Comp (ID=0x15, slave-to-master, 8 bytes):
- Byte 0-1: PWM_wo_Comp_Red (16-bit, little-endian)
- Byte 2-3: PWM_wo_Comp_Green (16-bit, little-endian)
- Byte 4-5: PWM_wo_Comp_Blue (16-bit, little-endian)
- Byte 6-7: VF_Frame_VS (16-bit, little-endian, value in mV)
VF_Frame (ID=0x13, slave-to-master, 8 bytes):
- Byte 0-1: VF_Frame_Red_VF (16-bit, little-endian, value in mV)
- Byte 2-3: VF_Frame_Green_VF (16-bit, little-endian, value in mV)
- Byte 4-5: VF_Frame_Blue1_VF (16-bit, little-endian, value in mV)
- Byte 6-7: VF_Frame_VLED (16-bit, little-endian, value in mV)
"""
import argparse
import logging
import time
import sys
from pylin import LinBusManager, LinDevice22
from pymumclient import MelexisUniversalMaster
from config import *
logging.basicConfig(
level=logging.INFO,
format='%(asctime)-15s %(levelname)-8s %(message)s'
)
logger = logging.getLogger(__name__)
# ADC measurement expected ranges (in mV)
VS_EXPECTED_MIN_MV = 10000 # 10.0V minimum
VS_EXPECTED_MAX_MV = 14000 # 14.0V maximum
VS_EXPECTED_NOMINAL_MV = 12000 # 12.0V nominal
VLED_EXPECTED_MIN_MV = 4000 # 4.0V minimum
VLED_EXPECTED_MAX_MV = 6000 # 6.0V maximum
VLED_EXPECTED_NOMINAL_MV = 5000 # 5.0V nominal
# LED forward voltage ranges when LEDs are off
LED_VF_OFF_MIN_MV = 0 # 0V minimum (off)
LED_VF_OFF_MAX_MV = 500 # 0.5V maximum (off, allowing some noise)
# LED forward voltage ranges when LEDs are on
LED_VF_ON_MIN_MV = 1500 # 1.5V minimum (on)
LED_VF_ON_MAX_MV = 3500 # 3.5V maximum (on)
# Settle time after changing LED state (seconds)
LED_SETTLE_TIME = 1.0
def read_alm_status(lin_dev):
"""Read ALM_Status frame and return (ALMNadNo, raw_bytes)."""
try:
response = lin_dev.send_message(
master_to_slave=False,
frame_id=ALM_STATUS_FRAME['frame_id'],
data_length=ALM_STATUS_FRAME['length'],
data=None,
)
if response and len(response) >= ALM_STATUS_FRAME['length']:
parsed = unpack_frame(ALM_STATUS_FRAME, response)
return parsed['ALMNadNo'], response
return None, None
except Exception as e:
logger.error(f"Failed to read ALM_Status: {e}")
return None, None
def send_config_frame(lin_dev, calibration=0, enable_derating=1,
enable_compensation=1, max_lm=3840):
"""Send ConfigFrame to configure calibration, derating and compensation."""
data = pack_frame(CONFIG_FRAME,
ConfigFrame_Calibration=calibration,
ConfigFrame_EnableDerating=enable_derating,
ConfigFrame_EnableCompensation=enable_compensation,
ConfigFrame_MaxLM=max_lm,
)
lin_dev.send_message(
master_to_slave=True,
frame_id=CONFIG_FRAME['frame_id'],
data_length=CONFIG_FRAME['length'],
data=data,
)
def set_led_color(lin_dev, nad, red, green, blue, intensity):
"""Set LED color and intensity via ALM_Req_A frame."""
data = pack_frame(ALM_REQ_A_FRAME,
AmbLightColourRed=red,
AmbLightColourGreen=green,
AmbLightColourBlue=blue,
AmbLightIntensity=intensity,
AmbLightLIDFrom=nad,
AmbLightLIDTo=nad,
)
lin_dev.send_message(
master_to_slave=True,
frame_id=ALM_REQ_A_FRAME['frame_id'],
data_length=ALM_REQ_A_FRAME['length'],
data=data,
)
def read_pwm_wo_comp_frame(lin_dev):
"""
Read PWM_wo_Comp frame from slave.
Returns:
tuple: (raw_bytes, parsed_dict) or (None, None) on failure.
parsed_dict keys: pwm_red, pwm_green, pwm_blue, vs_mv
"""
try:
response = lin_dev.send_message(
master_to_slave=False,
frame_id=PWM_WO_COMP_FRAME['frame_id'],
data_length=PWM_WO_COMP_FRAME['length'],
data=None,
)
if response and len(response) >= PWM_WO_COMP_FRAME['length']:
s = unpack_frame(PWM_WO_COMP_FRAME, response)
return response, {
'pwm_red': s['PWM_wo_Comp_Red'],
'pwm_green': s['PWM_wo_Comp_Green'],
'pwm_blue': s['PWM_wo_Comp_Blue'],
'vs_mv': s['VF_Frame_VS'],
}
return None, None
except Exception as e:
logger.error(f"Failed to read PWM_wo_Comp frame: {e}")
return None, None
def read_vf_frame(lin_dev):
"""
Read VF_Frame from slave.
Returns:
tuple: (raw_bytes, parsed_dict) or (None, None) on failure.
parsed_dict keys: red_vf_mv, green_vf_mv, blue_vf_mv, vled_mv
"""
try:
response = lin_dev.send_message(
master_to_slave=False,
frame_id=VF_FRAME['frame_id'],
data_length=VF_FRAME['length'],
data=None,
)
if response and len(response) >= VF_FRAME['length']:
s = unpack_frame(VF_FRAME, response)
return response, {
'red_vf_mv': s['VF_Frame_Red_VF'],
'green_vf_mv': s['VF_Frame_Green_VF'],
'blue_vf_mv': s['VF_Frame_Blue1_VF'],
'vled_mv': s['VF_Frame_VLED'],
}
return None, None
except Exception as e:
logger.error(f"Failed to read VF_Frame: {e}")
return None, None
def sample_signal(lin_dev, signal_name, read_func, signal_key,
expected_min, expected_max, num_samples=5, sample_interval=0.1):
"""
Read a signal multiple times and verify it is within expected range.
Args:
lin_dev: LinDevice22 instance
signal_name: Display name for the signal
read_func: Function to call to read the frame (returns raw, parsed)
signal_key: Key in parsed dict to extract the signal value
expected_min: Minimum expected value in mV
expected_max: Maximum expected value in mV
num_samples: Number of samples to read
sample_interval: Delay between samples in seconds
Returns:
tuple: (passed, avg_voltage_mv, samples)
"""
samples = []
passed = True
for i in range(num_samples):
raw, parsed = read_func(lin_dev)
if parsed is None:
logger.warning(f" Sample {i+1}/{num_samples}: No response")
continue
value_mv = parsed[signal_key]
samples.append(value_mv)
in_range = expected_min <= value_mv <= expected_max
status = "OK" if in_range else "FAIL"
logger.info(f" Sample {i+1}/{num_samples}: {signal_name} = {value_mv} mV ({value_mv/1000:.2f} V) [{status}]")
if not in_range:
passed = False
if i < num_samples - 1:
time.sleep(sample_interval)
if len(samples) == 0:
logger.error(f" No valid samples received for {signal_name}")
return False, 0, samples
avg_mv = sum(samples) / len(samples)
return passed, avg_mv, samples
def log_signal_summary(signal_name, passed, avg_mv, samples):
"""Log summary statistics for a verified signal."""
if len(samples) > 0:
s_min = min(samples)
s_max = max(samples)
logger.info(f" Average: {avg_mv:.0f} mV ({avg_mv/1000:.2f} V)")
logger.info(f" Min: {s_min} mV ({s_min/1000:.2f} V)")
logger.info(f" Max: {s_max} mV ({s_max/1000:.2f} V)")
logger.info(f" Result: {'PASS' if passed else 'FAIL'}")
else:
logger.error(f" Result: FAIL (no data)")
def verify_adc_signals(lin_dev, num_samples, sample_interval,
expected_red_vf, expected_green_vf, expected_blue_vf):
"""
Verify all ADC signals (VS, VLED, Red_VF, Green_VF, Blue_VF) for the current LED state.
Args:
lin_dev: LinDevice22 instance
num_samples: Number of samples per signal
sample_interval: Delay between samples in seconds
expected_red_vf: Tuple (min_mv, max_mv) for Red forward voltage
expected_green_vf: Tuple (min_mv, max_mv) for Green forward voltage
expected_blue_vf: Tuple (min_mv, max_mv) for Blue forward voltage
Returns:
bool: True if all signals pass, False otherwise
"""
all_passed = True
logger.info(f" --- VS (Supply Voltage) ---")
vs_passed, vs_avg, vs_samples = sample_signal(
lin_dev, "VS", read_pwm_wo_comp_frame, 'vs_mv',
VS_EXPECTED_MIN_MV, VS_EXPECTED_MAX_MV,
num_samples=num_samples, sample_interval=sample_interval
)
log_signal_summary("VS", vs_passed, vs_avg, vs_samples)
if not vs_passed:
all_passed = False
logger.info(f" --- VLED (DC-DC Voltage) ---")
vled_passed, vled_avg, vled_samples = sample_signal(
lin_dev, "VLED", read_vf_frame, 'vled_mv',
VLED_EXPECTED_MIN_MV, VLED_EXPECTED_MAX_MV,
num_samples=num_samples, sample_interval=sample_interval
)
log_signal_summary("VLED", vled_passed, vled_avg, vled_samples)
if not vled_passed:
all_passed = False
led_checks = [
("Red_VF", 'red_vf_mv', expected_red_vf),
("Green_VF", 'green_vf_mv', expected_green_vf),
("Blue_VF", 'blue_vf_mv', expected_blue_vf),
]
for signal_name, signal_key, (exp_min, exp_max) in led_checks:
logger.info(f" --- {signal_name} (expected {exp_min}-{exp_max} mV) ---")
led_passed, led_avg, led_samples = sample_signal(
lin_dev, signal_name, read_vf_frame, signal_key,
exp_min, exp_max,
num_samples=num_samples, sample_interval=sample_interval
)
log_signal_summary(signal_name, led_passed, led_avg, led_samples)
if not led_passed:
all_passed = False
return all_passed
def main():
parser = argparse.ArgumentParser(description='LIN ADC Measurement Verification Test')
parser.add_argument('--host', default=MUM_HOST,
help=f'MUM IP address (default: {MUM_HOST})')
parser.add_argument('--nad', type=lambda x: int(x, 0), default=LED_DEFAULT_NAD,
help=f'Node address (default: 0x{LED_DEFAULT_NAD:02X})')
parser.add_argument('--samples', type=int, default=10,
help='Number of samples to read per signal (default: 10)')
parser.add_argument('--interval', type=float, default=0.1,
help='Interval between samples in seconds (default: 0.1)')
parser.add_argument('--settle-time', type=float, default=LED_SETTLE_TIME,
help=f'Settle time after LED state change (default: {LED_SETTLE_TIME}s)')
args = parser.parse_args()
# Define test cases: (name, red, green, blue, intensity,
# expected_red_vf, expected_green_vf, expected_blue_vf)
test_cases = [
(
"All LEDs OFF",
0, 0, 0, 0,
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
),
(
"Red ON (255/255)",
255, 0, 0, 255,
(LED_VF_ON_MIN_MV, LED_VF_ON_MAX_MV),
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
),
(
"Green ON (255/255)",
0, 255, 0, 255,
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
(LED_VF_ON_MIN_MV, LED_VF_ON_MAX_MV),
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
),
(
"Blue ON (255/255)",
0, 0, 255, 255,
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
(LED_VF_ON_MIN_MV, LED_VF_ON_MAX_MV),
),
(
"All LEDs ON (255/255)",
255, 255, 255, 255,
(LED_VF_ON_MIN_MV, LED_VF_ON_MAX_MV),
(LED_VF_ON_MIN_MV, LED_VF_ON_MAX_MV),
(LED_VF_ON_MIN_MV, LED_VF_ON_MAX_MV),
),
]
test_results = {}
nad = args.nad # may be updated below after reading ALM_Status
try:
logger.info(f"Connecting to MUM at {args.host}...")
mum = MelexisUniversalMaster()
mum.open_all(args.host)
power_control = mum.get_device(MUM_POWER_DEVICE)
linmaster = mum.get_device(MUM_LIN_DEVICE)
linmaster.setup()
lin_bus = LinBusManager(linmaster)
lin_dev = LinDevice22(lin_bus)
lin_dev.baudrate = LIN_BAUDRATE
lin_dev.nad = args.nad
power_control.power_up()
time.sleep(0.5)
logger.info("MUM connected and LIN bus ready")
logger.info("=" * 70)
logger.info("ADC MEASUREMENT VERIFICATION TEST")
logger.info(f"Samples: {args.samples}, Interval: {args.interval}s, "
f"Settle: {args.settle_time}s")
logger.info("=" * 70)
# Wait for ADC to settle after power-up
logger.info("Waiting for ADC to settle after power-up...")
time.sleep(1.0)
# Read the actual NAD from the node. Using args.nad directly risks
# a silent miss if the node was assigned a different NAD (e.g. via
# auto-addressing), because AmbLightLIDFrom/LIDTo must equal ALMNadNo.
logger.info("Reading node NAD from ALM_Status...")
detected_nad, status_data = read_alm_status(lin_dev)
if detected_nad is not None:
nad = detected_nad
data_hex = ' '.join(f'{b:02X}' for b in status_data)
logger.info(f"Detected NAD: 0x{nad:02X} (Status frame: {data_hex})")
else:
nad = args.nad
logger.warning(f"Could not read NAD, falling back to 0x{nad:02X}")
logger.info("=" * 70)
# Configure: disable derating and compensation so PWM output directly
# reflects the requested color/brightness.
logger.info("Sending ConfigFrame: Calibration=1, Derating=0, Compensation=0")
send_config_frame(lin_dev, calibration=1, enable_derating=0,
enable_compensation=0)
time.sleep(0.1)
# Ensure LEDs are off before starting
set_led_color(lin_dev, nad, 0, 0, 0, 0)
time.sleep(args.settle_time)
for idx, (name, red, green, blue, intensity,
exp_red, exp_green, exp_blue) in enumerate(test_cases, 1):
logger.info("")
logger.info("-" * 70)
logger.info(f"TEST {idx}/{len(test_cases)}: {name}")
logger.info(f" Command: R={red} G={green} B={blue} I={intensity}"
f" -> NAD 0x{nad:02X}")
logger.info("-" * 70)
# Set LED state
set_led_color(lin_dev, nad, red, green, blue, intensity)
logger.info(f" Waiting {args.settle_time}s for ADC to settle...")
time.sleep(args.settle_time)
# Verify all ADC signals
passed = verify_adc_signals(
lin_dev, args.samples, args.interval,
exp_red, exp_green, exp_blue
)
test_results[name] = passed
logger.info(f" >> TEST {idx} {'PASS' if passed else 'FAIL'}")
# Turn LEDs off at the end
set_led_color(lin_dev, nad, 0, 0, 0, 0)
# Summary
logger.info("")
logger.info("=" * 70)
logger.info("TEST SUMMARY")
logger.info("=" * 70)
all_passed = True
for name, passed in test_results.items():
status = "PASS" if passed else "FAIL"
logger.info(f" {status} - {name}")
if not passed:
all_passed = False
logger.info("-" * 70)
if all_passed:
logger.info("RESULT: ALL TESTS PASSED")
else:
logger.info("RESULT: SOME TESTS FAILED")
logger.info("=" * 70)
logger.info("Tearing down...")
linmaster.teardown()
logger.info("Done (ECU still powered)")
sys.exit(0 if all_passed else 1)
except KeyboardInterrupt:
logger.info("")
logger.info("Interrupted by user")
try:
set_led_color(lin_dev, nad, 0, 0, 0, 0)
linmaster.teardown()
except:
pass
sys.exit(130)
except Exception as e:
logger.error(f"Error: {e}", exc_info=True)
sys.exit(1)
if __name__ == "__main__":
main()

Binary file not shown.

View File

@ -0,0 +1,543 @@
#!/usr/bin/env python3
"""
Interactive BABYLIN animation validation for ALM_Req_A.
This script executes the requirement-oriented checks step-by-step and pauses
after each action so the tester can verify physical LED behavior.
Covered checks:
1) AmbLightMode behavior (0 immediate, 1 fade RGBI, 2 immediate color + fade I)
2) AmbLightUpdate save/apply/discard
3) AmbLightDuration scaling (0.2 s/LSB)
4) LID range selection (single-node, broadcast, invalid From>To)
"""
import argparse
import logging
import time
from pylin import LinBusManager, LinDevice22
from pymumclient import MelexisUniversalMaster
from config import *
logging.basicConfig(
level=logging.INFO,
format="%(asctime)-15s %(levelname)-8s %(message)s",
)
logger = logging.getLogger(__name__)
SEPARATOR = "=" * 78
SUB = "-" * 78
# ALM_Status.ALMLedState values
LED_STATE_OFF = 0
LED_STATE_ANIMATING = 1
LED_STATE_ON = 2
LED_STATE_NAMES = {
LED_STATE_OFF: "OFF",
LED_STATE_ANIMATING: "ANIMATING",
LED_STATE_ON: "ON",
}
def pause(msg):
print()
input(f">>> {msg}")
print()
def banner(title):
logger.info(SEPARATOR)
logger.info(title)
logger.info(SEPARATOR)
def section(title):
logger.info(SUB)
logger.info(title)
logger.info(SUB)
def read_alm_status(lin_dev):
"""Return (parsed_dict, raw_bytes) or (None, None)."""
try:
response = lin_dev.send_message(
master_to_slave=False,
frame_id=ALM_STATUS_FRAME["frame_id"],
data_length=ALM_STATUS_FRAME["length"],
data=None,
)
if response and len(response) >= ALM_STATUS_FRAME["length"]:
return unpack_frame(ALM_STATUS_FRAME, response), response
return None, None
except Exception as exc:
logger.error("Failed reading ALM_Status: %s", exc)
return None, None
def read_led_state(lin_dev):
parsed, _ = read_alm_status(lin_dev)
if parsed is None:
return -1
return parsed.get("ALMLEDState", -1)
def read_nad(lin_dev, fallback):
parsed, raw = read_alm_status(lin_dev)
if parsed is None:
logger.warning("Could not read ALM_Status, fallback NAD=0x%02X", fallback)
return fallback
nad = parsed.get("ALMNadNo", fallback)
logger.info("Detected ALMNadNo=0x%02X (raw: %s)", nad, " ".join(f"{b:02X}" for b in raw))
return nad
def send_req(
lin_dev,
*,
red,
green,
blue,
intensity,
update,
mode,
duration,
lid_from,
lid_to,
):
data = pack_frame(
ALM_REQ_A_FRAME,
AmbLightColourRed=red,
AmbLightColourGreen=green,
AmbLightColourBlue=blue,
AmbLightIntensity=intensity,
AmbLightUpdate=update,
AmbLightMode=mode,
AmbLightDuration=duration,
AmbLightLIDFrom=lid_from,
AmbLightLIDTo=lid_to,
)
lin_dev.send_message(
master_to_slave=True,
frame_id=ALM_REQ_A_FRAME["frame_id"],
data_length=ALM_REQ_A_FRAME["length"],
data=data,
)
def observe_state(lin_dev, seconds):
"""Poll status slowly and print changes."""
logger.info("Observing for %.1f s...", seconds)
end_t = time.time() + seconds
last = None
while time.time() < end_t:
st = read_led_state(lin_dev)
if st != last:
name = LED_STATE_NAMES.get(st, f"UNKNOWN({st})")
logger.info(" ALMLEDState -> %s", name)
last = st
time.sleep(0.25)
def guided_step(lin_dev, title, expectation_lines, command_kwargs, observe_s):
section(title)
logger.info("What you should see:")
for line in expectation_lines:
logger.info(" - %s", line)
pause("Press Enter to send this command...")
send_req(lin_dev, **command_kwargs)
observe_state(lin_dev, observe_s)
pause("Verify visually, then press Enter for the next step...")
def main():
parser = argparse.ArgumentParser(description="Interactive ALM animation checks for BABYLIN")
parser.add_argument("--host", default=MUM_HOST, help=f"MUM IP (default: {MUM_HOST})")
parser.add_argument(
"--nad",
type=lambda x: int(x, 0),
default=LED_DEFAULT_NAD,
help=f"Fallback NAD if ALM_Status read fails (default: 0x{LED_DEFAULT_NAD:02X})",
)
parser.add_argument(
"--slow-factor",
type=float,
default=1.0,
help="Multiply wait/observe durations (default: 1.0)",
)
args = parser.parse_args()
mum = None
linmaster = None
lin_dev = None
try:
banner("Connecting to MUM / LIN")
mum = MelexisUniversalMaster()
mum.open_all(args.host)
power_control = mum.get_device(MUM_POWER_DEVICE)
linmaster = mum.get_device(MUM_LIN_DEVICE)
linmaster.setup()
lin_bus = LinBusManager(linmaster)
lin_dev = LinDevice22(lin_bus)
lin_dev.baudrate = LIN_BAUDRATE
lin_dev.nad = args.nad
power_control.power_up()
time.sleep(0.5 * args.slow_factor)
nad = read_nad(lin_dev, args.nad)
lin_dev.nad = nad
banner("Interactive Requirement Validation")
logger.info("Target NAD: 0x%02X", nad)
logger.info("Slow factor: %.2f", args.slow_factor)
logger.info("You will be prompted before and after every test step.")
pause("Press Enter to start from a known OFF baseline...")
# Step 0: Baseline OFF
guided_step(
lin_dev,
"Step 0 - Baseline OFF",
[
"LED should turn OFF quickly.",
"ALMLEDState should become OFF.",
],
{
"red": 0,
"green": 0,
"blue": 0,
"intensity": 0,
"update": 0,
"mode": 0,
"duration": 0,
"lid_from": nad,
"lid_to": nad,
},
1.0 * args.slow_factor,
)
# 1) Mode behavior checks
guided_step(
lin_dev,
"Step 1 - Mode 0 Immediate Setpoint",
[
"Color/intensity should change immediately.",
"No visible fade; direct jump to requested setpoint.",
],
{
"red": 0,
"green": 180,
"blue": 80,
"intensity": 200,
"update": 0,
"mode": 0,
"duration": 10,
"lid_from": nad,
"lid_to": nad,
},
1.0 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 2 - Mode 1 Fade RGB + Intensity (2.0 s)",
[
"RGB and intensity should both transition smoothly.",
"Transition duration should be close to 2.0 s (Duration=10).",
],
{
"red": 255,
"green": 40,
"blue": 0,
"intensity": 220,
"update": 0,
"mode": 1,
"duration": 10,
"lid_from": nad,
"lid_to": nad,
},
3.0 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 3 - Mode 2 Immediate Color + Faded Intensity (2.0 s)",
[
"Color should jump immediately to the new RGB target.",
"Only intensity should ramp over ~2.0 s.",
],
{
"red": 0,
"green": 0,
"blue": 255,
"intensity": 50,
"update": 0,
"mode": 2,
"duration": 10,
"lid_from": nad,
"lid_to": nad,
},
3.0 * args.slow_factor,
)
# 2) Update save/apply/discard checks
guided_step(
lin_dev,
"Step 4 - Update=1 Save (must NOT apply)",
[
"LED output should remain unchanged after this command.",
"No visible color/intensity change should occur.",
],
{
"red": 0,
"green": 255,
"blue": 0,
"intensity": 255,
"update": 1,
"mode": 1,
"duration": 10,
"lid_from": nad,
"lid_to": nad,
},
1.5 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 5 - Update=2 Apply Saved",
[
"Saved command from Step 4 should execute now.",
"Payload in this Apply frame should be ignored by ECU logic.",
"You should see saved behavior (mode/duration/RGBI from Step 4).",
],
{
"red": 7,
"green": 7,
"blue": 7,
"intensity": 7,
"update": 2,
"mode": 0,
"duration": 0,
"lid_from": nad,
"lid_to": nad,
},
3.0 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 6 - Update=3 Discard Saved",
[
"Saved buffer should be cleared.",
"This discard command itself should not change output.",
],
{
"red": 0,
"green": 0,
"blue": 0,
"intensity": 0,
"update": 3,
"mode": 0,
"duration": 0,
"lid_from": nad,
"lid_to": nad,
},
1.5 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 7 - Update=2 After Discard",
[
"No saved command should exist now.",
"Apply should behave like a no-op (no new visible action).",
],
{
"red": 123,
"green": 12,
"blue": 45,
"intensity": 200,
"update": 2,
"mode": 1,
"duration": 5,
"lid_from": nad,
"lid_to": nad,
},
2.0 * args.slow_factor,
)
# 3) Duration scaling checks
guided_step(
lin_dev,
"Step 8 - Duration=1 (expect ~0.2 s)",
[
"Transition should complete very quickly (~0.2 s).",
],
{
"red": 255,
"green": 0,
"blue": 0,
"intensity": 200,
"update": 0,
"mode": 1,
"duration": 1,
"lid_from": nad,
"lid_to": nad,
},
1.5 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 9 - Duration=5 (expect ~1.0 s)",
[
"Transition should take around 1.0 s.",
"Visibly slower than Step 8.",
],
{
"red": 0,
"green": 255,
"blue": 0,
"intensity": 200,
"update": 0,
"mode": 1,
"duration": 5,
"lid_from": nad,
"lid_to": nad,
},
2.0 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 10 - Duration=10 (expect ~2.0 s)",
[
"Transition should take around 2.0 s.",
"Visibly slower than Step 9.",
],
{
"red": 0,
"green": 0,
"blue": 255,
"intensity": 200,
"update": 0,
"mode": 1,
"duration": 10,
"lid_from": nad,
"lid_to": nad,
},
3.0 * args.slow_factor,
)
# 4) LID selection checks
guided_step(
lin_dev,
"Step 11 - LID Single-Node Select (From=To=NAD)",
[
"This node should react (it is explicitly selected).",
],
{
"red": 255,
"green": 120,
"blue": 0,
"intensity": 180,
"update": 0,
"mode": 0,
"duration": 0,
"lid_from": nad,
"lid_to": nad,
},
1.0 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 12 - LID Broadcast Select (From=0, To=255)",
[
"This node should react (broadcast range).",
],
{
"red": 120,
"green": 0,
"blue": 255,
"intensity": 180,
"update": 0,
"mode": 0,
"duration": 0,
"lid_from": 0,
"lid_to": 255,
},
1.0 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 13 - LID Invalid Range (From > To)",
[
"Node should ignore this command.",
"No visible output change is expected.",
],
{
"red": 255,
"green": 255,
"blue": 255,
"intensity": 255,
"update": 0,
"mode": 0,
"duration": 0,
"lid_from": 20,
"lid_to": 10,
},
1.5 * args.slow_factor,
)
pause("All checks done. Press Enter to send final OFF cleanup...")
send_req(
lin_dev,
red=0,
green=0,
blue=0,
intensity=0,
update=0,
mode=0,
duration=0,
lid_from=nad,
lid_to=nad,
)
observe_state(lin_dev, 1.0 * args.slow_factor)
banner("Test sequence completed")
except KeyboardInterrupt:
logger.info("Interrupted by user")
finally:
try:
if lin_dev is not None:
# Best effort: leave node OFF
send_req(
lin_dev,
red=0,
green=0,
blue=0,
intensity=0,
update=0,
mode=0,
duration=0,
lid_from=lin_dev.nad,
lid_to=lin_dev.nad,
)
except Exception:
pass
try:
if linmaster is not None:
linmaster.teardown()
except Exception:
pass
if __name__ == "__main__":
main()

Binary file not shown.

View File

@ -0,0 +1,260 @@
#!/usr/bin/env python3
"""
LIN Auto-Addressing Test - Matching babylin behavior
This test replicates the exact babylin sequence that successfully changed NAD.
Key observations from babylin log:
1. Uses FreeFormat frame (ID 0x3C)
2. Frame structure: [NAD, PCI, SID, SupID_LSB, SupID_MSB, Subf, Param1, Param2]
3. Uses LIN 1.x Classic checksum
4. Loops the auto-addressing schedule multiple times (6+ iterations in babylin log)
5. NAD change happens after several iterations
"""
import argparse
import logging
import time
from pylin import LinBusManager, LinDevice22
from pymumclient import MelexisUniversalMaster
from config import *
logging.basicConfig(
level=logging.INFO,
format='%(asctime)-15s %(levelname)-8s %(message)s'
)
logger = logging.getLogger(__name__)
def read_status(lin_dev):
"""Read ALM_Status frame"""
try:
response = lin_dev.send_message(
master_to_slave=False,
frame_id=LIN_FRAME_ID_ALM_STATUS,
data_length=4,
data=None
)
if response and len(response) > 0:
return response[0], response
return None, None
except Exception as e:
logger.error(f"Failed to read status: {e}")
return None, None
def send_bsm_frame(transport_layer, subfunction, param1, param2):
"""
Send BSM-SNPD diagnostic frame with Classic checksum.
Uses ld_put_raw() which sends with LIN 1.x Classic checksum (like babylin).
send_message() uses Enhanced checksum which causes firmware to reject frames.
"""
try:
data = bytearray([
BSM_NAD_BROADCAST,
BSM_PCI,
BSM_SID,
BSM_SUPPLIER_ID_LSB,
BSM_SUPPLIER_ID_MSB,
subfunction,
param1,
param2
])
transport_layer.ld_put_raw(data=data, baudrate=LIN_BAUDRATE)
return True
except Exception as e:
logger.error(f"Failed to send BSM frame: {e}")
return False
def poll_status_frames(lin_dev, duration_seconds=AUTOADDRESSING_POLL_DURATION):
"""
Poll status frames for a specified duration, matching babylin's Pub_serv schedule.
This acts as a keepalive and gives the ECU time to process.
"""
start_time = time.time()
poll_count = 0
while (time.time() - start_time) < duration_seconds:
try:
lin_dev.send_message(
master_to_slave=False,
frame_id=LIN_FRAME_ID_ALM_STATUS,
data_length=4,
data=[]
)
poll_count += 1
time.sleep(AUTOADDRESSING_STATUS_POLL_INTERVAL)
except Exception:
# Ignore timeout errors during polling
time.sleep(AUTOADDRESSING_STATUS_POLL_INTERVAL)
logger.debug(f" Polled status {poll_count} times over {duration_seconds:.1f}s")
def run_auto_addressing_sequence(transport_layer, target_nad):
"""
Run one complete auto-addressing sequence matching babylin.
Babylin sequence:
1. INIT (subf 0x01)
2. Wait 50ms
3. 16x NAD assignments (subf 0x02) with 20ms delays
4. STORE (subf 0x03)
5. FINALIZE (subf 0x04)
Args:
transport_layer: LIN transport layer for sending frames
target_nad: Target NAD to assign (will be placed first in sequence)
"""
# Step 1: Initialize auto-addressing mode
logger.debug(" INIT (0x01)")
if not send_bsm_frame(transport_layer, BSM_SUBF_INIT, 0x02, 0xFF):
return False
time.sleep(BSM_INIT_DELAY)
# Step 2: Send 16 NAD assignment frames
# Put target NAD first in sequence to ensure it gets assigned
nad_sequence = list(VALID_NAD_RANGE)
# Move target_nad to the front of the sequence
if target_nad in nad_sequence:
nad_sequence.remove(target_nad)
nad_sequence.insert(0, target_nad)
for nad in nad_sequence:
logger.debug(f" ASSIGN NAD 0x{nad:02X} (0x02)")
if not send_bsm_frame(transport_layer, BSM_SUBF_ASSIGN, 0x02, nad):
return False
time.sleep(BSM_FRAME_DELAY)
# Step 3: Store configuration
logger.debug(" STORE (0x03)")
if not send_bsm_frame(transport_layer, BSM_SUBF_STORE, 0x02, 0xFF):
return False
time.sleep(BSM_FRAME_DELAY)
# Step 4: Finalize
logger.debug(" FINALIZE (0x04)")
if not send_bsm_frame(transport_layer, BSM_SUBF_FINALIZE, 0x02, 0xFF):
return False
time.sleep(BSM_FRAME_DELAY)
return True
def main():
parser = argparse.ArgumentParser(description='LIN Auto-Addressing Test')
parser.add_argument('--host', default=MUM_HOST,
help=f'MUM IP address (default: {MUM_HOST})')
parser.add_argument('--iterations', type=int, default=AUTOADDRESSING_DEFAULT_ITERATIONS,
help=f'Number of auto-addressing iterations (default: {AUTOADDRESSING_DEFAULT_ITERATIONS})')
parser.add_argument('--check-interval', type=int, default=0,
help='Check status every N iterations (0=only at end)')
args = parser.parse_args()
try:
logger.info(f"Connecting to MUM at {args.host}...")
# Initialize MUM
mum = MelexisUniversalMaster()
mum.open_all(args.host)
power_control = mum.get_device(MUM_POWER_DEVICE)
linmaster = mum.get_device(MUM_LIN_DEVICE)
linmaster.setup()
# Initialize LIN
lin_bus = LinBusManager(linmaster)
lin_dev = LinDevice22(lin_bus)
lin_dev.baudrate = LIN_BAUDRATE
# Get transport layer for sending with Classic checksum
transport_layer = lin_dev.get_device("bus/transport_layer")
# Power up
power_control.power_up()
time.sleep(0.5)
logger.info("=" * 70)
logger.info("MUM connected, LIN bus ready")
# Read initial status
initial_nad, _ = read_status(lin_dev)
if initial_nad:
logger.info(f"Initial NAD: 0x{initial_nad:02X}")
# Calculate target NAD (different from initial NAD)
valid_nads = list(VALID_NAD_RANGE)
if initial_nad and initial_nad in valid_nads:
valid_nads.remove(initial_nad)
target_nad = valid_nads[0] # Pick the first available NAD
logger.info(f"Target NAD: 0x{target_nad:02X}")
logger.info("=" * 70)
logger.info(f"Running {args.iterations} auto-addressing iterations...")
logger.info("(Like babylin: iterate multiple times, then check result)")
logger.info("=" * 70)
# Run iterations with status polling (like babylin's schedule switching)
for iteration in range(1, args.iterations + 1):
logger.info(f"Iteration {iteration}/{args.iterations}")
# Run BSM sequence (like babylin's LIN_AA schedule)
if not run_auto_addressing_sequence(transport_layer, target_nad):
logger.error("Auto-addressing sequence failed")
break
# Poll status frames between iterations (like babylin's Pub_serv schedule)
# This gives ECU time to process and keeps communication alive
logger.debug(f" Status polling between iterations...")
poll_status_frames(lin_dev, duration_seconds=2.0)
# Check status at intervals if requested
if args.check_interval > 0 and iteration % args.check_interval == 0:
nad, _ = read_status(lin_dev)
if nad:
logger.info(f" After iteration {iteration}: NAD = 0x{nad:02X}")
if initial_nad and nad != initial_nad:
logger.info("=" * 70)
logger.info(f"SUCCESS! NAD changed from 0x{initial_nad:02X} to 0x{nad:02X}")
logger.info(f"Change occurred after {iteration} iterations")
logger.info("=" * 70)
break
# Final status check
logger.info("=" * 70)
logger.info("Checking final status...")
time.sleep(1.0)
final_nad, final_data = read_status(lin_dev)
if final_nad:
data_hex = ' '.join(f'{b:02X}' for b in final_data)
logger.info(f"Final NAD: 0x{final_nad:02X}, Data: {data_hex}")
if initial_nad and final_nad != initial_nad:
logger.info("=" * 70)
logger.info(f"SUCCESS! NAD changed from 0x{initial_nad:02X} to 0x{final_nad:02X}")
logger.info("=" * 70)
else:
logger.info(f"NAD unchanged (still 0x{final_nad:02X})")
logger.info("=" * 70)
linmaster.teardown()
logger.info("Done")
except KeyboardInterrupt:
logger.info("\nInterrupted by user")
try:
linmaster.teardown()
except:
pass
except Exception as e:
logger.error(f"Error: {e}", exc_info=True)
if __name__ == "__main__":
main()

Binary file not shown.

View File

@ -0,0 +1,230 @@
#!/usr/bin/env python3
"""
LIN LED Control Test
This test verifies LIN communication by controlling the LED on the board.
It will fade through different colors (Red, Green, Blue) to verify that
frames are being received correctly.
Frame structure (ALM_Req_A, ID=0x0A, 8 bytes):
- Byte 0: AmbLightColourRed (0-255)
- Byte 1: AmbLightColourGreen (0-255)
- Byte 2: AmbLightColourBlue (0-255)
- Byte 3: AmbLightIntensity (0-255)
- Byte 4: AmbLightUpdate[1:0] | (AmbLightMode[5:0] << 2)
- Byte 5: AmbLightDuration (0-255)
- Byte 6: AmbLightLIDFrom (NAD range start set equal to LIDTo to target one node)
- Byte 7: AmbLightLIDTo (NAD range end)
"""
import argparse
import logging
import time
import math
from pylin import LinBusManager, LinDevice22
from pymumclient import MelexisUniversalMaster
from config import *
logging.basicConfig(
level=logging.INFO,
format='%(asctime)-15s %(levelname)-8s %(message)s'
)
logger = logging.getLogger(__name__)
def read_alm_status(lin_dev):
"""Read ALM_Status frame and return (ALMNadNo, raw_bytes)."""
try:
response = lin_dev.send_message(
master_to_slave=False,
frame_id=ALM_STATUS_FRAME['frame_id'],
data_length=ALM_STATUS_FRAME['length'],
data=None
)
if response and len(response) >= ALM_STATUS_FRAME['length']:
parsed = unpack_frame(ALM_STATUS_FRAME, response)
return parsed['ALMNadNo'], response
return None, None
except Exception as e:
logger.error(f"Failed to read ALM_Status: {e}")
return None, None
def set_led_color(lin_dev, nad, red, green, blue, intensity,
update=0, mode=0, duration=0):
"""
Set LED color and intensity via ALM_Req_A frame.
The node responds only if AmbLightLIDFrom <= ALMNadNo <= AmbLightLIDTo.
Setting both to the same NAD targets a single node.
"""
try:
data = pack_frame(ALM_REQ_A_FRAME,
AmbLightColourRed=red,
AmbLightColourGreen=green,
AmbLightColourBlue=blue,
AmbLightIntensity=intensity,
AmbLightUpdate=update,
AmbLightMode=mode,
AmbLightDuration=duration,
AmbLightLIDFrom=nad,
AmbLightLIDTo=nad,
)
lin_dev.send_message(
master_to_slave=True,
frame_id=ALM_REQ_A_FRAME['frame_id'],
data_length=ALM_REQ_A_FRAME['length'],
data=data,
)
return True
except Exception as e:
logger.error(f"Failed to set LED color: {e}")
return False
def fade_test(lin_dev, nad, duration_per_color=5.0):
"""
Fade through Red, Green, and Blue colors.
Args:
lin_dev: LinDevice22 instance
nad: Node address
duration_per_color: How long to fade each color (seconds)
"""
colors = [
("Red", 255, 0, 0),
("Green", 0, 255, 0),
("Blue", 0, 0, 255),
]
steps = 50 # Number of fade steps
delay = duration_per_color / steps
for color_name, r_max, g_max, b_max in colors:
logger.info(f"Fading {color_name}...")
# Fade in
for step in range(steps + 1):
progress = step / steps
# Use sine wave for smoother fade
brightness = math.sin(progress * math.pi / 2)
red = int(r_max * brightness)
green = int(g_max * brightness)
blue = int(b_max * brightness)
intensity = int(100 * brightness)
set_led_color(lin_dev, nad, red, green, blue, intensity)
time.sleep(delay)
# Fade out
for step in range(steps, -1, -1):
progress = step / steps
brightness = math.sin(progress * math.pi / 2)
red = int(r_max * brightness)
green = int(g_max * brightness)
blue = int(b_max * brightness)
intensity = int(100 * brightness)
set_led_color(lin_dev, nad, red, green, blue, intensity)
time.sleep(delay)
def main():
parser = argparse.ArgumentParser(description='LIN LED Control Test')
parser.add_argument('--host', default=MUM_HOST,
help=f'MUM IP address (default: {MUM_HOST})')
parser.add_argument('--nad', type=lambda x: int(x,0), default=LED_DEFAULT_NAD,
help=f'Node address to control (default: 0x{LED_DEFAULT_NAD:02X})')
parser.add_argument('--cycles', type=int, default=3,
help='Number of fade cycles (default: 3)')
parser.add_argument('--duration', type=float, default=3.0,
help='Duration per color in seconds (default: 3.0)')
args = parser.parse_args()
try:
logger.info(f"Connecting to MUM at {args.host}...")
# Setup MUM and LIN
mum = MelexisUniversalMaster()
mum.open_all(args.host)
power_control = mum.get_device(MUM_POWER_DEVICE)
linmaster = mum.get_device(MUM_LIN_DEVICE)
linmaster.setup()
lin_bus = LinBusManager(linmaster)
lin_dev = LinDevice22(lin_bus)
lin_dev.baudrate = LIN_BAUDRATE
lin_dev.nad = args.nad
power_control.power_up()
time.sleep(0.5)
logger.info("MUM connected and LIN bus ready")
logger.info("=" * 70)
# Read current NAD
logger.info("Reading current NAD from ALM_Status...")
current_nad, status_data = read_alm_status(lin_dev)
if current_nad is not None:
data_hex = ' '.join(f'{b:02X}' for b in status_data)
logger.info(f"Current NAD: 0x{current_nad:02X}")
logger.info(f"Full status data: {data_hex}")
else:
logger.warning("Could not read NAD, using command-line NAD")
current_nad = args.nad
logger.info("=" * 70)
logger.info(f"LED FADE TEST")
logger.info(f"Controlling NAD: 0x{current_nad:02X}")
logger.info(f"LIDFrom: 0x{current_nad:02X}, LIDTo: 0x{current_nad:02X}")
logger.info(f"Fade cycles: {args.cycles}")
logger.info(f"Duration per color: {args.duration}s")
logger.info("=" * 70)
# Turn LED off initially
logger.info("Turning LED off...")
set_led_color(lin_dev, current_nad, 0, 0, 0, 0)
time.sleep(1.0)
# Run fade test
for cycle in range(1, args.cycles + 1):
logger.info(f"\nCycle {cycle}/{args.cycles}")
fade_test(lin_dev, current_nad, args.duration)
if cycle < args.cycles:
logger.info("Pausing between cycles...")
time.sleep(1.0)
# Turn LED off at the end
logger.info("\nTurning LED off...")
set_led_color(lin_dev, current_nad, 0, 0, 0, 0)
logger.info("=" * 70)
logger.info("✓ LED TEST COMPLETED")
logger.info("=" * 70)
logger.info("Tearing down...")
linmaster.teardown()
logger.info("Done (ECU still powered)")
except KeyboardInterrupt:
logger.info("")
logger.info("Interrupted by user")
logger.info("Turning LED off...")
try:
set_led_color(lin_dev, args.nad, 0, 0, 0, 0)
linmaster.teardown()
except:
pass
except Exception as e:
logger.error(f"Error: {e}", exc_info=True)
if __name__ == "__main__":
main()

Binary file not shown.

View File

@ -1,116 +1,116 @@
"""Mock implementation of the BabyLIN SDK wrapper API used by our adapter. """Mock implementation of the BabyLIN SDK wrapper API used by our adapter.
This module provides create_BabyLIN() returning an object with BLC_* methods, This module provides create_BabyLIN() returning an object with BLC_* methods,
so the real adapter can be exercised without hardware. so the real adapter can be exercised without hardware.
Design notes: Design notes:
- We simulate a single device with one channel and an RX queue per channel. - We simulate a single device with one channel and an RX queue per channel.
- Transmit (BLC_mon_set_xmit) echoes payload into the RX queue to mimic loopback. - Transmit (BLC_mon_set_xmit) echoes payload into the RX queue to mimic loopback.
- Master request (BLC_sendRawMasterRequest) enqueues a deterministic response so - Master request (BLC_sendRawMasterRequest) enqueues a deterministic response so
tests can validate request/response logic without randomness. tests can validate request/response logic without randomness.
""" """
from dataclasses import dataclass from dataclasses import dataclass
from typing import List from typing import List
BL_OK = 0 # Success code matching the real SDK convention BL_OK = 0 # Success code matching the real SDK convention
@dataclass @dataclass
class BLC_FRAME: class BLC_FRAME:
"""Minimal frame structure to mirror the SDK's BLC_FRAME used by the adapter.""" """Minimal frame structure to mirror the SDK's BLC_FRAME used by the adapter."""
frameId: int frameId: int
lenOfData: int lenOfData: int
frameData: bytes frameData: bytes
class _MockChannel: class _MockChannel:
"""Represents a BabyLIN channel with a simple RX queue.""" """Represents a BabyLIN channel with a simple RX queue."""
def __init__(self): def __init__(self):
self.rx: List[BLC_FRAME] = [] # FIFO for received frames self.rx: List[BLC_FRAME] = [] # FIFO for received frames
class _MockBL: class _MockBL:
"""BabyLIN mock exposing the subset of BLC_* APIs our adapter calls.""" """BabyLIN mock exposing the subset of BLC_* APIs our adapter calls."""
def __init__(self): def __init__(self):
self.BL_OK = BL_OK self.BL_OK = BL_OK
self._ports = ["MOCK_PORT"] # Simulate one discoverable device self._ports = ["MOCK_PORT"] # Simulate one discoverable device
self._handle = object() # Opaque handle placeholder self._handle = object() # Opaque handle placeholder
self._channels = [_MockChannel()] # Single channel system self._channels = [_MockChannel()] # Single channel system
# ----------------------------- # -----------------------------
# Discovery/open/close # Discovery/open/close
# ----------------------------- # -----------------------------
def BLC_getBabyLinPorts(self, timeout_ms: int): def BLC_getBabyLinPorts(self, timeout_ms: int):
"""Return a list of mock ports; timeout not used in mock.""" """Return a list of mock ports; timeout not used in mock."""
return list(self._ports) return list(self._ports)
def BLC_openPort(self, port: str): def BLC_openPort(self, port: str):
"""Return an opaque handle for the given port name.""" """Return an opaque handle for the given port name."""
return self._handle return self._handle
def BLC_closeAll(self): def BLC_closeAll(self):
"""Pretend to close; always succeeds.""" """Pretend to close; always succeeds."""
return BL_OK return BL_OK
# ----------------------------- # -----------------------------
# SDF and channel handling # SDF and channel handling
# ----------------------------- # -----------------------------
def BLC_loadSDF(self, handle, sdf_path: str, download: int): def BLC_loadSDF(self, handle, sdf_path: str, download: int):
"""No-op in mock; assume success.""" """No-op in mock; assume success."""
return BL_OK return BL_OK
def BLC_getChannelCount(self, handle): def BLC_getChannelCount(self, handle):
"""Report number of channels (1 in mock).""" """Report number of channels (1 in mock)."""
return len(self._channels) return len(self._channels)
def BLC_getChannelHandle(self, handle, idx: int): def BLC_getChannelHandle(self, handle, idx: int):
"""Return the channel object acting as its own handle.""" """Return the channel object acting as its own handle."""
return self._channels[idx] return self._channels[idx]
def BLC_sendCommand(self, channel, command: str): def BLC_sendCommand(self, channel, command: str):
"""Accept any command (e.g., start schedule); always succeed.""" """Accept any command (e.g., start schedule); always succeed."""
return BL_OK return BL_OK
# ----------------------------- # -----------------------------
# Transmit/Receive primitives # Transmit/Receive primitives
# ----------------------------- # -----------------------------
def BLC_mon_set_xmit(self, channel: _MockChannel, frame_id: int, data: bytes, slot_time: int): def BLC_mon_set_xmit(self, channel: _MockChannel, frame_id: int, data: bytes, slot_time: int):
"""Echo transmitted payload back to RX to simulate a bus loopback.""" """Echo transmitted payload back to RX to simulate a bus loopback."""
channel.rx.append(BLC_FRAME(frameId=frame_id, lenOfData=len(data), frameData=bytes(data))) channel.rx.append(BLC_FRAME(frameId=frame_id, lenOfData=len(data), frameData=bytes(data)))
return BL_OK return BL_OK
def BLC_getNextFrameTimeout(self, channel: _MockChannel, timeout_ms: int): def BLC_getNextFrameTimeout(self, channel: _MockChannel, timeout_ms: int):
"""Pop next frame from RX queue; return None on timeout (empty queue).""" """Pop next frame from RX queue; return None on timeout (empty queue)."""
if channel.rx: if channel.rx:
return channel.rx.pop(0) return channel.rx.pop(0)
# Simulate timeout -> real wrapper may raise; we return None for simplicity # Simulate timeout -> real wrapper may raise; we return None for simplicity
return None return None
def BLC_sendRawMasterRequest(self, channel: _MockChannel, frame_id: int, payload_or_length): def BLC_sendRawMasterRequest(self, channel: _MockChannel, frame_id: int, payload_or_length):
"""Simulate a slave response for a master request. """Simulate a slave response for a master request.
Supports two call forms to mirror SDK variations: Supports two call forms to mirror SDK variations:
- (channel, frame_id, bytes): use bytes as the response payload - (channel, frame_id, bytes): use bytes as the response payload
- (channel, frame_id, length): synthesize payload with a deterministic pattern - (channel, frame_id, length): synthesize payload with a deterministic pattern
""" """
if isinstance(payload_or_length, (bytes, bytearray)): if isinstance(payload_or_length, (bytes, bytearray)):
data = bytes(payload_or_length) data = bytes(payload_or_length)
else: else:
length = int(payload_or_length) length = int(payload_or_length)
# Deterministic pattern: response[i] = (frame_id + i) & 0xFF # Deterministic pattern: response[i] = (frame_id + i) & 0xFF
data = bytes(((frame_id + i) & 0xFF) for i in range(max(0, min(8, length)))) data = bytes(((frame_id + i) & 0xFF) for i in range(max(0, min(8, length))))
# Enqueue the response frame as if the slave published it on the bus # Enqueue the response frame as if the slave published it on the bus
channel.rx.append(BLC_FRAME(frameId=frame_id, lenOfData=len(data), frameData=data)) channel.rx.append(BLC_FRAME(frameId=frame_id, lenOfData=len(data), frameData=data))
return BL_OK return BL_OK
def BLC_getDetailedErrorString(self, rc: int): def BLC_getDetailedErrorString(self, rc: int):
"""Provide a friendly error string for non-OK return codes.""" """Provide a friendly error string for non-OK return codes."""
return f"Mock error rc={rc}" return f"Mock error rc={rc}"
def create_BabyLIN(): def create_BabyLIN():
"""Factory method matching the real SDK to construct the mock instance.""" """Factory method matching the real SDK to construct the mock instance."""
return _MockBL() return _MockBL()