Add MUM support in the testing framework

This commit is contained in:
Hosam-Eldin Mostafa 2026-04-28 23:37:53 +02:00
parent 58aa7350e6
commit b8f52bea39
84 changed files with 8860 additions and 4851 deletions

364
.gitignore vendored
View File

@ -1,182 +1,182 @@
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
#uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
# --- Project specific ---
# Test run artifacts
reports/
!reports/.gitkeep
# Vendor binaries (keep headers/docs and keep .dll from the SDK for now)
vendor/**/*.lib
vendor/**/*.pdb
# Optional firmware blobs (uncomment if you don't want to track)
# firmware/
# ---> Python
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
#uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
# --- Project specific ---
# Test run artifacts
reports/
!reports/.gitkeep
# Vendor binaries (keep headers/docs and keep .dll from the SDK for now)
vendor/**/*.lib
vendor/**/*.pdb
# Optional firmware blobs (uncomment if you don't want to track)
# firmware/

611
README.md
View File

@ -1,285 +1,326 @@
# ECU Tests Framework
Python-based ECU testing framework built on pytest, with a pluggable LIN communication layer (Mock and BabyLin), configuration via YAML, and enhanced HTML/XML reporting with rich test metadata.
## Highlights
- Mock LIN adapter for fast, hardware-free development
- Real BabyLIN adapter using the SDK's official Python wrapper (BabyLIN_library.py)
- Hex flashing scaffold you can wire to UDS
- Rich pytest fixtures and example tests
- Self-contained HTML report with Title, Requirements, Steps, and Expected Results extracted from test docstrings
- JUnit XML report for CI/CD
## Quick links
- Using the framework (common runs, markers, CI, Pi): `docs/12_using_the_framework.md`
- Plugin overview (reporting, hooks, artifacts): `docs/11_conftest_plugin_overview.md`
- Power supply (Owon) usage and troubleshooting: `docs/14_power_supply.md`
- Report properties cheatsheet (standard keys): `docs/15_report_properties_cheatsheet.md`
## TL;DR quick start (copy/paste)
Mock (no hardware):
```powershell
python -m venv .venv; .\.venv\Scripts\Activate.ps1; pip install -r requirements.txt; pytest -m "not hardware" -v
```
Hardware (BabyLIN SDK):
```powershell
# Place BabyLIN_library.py and native libs under .\vendor per vendor/README.md first
$env:ECU_TESTS_CONFIG = ".\config\babylin.example.yaml"; pytest -m "hardware and babylin" -v
```
## Quick start (Windows PowerShell)
1) Create a virtual environment and install dependencies
```powershell
python -m venv .venv
.\.venv\Scripts\Activate.ps1
pip install -r requirements.txt
```
2) Run the mock test suite (default interface)
```powershell
python.exe -m pytest -m "not hardware" -v
```
3) View the reports
- HTML: `reports/report.html`
- JUnit XML: `reports/junit.xml`
Tip: You can change output via `--html` and `--junitxml` CLI options.
## Reporting: Metadata in HTML
We extract these fields from each tests docstring and render them in the HTML report:
- Title
- Description
- Requirements (e.g., REQ-001)
- Test Steps
- Expected Result
Markers like `smoke`, `hardware`, and `req_00x` are also displayed.
Example docstring format used by the plugin:
```python
"""
Title: Mock LIN Interface - Send/Receive Echo Test
Description: Validates basic send/receive functionality using the mock LIN interface with echo behavior.
Requirements: REQ-001, REQ-003
Test Steps:
1. Connect to mock interface
2. Send frame ID 0x01 with data [0x55]
3. Receive the echo within 100ms
4. Assert ID and data integrity
Expected Result:
- Echoed frame matches sent frame
"""
```
## Configuration
Default config is `config/test_config.yaml`. Override via the `ECU_TESTS_CONFIG` environment variable.
```powershell
$env:ECU_TESTS_CONFIG = (Resolve-Path .\config\test_config.yaml)
```
BabyLIN configuration template: `config/babylin.example.yaml`
```yaml
interface:
type: babylin # or "mock"
channel: 0 # Channel index used by the SDK wrapper
bitrate: 19200 # Usually determined by SDF
sdf_path: ./vendor/Example.sdf
schedule_nr: 0 # Start this schedule on connect
```
Switch to hardware profile and run only hardware tests:
```powershell
$env:ECU_TESTS_CONFIG = (Resolve-Path .\config\babylin.example.yaml)
python.exe -m pytest -m hardware -v
```
## Project structure
```
ecu_tests/
├── ecu_framework/
│ ├── config.py # YAML config loader
│ ├── power/
│ │ └── owon_psu.py # Owon PSU serial SCPI controller (library)
│ ├── lin/
│ │ ├── base.py # LinInterface + LinFrame
│ │ ├── mock.py # Mock LIN adapter
│ │ └── babylin.py # BabyLIN SDK-wrapper adapter (uses BabyLIN_library.py)
│ └── flashing/
│ └── hex_flasher.py # Hex flashing scaffold
├── tests/
│ ├── conftest.py # Shared fixtures
│ ├── test_smoke_mock.py # Mock interface smoke and boundary tests
│ ├── test_babylin_hardware_smoke.py # Hardware smoke tests
│ ├── test_babylin_hardware_schedule_smoke.py # Hardware schedule flow
│ ├── test_babylin_wrapper_mock.py # SDK adapter with mock wrapper
│ ├── plugin/
│ │ └── test_conftest_plugin_artifacts.py # Plugin self-test (reports artifacts)
│ ├── unit/
│ │ ├── test_config_loader.py # Config loader unit tests
│ │ ├── test_linframe.py # LIN frame dataclass/logic
│ │ ├── test_hex_flasher.py # Hex flasher scaffolding
│ │ └── test_babylin_adapter_mocked.py # BabyLIN adapter with mocks
│ └── hardware/
│ └── test_owon_psu.py # Owon PSU hardware test (uses central config)
├── config/
│ ├── test_config.yaml # Default config
│ ├── babylin.example.yaml # BabyLIN hardware template
│ ├── owon_psu.example.yaml # Owon PSU example (copy to owon_psu.yaml)
│ └── owon_psu.yaml # Optional machine-specific PSU config
├── vendor/ # Place SDK wrapper and platform libs here
│ ├── Owon/
│ │ └── owon_psu_quick_demo.py # Quick PSU demo using the library & YAML
│ ├── BabyLIN_library.py # Official SDK Python wrapper
│ └── BabyLIN library/ # Platform-specific binaries from SDK (DLL/.so)
├── reports/ # Generated reports
│ ├── report.html
│ └── junit.xml
├── conftest_plugin.py # HTML metadata extraction & rendering
├── pytest.ini # Markers and default addopts
├── requirements.txt
└── README.md
```
## Usage recipes
- Run everything (mock and any non-hardware tests):
```powershell
python.exe -m pytest -v
```
- Run by marker:
```powershell
python.exe -m pytest -m "smoke" -v
python.exe -m pytest -m "req_001" -v
```
- Run in parallel:
```powershell
python.exe -m pytest -n auto -v
```
- Run the plugin self-test (verifies reporting artifacts under `reports/`):
```powershell
python -m pytest tests\plugin\test_conftest_plugin_artifacts.py -q
```
- Generate separate HTML/JUnit reports for unit vs non-unit tests:
```powershell
./scripts/run_two_reports.ps1
```
## BabyLIN adapter notes
The `ecu_framework/lin/babylin.py` implementation uses the official `BabyLIN_library.py` wrapper from the SDK. Put `BabyLIN_library.py` under `vendor/` (or on `PYTHONPATH`) along with the SDK's platform-specific libraries. Configure `sdf_path` and `schedule_nr` to load an SDF and start a schedule during connect. The adapter sends frames via `BLC_mon_set_xmit` and receives via `BLC_getNextFrameTimeout`.
## Docs and references
- Guide: `TESTING_FRAMEWORK_GUIDE.md` (deep dive with examples and step-by-step flows)
- Reports: `reports/report.html` and `reports/junit.xml` (generated on each run)
- CI summary: `reports/summary.md` (machine-friendly run summary)
- Requirements coverage: `reports/requirements_coverage.json` (requirement → tests mapping)
- Tip: Open the HTML report on Windows with: `start .\reports\report.html`
- Configs: `config/test_config.yaml`, `config/babylin.example.yaml` (copy and modify for your environment)
- BabyLIN SDK placement and notes: `vendor/README.md`
- Docs index: `docs/README.md` (run sequence, config resolution, reporting, call flows)
- Raspberry Pi deployment: `docs/09_raspberry_pi_deployment.md`
- Build custom Pi image: `docs/10_build_custom_image.md`
- Pi scripts: `scripts/pi_install.sh`, `scripts/ecu-tests.service`, `scripts/ecu-tests.timer`, `scripts/run_tests.sh`
## Troubleshooting
- HTML report missing columns: ensure `pytest.ini` includes `-p conftest_plugin` in `addopts`.
- ImportError for BabyLIN_library: verify `vendor/BabyLIN_library.py` placement and that required native libraries (DLL/.so) from the SDK are available on PATH/LD_LIBRARY_PATH.
- Permission errors in PowerShell: run the venv's full Python path or adjust ExecutionPolicy for scripts.
- Import errors: activate the venv and reinstall `requirements.txt`.
## Owon Power Supply (SCPI) — library, config, tests, and quick demo
We provide a reusable pyserial-based library, a hardware test integrated with the central config,
and a minimal quick demo script.
- Library: `ecu_framework/power/owon_psu.py` (class `OwonPSU`, `SerialParams`, `scan_ports`)
- Central config: `config/test_config.yaml` (`power_supply` section)
- Optionally merge `config/owon_psu.yaml` or set `OWON_PSU_CONFIG` to a YAML path
- Hardware test: `tests/hardware/test_owon_psu.py` (skips unless `power_supply.enabled` is true)
- quick demo: `vendor/Owon/owon_psu_quick_demo.py` (reads `OWON_PSU_CONFIG` or `config/owon_psu.yaml`)
Quick setup (Windows PowerShell):
```powershell
# Ensure dependencies
pip install -r .\requirements.txt
# Option A: configure centrally in test_config.yaml
# Edit config\test_config.yaml and set:
# power_supply.enabled: true
# power_supply.port: COM4
# Option B: use a separate machine-specific YAML
copy .\config\owon_psu.example.yaml .\config\owon_psu.yaml
# edit COM port and options in .\config\owon_psu.yaml
# Run the hardware PSU test (skips if disabled or missing port)
pytest -k test_owon_psu_idn_and_optional_set -m hardware -q
# Run the quick demo script
python .\vendor\Owon\owon_psu_quick_demo.py
```
YAML keys supported by `power_supply`:
```yaml
power_supply:
enabled: true
port: COM4 # or /dev/ttyUSB0
baudrate: 115200
timeout: 1.0
eol: "\n" # or "\r\n"
parity: N # N|E|O
stopbits: 1 # 1|2
xonxoff: false
rtscts: false
dsrdtr: false
idn_substr: OWON
do_set: false
set_voltage: 5.0
set_current: 0.1
```
Troubleshooting:
- If `*IDN?` is empty, confirm port, parity/stopbits, and `eol` (try `\r\n`).
- On Windows, if COM>9, use `\\.\COM10` style in some tools; here plain `COM10` usually works.
- Ensure only one program opens the COM port at a time.
## Next steps
- Replace `HexFlasher` with a production flashing routine (UDS)
- Expand tests for end-to-end ECU workflows and requirement coverage
# ECU Tests Framework
Python-based ECU testing framework built on pytest, with a pluggable LIN communication layer (Mock, MUM, and legacy BabyLIN), configuration via YAML, and enhanced HTML/XML reporting with rich test metadata.
## Highlights
- **MUM (Melexis Universal Master) adapter** — current default for hardware tests; networked LIN master with built-in power control
- Mock LIN adapter for fast, hardware-free development
- BabyLIN adapter (legacy) using the vendor SDK's Python wrapper
- Hex flashing scaffold you can wire to UDS
- Rich pytest fixtures and example tests
- Self-contained HTML report with Title, Requirements, Steps, and Expected Results extracted from test docstrings
- JUnit XML report for CI/CD
## Quick links
- Using the framework (common runs, markers, CI, Pi): `docs/12_using_the_framework.md`
- Plugin overview (reporting, hooks, artifacts): `docs/11_conftest_plugin_overview.md`
- Power supply (Owon) usage and troubleshooting: `docs/14_power_supply.md`
- Report properties cheatsheet (standard keys): `docs/15_report_properties_cheatsheet.md`
- MUM source scripts (vendor reference): [vendor/automated_lin_test/README.md](vendor/automated_lin_test/README.md)
## TL;DR quick start (copy/paste)
Mock (no hardware):
```powershell
python -m venv .venv; .\.venv\Scripts\Activate.ps1; pip install -r requirements.txt; pytest -m "not hardware" -v
```
Hardware via MUM (current default):
```powershell
# 1. Install Melexis 'pylin' and 'pymumclient' (see vendor/automated_lin_test/install_packages.sh)
# 2. Make sure the MUM is reachable (default IP 192.168.7.2)
$env:ECU_TESTS_CONFIG = ".\config\mum.example.yaml"; pytest -m "hardware and mum" -v
```
Hardware via BabyLIN (legacy):
```powershell
# Place BabyLIN_library.py and native libs under .\vendor per vendor/README.md first
$env:ECU_TESTS_CONFIG = ".\config\babylin.example.yaml"; pytest -m "hardware and babylin" -v
```
## Quick start (Windows PowerShell)
1) Create a virtual environment and install dependencies
```powershell
python -m venv .venv
.\.venv\Scripts\Activate.ps1
pip install -r requirements.txt
```
2) Run the mock test suite (default interface)
```powershell
python.exe -m pytest -m "not hardware" -v
```
3) View the reports
- HTML: `reports/report.html`
- JUnit XML: `reports/junit.xml`
Tip: You can change output via `--html` and `--junitxml` CLI options.
## Reporting: Metadata in HTML
We extract these fields from each tests docstring and render them in the HTML report:
- Title
- Description
- Requirements (e.g., REQ-001)
- Test Steps
- Expected Result
Markers like `smoke`, `hardware`, and `req_00x` are also displayed.
Example docstring format used by the plugin:
```python
"""
Title: Mock LIN Interface - Send/Receive Echo Test
Description: Validates basic send/receive functionality using the mock LIN interface with echo behavior.
Requirements: REQ-001, REQ-003
Test Steps:
1. Connect to mock interface
2. Send frame ID 0x01 with data [0x55]
3. Receive the echo within 100ms
4. Assert ID and data integrity
Expected Result:
- Echoed frame matches sent frame
"""
```
## Configuration
Default config is `config/test_config.yaml`. Override via the `ECU_TESTS_CONFIG` environment variable.
```powershell
$env:ECU_TESTS_CONFIG = (Resolve-Path .\config\test_config.yaml)
```
### MUM configuration (default for hardware)
Template: `config/mum.example.yaml`
```yaml
interface:
type: mum
host: 192.168.7.2 # MUM IP (USB-RNDIS default)
lin_device: lin0 # MUM LIN device name
power_device: power_out0 # MUM power-control device (built-in PSU)
bitrate: 19200 # LIN baudrate
boot_settle_seconds: 0.5 # Wait after power-up before sending the first frame
frame_lengths:
0x0A: 8 # ALM_Req_A
0x11: 4 # ALM_Status
```
The MUM has its own power output, so `power_supply.enabled: false` is the
typical setting when using MUM. The Owon PSU support remains for over/under-
voltage scenarios but is independent of the LIN interface.
### BabyLIN configuration (legacy)
Template: `config/babylin.example.yaml`
```yaml
interface:
type: babylin # or "mock", or "mum"
channel: 0 # Channel index used by the SDK wrapper
bitrate: 19200 # Usually determined by SDF
sdf_path: ./vendor/Example.sdf
schedule_nr: 0 # Start this schedule on connect (-1 to skip)
```
### LIN adapter capabilities
| Adapter | Power control | Diagnostic frames (Classic checksum) | Passive listen |
| --- | --- | --- | --- |
| `mock` | n/a | n/a | yes (queue-based) |
| `mum` | yes (`power_out0`) | yes (`MumLinInterface.send_raw()``ld_put_raw`) | no — `receive(id)` triggers a slave read |
| `babylin` | external (Owon PSU) | via SDF / `BLC_sendCommand` | yes (frame queue) |
Switch to hardware profile and run only hardware tests:
```powershell
$env:ECU_TESTS_CONFIG = (Resolve-Path .\config\babylin.example.yaml)
python.exe -m pytest -m hardware -v
```
## Project structure
```
ecu_tests/
├── ecu_framework/
│ ├── config.py # YAML config loader
│ ├── power/
│ │ └── owon_psu.py # Owon PSU serial SCPI controller (library)
│ ├── lin/
│ │ ├── base.py # LinInterface + LinFrame
│ │ ├── mock.py # Mock LIN adapter
│ │ └── babylin.py # BabyLIN SDK-wrapper adapter (uses BabyLIN_library.py)
│ └── flashing/
│ └── hex_flasher.py # Hex flashing scaffold
├── tests/
│ ├── conftest.py # Shared fixtures
│ ├── test_smoke_mock.py # Mock interface smoke and boundary tests
│ ├── test_babylin_hardware_smoke.py # Hardware smoke tests
│ ├── test_babylin_hardware_schedule_smoke.py # Hardware schedule flow
│ ├── test_babylin_wrapper_mock.py # SDK adapter with mock wrapper
│ ├── plugin/
│ │ └── test_conftest_plugin_artifacts.py # Plugin self-test (reports artifacts)
│ ├── unit/
│ │ ├── test_config_loader.py # Config loader unit tests
│ │ ├── test_linframe.py # LIN frame dataclass/logic
│ │ ├── test_hex_flasher.py # Hex flasher scaffolding
│ │ └── test_babylin_adapter_mocked.py # BabyLIN adapter with mocks
│ └── hardware/
│ └── test_owon_psu.py # Owon PSU hardware test (uses central config)
├── config/
│ ├── test_config.yaml # Default config
│ ├── babylin.example.yaml # BabyLIN hardware template
│ ├── owon_psu.example.yaml # Owon PSU example (copy to owon_psu.yaml)
│ └── owon_psu.yaml # Optional machine-specific PSU config
├── vendor/ # Place SDK wrapper and platform libs here
│ ├── Owon/
│ │ └── owon_psu_quick_demo.py # Quick PSU demo using the library & YAML
│ ├── BabyLIN_library.py # Official SDK Python wrapper
│ └── BabyLIN library/ # Platform-specific binaries from SDK (DLL/.so)
├── reports/ # Generated reports
│ ├── report.html
│ └── junit.xml
├── conftest_plugin.py # HTML metadata extraction & rendering
├── pytest.ini # Markers and default addopts
├── requirements.txt
└── README.md
```
## Usage recipes
- Run everything (mock and any non-hardware tests):
```powershell
python.exe -m pytest -v
```
- Run by marker:
```powershell
python.exe -m pytest -m "smoke" -v
python.exe -m pytest -m "req_001" -v
```
- Run in parallel:
```powershell
python.exe -m pytest -n auto -v
```
- Run the plugin self-test (verifies reporting artifacts under `reports/`):
```powershell
python -m pytest tests\plugin\test_conftest_plugin_artifacts.py -q
```
- Generate separate HTML/JUnit reports for unit vs non-unit tests:
```powershell
./scripts/run_two_reports.ps1
```
## BabyLIN adapter notes
The `ecu_framework/lin/babylin.py` implementation uses the official `BabyLIN_library.py` wrapper from the SDK. Put `BabyLIN_library.py` under `vendor/` (or on `PYTHONPATH`) along with the SDK's platform-specific libraries. Configure `sdf_path` and `schedule_nr` to load an SDF and start a schedule during connect. The adapter sends frames via `BLC_mon_set_xmit` and receives via `BLC_getNextFrameTimeout`.
## Docs and references
- Guide: `TESTING_FRAMEWORK_GUIDE.md` (deep dive with examples and step-by-step flows)
- Reports: `reports/report.html` and `reports/junit.xml` (generated on each run)
- CI summary: `reports/summary.md` (machine-friendly run summary)
- Requirements coverage: `reports/requirements_coverage.json` (requirement → tests mapping)
- Tip: Open the HTML report on Windows with: `start .\reports\report.html`
- Configs: `config/test_config.yaml`, `config/babylin.example.yaml` (copy and modify for your environment)
- BabyLIN SDK placement and notes: `vendor/README.md`
- Docs index: `docs/README.md` (run sequence, config resolution, reporting, call flows)
- Raspberry Pi deployment: `docs/09_raspberry_pi_deployment.md`
- Build custom Pi image: `docs/10_build_custom_image.md`
- Pi scripts: `scripts/pi_install.sh`, `scripts/ecu-tests.service`, `scripts/ecu-tests.timer`, `scripts/run_tests.sh`
## Troubleshooting
- HTML report missing columns: ensure `pytest.ini` includes `-p conftest_plugin` in `addopts`.
- ImportError for BabyLIN_library: verify `vendor/BabyLIN_library.py` placement and that required native libraries (DLL/.so) from the SDK are available on PATH/LD_LIBRARY_PATH.
- Permission errors in PowerShell: run the venv's full Python path or adjust ExecutionPolicy for scripts.
- Import errors: activate the venv and reinstall `requirements.txt`.
## Owon Power Supply (SCPI) — library, config, tests, and quick demo
We provide a reusable pyserial-based library, a hardware test integrated with the central config,
and a minimal quick demo script.
- Library: `ecu_framework/power/owon_psu.py` (class `OwonPSU`, `SerialParams`, `scan_ports`)
- Central config: `config/test_config.yaml` (`power_supply` section)
- Optionally merge `config/owon_psu.yaml` or set `OWON_PSU_CONFIG` to a YAML path
- Hardware test: `tests/hardware/test_owon_psu.py` (skips unless `power_supply.enabled` is true)
- quick demo: `vendor/Owon/owon_psu_quick_demo.py` (reads `OWON_PSU_CONFIG` or `config/owon_psu.yaml`)
Quick setup (Windows PowerShell):
```powershell
# Ensure dependencies
pip install -r .\requirements.txt
# Option A: configure centrally in test_config.yaml
# Edit config\test_config.yaml and set:
# power_supply.enabled: true
# power_supply.port: COM4
# Option B: use a separate machine-specific YAML
copy .\config\owon_psu.example.yaml .\config\owon_psu.yaml
# edit COM port and options in .\config\owon_psu.yaml
# Run the hardware PSU test (skips if disabled or missing port)
pytest -k test_owon_psu_idn_and_optional_set -m hardware -q
# Run the quick demo script
python .\vendor\Owon\owon_psu_quick_demo.py
```
YAML keys supported by `power_supply`:
```yaml
power_supply:
enabled: true
port: COM4 # or /dev/ttyUSB0
baudrate: 115200
timeout: 1.0
eol: "\n" # or "\r\n"
parity: N # N|E|O
stopbits: 1 # 1|2
xonxoff: false
rtscts: false
dsrdtr: false
idn_substr: OWON
do_set: false
set_voltage: 5.0
set_current: 0.1
```
Troubleshooting:
- If `*IDN?` is empty, confirm port, parity/stopbits, and `eol` (try `\r\n`).
- On Windows, if COM>9, use `\\.\COM10` style in some tools; here plain `COM10` usually works.
- Ensure only one program opens the COM port at a time.
## Next steps
- Replace `HexFlasher` with a production flashing routine (UDS)
- Expand tests for end-to-end ECU workflows and requirement coverage

View File

@ -1,359 +1,359 @@
# ECU Testing Framework - Complete Guide
## Overview
This comprehensive ECU Testing Framework provides a robust solution for testing Electronic Control Units (ECUs) using pytest with BabyLIN LIN bus communication. The framework includes detailed test documentation, enhanced reporting, mock interfaces for development, and real hardware integration capabilities.
## Framework Features
### ✅ **Complete Implementation Status**
- **✅ pytest-based testing framework** with custom plugins
- **✅ BabyLIN LIN communication integration** via the official SDK Python wrapper (`BabyLIN_library.py`)
- **✅ Mock interface for hardware-independent development**
- **✅ Enhanced HTML/XML reporting with test metadata**
- **✅ Detailed test documentation extraction**
- **✅ Configuration management with YAML**
- **✅ Hex file flashing capabilities (scaffold)**
- **✅ Custom pytest markers for requirement traceability**
## Enhanced Reporting System
### Test Metadata Integration
The framework automatically extracts detailed test information from docstrings and integrates it into reports:
**HTML Report Features:**
- **Title Column**: Clear test descriptions extracted from docstrings
- **Requirements Column**: Requirement traceability (REQ-001, REQ-002, etc.)
- **Enhanced Test Details**: Description, test steps, and expected results
- **Marker Integration**: Custom pytest markers for categorization
**Example Test Documentation Format:**
```python
@pytest.mark.smoke
@pytest.mark.req_001
def test_mock_send_receive_echo(self, mock_interface):
"""
Title: Mock LIN Interface - Send/Receive Echo Test
Description: Validates basic send/receive functionality using the mock
LIN interface with echo behavior for development testing.
Requirements: REQ-001, REQ-003
Test Steps:
1. Connect to mock LIN interface
2. Send a test frame with ID 0x01 and data [0x55]
3. Receive the echoed frame within 100ms timeout
4. Verify frame ID and data integrity
Expected Result:
- Frame should be echoed back successfully
- Received data should match sent data exactly
- Operation should complete within timeout period
"""
```
### Report Generation
**HTML Report (`reports/report.html`):**
- Interactive table with sortable columns
- Test titles and requirements clearly visible
- Execution duration and status tracking
- Enhanced metadata from docstrings
**XML Report (`reports/junit.xml`):**
- Standard JUnit XML format for CI/CD integration
- Test execution data and timing information
- Compatible with most CI systems (Jenkins, GitLab CI, etc.)
## Project Structure
```
ecu_tests/
├── ecu_framework/ # Core framework package
│ ├── config.py # YAML configuration management
│ ├── lin/ # LIN communication interfaces
│ │ ├── base.py # Abstract LinInterface definition
│ │ ├── mock.py # Mock interface for development
│ │ └── babylin.py # Real BabyLin hardware interface
│ └── flashing/ # Hex file flashing capabilities
│ └── hex_flasher.py # ECU flash programming
├── tests/ # Test suite
│ ├── conftest.py # pytest fixtures and configuration
│ ├── test_smoke_mock.py # Mock interface validation tests
│ ├── test_babylin_hardware_smoke.py # Hardware smoke tests
│ └── test_hardware_placeholder.py # Future hardware tests
├── config/ # Configuration files
│ ├── test_config.yaml # Main test configuration
│ └── babylin.example.yaml # BabyLin configuration template
├── vendor/ # BabyLIN SDK placement
| ├── BabyLIN_library.py # Official SDK Python wrapper
| └── platform libs # OS-specific native libs (DLL/.so/.dylib)
├── reports/ # Generated test reports
│ ├── report.html # Enhanced HTML report
│ └── junit.xml # JUnit XML report
├── conftest_plugin.py # Custom pytest plugin for enhanced reporting
├── pytest.ini # pytest configuration with custom markers
├── requirements.txt # Python dependencies
└── README.md # Project documentation
```
## Running Tests
### Basic Test Execution
```powershell
# Run all tests with verbose output
python -m pytest -v
# Run specific test suite
python -m pytest tests\test_smoke_mock.py -v
# Run tests with specific markers
python -m pytest -m "smoke" -v
python -m pytest -m "req_001" -v
# Run hardware tests (requires BabyLIN hardware); join with adapter marker
python -m pytest -m "hardware and babylin" -v
```
### Unit Tests (fast, no hardware)
Run only unit tests using the dedicated marker or by path:
```powershell
# By marker
python -m pytest -m unit -q
# By path
python -m pytest tests\unit -q
# Plugin self-tests (verifies reporting artifacts)
python -m pytest tests\plugin -q
```
Reports still go to `reports/` (HTML and JUnit per defaults). Open the HTML on Windows with:
```powershell
start .\reports\report.html
```
Coverage: enabled by default via pytest.ini. To disable locally:
```powershell
python -m pytest -q -o addopts=""
```
Optional HTML coverage:
```powershell
python -m pytest --cov=ecu_framework --cov-report=html -q
start .\htmlcov\index.html
```
See also: `docs/13_unit_testing_guide.md` for more details and examples.
### Report Generation
Tests automatically generate enhanced reports:
- **HTML Report**: `reports/report.html` - Interactive report with metadata
- **XML Report**: `reports/junit.xml` - CI/CD compatible format
## Configuration
### Test Configuration (`config/test_config.yaml`)
```yaml
interface:
type: mock # or babylin for hardware
timeout: 1.0
flash:
hex_file_path: firmware/ecu_firmware.hex
flash_timeout: 30.0
ecu:
name: Test ECU
lin_id_range: [0x01, 0x3F]
```
### BabyLIN Configuration (`config/babylin.example.yaml`)
```yaml
interface:
type: babylin
channel: 0 # channel index used by the SDK wrapper
bitrate: 19200 # typically set by SDF
sdf_path: ./vendor/Example.sdf
schedule_nr: 0 # schedule to start on connect
```
## Test Categories
### 1. Mock Interface Tests (`test_smoke_mock.py`)
**Purpose**: Hardware-independent development and validation
- ✅ Send/receive echo functionality
- ✅ Master request/response testing
- ✅ Timeout behavior validation
- ✅ Frame validation boundary testing
- ✅ Parameterized boundary tests for comprehensive coverage
**Status**: **7 tests passing** - Complete implementation
### 2. Hardware Smoke Tests (`test_babylin_hardware_smoke.py`)
**Purpose**: Basic BabyLIN hardware connectivity validation
- ✅ SDK wrapper import and device open
- ✅ Interface connection establishment
- ✅ Basic send/receive operations
- ✅ Error handling and cleanup
**Status**: Ready for hardware testing
### 3. Hardware Integration Tests (`test_hardware_placeholder.py`)
**Purpose**: Full ECU testing workflow with real hardware
- ECU flashing with hex files
- Communication protocol validation
- Diagnostic command testing
- Performance and stress testing
**Status**: Framework ready, awaiting ECU specifications
## Custom Pytest Markers
The framework includes custom markers for test categorization and requirement traceability:
```python
# In pytest.ini
markers =
smoke: Basic functionality tests
integration: Integration tests requiring hardware
hardware: Tests requiring physical BabyLin hardware
babylin: Tests targeting the BabyLIN SDK adapter
unit: Fast unit tests (no hardware)
boundary: Boundary condition and edge case tests
req_001: Tests validating requirement REQ-001 (LIN Interface Basic Operations)
req_002: Tests validating requirement REQ-002 (Master Request/Response)
req_003: Tests validating requirement REQ-003 (Frame Validation)
req_004: Tests validating requirement REQ-004 (Timeout Handling)
```
## BabyLIN Integration Details
### SDK Python wrapper
The framework uses the official SDK Python wrapper `BabyLIN_library.py` (placed under `vendor/`) and calls its BLC_* APIs.
Key calls in the adapter (`ecu_framework/lin/babylin.py`):
- `BLC_getBabyLinPorts`, `BLC_openPort` — discovery and open
- `BLC_loadSDF`, `BLC_getChannelHandle`, `BLC_sendCommand('start schedule N;')` — SDF + scheduling
- `BLC_mon_set_xmit` — transmit
- `BLC_getNextFrameTimeout` — receive
- `BLC_sendRawMasterRequest` — master request (length then bytes)
## Development Workflow
### 1. Development Phase
```powershell
# Use mock interface for development
python -m pytest tests\test_smoke_mock.py -v
```
### 2. Hardware Integration Phase
```powershell
# Test with real BabyLIN hardware
python -m pytest -m "hardware and babylin" -v
```
### 3. Full System Testing
```powershell
# Complete test suite including ECU flashing
python -m pytest -v
```
## Enhanced Reporting Output Example
The enhanced HTML report includes:
| Result | Test | Title | Requirements | Duration | Links |
|--------|------|-------|--------------|----------|--------|
| ✅ Passed | test_mock_send_receive_echo | Mock LIN Interface - Send/Receive Echo Test | REQ-001, REQ-003 | 1 ms | |
| ✅ Passed | test_mock_request_synthesized_response | Mock LIN Interface - Master Request Response Test | REQ-002 | 0 ms | |
| ✅ Passed | test_mock_receive_timeout_behavior | Mock LIN Interface - Receive Timeout Test | REQ-004 | 106 ms | |
## Framework Validation Results
**Current Status**: ✅ **All core features implemented and tested**
**Mock Interface Tests**: 7/7 passing (0.14s execution time)
- Send/receive operations: ✅ Working
- Timeout handling: ✅ Working
- Frame validation: ✅ Working
- Boundary testing: ✅ Working
**Enhanced Reporting**: ✅ **Fully functional**
- HTML report with metadata: ✅ Working
- XML report generation: ✅ Working
- Custom pytest plugin: ✅ Working
- Docstring metadata extraction: ✅ Working
**Configuration System**: ✅ **Complete**
- YAML configuration loading: ✅ Working
- Environment variable override: ✅ Working
- BabyLIN SDF/schedule configuration: ✅ Working
- Power supply (PSU) configuration: ✅ Working (see `config/test_config.yaml``power_supply`)
## Owon Power Supply (PSU) Integration
The framework includes a serial SCPI controller for Owon PSUs and a hardware test wired to the central config.
- Library: `ecu_framework/power/owon_psu.py` (pyserial)
- Config: `config/test_config.yaml` (`power_supply` section)
- Optionally merge machine-specific settings from `config/owon_psu.yaml` or env `OWON_PSU_CONFIG`
- Hardware test: `tests/hardware/test_owon_psu.py` (skips unless `power_supply.enabled` and `port` present)
- quick demo: `vendor/Owon/owon_psu_quickdemo.py`
Quick run:
```powershell
pip install -r .\requirements.txt
copy .\config\owon_psu.example.yaml .\config\owon_psu.yaml
# edit COM port in .\config\owon_psu.yaml
pytest -k test_owon_psu_idn_and_optional_set -m hardware -q
python .\vendor\Owon\owon_psu_quick_demo.py
```
Common config keys:
```yaml
power_supply:
enabled: true
port: COM4
baudrate: 115200
timeout: 1.0
eol: "\n"
parity: N
stopbits: 1
idn_substr: OWON
```
## Next Steps
1. **Hardware Testing**: Connect BabyLin hardware and validate hardware smoke tests
2. **ECU Integration**: Define ECU-specific communication protocols and diagnostic commands
3. **Hex Flashing**: Implement complete hex file flashing workflow
4. **CI/CD Integration**: Set up automated testing pipeline with generated reports
## Dependencies
```
pytest>=8.4.2
pytest-html>=4.1.1
pytest-xdist>=3.8.0
pyyaml>=6.0.2
```
# ECU Testing Framework - Complete Guide
## Overview
This comprehensive ECU Testing Framework provides a robust solution for testing Electronic Control Units (ECUs) using pytest with BabyLIN LIN bus communication. The framework includes detailed test documentation, enhanced reporting, mock interfaces for development, and real hardware integration capabilities.
## Framework Features
### ✅ **Complete Implementation Status**
- **✅ pytest-based testing framework** with custom plugins
- **✅ BabyLIN LIN communication integration** via the official SDK Python wrapper (`BabyLIN_library.py`)
- **✅ Mock interface for hardware-independent development**
- **✅ Enhanced HTML/XML reporting with test metadata**
- **✅ Detailed test documentation extraction**
- **✅ Configuration management with YAML**
- **✅ Hex file flashing capabilities (scaffold)**
- **✅ Custom pytest markers for requirement traceability**
## Enhanced Reporting System
### Test Metadata Integration
The framework automatically extracts detailed test information from docstrings and integrates it into reports:
**HTML Report Features:**
- **Title Column**: Clear test descriptions extracted from docstrings
- **Requirements Column**: Requirement traceability (REQ-001, REQ-002, etc.)
- **Enhanced Test Details**: Description, test steps, and expected results
- **Marker Integration**: Custom pytest markers for categorization
**Example Test Documentation Format:**
```python
@pytest.mark.smoke
@pytest.mark.req_001
def test_mock_send_receive_echo(self, mock_interface):
"""
Title: Mock LIN Interface - Send/Receive Echo Test
Description: Validates basic send/receive functionality using the mock
LIN interface with echo behavior for development testing.
Requirements: REQ-001, REQ-003
Test Steps:
1. Connect to mock LIN interface
2. Send a test frame with ID 0x01 and data [0x55]
3. Receive the echoed frame within 100ms timeout
4. Verify frame ID and data integrity
Expected Result:
- Frame should be echoed back successfully
- Received data should match sent data exactly
- Operation should complete within timeout period
"""
```
### Report Generation
**HTML Report (`reports/report.html`):**
- Interactive table with sortable columns
- Test titles and requirements clearly visible
- Execution duration and status tracking
- Enhanced metadata from docstrings
**XML Report (`reports/junit.xml`):**
- Standard JUnit XML format for CI/CD integration
- Test execution data and timing information
- Compatible with most CI systems (Jenkins, GitLab CI, etc.)
## Project Structure
```
ecu_tests/
├── ecu_framework/ # Core framework package
│ ├── config.py # YAML configuration management
│ ├── lin/ # LIN communication interfaces
│ │ ├── base.py # Abstract LinInterface definition
│ │ ├── mock.py # Mock interface for development
│ │ └── babylin.py # Real BabyLin hardware interface
│ └── flashing/ # Hex file flashing capabilities
│ └── hex_flasher.py # ECU flash programming
├── tests/ # Test suite
│ ├── conftest.py # pytest fixtures and configuration
│ ├── test_smoke_mock.py # Mock interface validation tests
│ ├── test_babylin_hardware_smoke.py # Hardware smoke tests
│ └── test_hardware_placeholder.py # Future hardware tests
├── config/ # Configuration files
│ ├── test_config.yaml # Main test configuration
│ └── babylin.example.yaml # BabyLin configuration template
├── vendor/ # BabyLIN SDK placement
| ├── BabyLIN_library.py # Official SDK Python wrapper
| └── platform libs # OS-specific native libs (DLL/.so/.dylib)
├── reports/ # Generated test reports
│ ├── report.html # Enhanced HTML report
│ └── junit.xml # JUnit XML report
├── conftest_plugin.py # Custom pytest plugin for enhanced reporting
├── pytest.ini # pytest configuration with custom markers
├── requirements.txt # Python dependencies
└── README.md # Project documentation
```
## Running Tests
### Basic Test Execution
```powershell
# Run all tests with verbose output
python -m pytest -v
# Run specific test suite
python -m pytest tests\test_smoke_mock.py -v
# Run tests with specific markers
python -m pytest -m "smoke" -v
python -m pytest -m "req_001" -v
# Run hardware tests (requires BabyLIN hardware); join with adapter marker
python -m pytest -m "hardware and babylin" -v
```
### Unit Tests (fast, no hardware)
Run only unit tests using the dedicated marker or by path:
```powershell
# By marker
python -m pytest -m unit -q
# By path
python -m pytest tests\unit -q
# Plugin self-tests (verifies reporting artifacts)
python -m pytest tests\plugin -q
```
Reports still go to `reports/` (HTML and JUnit per defaults). Open the HTML on Windows with:
```powershell
start .\reports\report.html
```
Coverage: enabled by default via pytest.ini. To disable locally:
```powershell
python -m pytest -q -o addopts=""
```
Optional HTML coverage:
```powershell
python -m pytest --cov=ecu_framework --cov-report=html -q
start .\htmlcov\index.html
```
See also: `docs/13_unit_testing_guide.md` for more details and examples.
### Report Generation
Tests automatically generate enhanced reports:
- **HTML Report**: `reports/report.html` - Interactive report with metadata
- **XML Report**: `reports/junit.xml` - CI/CD compatible format
## Configuration
### Test Configuration (`config/test_config.yaml`)
```yaml
interface:
type: mock # or babylin for hardware
timeout: 1.0
flash:
hex_file_path: firmware/ecu_firmware.hex
flash_timeout: 30.0
ecu:
name: Test ECU
lin_id_range: [0x01, 0x3F]
```
### BabyLIN Configuration (`config/babylin.example.yaml`)
```yaml
interface:
type: babylin
channel: 0 # channel index used by the SDK wrapper
bitrate: 19200 # typically set by SDF
sdf_path: ./vendor/Example.sdf
schedule_nr: 0 # schedule to start on connect
```
## Test Categories
### 1. Mock Interface Tests (`test_smoke_mock.py`)
**Purpose**: Hardware-independent development and validation
- ✅ Send/receive echo functionality
- ✅ Master request/response testing
- ✅ Timeout behavior validation
- ✅ Frame validation boundary testing
- ✅ Parameterized boundary tests for comprehensive coverage
**Status**: **7 tests passing** - Complete implementation
### 2. Hardware Smoke Tests (`test_babylin_hardware_smoke.py`)
**Purpose**: Basic BabyLIN hardware connectivity validation
- ✅ SDK wrapper import and device open
- ✅ Interface connection establishment
- ✅ Basic send/receive operations
- ✅ Error handling and cleanup
**Status**: Ready for hardware testing
### 3. Hardware Integration Tests (`test_hardware_placeholder.py`)
**Purpose**: Full ECU testing workflow with real hardware
- ECU flashing with hex files
- Communication protocol validation
- Diagnostic command testing
- Performance and stress testing
**Status**: Framework ready, awaiting ECU specifications
## Custom Pytest Markers
The framework includes custom markers for test categorization and requirement traceability:
```python
# In pytest.ini
markers =
smoke: Basic functionality tests
integration: Integration tests requiring hardware
hardware: Tests requiring physical BabyLin hardware
babylin: Tests targeting the BabyLIN SDK adapter
unit: Fast unit tests (no hardware)
boundary: Boundary condition and edge case tests
req_001: Tests validating requirement REQ-001 (LIN Interface Basic Operations)
req_002: Tests validating requirement REQ-002 (Master Request/Response)
req_003: Tests validating requirement REQ-003 (Frame Validation)
req_004: Tests validating requirement REQ-004 (Timeout Handling)
```
## BabyLIN Integration Details
### SDK Python wrapper
The framework uses the official SDK Python wrapper `BabyLIN_library.py` (placed under `vendor/`) and calls its BLC_* APIs.
Key calls in the adapter (`ecu_framework/lin/babylin.py`):
- `BLC_getBabyLinPorts`, `BLC_openPort` — discovery and open
- `BLC_loadSDF`, `BLC_getChannelHandle`, `BLC_sendCommand('start schedule N;')` — SDF + scheduling
- `BLC_mon_set_xmit` — transmit
- `BLC_getNextFrameTimeout` — receive
- `BLC_sendRawMasterRequest` — master request (length then bytes)
## Development Workflow
### 1. Development Phase
```powershell
# Use mock interface for development
python -m pytest tests\test_smoke_mock.py -v
```
### 2. Hardware Integration Phase
```powershell
# Test with real BabyLIN hardware
python -m pytest -m "hardware and babylin" -v
```
### 3. Full System Testing
```powershell
# Complete test suite including ECU flashing
python -m pytest -v
```
## Enhanced Reporting Output Example
The enhanced HTML report includes:
| Result | Test | Title | Requirements | Duration | Links |
|--------|------|-------|--------------|----------|--------|
| ✅ Passed | test_mock_send_receive_echo | Mock LIN Interface - Send/Receive Echo Test | REQ-001, REQ-003 | 1 ms | |
| ✅ Passed | test_mock_request_synthesized_response | Mock LIN Interface - Master Request Response Test | REQ-002 | 0 ms | |
| ✅ Passed | test_mock_receive_timeout_behavior | Mock LIN Interface - Receive Timeout Test | REQ-004 | 106 ms | |
## Framework Validation Results
**Current Status**: ✅ **All core features implemented and tested**
**Mock Interface Tests**: 7/7 passing (0.14s execution time)
- Send/receive operations: ✅ Working
- Timeout handling: ✅ Working
- Frame validation: ✅ Working
- Boundary testing: ✅ Working
**Enhanced Reporting**: ✅ **Fully functional**
- HTML report with metadata: ✅ Working
- XML report generation: ✅ Working
- Custom pytest plugin: ✅ Working
- Docstring metadata extraction: ✅ Working
**Configuration System**: ✅ **Complete**
- YAML configuration loading: ✅ Working
- Environment variable override: ✅ Working
- BabyLIN SDF/schedule configuration: ✅ Working
- Power supply (PSU) configuration: ✅ Working (see `config/test_config.yaml``power_supply`)
## Owon Power Supply (PSU) Integration
The framework includes a serial SCPI controller for Owon PSUs and a hardware test wired to the central config.
- Library: `ecu_framework/power/owon_psu.py` (pyserial)
- Config: `config/test_config.yaml` (`power_supply` section)
- Optionally merge machine-specific settings from `config/owon_psu.yaml` or env `OWON_PSU_CONFIG`
- Hardware test: `tests/hardware/test_owon_psu.py` (skips unless `power_supply.enabled` and `port` present)
- quick demo: `vendor/Owon/owon_psu_quickdemo.py`
Quick run:
```powershell
pip install -r .\requirements.txt
copy .\config\owon_psu.example.yaml .\config\owon_psu.yaml
# edit COM port in .\config\owon_psu.yaml
pytest -k test_owon_psu_idn_and_optional_set -m hardware -q
python .\vendor\Owon\owon_psu_quick_demo.py
```
Common config keys:
```yaml
power_supply:
enabled: true
port: COM4
baudrate: 115200
timeout: 1.0
eol: "\n"
parity: N
stopbits: 1
idn_substr: OWON
```
## Next Steps
1. **Hardware Testing**: Connect BabyLin hardware and validate hardware smoke tests
2. **ECU Integration**: Define ECU-specific communication protocols and diagnostic commands
3. **Hex Flashing**: Implement complete hex file flashing workflow
4. **CI/CD Integration**: Set up automated testing pipeline with generated reports
## Dependencies
```
pytest>=8.4.2
pytest-html>=4.1.1
pytest-xdist>=3.8.0
pyyaml>=6.0.2
```
This framework provides a complete, production-ready testing solution for ECU development with BabyLIN communication, featuring enhanced documentation, traceability, and reporting capabilities.

View File

@ -1,11 +1,11 @@
# Example configuration for BabyLIN hardware runs (SDK Python wrapper)
interface:
type: babylin
channel: 0 # Channel index (0-based) as used by the SDK
bitrate: 19200 # Usually defined by the SDF, kept for reference
node_name: ECU_TEST_NODE
sdf_path: .\vendor\Example.sdf # Path to your SDF file
schedule_nr: 0 # Schedule number to start on connect
flash:
enabled: true
hex_path: C:\\Path\\To\\firmware.hex # TODO: update
# Example configuration for BabyLIN hardware runs (SDK Python wrapper)
interface:
type: babylin
channel: 0 # Channel index (0-based) as used by the SDK
bitrate: 19200 # Usually defined by the SDF, kept for reference
node_name: ECU_TEST_NODE
sdf_path: .\vendor\Example.sdf # Path to your SDF file
schedule_nr: 0 # Schedule number to start on connect
flash:
enabled: true
hex_path: C:\\Path\\To\\firmware.hex # TODO: update

View File

@ -1,50 +1,50 @@
# Examples: Mock-only and BabyLIN hardware configurations
#
# How to use (Windows PowerShell):
# # Point the framework to a specific config file
# $env:ECU_TESTS_CONFIG = ".\config\examples.yaml"
# # Run only mock tests
# pytest -m "not hardware" -v
# # Switch to the BabyLIN profile by moving it under the 'active' key or by
# # exporting a different file path containing only the desired profile.
#
# This file shows both profiles in one place; typically you'll copy the relevant
# section into its own YAML file (e.g., config/mock.yaml, config/babylin.yaml).
# --- MOCK PROFILE -----------------------------------------------------------
mock_profile:
interface:
type: mock
channel: 1
bitrate: 19200
flash:
enabled: false
hex_path:
# --- BABYLIN PROFILE --------------------------------------------------------
# Requires: vendor/BabyLIN_library.py and platform libraries placed per vendor/README.md
babylin_profile:
interface:
type: babylin
channel: 0 # SDK channel index (0-based)
bitrate: 19200 # Informational; SDF usually defines effective timing
node_name: ECU_TEST_NODE # Optional label
sdf_path: .\vendor\Example.sdf # Update to your real SDF path
schedule_nr: 0 # Start this schedule on connect
flash:
enabled: true
hex_path: C:\\Path\\To\\firmware.hex # Update as needed
# --- ACTIVE SELECTION -------------------------------------------------------
# To use one of the profiles above, copy it under the 'active' key below or
# include only that profile in a separate file. The loader expects the top-level
# keys 'interface' and 'flash' by default. For convenience, we expose a shape
# that mirrors that directly. Here is a self-contained active selection:
active:
interface:
type: mock
channel: 1
bitrate: 19200
flash:
enabled: false
hex_path:
# Examples: Mock-only and BabyLIN hardware configurations
#
# How to use (Windows PowerShell):
# # Point the framework to a specific config file
# $env:ECU_TESTS_CONFIG = ".\config\examples.yaml"
# # Run only mock tests
# pytest -m "not hardware" -v
# # Switch to the BabyLIN profile by moving it under the 'active' key or by
# # exporting a different file path containing only the desired profile.
#
# This file shows both profiles in one place; typically you'll copy the relevant
# section into its own YAML file (e.g., config/mock.yaml, config/babylin.yaml).
# --- MOCK PROFILE -----------------------------------------------------------
mock_profile:
interface:
type: mock
channel: 1
bitrate: 19200
flash:
enabled: false
hex_path:
# --- BABYLIN PROFILE --------------------------------------------------------
# Requires: vendor/BabyLIN_library.py and platform libraries placed per vendor/README.md
babylin_profile:
interface:
type: babylin
channel: 0 # SDK channel index (0-based)
bitrate: 19200 # Informational; SDF usually defines effective timing
node_name: ECU_TEST_NODE # Optional label
sdf_path: .\vendor\Example.sdf # Update to your real SDF path
schedule_nr: 0 # Start this schedule on connect
flash:
enabled: true
hex_path: C:\\Path\\To\\firmware.hex # Update as needed
# --- ACTIVE SELECTION -------------------------------------------------------
# To use one of the profiles above, copy it under the 'active' key below or
# include only that profile in a separate file. The loader expects the top-level
# keys 'interface' and 'flash' by default. For convenience, we expose a shape
# that mirrors that directly. Here is a self-contained active selection:
active:
interface:
type: mock
channel: 1
bitrate: 19200
flash:
enabled: false
hex_path:

29
config/mum.example.yaml Normal file
View File

@ -0,0 +1,29 @@
# MUM (Melexis Universal Master) interface example.
# Copy to test_config.yaml or point ECU_TESTS_CONFIG at this file.
#
# Prerequisites:
# - MUM is reachable over IP (default 192.168.7.2 over USB-RNDIS).
# - Melexis Python packages 'pylin' and 'pymumclient' are importable.
# See vendor/automated_lin_test/install_packages.sh.
interface:
type: mum
host: 192.168.7.2 # MUM IP address
lin_device: lin0 # MUM LIN device name
power_device: power_out0 # MUM power-control device
bitrate: 19200 # LIN baudrate
boot_settle_seconds: 0.5 # Delay after power-up before first frame
# Optional: per-frame-id data lengths. Defaults cover the 4SEVEN library
# (ALM_Status=4, ALM_Req_A=8, etc.) — only override if your ECU differs.
frame_lengths:
0x0A: 8 # ALM_Req_A
0x11: 4 # ALM_Status
flash:
enabled: false
hex_path:
# The Owon PSU is unused on the MUM flow (MUM provides power on power_out0).
# Leave disabled unless you also want to drive the Owon for a separate test.
power_supply:
enabled: false

View File

@ -1,18 +1,18 @@
# Example configuration for Owon PSU hardware test
# Copy to config/owon_psu.yaml and adjust values for your setup
port: COM4 # e.g., COM4 on Windows, /dev/ttyUSB0 on Linux
baudrate: 115200 # default 115200
timeout: 1.0 # seconds
# eol: "\n" # write/query line termination (default "\n"); use "\r\n" if required
# parity: N # N|E|O (default N)
# stopbits: 1 # 1 or 2 (default 1)
# xonxoff: false
# rtscts: false
# dsrdtr: false
# Optional assertions/behavior
# idn_substr: OWON # require this substring in *IDN?
# do_set: true # briefly set V/I and toggle output
# set_voltage: 1.0 # volts when do_set is true
# set_current: 0.1 # amps when do_set is true
# Example configuration for Owon PSU hardware test
# Copy to config/owon_psu.yaml and adjust values for your setup
port: COM4 # e.g., COM4 on Windows, /dev/ttyUSB0 on Linux
baudrate: 115200 # default 115200
timeout: 1.0 # seconds
# eol: "\n" # write/query line termination (default "\n"); use "\r\n" if required
# parity: N # N|E|O (default N)
# stopbits: 1 # 1 or 2 (default 1)
# xonxoff: false
# rtscts: false
# dsrdtr: false
# Optional assertions/behavior
# idn_substr: OWON # require this substring in *IDN?
# do_set: true # briefly set V/I and toggle output
# set_voltage: 1.0 # volts when do_set is true
# set_current: 0.1 # amps when do_set is true

View File

@ -1,18 +1,18 @@
# Example configuration for Owon PSU hardware test
# Copy to config/owon_psu.yaml and adjust values for your setup
port: COM4 # e.g., COM4 on Windows, /dev/ttyUSB0 on Linux
baudrate: 115200 # default 115200
timeout: 1.0 # seconds
eol: "\n" # write/query line termination (default "\n"); use "\r\n" if required
parity: N # N|E|O (default N)
stopbits: 1 # 1 or 2 (default 1)
xonxoff: false
rtscts: false
dsrdtr: false
# Optional assertions/behavior
idn_substr: OWON # require this substring in *IDN?
do_set: true # briefly set V/I and toggle output
set_voltage: 10.0 # volts when do_set is true
set_current: 0.1 # amps when do_set is true
# Example configuration for Owon PSU hardware test
# Copy to config/owon_psu.yaml and adjust values for your setup
port: COM4 # e.g., COM4 on Windows, /dev/ttyUSB0 on Linux
baudrate: 115200 # default 115200
timeout: 1.0 # seconds
eol: "\n" # write/query line termination (default "\n"); use "\r\n" if required
parity: N # N|E|O (default N)
stopbits: 1 # 1 or 2 (default 1)
xonxoff: false
rtscts: false
dsrdtr: false
# Optional assertions/behavior
idn_substr: OWON # require this substring in *IDN?
do_set: true # briefly set V/I and toggle output
set_voltage: 13.0 # volts when do_set is true
set_current: 1.0 # amps when do_set is true (raise above ECU draw to stay in CV mode)

View File

@ -1,18 +1,34 @@
interface:
type: mock
channel: 1
bitrate: 19200
# MUM (Melexis Universal Master) is the current default. Switch type to
# 'babylin' for the legacy SDK flow, or 'mock' for hardware-free runs.
type: mum
host: 192.168.7.2 # MUM IP (USB-RNDIS default)
lin_device: lin0 # MUM LIN device name
power_device: power_out0 # MUM power-control device (built-in PSU)
bitrate: 19200 # LIN baudrate
boot_settle_seconds: 0.5 # Wait after power-up before sending the first frame
frame_lengths:
0x0A: 8 # ALM_Req_A (master-published, RGB control)
0x11: 4 # ALM_Status (slave-published)
# --- BabyLIN (legacy) settings, used only when type: babylin ---
channel: 0
node_name: ECU_TEST_NODE
sdf_path: .\vendor\4SEVEN_color_lib_test.sdf
schedule_nr: -1 # -1 = don't auto-start a schedule
flash:
enabled: false
hex_path:
# Optional: central power supply config used by hardware tests/demos
# You can also place machine-specific values in config/owon_psu.yaml or set OWON_PSU_CONFIG
# Owon PSU is independent of the LIN interface. The MUM provides its own
# power on power_out0, so leave the PSU disabled unless you specifically
# need to drive an external supply for over/under-voltage scenarios.
power_supply:
enabled: true
enabled: false
# port: COM4
baudrate: 115200
timeout: 1.0
timeout: 2.0
eol: "\n"
parity: N
stopbits: 1
@ -21,5 +37,5 @@ power_supply:
dsrdtr: false
# idn_substr: OWON
do_set: false
set_voltage: 1.0
set_current: 0.1
set_voltage: 13.0
set_current: 1.0

View File

@ -1,27 +1,27 @@
"""
Pytest configuration for this repository.
Purpose:
- Optionally register the local plugin in `conftest_plugin.py` if present.
- Avoid hard failures on environments where that file isn't available.
"""
from __future__ import annotations
import importlib
import sys
from typing import Any
def pytest_configure(config: Any) -> None:
try:
plugin = importlib.import_module("conftest_plugin")
except Exception as e:
# Soft warning only; tests can still run without the extra report features.
sys.stderr.write(f"[pytest] conftest_plugin not loaded: {e}\n")
return
# Register the plugin module so its hooks are active.
try:
config.pluginmanager.register(plugin, name="conftest_plugin")
except Exception as reg_err:
sys.stderr.write(f"[pytest] failed to register conftest_plugin: {reg_err}\n")
"""
Pytest configuration for this repository.
Purpose:
- Optionally register the local plugin in `conftest_plugin.py` if present.
- Avoid hard failures on environments where that file isn't available.
"""
from __future__ import annotations
import importlib
import sys
from typing import Any
def pytest_configure(config: Any) -> None:
try:
plugin = importlib.import_module("conftest_plugin")
except Exception as e:
# Soft warning only; tests can still run without the extra report features.
sys.stderr.write(f"[pytest] conftest_plugin not loaded: {e}\n")
return
# Register the plugin module so its hooks are active.
try:
config.pluginmanager.register(plugin, name="conftest_plugin")
except Exception as reg_err:
sys.stderr.write(f"[pytest] failed to register conftest_plugin: {reg_err}\n")

View File

@ -1,261 +1,261 @@
"""
Custom pytest plugin to enhance test reports with detailed metadata.
Why we need this plugin:
- Surface business-facing info (Title, Description, Requirements, Steps, Expected Result) in the HTML report for quick review.
- Map tests to requirement IDs and produce a requirements coverage JSON artifact for traceability.
- Emit a compact CI summary (summary.md) for dashboards and PR comments.
How it works (high level):
- During collection, we track all test nodeids for later "unmapped" reporting.
- During test execution, we parse the test function's docstring and markers to extract metadata and requirement IDs; we attach these as user_properties on the report.
- We add custom columns (Title, Requirements) to the HTML table.
- At the end of the run, we write two artifacts into reports/: requirements_coverage.json and summary.md.
"""
import os
import re
import json
import datetime as _dt
import pytest
# -----------------------------
# Session-scoped state for reports
# -----------------------------
# Track all collected tests (nodeids) so we can later highlight tests that had no requirement mapping.
_ALL_COLLECTED_TESTS: set[str] = set()
# Map requirement ID (e.g., REQ-001) -> set of nodeids that cover it.
_REQ_TO_TESTS: dict[str, set[str]] = {}
# Nodeids that did map to at least one requirement.
_MAPPED_TESTS: set[str] = set()
def _normalize_req_id(token: str) -> str | None:
"""Normalize requirement token to REQ-XXX form.
Accepts markers like 'req_001' or strings like 'REQ-001'.
Returns None if not a recognizable requirement. This provides a single
canonical format for coverage mapping and reporting.
"""
token = token.strip()
m1 = re.fullmatch(r"req_(\d{1,3})", token, re.IGNORECASE)
if m1:
return f"REQ-{int(m1.group(1)):03d}"
m2 = re.fullmatch(r"REQ[-_ ]?(\d{1,3})", token, re.IGNORECASE)
if m2:
return f"REQ-{int(m2.group(1)):03d}"
return None
def _extract_req_ids_from_docstring(docstring: str) -> list[str]:
"""Parse the 'Requirements:' line in the docstring and return REQ-XXX tokens.
Supports comma- or whitespace-separated tokens and normalizes them.
"""
reqs: list[str] = []
req_match = re.search(r"Requirements:\s*(.+)", docstring)
if req_match:
raw = req_match.group(1)
# split by comma or whitespace
parts = re.split(r"[\s,]+", raw)
for p in parts:
rid = _normalize_req_id(p)
if rid:
reqs.append(rid)
return list(dict.fromkeys(reqs)) # dedupe, preserve order
def pytest_configure(config):
# Ensure reports directory exists early so downstream hooks can write artifacts safely
os.makedirs("reports", exist_ok=True)
def pytest_collection_modifyitems(session, config, items):
# Track all collected tests for unmapped detection (for the final coverage JSON)
for item in items:
_ALL_COLLECTED_TESTS.add(item.nodeid)
# (Legacy makereport implementation removed in favor of the hookwrapper below.)
def pytest_html_results_table_header(cells):
"""Add custom columns to HTML report table.
Why: Make the most important context (Title and Requirements) visible at a glance
in the HTML report table without opening each test details section.
"""
cells.insert(2, '<th class="sortable" data-column-type="text">Title</th>')
cells.insert(3, '<th class="sortable" data-column-type="text">Requirements</th>')
def pytest_html_results_table_row(report, cells):
"""Add custom data to HTML report table rows.
We pull the user_properties attached during makereport and render the
Title and Requirements columns for each test row.
"""
# Get title from user properties
title = ""
requirements = ""
for prop in getattr(report, 'user_properties', []):
if prop[0] == "title":
title = prop[1]
elif prop[0] == "requirements":
requirements = prop[1]
cells.insert(2, f'<td class="col-title">{title}</td>')
cells.insert(3, f'<td class="col-requirements">{requirements}</td>')
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item, call):
"""Active hook: attach metadata to reports and build requirement coverage.
Why hook at makereport:
- We want to attach metadata to the test report object so it shows up in
the HTML and JUnit outputs via user_properties.
- We also build the requirements mapping here because we have both markers
and docstrings available on the test item.
"""
outcome = yield
report = outcome.get_result()
if call.when == "call" and hasattr(item, "function"):
# Add test metadata from docstring: parse Title, Description, Requirements,
# Test Steps, and Expected Result. Each is optional and extracted if present.
if item.function.__doc__:
docstring = item.function.__doc__.strip()
# Extract and add all metadata
metadata: dict[str, str] = {}
# Title
title_match = re.search(r"Title:\s*(.+)", docstring)
if title_match:
metadata["title"] = title_match.group(1).strip()
# Description
desc_match = re.search(r"Description:\s*(.+?)(?=\n\s*(?:Requirements|Test Steps|Expected Result))", docstring, re.DOTALL)
if desc_match:
metadata["description"] = " ".join(desc_match.group(1).strip().split())
# Requirements
req_match = re.search(r"Requirements:\s*(.+)", docstring)
if req_match:
metadata["requirements"] = req_match.group(1).strip()
# Test steps
steps_match = re.search(r"Test Steps:\s*(.+?)(?=\n\s*Expected Result)", docstring, re.DOTALL)
if steps_match:
steps = steps_match.group(1).strip()
steps_clean = re.sub(r"\n\s*\d+\.\s*", " | ", steps)
metadata["test_steps"] = steps_clean.strip(" |")
# Expected result
result_match = re.search(r"Expected Result:\s*(.+?)(?=\n\s*\"\"\"|\Z)", docstring, re.DOTALL)
if result_match:
expected = " ".join(result_match.group(1).strip().split())
metadata["expected_result"] = expected.replace("- ", "")
# Add all metadata as user properties (HTML plugin reads these)
if metadata:
if not hasattr(report, "user_properties"):
report.user_properties = []
for key, value in metadata.items():
report.user_properties.append((key, value))
# Build requirement coverage mapping
nodeid = item.nodeid
req_ids: set[str] = set()
# From markers: allow @pytest.mark.req_001 style to count toward coverage
for mark in item.iter_markers():
rid = _normalize_req_id(mark.name)
if rid:
req_ids.add(rid)
# From docstring line 'Requirements:'
for rid in _extract_req_ids_from_docstring(docstring):
req_ids.add(rid)
# Update global maps for coverage JSON
if req_ids:
_MAPPED_TESTS.add(nodeid)
for rid in req_ids:
bucket = _REQ_TO_TESTS.setdefault(rid, set())
bucket.add(nodeid)
def pytest_terminal_summary(terminalreporter, exitstatus):
"""Write CI-friendly summary and requirements coverage JSON.
Why we write these artifacts:
- requirements_coverage.json Machine-readable traceability matrix for CI dashboards.
- summary.md Quick textual summary that can be surfaced in PR checks or CI job logs.
"""
# Compute stats
stats = terminalreporter.stats
def _count(key):
return len(stats.get(key, []))
results = {
"passed": _count("passed"),
"failed": _count("failed"),
"skipped": _count("skipped"),
"error": _count("error"),
"xfailed": _count("xfailed"),
"xpassed": _count("xpassed"),
"rerun": _count("rerun"),
"total": sum(len(v) for v in stats.values()),
"collected": getattr(terminalreporter, "_numcollected", None),
}
# Prepare JSON payload for requirements coverage and quick links to artifacts
coverage = {
"generated_at": _dt.datetime.now().astimezone().isoformat(),
"results": results,
"requirements": {rid: sorted(list(nodes)) for rid, nodes in sorted(_REQ_TO_TESTS.items())},
"unmapped_tests": sorted(list(_ALL_COLLECTED_TESTS - _MAPPED_TESTS)),
"files": {
"html": "reports/report.html",
"junit": "reports/junit.xml",
"summary_md": "reports/summary.md",
},
}
# Write JSON coverage file
json_path = os.path.join("reports", "requirements_coverage.json")
try:
with open(json_path, "w", encoding="utf-8") as f:
json.dump(coverage, f, indent=2)
except Exception as e:
terminalreporter.write_line(f"[conftest_plugin] Failed to write {json_path}: {e}")
# Write Markdown summary for CI consumption
md_path = os.path.join("reports", "summary.md")
try:
lines = [
"# Test Run Summary",
"",
f"Generated: {coverage['generated_at']}",
"",
f"- Collected: {results.get('collected')}",
f"- Passed: {results['passed']}",
f"- Failed: {results['failed']}",
f"- Skipped: {results['skipped']}",
f"- Errors: {results['error']}",
f"- XFailed: {results['xfailed']}",
f"- XPassed: {results['xpassed']}",
f"- Rerun: {results['rerun']}",
"",
"## Artifacts",
"- HTML Report: ./report.html",
"- JUnit XML: ./junit.xml",
"- Requirements Coverage (JSON): ./requirements_coverage.json",
]
with open(md_path, "w", encoding="utf-8") as f:
f.write("\n".join(lines) + "\n")
except Exception as e:
"""
Custom pytest plugin to enhance test reports with detailed metadata.
Why we need this plugin:
- Surface business-facing info (Title, Description, Requirements, Steps, Expected Result) in the HTML report for quick review.
- Map tests to requirement IDs and produce a requirements coverage JSON artifact for traceability.
- Emit a compact CI summary (summary.md) for dashboards and PR comments.
How it works (high level):
- During collection, we track all test nodeids for later "unmapped" reporting.
- During test execution, we parse the test function's docstring and markers to extract metadata and requirement IDs; we attach these as user_properties on the report.
- We add custom columns (Title, Requirements) to the HTML table.
- At the end of the run, we write two artifacts into reports/: requirements_coverage.json and summary.md.
"""
import os
import re
import json
import datetime as _dt
import pytest
# -----------------------------
# Session-scoped state for reports
# -----------------------------
# Track all collected tests (nodeids) so we can later highlight tests that had no requirement mapping.
_ALL_COLLECTED_TESTS: set[str] = set()
# Map requirement ID (e.g., REQ-001) -> set of nodeids that cover it.
_REQ_TO_TESTS: dict[str, set[str]] = {}
# Nodeids that did map to at least one requirement.
_MAPPED_TESTS: set[str] = set()
def _normalize_req_id(token: str) -> str | None:
"""Normalize requirement token to REQ-XXX form.
Accepts markers like 'req_001' or strings like 'REQ-001'.
Returns None if not a recognizable requirement. This provides a single
canonical format for coverage mapping and reporting.
"""
token = token.strip()
m1 = re.fullmatch(r"req_(\d{1,3})", token, re.IGNORECASE)
if m1:
return f"REQ-{int(m1.group(1)):03d}"
m2 = re.fullmatch(r"REQ[-_ ]?(\d{1,3})", token, re.IGNORECASE)
if m2:
return f"REQ-{int(m2.group(1)):03d}"
return None
def _extract_req_ids_from_docstring(docstring: str) -> list[str]:
"""Parse the 'Requirements:' line in the docstring and return REQ-XXX tokens.
Supports comma- or whitespace-separated tokens and normalizes them.
"""
reqs: list[str] = []
req_match = re.search(r"Requirements:\s*(.+)", docstring)
if req_match:
raw = req_match.group(1)
# split by comma or whitespace
parts = re.split(r"[\s,]+", raw)
for p in parts:
rid = _normalize_req_id(p)
if rid:
reqs.append(rid)
return list(dict.fromkeys(reqs)) # dedupe, preserve order
def pytest_configure(config):
# Ensure reports directory exists early so downstream hooks can write artifacts safely
os.makedirs("reports", exist_ok=True)
def pytest_collection_modifyitems(session, config, items):
# Track all collected tests for unmapped detection (for the final coverage JSON)
for item in items:
_ALL_COLLECTED_TESTS.add(item.nodeid)
# (Legacy makereport implementation removed in favor of the hookwrapper below.)
def pytest_html_results_table_header(cells):
"""Add custom columns to HTML report table.
Why: Make the most important context (Title and Requirements) visible at a glance
in the HTML report table without opening each test details section.
"""
cells.insert(2, '<th class="sortable" data-column-type="text">Title</th>')
cells.insert(3, '<th class="sortable" data-column-type="text">Requirements</th>')
def pytest_html_results_table_row(report, cells):
"""Add custom data to HTML report table rows.
We pull the user_properties attached during makereport and render the
Title and Requirements columns for each test row.
"""
# Get title from user properties
title = ""
requirements = ""
for prop in getattr(report, 'user_properties', []):
if prop[0] == "title":
title = prop[1]
elif prop[0] == "requirements":
requirements = prop[1]
cells.insert(2, f'<td class="col-title">{title}</td>')
cells.insert(3, f'<td class="col-requirements">{requirements}</td>')
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item, call):
"""Active hook: attach metadata to reports and build requirement coverage.
Why hook at makereport:
- We want to attach metadata to the test report object so it shows up in
the HTML and JUnit outputs via user_properties.
- We also build the requirements mapping here because we have both markers
and docstrings available on the test item.
"""
outcome = yield
report = outcome.get_result()
if call.when == "call" and hasattr(item, "function"):
# Add test metadata from docstring: parse Title, Description, Requirements,
# Test Steps, and Expected Result. Each is optional and extracted if present.
if item.function.__doc__:
docstring = item.function.__doc__.strip()
# Extract and add all metadata
metadata: dict[str, str] = {}
# Title
title_match = re.search(r"Title:\s*(.+)", docstring)
if title_match:
metadata["title"] = title_match.group(1).strip()
# Description
desc_match = re.search(r"Description:\s*(.+?)(?=\n\s*(?:Requirements|Test Steps|Expected Result))", docstring, re.DOTALL)
if desc_match:
metadata["description"] = " ".join(desc_match.group(1).strip().split())
# Requirements
req_match = re.search(r"Requirements:\s*(.+)", docstring)
if req_match:
metadata["requirements"] = req_match.group(1).strip()
# Test steps
steps_match = re.search(r"Test Steps:\s*(.+?)(?=\n\s*Expected Result)", docstring, re.DOTALL)
if steps_match:
steps = steps_match.group(1).strip()
steps_clean = re.sub(r"\n\s*\d+\.\s*", " | ", steps)
metadata["test_steps"] = steps_clean.strip(" |")
# Expected result
result_match = re.search(r"Expected Result:\s*(.+?)(?=\n\s*\"\"\"|\Z)", docstring, re.DOTALL)
if result_match:
expected = " ".join(result_match.group(1).strip().split())
metadata["expected_result"] = expected.replace("- ", "")
# Add all metadata as user properties (HTML plugin reads these)
if metadata:
if not hasattr(report, "user_properties"):
report.user_properties = []
for key, value in metadata.items():
report.user_properties.append((key, value))
# Build requirement coverage mapping
nodeid = item.nodeid
req_ids: set[str] = set()
# From markers: allow @pytest.mark.req_001 style to count toward coverage
for mark in item.iter_markers():
rid = _normalize_req_id(mark.name)
if rid:
req_ids.add(rid)
# From docstring line 'Requirements:'
for rid in _extract_req_ids_from_docstring(docstring):
req_ids.add(rid)
# Update global maps for coverage JSON
if req_ids:
_MAPPED_TESTS.add(nodeid)
for rid in req_ids:
bucket = _REQ_TO_TESTS.setdefault(rid, set())
bucket.add(nodeid)
def pytest_terminal_summary(terminalreporter, exitstatus):
"""Write CI-friendly summary and requirements coverage JSON.
Why we write these artifacts:
- requirements_coverage.json Machine-readable traceability matrix for CI dashboards.
- summary.md Quick textual summary that can be surfaced in PR checks or CI job logs.
"""
# Compute stats
stats = terminalreporter.stats
def _count(key):
return len(stats.get(key, []))
results = {
"passed": _count("passed"),
"failed": _count("failed"),
"skipped": _count("skipped"),
"error": _count("error"),
"xfailed": _count("xfailed"),
"xpassed": _count("xpassed"),
"rerun": _count("rerun"),
"total": sum(len(v) for v in stats.values()),
"collected": getattr(terminalreporter, "_numcollected", None),
}
# Prepare JSON payload for requirements coverage and quick links to artifacts
coverage = {
"generated_at": _dt.datetime.now().astimezone().isoformat(),
"results": results,
"requirements": {rid: sorted(list(nodes)) for rid, nodes in sorted(_REQ_TO_TESTS.items())},
"unmapped_tests": sorted(list(_ALL_COLLECTED_TESTS - _MAPPED_TESTS)),
"files": {
"html": "reports/report.html",
"junit": "reports/junit.xml",
"summary_md": "reports/summary.md",
},
}
# Write JSON coverage file
json_path = os.path.join("reports", "requirements_coverage.json")
try:
with open(json_path, "w", encoding="utf-8") as f:
json.dump(coverage, f, indent=2)
except Exception as e:
terminalreporter.write_line(f"[conftest_plugin] Failed to write {json_path}: {e}")
# Write Markdown summary for CI consumption
md_path = os.path.join("reports", "summary.md")
try:
lines = [
"# Test Run Summary",
"",
f"Generated: {coverage['generated_at']}",
"",
f"- Collected: {results.get('collected')}",
f"- Passed: {results['passed']}",
f"- Failed: {results['failed']}",
f"- Skipped: {results['skipped']}",
f"- Errors: {results['error']}",
f"- XFailed: {results['xfailed']}",
f"- XPassed: {results['xpassed']}",
f"- Rerun: {results['rerun']}",
"",
"## Artifacts",
"- HTML Report: ./report.html",
"- JUnit XML: ./junit.xml",
"- Requirements Coverage (JSON): ./requirements_coverage.json",
]
with open(md_path, "w", encoding="utf-8") as f:
f.write("\n".join(lines) + "\n")
except Exception as e:
terminalreporter.write_line(f"[conftest_plugin] Failed to write {md_path}: {e}")

View File

@ -1,124 +1,129 @@
# Run Sequence: What Happens When You Start Tests
This document walks through the exact order of operations when you run the framework with pytest, what gets called, and where configuration/data is fetched from.
## High-level flow
1. You run pytest from PowerShell
2. pytest reads `pytest.ini` and loads configured plugins (including our custom `conftest_plugin`)
3. Test discovery collects tests under `tests/`
4. Session fixtures run:
- `config()` loads YAML configuration
- `lin()` selects and connects the LIN interface (Mock or BabyLin)
- `flash_ecu()` optionally flashes the ECU (if enabled)
5. Tests execute using fixtures and call interface methods
6. Our plugin extracts test metadata (Title, Requirements, Steps) from docstrings
7. Reports are written to `reports/report.html` and `reports/junit.xml`
## Detailed call sequence
```mermaid
sequenceDiagram
autonumber
participant U as User (PowerShell)
participant P as pytest
participant PI as pytest.ini
participant PL as conftest_plugin.py
participant T as Test Discovery (tests/*)
participant F as Fixtures (conftest.py)
participant C as Config Loader (ecu_framework/config.py)
participant PS as Power Supply (optional)
participant L as LIN Adapter (mock/BabyLIN SDK)
participant X as HexFlasher (optional)
participant R as Reports (HTML/JUnit)
U->>P: python -m pytest [args]
P->>PI: Read addopts, markers, plugins
P->>PL: Load custom plugin hooks
P->>T: Collect tests
P->>F: Init session fixtures
F->>C: load_config(workspace_root)
C-->>F: EcuTestConfig (merged dataclasses)
F->>L: Create interface (mock or BabyLIN SDK)
L-->>F: Instance ready
F->>L: connect()
alt flash.enabled and hex_path provided
F->>X: HexFlasher(lin).flash_hex(hex_path)
X-->>F: Flash result (ok/fail)
end
opt power_supply.enabled and port provided
Note over PS: owon_psu_quick_demo may open PSU via ecu_framework.power.owon_psu
end
loop for each test
P->>PL: runtest_makereport(item, call)
Note over PL: Parse docstring and attach metadata
P->>L: send()/receive()/request()
L-->>P: Frames or None (timeout)
end
P->>R: Write HTML (with metadata columns)
P->>R: Write JUnit XML
```
```text
PowerShell → python -m pytest
pytest loads pytest.ini
- addopts: --junitxml, --html, --self-contained-html, -p conftest_plugin
- markers registered
pytest collects tests in tests/
Session fixture: config()
→ calls ecu_framework.config.load_config(workspace_root)
→ determines config file path by precedence
→ merges YAML + overrides into dataclasses (EcuTestConfig)
→ optionally merges config/owon_psu.yaml (or OWON_PSU_CONFIG) into power_supply
Session fixture: lin(config)
→ chooses interface by config.interface.type
- mock → ecu_framework.lin.mock.MockBabyLinInterface(...)
- babylin → ecu_framework.lin.babylin.BabyLinInterface(...)
→ lin.connect()
Optional session fixture: flash_ecu(config, lin)
→ if config.flash.enabled and hex_path set
→ ecu_framework.flashing.HexFlasher(lin).flash_hex(hex_path)
Test functions execute
→ use the lin fixture to send/receive/request
Reporting plugin (conftest_plugin.py)
→ pytest_runtest_makereport parses test docstring
→ attaches user_properties: title, requirements, steps, expected_result
→ pytest-html hooks add Title and Requirements columns
Reports written
→ reports/report.html (HTML with metadata columns)
→ reports/junit.xml (JUnit XML for CI)
```
## Where information is fetched from
- pytest configuration: `pytest.ini`
- YAML config (default): `config/test_config.yaml`
- YAML override via env var: `ECU_TESTS_CONFIG`
- BabyLIN SDK wrapper and SDF path: `interface.sdf_path` and `interface.schedule_nr` in YAML
- Test metadata: parsed from each tests docstring
- Markers: declared in `pytest.ini`, attached in tests via `@pytest.mark.*`
## Key components involved
- `tests/conftest.py`: defines `config`, `lin`, and `flash_ecu` fixtures
- `ecu_framework/config.py`: loads and merges configuration into dataclasses
- `ecu_framework/lin/base.py`: abstract LIN interface contract and frame shape
- `ecu_framework/lin/mock.py`: mock behavior for send/receive/request
- `ecu_framework/lin/babylin.py`: BabyLIN SDK wrapper adapter (real hardware via BabyLIN_library.py)
- `ecu_framework/flashing/hex_flasher.py`: placeholder flashing logic
- `conftest_plugin.py`: report customization and metadata extraction
## Edge cases and behavior
- If `interface.type` is `babylin` but the SDK wrapper or libraries cannot be loaded, hardware tests are skipped
- If `flash.enabled` is true but `hex_path` is missing, flashing fixture skips
- Timeouts are honored in `receive()` and `request()` implementations
- Invalid frame IDs (outside 0x000x3F) or data > 8 bytes will raise in `LinFrame`
# Run Sequence: What Happens When You Start Tests
This document walks through the exact order of operations when you run the framework with pytest, what gets called, and where configuration/data is fetched from.
## High-level flow
1. You run pytest from PowerShell
2. pytest reads `pytest.ini` and loads configured plugins (including our custom `conftest_plugin`)
3. Test discovery collects tests under `tests/`
4. Session fixtures run:
- `config()` loads YAML configuration
- `lin()` selects and connects the LIN interface (Mock, MUM, or legacy BabyLIN)
- `flash_ecu()` optionally flashes the ECU (if enabled)
5. Tests execute using fixtures and call interface methods
6. Our plugin extracts test metadata (Title, Requirements, Steps) from docstrings
7. Reports are written to `reports/report.html` and `reports/junit.xml`
## Detailed call sequence
```mermaid
sequenceDiagram
autonumber
participant U as User (PowerShell)
participant P as pytest
participant PI as pytest.ini
participant PL as conftest_plugin.py
participant T as Test Discovery (tests/*)
participant F as Fixtures (conftest.py)
participant C as Config Loader (ecu_framework/config.py)
participant PS as Power Supply (optional)
participant L as LIN Adapter (mock/MUM/BabyLIN)
participant X as HexFlasher (optional)
participant R as Reports (HTML/JUnit)
U->>P: python -m pytest [args]
P->>PI: Read addopts, markers, plugins
P->>PL: Load custom plugin hooks
P->>T: Collect tests
P->>F: Init session fixtures
F->>C: load_config(workspace_root)
C-->>F: EcuTestConfig (merged dataclasses)
F->>L: Create interface (mock, MUM, or BabyLIN SDK)
L-->>F: Instance ready
F->>L: connect()
alt flash.enabled and hex_path provided
F->>X: HexFlasher(lin).flash_hex(hex_path)
X-->>F: Flash result (ok/fail)
end
opt power_supply.enabled and port provided
Note over PS: owon_psu_quick_demo may open PSU via ecu_framework.power.owon_psu
end
loop for each test
P->>PL: runtest_makereport(item, call)
Note over PL: Parse docstring and attach metadata
P->>L: send()/receive()/request()
L-->>P: Frames or None (timeout)
end
P->>R: Write HTML (with metadata columns)
P->>R: Write JUnit XML
```
```text
PowerShell → python -m pytest
pytest loads pytest.ini
- addopts: --junitxml, --html, --self-contained-html, -p conftest_plugin
- markers registered
pytest collects tests in tests/
Session fixture: config()
→ calls ecu_framework.config.load_config(workspace_root)
→ determines config file path by precedence
→ merges YAML + overrides into dataclasses (EcuTestConfig)
→ optionally merges config/owon_psu.yaml (or OWON_PSU_CONFIG) into power_supply
Session fixture: lin(config)
→ chooses interface by config.interface.type
- mock → ecu_framework.lin.mock.MockBabyLinInterface(...)
- mum → ecu_framework.lin.mum.MumLinInterface(host, lin_device, power_device, ...)
- babylin → ecu_framework.lin.babylin.BabyLinInterface(...) [legacy]
→ lin.connect()
- MUM connect() also powers up the ECU via power_out0 and waits boot_settle_seconds
Optional session fixture: flash_ecu(config, lin)
→ if config.flash.enabled and hex_path set
→ ecu_framework.flashing.HexFlasher(lin).flash_hex(hex_path)
Test functions execute
→ use the lin fixture to send/receive/request
Reporting plugin (conftest_plugin.py)
→ pytest_runtest_makereport parses test docstring
→ attaches user_properties: title, requirements, steps, expected_result
→ pytest-html hooks add Title and Requirements columns
Reports written
→ reports/report.html (HTML with metadata columns)
→ reports/junit.xml (JUnit XML for CI)
```
## Where information is fetched from
- pytest configuration: `pytest.ini`
- YAML config (default): `config/test_config.yaml`
- YAML override via env var: `ECU_TESTS_CONFIG`
- BabyLIN SDK wrapper and SDF path: `interface.sdf_path` and `interface.schedule_nr` in YAML
- Test metadata: parsed from each tests docstring
- Markers: declared in `pytest.ini`, attached in tests via `@pytest.mark.*`
## Key components involved
- `tests/conftest.py`: defines `config`, `lin`, and `flash_ecu` fixtures
- `ecu_framework/config.py`: loads and merges configuration into dataclasses
- `ecu_framework/lin/base.py`: abstract LIN interface contract and frame shape
- `ecu_framework/lin/mock.py`: mock behavior for send/receive/request
- `ecu_framework/lin/mum.py`: MUM adapter (Melexis Universal Master via pylin + pymumclient)
- `ecu_framework/lin/babylin.py`: BabyLIN SDK wrapper adapter (legacy real hardware via BabyLIN_library.py)
- `ecu_framework/flashing/hex_flasher.py`: placeholder flashing logic
- `conftest_plugin.py`: report customization and metadata extraction
## Edge cases and behavior
- If `interface.type` is `babylin` but the SDK wrapper or libraries cannot be loaded, hardware tests are skipped
- If `interface.type` is `mum` but `pylin` / `pymumclient` aren't importable, or `interface.host` is unset, hardware tests are skipped with a clear message
- If `flash.enabled` is true but `hex_path` is missing, flashing fixture skips
- Timeouts are honored in `receive()` and `request()` implementations
- Invalid frame IDs (outside 0x000x3F) or data > 8 bytes will raise in `LinFrame`
- MUM `receive()` is master-driven: it requires a frame ID; `receive(id=None)` raises NotImplementedError. Diagnostic frames needing LIN 1.x Classic checksum should use `MumLinInterface.send_raw()`.

View File

@ -1,124 +1,147 @@
# Configuration Resolution: What is read and when
This document explains how configuration is loaded, merged, and provided to tests and interfaces.
## Sources and precedence
From highest to lowest precedence:
1. In-code overrides (if `load_config(..., overrides=...)` is used)
2. Environment variable `ECU_TESTS_CONFIG` (absolute/relative path to YAML)
3. `config/test_config.yaml` (if present under the workspace root)
4. Built-in defaults
## Data model (dataclasses)
- `EcuTestConfig`
- `interface: InterfaceConfig`
- `type`: `mock` or `babylin`
- `channel`: LIN channel index (0-based in SDK wrapper)
- `bitrate`: LIN bitrate (e.g., 19200); usually defined by SDF
- `sdf_path`: Path to SDF file (hardware; required for typical operation)
- `schedule_nr`: Schedule number to start on connect (hardware)
- `node_name`: Optional node identifier (informational)
- `dll_path`, `func_names`: Legacy fields from the old ctypes adapter; not used with the SDK wrapper
- `flash: FlashConfig`
- `enabled`: whether to flash before tests
- `hex_path`: path to HEX file
- `power_supply: PowerSupplyConfig`
- `enabled`: whether PSU features/tests are active
- `port`: Serial device (e.g., `COM4`, `/dev/ttyUSB0`)
- `baudrate`, `timeout`, `eol`: line settings (e.g., `"\n"` or `"\r\n"`)
- `parity`: `N|E|O`
- `stopbits`: `1` or `2`
- `xonxoff`, `rtscts`, `dsrdtr`: flow control flags
- `idn_substr`: optional substring to assert in `*IDN?`
- `do_set`, `set_voltage`, `set_current`: optional demo/test actions
## YAML examples
Minimal mock configuration (default):
```yaml
interface:
type: mock
channel: 1
bitrate: 19200
flash:
enabled: false
```
Hardware (BabyLIN SDK wrapper) configuration:
```yaml
interface:
type: babylin
channel: 0 # 0-based channel index
bitrate: 19200 # optional; typically driven by SDF
node_name: "ECU_TEST_NODE"
sdf_path: "./vendor/Example.sdf"
schedule_nr: 0
flash:
enabled: true
hex_path: "firmware/ecu_firmware.hex"
Power supply configuration (either inline or merged from a dedicated YAML):
```yaml
power_supply:
enabled: true
port: COM4 # or /dev/ttyUSB0 on Linux
baudrate: 115200
timeout: 1.0
eol: "\n" # or "\r\n" if your device requires CRLF
parity: N # N|E|O
stopbits: 1 # 1|2
xonxoff: false
rtscts: false
dsrdtr: false
idn_substr: OWON
do_set: false
set_voltage: 5.0
set_current: 0.1
```
```
## Load flow
```text
tests/conftest.py: config() fixture
→ load_config(workspace_root)
→ check env var ECU_TESTS_CONFIG
→ else check config/test_config.yaml
→ else use defaults
→ convert dicts to EcuTestConfig dataclasses
→ provide to other fixtures/tests
Additionally, if present, a dedicated PSU YAML is merged into `power_supply`:
- Environment variable `OWON_PSU_CONFIG` (path to YAML), else
- `config/owon_psu.yaml` under the workspace root
This lets you keep machine-specific serial settings separate while still having
central defaults in `config/test_config.yaml`.
```
## How tests and adapters consume config
- `lin` fixture picks `mock` or `babylin` based on `interface.type`
- Mock adapter uses `bitrate` and `channel` to simulate timing/behavior
- BabyLIN adapter (SDK wrapper) uses `sdf_path`, `schedule_nr`, `channel` to open the device, load the SDF, and start a schedule. `bitrate` is informational unless explicitly applied via commands/SDF.
- `flash_ecu` uses `flash.enabled` and `flash.hex_path`
- PSU-related tests or utilities read `config.power_supply` for serial parameters
and optional actions (IDN assertions, on/off toggle, set/measure). The reference
implementation is `ecu_framework/power/owon_psu.py`, with a hardware test in
`tests/hardware/test_owon_psu.py` and a quick demo script in `vendor/Owon/owon_psu_quick_demo.py`.
## Tips
- Keep multiple YAMLs and switch via `ECU_TESTS_CONFIG`
- Check path validity for `sdf_path` and `hex_path` before running hardware tests
- Ensure `vendor/BabyLIN_library.py` and the platform-specific libraries from the SDK are available on `PYTHONPATH`
- Use environment-specific YAML files for labs vs. CI
- For PSU, prefer `OWON_PSU_CONFIG` or `config/owon_psu.yaml` to avoid committing
local COM port settings. Central defaults can live in `config/test_config.yaml`.
# Configuration Resolution: What is read and when
This document explains how configuration is loaded, merged, and provided to tests and interfaces.
## Sources and precedence
From highest to lowest precedence:
1. In-code overrides (if `load_config(..., overrides=...)` is used)
2. Environment variable `ECU_TESTS_CONFIG` (absolute/relative path to YAML)
3. `config/test_config.yaml` (if present under the workspace root)
4. Built-in defaults
## Data model (dataclasses)
- `EcuTestConfig`
- `interface: InterfaceConfig`
- `type`: `mock`, `mum`, or `babylin`
- `channel`: LIN channel index (0-based in SDK wrapper) — BabyLIN-specific
- `bitrate`: LIN baudrate (e.g., 19200). The MUM uses this directly; BabyLIN typically takes it from the SDF
- `sdf_path`: Path to SDF file (BabyLIN; required for typical operation)
- `schedule_nr`: Schedule number to start on connect (BabyLIN). `-1` = skip
- `node_name`: Optional node identifier (informational)
- `dll_path`, `func_names`: Legacy fields from the old ctypes adapter; not used with the SDK wrapper
- `host`: MUM IP address (MUM-only). Required when `type: mum`
- `lin_device`: MUM LIN device name (MUM-only, default `lin0`)
- `power_device`: MUM power-control device (MUM-only, default `power_out0`)
- `boot_settle_seconds`: Delay after MUM power-up before sending the first frame (default 0.5)
- `frame_lengths`: Optional `{frame_id: data_length}` map for the MUM adapter to drive slave-published reads. Hex keys like `0x0A` are supported in YAML
- `flash: FlashConfig`
- `enabled`: whether to flash before tests
- `hex_path`: path to HEX file
- `power_supply: PowerSupplyConfig`
- `enabled`: whether PSU features/tests are active
- `port`: Serial device (e.g., `COM4`, `/dev/ttyUSB0`)
- `baudrate`, `timeout`, `eol`: line settings (e.g., `"\n"` or `"\r\n"`)
- `parity`: `N|E|O`
- `stopbits`: `1` or `2`
- `xonxoff`, `rtscts`, `dsrdtr`: flow control flags
- `idn_substr`: optional substring to assert in `*IDN?`
- `do_set`, `set_voltage`, `set_current`: optional demo/test actions
## YAML examples
Minimal mock configuration (default):
```yaml
interface:
type: mock
channel: 1
bitrate: 19200
flash:
enabled: false
```
Hardware via MUM (current default) — see also `config/mum.example.yaml`:
```yaml
interface:
type: mum
host: 192.168.7.2 # MUM IP address (USB-RNDIS default)
lin_device: lin0 # MUM LIN device name
power_device: power_out0 # MUM power-control device
bitrate: 19200 # LIN baudrate
boot_settle_seconds: 0.5 # Delay after power-up before first frame
frame_lengths:
0x0A: 8 # ALM_Req_A
0x11: 4 # ALM_Status
flash:
enabled: false
```
Hardware (BabyLIN SDK wrapper) configuration:
```yaml
interface:
type: babylin
channel: 0 # 0-based channel index
bitrate: 19200 # optional; typically driven by SDF
node_name: "ECU_TEST_NODE"
sdf_path: "./vendor/Example.sdf"
schedule_nr: 0
flash:
enabled: true
hex_path: "firmware/ecu_firmware.hex"
Power supply configuration (either inline or merged from a dedicated YAML):
```yaml
power_supply:
enabled: true
port: COM4 # or /dev/ttyUSB0 on Linux
baudrate: 115200
timeout: 1.0
eol: "\n" # or "\r\n" if your device requires CRLF
parity: N # N|E|O
stopbits: 1 # 1|2
xonxoff: false
rtscts: false
dsrdtr: false
idn_substr: OWON
do_set: false
set_voltage: 5.0
set_current: 0.1
```
```
## Load flow
```text
tests/conftest.py: config() fixture
→ load_config(workspace_root)
→ check env var ECU_TESTS_CONFIG
→ else check config/test_config.yaml
→ else use defaults
→ convert dicts to EcuTestConfig dataclasses
→ provide to other fixtures/tests
Additionally, if present, a dedicated PSU YAML is merged into `power_supply`:
- Environment variable `OWON_PSU_CONFIG` (path to YAML), else
- `config/owon_psu.yaml` under the workspace root
This lets you keep machine-specific serial settings separate while still having
central defaults in `config/test_config.yaml`.
```
## How tests and adapters consume config
- `lin` fixture picks `mock`, `mum`, or `babylin` based on `interface.type`
- Mock adapter uses `bitrate` and `channel` to simulate timing/behavior
- MUM adapter uses `host`, `lin_device`, `power_device`, `bitrate`, `boot_settle_seconds`, and `frame_lengths` to open the MUM, set up the LIN bus, and power up the ECU on connect
- BabyLIN adapter (SDK wrapper) uses `sdf_path`, `schedule_nr`, `channel` to open the device, load the SDF, and start a schedule. `bitrate` is informational unless explicitly applied via commands/SDF
- `flash_ecu` uses `flash.enabled` and `flash.hex_path`
- PSU-related tests or utilities read `config.power_supply` for serial parameters
and optional actions (IDN assertions, on/off toggle, set/measure). The reference
implementation is `ecu_framework/power/owon_psu.py`, with a hardware test in
`tests/hardware/test_owon_psu.py` and a quick demo script in `vendor/Owon/owon_psu_quick_demo.py`.
## Tips
- Keep multiple YAMLs and switch via `ECU_TESTS_CONFIG`
- Check path validity for `sdf_path` and `hex_path` before running hardware tests
- Ensure `vendor/BabyLIN_library.py` and the platform-specific libraries from the SDK are available on `PYTHONPATH`
- Use environment-specific YAML files for labs vs. CI
- For PSU, prefer `OWON_PSU_CONFIG` or `config/owon_psu.yaml` to avoid committing
local COM port settings. Central defaults can live in `config/test_config.yaml`.

View File

@ -1,109 +1,109 @@
# Reporting and Metadata: How your docs show up in reports
This document describes how test documentation is extracted and rendered into the HTML report, and what appears in JUnit XML.
## What the plugin does
File: `conftest_plugin.py`
- Hooks into `pytest_runtest_makereport` to parse the tests docstring
- Extracts the following fields:
- Title
- Description
- Requirements
- Test Steps
- Expected Result
- Attaches them as `user_properties` on the test report
- Customizes the HTML results table to include Title and Requirements columns
## Docstring format to use
```python
"""
Title: Short, human-readable test name
Description: What is this test proving and why does it matter.
Requirements: REQ-001, REQ-00X
Test Steps:
1. Describe the first step
2. Next step
3. etc.
Expected Result:
- Primary outcome
- Any additional acceptance criteria
"""
```
## What appears in reports
- HTML (`reports/report.html`):
- Title and Requirements appear as columns in the table
- Other fields are available in the report payload and can be surfaced with minor tweaks
- JUnit XML (`reports/junit.xml`):
- Standard test results and timing
- Note: By default, the XML is compact and does not include custom properties; if you need properties in XML, we can extend the plugin to emit a custom JUnit format or produce an additional JSON artifact for traceability.
Open the HTML report on Windows PowerShell:
```powershell
start .\reports\report.html
```
Related artifacts written by the plugin:
- `reports/requirements_coverage.json` — requirement → test nodeids map and unmapped tests
- `reports/summary.md` — compact pass/fail/error/skip totals, environment info
To generate separate HTML/JUnit reports for unit vs non-unit test sets, use the helper script:
```powershell
./scripts/run_two_reports.ps1
```
## Parameterized tests and metadata
When using `@pytest.mark.parametrize`, each parameter set is treated as a distinct test case with its own nodeid, e.g.:
```
tests/test_babylin_wrapper_mock.py::test_babylin_master_request_with_mock_wrapper[wrapper0-True]
tests/test_babylin_wrapper_mock.py::test_babylin_master_request_with_mock_wrapper[wrapper1-False]
```
Metadata handling:
- The docstring on the test function is parsed once per case; the same Title/Requirements are attached to each parameterized instance.
- Requirement mapping (coverage JSON) records each parameterized nodeid under the normalized requirement keys, enabling fine-grained coverage.
- In the HTML table, you will see a row per parameterized instance with identical Title/Requirements but differing nodeids (and potentially differing outcomes if parameters influence behavior).
## Markers
Declared in `pytest.ini` and used via `@pytest.mark.<name>` in tests. They also appear in the HTML payload for each test (as user properties) and can be added as a column with a small change if desired.
## Extensibility
- Add more columns to HTML by updating `pytest_html_results_table_header/row`
- Persist full metadata (steps, expected) to a JSON file after the run for audit trails
- Populate requirement coverage map by scanning markers and aggregating results
## Runtime properties (record_property) and the `rp` helper fixture
Beyond static docstrings, you can attach dynamic key/value properties during a test.
- Built-in: `record_property("key", value)` in any test
- Convenience: use the shared `rp` fixture which wraps `record_property` and also prints a short line to captured output for quick scanning.
Example usage:
```python
def test_example(rp):
rp("device", "mock")
rp("tx_id", "0x12")
rp("rx_present", True)
```
Where they show up:
- HTML report: expand a test row to see a Properties table listing all recorded key/value pairs
- Captured output: look for lines like `[prop] key=value` emitted by the `rp` helper
Suggested standardized keys across suites live in `docs/15_report_properties_cheatsheet.md`.
# Reporting and Metadata: How your docs show up in reports
This document describes how test documentation is extracted and rendered into the HTML report, and what appears in JUnit XML.
## What the plugin does
File: `conftest_plugin.py`
- Hooks into `pytest_runtest_makereport` to parse the tests docstring
- Extracts the following fields:
- Title
- Description
- Requirements
- Test Steps
- Expected Result
- Attaches them as `user_properties` on the test report
- Customizes the HTML results table to include Title and Requirements columns
## Docstring format to use
```python
"""
Title: Short, human-readable test name
Description: What is this test proving and why does it matter.
Requirements: REQ-001, REQ-00X
Test Steps:
1. Describe the first step
2. Next step
3. etc.
Expected Result:
- Primary outcome
- Any additional acceptance criteria
"""
```
## What appears in reports
- HTML (`reports/report.html`):
- Title and Requirements appear as columns in the table
- Other fields are available in the report payload and can be surfaced with minor tweaks
- JUnit XML (`reports/junit.xml`):
- Standard test results and timing
- Note: By default, the XML is compact and does not include custom properties; if you need properties in XML, we can extend the plugin to emit a custom JUnit format or produce an additional JSON artifact for traceability.
Open the HTML report on Windows PowerShell:
```powershell
start .\reports\report.html
```
Related artifacts written by the plugin:
- `reports/requirements_coverage.json` — requirement → test nodeids map and unmapped tests
- `reports/summary.md` — compact pass/fail/error/skip totals, environment info
To generate separate HTML/JUnit reports for unit vs non-unit test sets, use the helper script:
```powershell
./scripts/run_two_reports.ps1
```
## Parameterized tests and metadata
When using `@pytest.mark.parametrize`, each parameter set is treated as a distinct test case with its own nodeid, e.g.:
```
tests/test_babylin_wrapper_mock.py::test_babylin_master_request_with_mock_wrapper[wrapper0-True]
tests/test_babylin_wrapper_mock.py::test_babylin_master_request_with_mock_wrapper[wrapper1-False]
```
Metadata handling:
- The docstring on the test function is parsed once per case; the same Title/Requirements are attached to each parameterized instance.
- Requirement mapping (coverage JSON) records each parameterized nodeid under the normalized requirement keys, enabling fine-grained coverage.
- In the HTML table, you will see a row per parameterized instance with identical Title/Requirements but differing nodeids (and potentially differing outcomes if parameters influence behavior).
## Markers
Declared in `pytest.ini` and used via `@pytest.mark.<name>` in tests. They also appear in the HTML payload for each test (as user properties) and can be added as a column with a small change if desired.
## Extensibility
- Add more columns to HTML by updating `pytest_html_results_table_header/row`
- Persist full metadata (steps, expected) to a JSON file after the run for audit trails
- Populate requirement coverage map by scanning markers and aggregating results
## Runtime properties (record_property) and the `rp` helper fixture
Beyond static docstrings, you can attach dynamic key/value properties during a test.
- Built-in: `record_property("key", value)` in any test
- Convenience: use the shared `rp` fixture which wraps `record_property` and also prints a short line to captured output for quick scanning.
Example usage:
```python
def test_example(rp):
rp("device", "mock")
rp("tx_id", "0x12")
rp("rx_present", True)
```
Where they show up:
- HTML report: expand a test row to see a Properties table listing all recorded key/value pairs
- Captured output: look for lines like `[prop] key=value` emitted by the `rp` helper
Suggested standardized keys across suites live in `docs/15_report_properties_cheatsheet.md`.

View File

@ -1,58 +1,89 @@
# LIN Interface Call Flow
This document explains how LIN operations flow through the abstraction for both Mock and BabyLin adapters.
## Contract (base)
File: `ecu_framework/lin/base.py`
- `connect()` / `disconnect()`
- `send(frame: LinFrame)`
- `receive(id: int | None = None, timeout: float = 1.0) -> LinFrame | None`
- `request(id: int, length: int, timeout: float = 1.0) -> LinFrame | None`
- `flush()`
`LinFrame` validates:
- ID is 0x000x3F (6-bit LIN ID)
- Data length ≤ 8 bytes
## Mock adapter flow
File: `ecu_framework/lin/mock.py`
- `connect()`: initialize buffers and state
- `send(frame)`: enqueues the frame and (for echo behavior) schedules it for RX
- `receive(timeout)`: waits up to timeout for a frame in RX buffer
- `request(id, length, timeout)`: synthesizes a deterministic response of the given length for predictability
- `disconnect()`: clears state
Use cases:
- Fast local dev, deterministic responses, no hardware
- Timeout and boundary behavior validation
## BabyLIN adapter flow (SDK wrapper)
File: `ecu_framework/lin/babylin.py`
- `connect()`: import SDK `BabyLIN_library.py`, discover ports, open first, optionally `BLC_loadSDF`, get channel handle, and `BLC_sendCommand("start schedule N;")`
- `send(frame)`: calls `BLC_mon_set_xmit(channelHandle, frameId, data, slotTime=0)`
- `receive(timeout)`: calls `BLC_getNextFrameTimeout(channelHandle, timeout_ms)` and converts returned `BLC_FRAME` to `LinFrame`
- `request(id, length, timeout)`: prefers `BLC_sendRawMasterRequest(channel, id, length)`; falls back to `(channel, id, bytes)`; if unavailable, sends a header and waits on `receive()`
- `disconnect()`: calls `BLC_closeAll()`
- Error handling: uses `BLC_getDetailedErrorString` (if available)
Configuration:
- `interface.sdf_path` locates the SDF to load
- `interface.schedule_nr` sets the schedule to start upon connect
- `interface.channel` selects the channel index
## Edge considerations
- Ensure the correct architecture (x86/x64) of the DLL matches Python
- Channel/bitrate must match your network configuration
- Some SDKs require initialization/scheduling steps before transmit/receive
- Time synchronization and timestamp units vary per SDK — convert as needed
Note on master requests:
- Our mock wrapper returns a deterministic byte pattern when called with the `length` signature.
- When only the bytes signature is available, zeros of the requested length are used in tests.
# LIN Interface Call Flow
This document explains how LIN operations flow through the abstraction for the Mock, MUM, and legacy BabyLIN adapters.
## Contract (base)
File: `ecu_framework/lin/base.py`
- `connect()` / `disconnect()`
- `send(frame: LinFrame)`
- `receive(id: int | None = None, timeout: float = 1.0) -> LinFrame | None`
- `request(id: int, length: int, timeout: float = 1.0) -> LinFrame | None`
- `flush()`
`LinFrame` validates:
- ID is 0x000x3F (6-bit LIN ID)
- Data length ≤ 8 bytes
## Mock adapter flow
File: `ecu_framework/lin/mock.py`
- `connect()`: initialize buffers and state
- `send(frame)`: enqueues the frame and (for echo behavior) schedules it for RX
- `receive(timeout)`: waits up to timeout for a frame in RX buffer
- `request(id, length, timeout)`: synthesizes a deterministic response of the given length for predictability
- `disconnect()`: clears state
Use cases:
- Fast local dev, deterministic responses, no hardware
- Timeout and boundary behavior validation
## MUM adapter flow (Melexis Universal Master)
File: `ecu_framework/lin/mum.py`
The MUM is a networked LIN master (default IP `192.168.7.2`) with built-in
power control on `power_out0`. It is **master-driven**: there is no passive
listen — to read a slave-published frame, the master triggers a header on
that frame ID. Diagnostic frames (BSM-SNPD, service ID 0xB5) require LIN 1.x
**Classic** checksum and are sent through the transport layer's
`ld_put_raw`, not the regular `send_message`.
- `connect()`: lazy-imports `pymumclient` + `pylin`; opens MUM
(`MelexisUniversalMaster.open_all(host)`), gets the LIN device
(`linmaster`) and power device (`power_control`), runs `linmaster.setup()`,
builds `LinBusManager` + `LinDevice22`, sets `lin_dev.baudrate`, fetches
the transport layer (`get_device("bus/transport_layer")`), and finally
`power_control.power_up()` followed by a `boot_settle_seconds` sleep
- `send(frame)`: `lin_dev.send_message(master_to_slave=True, frame_id, data_length, data)`
- `receive(id, timeout)`: `lin_dev.send_message(master_to_slave=False, frame_id=id, data_length=frame_lengths.get(id, default_data_length))`
— pylin returns the response bytes (or raises on timeout, which we treat as `None`).
`id=None` raises `NotImplementedError` because the MUM cannot listen passively.
- `disconnect()`: best-effort `power_control.power_down()` followed by `linmaster.teardown()`
- MUM-only extras: `send_raw(bytes)` (Classic checksum via `ld_put_raw`),
`power_up()`, `power_down()`, `power_cycle(wait)`
Configuration:
- `interface.host` is required; `interface.lin_device` and `interface.power_device` default to MUM conventions
- `interface.bitrate` is the actual LIN baudrate the MUM drives
- `interface.frame_lengths` lets you map slave frame IDs to their fixed data lengths so `receive(id)` can fetch the correct number of bytes; built-in defaults cover ALM_Status (4) and ALM_Req_A (8)
## BabyLIN adapter flow (SDK wrapper)
File: `ecu_framework/lin/babylin.py`
- `connect()`: import SDK `BabyLIN_library.py`, discover ports, open first, optionally `BLC_loadSDF`, get channel handle, and `BLC_sendCommand("start schedule N;")`
- `send(frame)`: calls `BLC_mon_set_xmit(channelHandle, frameId, data, slotTime=0)`
- `receive(timeout)`: calls `BLC_getNextFrameTimeout(channelHandle, timeout_ms)` and converts returned `BLC_FRAME` to `LinFrame`
- `request(id, length, timeout)`: prefers `BLC_sendRawMasterRequest(channel, id, length)`; falls back to `(channel, id, bytes)`; if unavailable, sends a header and waits on `receive()`
- `disconnect()`: calls `BLC_closeAll()`
- Error handling: uses `BLC_getDetailedErrorString` (if available)
Configuration:
- `interface.sdf_path` locates the SDF to load
- `interface.schedule_nr` sets the schedule to start upon connect
- `interface.channel` selects the channel index
## Edge considerations
- Ensure the correct architecture (x86/x64) of the DLL matches Python
- Channel/bitrate must match your network configuration
- Some SDKs require initialization/scheduling steps before transmit/receive
- Time synchronization and timestamp units vary per SDK — convert as needed
Note on master requests:
- Our mock wrapper returns a deterministic byte pattern when called with the `length` signature.
- When only the bytes signature is available, zeros of the requested length are used in tests.

View File

@ -1,77 +1,82 @@
# Architecture Overview
This document provides a high-level view of the frameworks components and how they interact, plus a Mermaid diagram for quick orientation.
## Components
- Tests (pytest) — test modules and functions under `tests/`
- Fixtures — defined in `tests/conftest.py` (config, lin, flash_ecu)
- Config Loader — `ecu_framework/config.py` (YAML → dataclasses)
- LIN Abstraction — `ecu_framework/lin/base.py` (`LinInterface`, `LinFrame`)
- Mock LIN Adapter — `ecu_framework/lin/mock.py`
- BabyLIN Adapter — `ecu_framework/lin/babylin.py` (SDK wrapper → BabyLIN_library.py)
- Flasher — `ecu_framework/flashing/hex_flasher.py`
- Power Supply (PSU) control — `ecu_framework/power/owon_psu.py` (serial SCPI)
- PSU quick demo script — `vendor/Owon/owon_psu_quick_demo.py`
- Reporting Plugin — `conftest_plugin.py` (docstring → report metadata)
- Reports — `reports/report.html`, `reports/junit.xml`
## Mermaid architecture diagram
```mermaid
flowchart TB
subgraph Tests & Pytest
T[tests/*]
CF[tests/conftest.py]
PL[conftest_plugin.py]
end
subgraph Framework
CFG[ecu_framework/config.py]
BASE[ecu_framework/lin/base.py]
MOCK[ecu_framework/lin/mock.py]
BABY[ecu_framework/lin/babylin.py]
FLASH[ecu_framework/flashing/hex_flasher.py]
POWER[ecu_framework/power/owon_psu.py]
end
subgraph Artifacts
REP[reports/report.html<br/>reports/junit.xml]
YAML[config/*.yaml<br/>babylin.example.yaml<br/>test_config.yaml]
PSU_YAML[config/owon_psu.yaml<br/>OWON_PSU_CONFIG]
SDK[vendor/BabyLIN_library.py<br/>platform-specific libs]
OWON[vendor/Owon/owon_psu_quick_demo.py]
end
T --> CF
CF --> CFG
CF --> BASE
CF --> MOCK
CF --> BABY
CF --> FLASH
T --> POWER
PL --> REP
CFG --> YAML
CFG --> PSU_YAML
BABY --> SDK
T --> OWON
T --> REP
```
## Data and control flow summary
- Tests use fixtures to obtain config and a connected LIN adapter
- Config loader reads YAML (or env override), returns typed dataclasses
- LIN calls are routed through the interface abstraction to the selected adapter
- Flasher (optional) uses the same interface to program the ECU
- Power supply control (optional) uses `ecu_framework/power/owon_psu.py` and reads
`config.power_supply` (merged with `config/owon_psu.yaml` or `OWON_PSU_CONFIG` when present);
the quick demo script under `vendor/Owon/` provides a quick manual flow
- Reporting plugin parses docstrings and enriches the HTML report
## Extending the architecture
- Add new bus adapters by implementing `LinInterface`
- Add new report sinks (e.g., JSON or a DB) by extending the plugin
- Add new fixtures for diagnostics or measurement tools (Scopes, power supplies, etc.)
# Architecture Overview
This document provides a high-level view of the frameworks components and how they interact, plus a Mermaid diagram for quick orientation.
## Components
- Tests (pytest) — test modules and functions under `tests/`
- Fixtures — defined in `tests/conftest.py` (config, lin, flash_ecu)
- Config Loader — `ecu_framework/config.py` (YAML → dataclasses)
- LIN Abstraction — `ecu_framework/lin/base.py` (`LinInterface`, `LinFrame`)
- Mock LIN Adapter — `ecu_framework/lin/mock.py`
- MUM LIN Adapter — `ecu_framework/lin/mum.py` (Melexis Universal Master via `pylin` + `pymumclient`)
- BabyLIN Adapter — `ecu_framework/lin/babylin.py` (SDK wrapper → BabyLIN_library.py; legacy)
- Flasher — `ecu_framework/flashing/hex_flasher.py`
- Power Supply (PSU) control — `ecu_framework/power/owon_psu.py` (serial SCPI)
- PSU quick demo script — `vendor/Owon/owon_psu_quick_demo.py`
- Reporting Plugin — `conftest_plugin.py` (docstring → report metadata)
- Reports — `reports/report.html`, `reports/junit.xml`
## Mermaid architecture diagram
```mermaid
flowchart TB
subgraph Tests & Pytest
T[tests/*]
CF[tests/conftest.py]
PL[conftest_plugin.py]
end
subgraph Framework
CFG[ecu_framework/config.py]
BASE[ecu_framework/lin/base.py]
MOCK[ecu_framework/lin/mock.py]
MUM[ecu_framework/lin/mum.py]
BABY[ecu_framework/lin/babylin.py]
FLASH[ecu_framework/flashing/hex_flasher.py]
POWER[ecu_framework/power/owon_psu.py]
end
subgraph Artifacts
REP[reports/report.html<br/>reports/junit.xml]
YAML[config/*.yaml<br/>test_config.yaml<br/>mum.example.yaml<br/>babylin.example.yaml]
PSU_YAML[config/owon_psu.yaml<br/>OWON_PSU_CONFIG]
MELEXIS[Melexis pylin + pymumclient<br/>MUM @ 192.168.7.2]
SDK[vendor/BabyLIN_library.py<br/>platform-specific libs]
OWON[vendor/Owon/owon_psu_quick_demo.py]
end
T --> CF
CF --> CFG
CF --> BASE
CF --> MOCK
CF --> MUM
CF --> BABY
CF --> FLASH
T --> POWER
PL --> REP
CFG --> YAML
CFG --> PSU_YAML
MUM --> MELEXIS
BABY --> SDK
T --> OWON
T --> REP
```
## Data and control flow summary
- Tests use fixtures to obtain config and a connected LIN adapter
- Config loader reads YAML (or env override), returns typed dataclasses
- LIN calls are routed through the interface abstraction to the selected adapter
- Flasher (optional) uses the same interface to program the ECU
- Power supply control (optional) uses `ecu_framework/power/owon_psu.py` and reads
`config.power_supply` (merged with `config/owon_psu.yaml` or `OWON_PSU_CONFIG` when present);
the quick demo script under `vendor/Owon/` provides a quick manual flow
- Reporting plugin parses docstrings and enriches the HTML report
## Extending the architecture
- Add new bus adapters by implementing `LinInterface`
- Add new report sinks (e.g., JSON or a DB) by extending the plugin
- Add new fixtures for diagnostics or measurement tools (Scopes, power supplies, etc.)

View File

@ -1,60 +1,60 @@
# Requirement Traceability
This document shows how requirements map to tests via pytest markers and docstrings, plus how to visualize coverage.
## Conventions
- Requirement IDs: `REQ-xxx`
- Use markers in tests: `@pytest.mark.req_001`, `@pytest.mark.req_002`, etc.
- Include readable requirement list in the test docstring under `Requirements:`
## Example
```python
@pytest.mark.req_001
@pytest.mark.req_003
"""
Title: Mock LIN Interface - Send/Receive Echo Test
Requirements: REQ-001, REQ-003
"""
```
## Mermaid: Requirement → Tests map
Note: This is illustrative; maintain it as your suite grows.
```mermaid
flowchart LR
R1[REQ-001: LIN Basic Ops]
R2[REQ-002: Master Request/Response]
R3[REQ-003: Frame Validation]
R4[REQ-004: Timeout Handling]
T1[test_mock_send_receive_echo]
T2[test_mock_request_synthesized_response]
T3[test_mock_receive_timeout_behavior]
T4[test_mock_frame_validation_boundaries]
R1 --> T1
R3 --> T1
R2 --> T2
R4 --> T3
R1 --> T4
R3 --> T4
```
## Generating a live coverage artifact (optional)
You can extend `conftest_plugin.py` to emit a JSON file with requirement-to-test mapping at the end of a run by scanning markers and docstrings. This can fuel dashboards or CI gates.
Suggested JSON shape:
```json
{
"requirements": {
"REQ-001": ["tests/test_smoke_mock.py::TestMockLinInterface::test_mock_send_receive_echo", "..."]
},
"uncovered": ["REQ-010", "REQ-012"]
}
```
# Requirement Traceability
This document shows how requirements map to tests via pytest markers and docstrings, plus how to visualize coverage.
## Conventions
- Requirement IDs: `REQ-xxx`
- Use markers in tests: `@pytest.mark.req_001`, `@pytest.mark.req_002`, etc.
- Include readable requirement list in the test docstring under `Requirements:`
## Example
```python
@pytest.mark.req_001
@pytest.mark.req_003
"""
Title: Mock LIN Interface - Send/Receive Echo Test
Requirements: REQ-001, REQ-003
"""
```
## Mermaid: Requirement → Tests map
Note: This is illustrative; maintain it as your suite grows.
```mermaid
flowchart LR
R1[REQ-001: LIN Basic Ops]
R2[REQ-002: Master Request/Response]
R3[REQ-003: Frame Validation]
R4[REQ-004: Timeout Handling]
T1[test_mock_send_receive_echo]
T2[test_mock_request_synthesized_response]
T3[test_mock_receive_timeout_behavior]
T4[test_mock_frame_validation_boundaries]
R1 --> T1
R3 --> T1
R2 --> T2
R4 --> T3
R1 --> T4
R3 --> T4
```
## Generating a live coverage artifact (optional)
You can extend `conftest_plugin.py` to emit a JSON file with requirement-to-test mapping at the end of a run by scanning markers and docstrings. This can fuel dashboards or CI gates.
Suggested JSON shape:
```json
{
"requirements": {
"REQ-001": ["tests/test_smoke_mock.py::TestMockLinInterface::test_mock_send_receive_echo", "..."]
},
"uncovered": ["REQ-010", "REQ-012"]
}
```

View File

@ -1,57 +1,57 @@
# Flashing Sequence (ECU Programming)
This document outlines the expected flashing workflow using the `HexFlasher` scaffold over the LIN interface and where you can plug in your production flasher (UDS).
## Overview
- Flashing is controlled by configuration (`flash.enabled`, `flash.hex_path`)
- The `flash_ecu` session fixture invokes the flasher before tests
- The flasher uses the same `LinInterface` as tests
## Mermaid sequence
```mermaid
sequenceDiagram
autonumber
participant P as pytest
participant F as flash_ecu fixture
participant H as HexFlasher
participant L as LinInterface (mock/babylin)
participant E as ECU
P->>F: Evaluate flashing precondition
alt flash.enabled == true and hex_path provided
F->>H: HexFlasher(lin).flash_hex(hex_path)
H->>L: connect (ensure session ready)
H->>E: Enter programming session (UDS)
H->>E: Erase memory (as required)
loop For each block in HEX
H->>L: Transfer block via LIN frames
L-->>H: Acks / flow control
end
H->>E: Verify checksum / signature
H->>E: Exit programming, reset if needed
H-->>F: Return success/failure
else
F-->>P: Skip flashing
end
```
## Implementation notes
- `ecu_framework/flashing/hex_flasher.py` is a stub — replace with your protocol implementation (UDS)
- Validate timing requirements and chunk sizes per ECU
- Consider power-cycle/reset hooks via programmable poewr supply.
## Error handling
- On failure, the fixture calls `pytest.fail("ECU flashing failed")`
- Make flashing idempotent when possible (can retry or detect current version)
## Configuration example
```yaml
flash:
enabled: true
hex_path: "firmware/ecu_firmware.hex"
```
# Flashing Sequence (ECU Programming)
This document outlines the expected flashing workflow using the `HexFlasher` scaffold over the LIN interface and where you can plug in your production flasher (UDS).
## Overview
- Flashing is controlled by configuration (`flash.enabled`, `flash.hex_path`)
- The `flash_ecu` session fixture invokes the flasher before tests
- The flasher uses the same `LinInterface` as tests
## Mermaid sequence
```mermaid
sequenceDiagram
autonumber
participant P as pytest
participant F as flash_ecu fixture
participant H as HexFlasher
participant L as LinInterface (mock/mum/babylin)
participant E as ECU
P->>F: Evaluate flashing precondition
alt flash.enabled == true and hex_path provided
F->>H: HexFlasher(lin).flash_hex(hex_path)
H->>L: connect (ensure session ready)
H->>E: Enter programming session (UDS)
H->>E: Erase memory (as required)
loop For each block in HEX
H->>L: Transfer block via LIN frames
L-->>H: Acks / flow control
end
H->>E: Verify checksum / signature
H->>E: Exit programming, reset if needed
H-->>F: Return success/failure
else
F-->>P: Skip flashing
end
```
## Implementation notes
- `ecu_framework/flashing/hex_flasher.py` is a stub — replace with your protocol implementation (UDS)
- Validate timing requirements and chunk sizes per ECU
- Consider power-cycle/reset hooks via programmable poewr supply.
## Error handling
- On failure, the fixture calls `pytest.fail("ECU flashing failed")`
- Make flashing idempotent when possible (can retry or detect current version)
## Configuration example
```yaml
flash:
enabled: true
hex_path: "firmware/ecu_firmware.hex"
```

View File

@ -1,103 +1,103 @@
# BabyLIN Adapter Internals (SDK Python wrapper)
This document describes how the real hardware adapter binds to the BabyLIN SDK via the official Python wrapper `BabyLIN_library.py` and how frames move across the boundary.
## Overview
- Location: `ecu_framework/lin/babylin.py`
- Uses the SDK's `BabyLIN_library.py` (place under `vendor/` or on `PYTHONPATH`)
- Discovers and opens a BabyLIN device using `BLC_getBabyLinPorts` and `BLC_openPort`
- Optionally loads an SDF via `BLC_loadSDF(handle, sdf_path, 1)` and starts a schedule with `BLC_sendCommand("start schedule N;")`
- Converts between Python `LinFrame` and the wrapper's `BLC_FRAME` structure for receive
## Mermaid: SDK connect sequence
```mermaid
sequenceDiagram
autonumber
participant T as Tests/Fixture
participant A as BabyLinInterface (SDK)
participant BL as BabyLIN_library (BLC_*)
T->>A: connect()
A->>BL: BLC_getBabyLinPorts(100)
BL-->>A: [port0, ...]
A->>BL: BLC_openPort(port0)
A->>BL: BLC_loadSDF(handle, sdf_path, 1)
A->>BL: BLC_getChannelHandle(handle, channelIndex)
A->>BL: start schedule N
A-->>T: connected
```
## Mermaid: Binding and call flow
```mermaid
sequenceDiagram
autonumber
participant T as Test
participant L as LinInterface (BabyLin)
participant D as BabyLIN_library (BLC_*)
T->>L: connect()
L->>D: BLC_getBabyLinPorts()
L->>D: BLC_openPort(port)
D-->>L: handle/ok
T->>L: send(frame)
L->>D: BLC_mon_set_xmit(channelHandle, frameId, data, slotTime=0)
D-->>L: code (0=ok)
T->>L: receive(timeout)
L->>D: BLC_getNextFrameTimeout(channelHandle, timeout_ms)
D-->>L: code, frame
L->>L: convert BLC_FRAME to LinFrame
L-->>T: LinFrame or None
T->>L: disconnect()
L->>D: BLC_closeAll()
```
## Master request behavior
When performing a master request, the adapter tries the SDK method in this order:
1. `BLC_sendRawMasterRequest(channel, id, length)` — preferred
2. `BLC_sendRawMasterRequest(channel, id, dataBytes)` — fallback
3. Send a header with zeros and wait on `receive()` — last resort
Mock behavior notes:
- The provided mock (`vendor/mock_babylin_wrapper.py`) synthesizes a deterministic response for the `length` signature (e.g., data[i] = (id + i) & 0xFF).
- For the bytes-only signature, the adapter sends zero-filled bytes of the requested length and validates by length.
## Wrapper usage highlights
```python
from BabyLIN_library import create_BabyLIN
bl = create_BabyLIN()
ports = bl.BLC_getBabyLinPorts(100)
h = bl.BLC_openPort(ports[0])
bl.BLC_loadSDF(h, "Example.sdf", 1)
ch = bl.BLC_getChannelHandle(h, 0)
bl.BLC_sendCommand(ch, "start schedule 0;")
# Transmit and receive
bl.BLC_mon_set_xmit(ch, 0x10, bytes([1,2,3,4]), 0)
frm = bl.BLC_getNextFrameTimeout(ch, 100)
print(frm.frameId, list(frm.frameData)[:frm.lenOfData])
bl.BLC_closeAll()
```
## Notes and pitfalls
- Architecture: Ensure Python (x86/x64) matches the platform library bundled with the SDK
- Timeouts: SDKs typically want milliseconds; convert Python seconds accordingly
- Error handling: On non-zero return codes, use `BLC_getDetailedErrorString` (if available) for human-readable messages
- Threading: If you use background receive threads, protect buffers with locks
- Performance: Avoid excessive allocations in tight loops; reuse frame structs when possible
## Extending
- Add bitrate/channel setup functions as exposed by the SDK
- Implement schedule tables or diagnostics passthrough if provided by the SDK
- Wrap more SDK errors into typed Python exceptions for clarity
# BabyLIN Adapter Internals (SDK Python wrapper)
This document describes how the real hardware adapter binds to the BabyLIN SDK via the official Python wrapper `BabyLIN_library.py` and how frames move across the boundary.
## Overview
- Location: `ecu_framework/lin/babylin.py`
- Uses the SDK's `BabyLIN_library.py` (place under `vendor/` or on `PYTHONPATH`)
- Discovers and opens a BabyLIN device using `BLC_getBabyLinPorts` and `BLC_openPort`
- Optionally loads an SDF via `BLC_loadSDF(handle, sdf_path, 1)` and starts a schedule with `BLC_sendCommand("start schedule N;")`
- Converts between Python `LinFrame` and the wrapper's `BLC_FRAME` structure for receive
## Mermaid: SDK connect sequence
```mermaid
sequenceDiagram
autonumber
participant T as Tests/Fixture
participant A as BabyLinInterface (SDK)
participant BL as BabyLIN_library (BLC_*)
T->>A: connect()
A->>BL: BLC_getBabyLinPorts(100)
BL-->>A: [port0, ...]
A->>BL: BLC_openPort(port0)
A->>BL: BLC_loadSDF(handle, sdf_path, 1)
A->>BL: BLC_getChannelHandle(handle, channelIndex)
A->>BL: start schedule N
A-->>T: connected
```
## Mermaid: Binding and call flow
```mermaid
sequenceDiagram
autonumber
participant T as Test
participant L as LinInterface (BabyLin)
participant D as BabyLIN_library (BLC_*)
T->>L: connect()
L->>D: BLC_getBabyLinPorts()
L->>D: BLC_openPort(port)
D-->>L: handle/ok
T->>L: send(frame)
L->>D: BLC_mon_set_xmit(channelHandle, frameId, data, slotTime=0)
D-->>L: code (0=ok)
T->>L: receive(timeout)
L->>D: BLC_getNextFrameTimeout(channelHandle, timeout_ms)
D-->>L: code, frame
L->>L: convert BLC_FRAME to LinFrame
L-->>T: LinFrame or None
T->>L: disconnect()
L->>D: BLC_closeAll()
```
## Master request behavior
When performing a master request, the adapter tries the SDK method in this order:
1. `BLC_sendRawMasterRequest(channel, id, length)` — preferred
2. `BLC_sendRawMasterRequest(channel, id, dataBytes)` — fallback
3. Send a header with zeros and wait on `receive()` — last resort
Mock behavior notes:
- The provided mock (`vendor/mock_babylin_wrapper.py`) synthesizes a deterministic response for the `length` signature (e.g., data[i] = (id + i) & 0xFF).
- For the bytes-only signature, the adapter sends zero-filled bytes of the requested length and validates by length.
## Wrapper usage highlights
```python
from BabyLIN_library import create_BabyLIN
bl = create_BabyLIN()
ports = bl.BLC_getBabyLinPorts(100)
h = bl.BLC_openPort(ports[0])
bl.BLC_loadSDF(h, "Example.sdf", 1)
ch = bl.BLC_getChannelHandle(h, 0)
bl.BLC_sendCommand(ch, "start schedule 0;")
# Transmit and receive
bl.BLC_mon_set_xmit(ch, 0x10, bytes([1,2,3,4]), 0)
frm = bl.BLC_getNextFrameTimeout(ch, 100)
print(frm.frameId, list(frm.frameData)[:frm.lenOfData])
bl.BLC_closeAll()
```
## Notes and pitfalls
- Architecture: Ensure Python (x86/x64) matches the platform library bundled with the SDK
- Timeouts: SDKs typically want milliseconds; convert Python seconds accordingly
- Error handling: On non-zero return codes, use `BLC_getDetailedErrorString` (if available) for human-readable messages
- Threading: If you use background receive threads, protect buffers with locks
- Performance: Avoid excessive allocations in tight loops; reuse frame structs when possible
## Extending
- Add bitrate/channel setup functions as exposed by the SDK
- Implement schedule tables or diagnostics passthrough if provided by the SDK
- Wrap more SDK errors into typed Python exceptions for clarity

View File

@ -1,144 +1,171 @@
# Raspberry Pi Deployment Guide
This guide explains how to run the ECU testing framework on a Raspberry Pi (Debian/Raspberry Pi OS). It covers environment setup, optional BabyLin hardware integration, running tests headless, and installing as a systemd service.
> Note: If you plan to use BabyLin hardware on a Pi, verify vendor driver support for ARM Linux. If BabyLin provides only Windows DLLs, use the Mock interface on Pi or deploy a different hardware interface that supports Linux/ARM.
## 1) Choose your interface
- Mock (recommended for headless/dev on Pi): `interface.type: mock`
- BabyLIN (only if ARM/Linux support is available): `interface.type: babylin` and ensure the SDK's `BabyLIN_library.py` and corresponding Linux/ARM shared libraries are available under `vendor/` or on PYTHONPATH/LD_LIBRARY_PATH.
## 2) Install prerequisites
```bash
sudo apt update
sudo apt install -y python3 python3-venv python3-pip git
```
Optional (for BabyLin or USB tools):
```bash
sudo apt install -y libusb-1.0-0 udev
```
## 3) Clone and set up
```bash
# clone your repo
git clone <your-repo-url> ~/ecu_tests
cd ~/ecu_tests
# create venv
python3 -m venv .venv
source .venv/bin/activate
# install deps
pip install -r requirements.txt
```
## 4) Configure
Create or edit `config/test_config.yaml`:
```yaml
interface:
type: mock # or babylin (if supported on ARM/Linux)
channel: 1
bitrate: 19200
flash:
enabled: false
```
Optionally point to another config file via env var:
```bash
export ECU_TESTS_CONFIG=$(pwd)/config/test_config.yaml
```
If using BabyLIN on Linux/ARM with the SDK wrapper, set:
```yaml
interface:
type: babylin
channel: 0
sdf_path: "/home/pi/ecu_tests/vendor/Example.sdf"
schedule_nr: 0
```
## 5) Run tests on Pi
```bash
source .venv/bin/activate
python -m pytest -m "not hardware" -v
```
Artifacts are in `reports/` (HTML, JUnit, JSON, summary MD).
## 6) Run as a systemd service (headless)
This section lets the Pi run the test suite on boot or on demand.
### Create a runner script
Create `scripts/run_tests.sh`:
```bash
#!/usr/bin/env bash
set -euo pipefail
cd "$(dirname "$0")/.."
source .venv/bin/activate
# optionally set custom config
# export ECU_TESTS_CONFIG=$(pwd)/config/test_config.yaml
python -m pytest -v
```
Make it executable:
```bash
chmod +x scripts/run_tests.sh
```
### Create a systemd unit
Create `scripts/ecu-tests.service`:
```ini
[Unit]
Description=ECU Tests Runner
After=network-online.target
Wants=network-online.target
[Service]
Type=oneshot
WorkingDirectory=/home/pi/ecu_tests
ExecStart=/home/pi/ecu_tests/scripts/run_tests.sh
User=pi
Group=pi
Environment=ECU_TESTS_CONFIG=/home/pi/ecu_tests/config/test_config.yaml
# Capture output to a log file
StandardOutput=append:/home/pi/ecu_tests/reports/service.log
StandardError=append:/home/pi/ecu_tests/reports/service.err
[Install]
WantedBy=multi-user.target
```
Install and run:
```bash
sudo mkdir -p /home/pi/ecu_tests/reports
sudo cp scripts/ecu-tests.service /etc/systemd/system/ecu-tests.service
sudo systemctl daemon-reload
sudo systemctl enable ecu-tests.service
# Start manually
sudo systemctl start ecu-tests.service
# Check status
systemctl status ecu-tests.service
```
## 7) USB and permissions (if using hardware)
- Create udev rules for your device (if required by vendor)
- Add user to dialout or plugdev groups if serial/USB access is needed
- Confirm your hardware library is found by Python and the dynamic linker:
- Ensure `vendor/BabyLIN_library.py` is importable (add `vendor/` to PYTHONPATH if needed)
- Ensure `.so` files are discoverable (e.g., place in `/usr/local/lib` and run `sudo ldconfig`, or set `LD_LIBRARY_PATH`)
## 8) Tips
- Use the mock interface on Pi for quick smoke tests and documentation/report generation
- For full HIL, ensure vendor SDK supports Linux/ARM and provide a shared object (`.so`) and headers
- If only Windows is supported, run the hardware suite on a Windows host and use the Pi for lightweight tasks (archiving, reporting, quick checks)
# Raspberry Pi Deployment Guide
This guide explains how to run the ECU testing framework on a Raspberry Pi (Debian/Raspberry Pi OS). It covers environment setup, hardware integration via MUM (recommended) or BabyLin (legacy), running tests headless, and installing as a systemd service.
> Note: The MUM (Melexis Universal Master) is **networked**, so the Pi only
> needs IP reachability to the MUM (default `192.168.7.2`) — there are no
> Pi-side native libs to worry about. BabyLin needs ARM Linux native
> libraries; if those aren't available, use Mock or MUM on the Pi instead.
## 1) Choose your interface
- **MUM (recommended for hardware on Pi)**: `interface.type: mum`. Requires Melexis `pylin` + `pymumclient` (see `vendor/automated_lin_test/install_packages.sh`) and IP reachability to the MUM device.
- Mock (recommended for headless/dev on Pi): `interface.type: mock`
- BabyLIN (only if ARM/Linux support is available): `interface.type: babylin` and ensure the SDK's `BabyLIN_library.py` and corresponding Linux/ARM shared libraries are available under `vendor/` or on PYTHONPATH/LD_LIBRARY_PATH.
## 2) Install prerequisites
```bash
sudo apt update
sudo apt install -y python3 python3-venv python3-pip git
```
Optional (for BabyLin or USB tools):
```bash
sudo apt install -y libusb-1.0-0 udev
```
## 3) Clone and set up
```bash
# clone your repo
git clone <your-repo-url> ~/ecu_tests
cd ~/ecu_tests
# create venv
python3 -m venv .venv
source .venv/bin/activate
# install deps
pip install -r requirements.txt
```
## 4) Configure
Create or edit `config/test_config.yaml`:
```yaml
interface:
type: mock # or babylin (if supported on ARM/Linux)
channel: 1
bitrate: 19200
flash:
enabled: false
```
Optionally point to another config file via env var:
```bash
export ECU_TESTS_CONFIG=$(pwd)/config/test_config.yaml
```
If using the MUM on the Pi, set:
```yaml
interface:
type: mum
host: 192.168.7.2 # adjust to your MUM IP
lin_device: lin0
power_device: power_out0
bitrate: 19200
boot_settle_seconds: 0.5
frame_lengths:
0x0A: 8
0x11: 4
```
Confirm reachability before running tests:
```bash
ping -c 2 192.168.7.2
```
If using BabyLIN on Linux/ARM with the SDK wrapper, set:
```yaml
interface:
type: babylin
channel: 0
sdf_path: "/home/pi/ecu_tests/vendor/Example.sdf"
schedule_nr: 0
```
## 5) Run tests on Pi
```bash
source .venv/bin/activate
python -m pytest -m "not hardware" -v
```
Artifacts are in `reports/` (HTML, JUnit, JSON, summary MD).
## 6) Run as a systemd service (headless)
This section lets the Pi run the test suite on boot or on demand.
### Create a runner script
Create `scripts/run_tests.sh`:
```bash
#!/usr/bin/env bash
set -euo pipefail
cd "$(dirname "$0")/.."
source .venv/bin/activate
# optionally set custom config
# export ECU_TESTS_CONFIG=$(pwd)/config/test_config.yaml
python -m pytest -v
```
Make it executable:
```bash
chmod +x scripts/run_tests.sh
```
### Create a systemd unit
Create `scripts/ecu-tests.service`:
```ini
[Unit]
Description=ECU Tests Runner
After=network-online.target
Wants=network-online.target
[Service]
Type=oneshot
WorkingDirectory=/home/pi/ecu_tests
ExecStart=/home/pi/ecu_tests/scripts/run_tests.sh
User=pi
Group=pi
Environment=ECU_TESTS_CONFIG=/home/pi/ecu_tests/config/test_config.yaml
# Capture output to a log file
StandardOutput=append:/home/pi/ecu_tests/reports/service.log
StandardError=append:/home/pi/ecu_tests/reports/service.err
[Install]
WantedBy=multi-user.target
```
Install and run:
```bash
sudo mkdir -p /home/pi/ecu_tests/reports
sudo cp scripts/ecu-tests.service /etc/systemd/system/ecu-tests.service
sudo systemctl daemon-reload
sudo systemctl enable ecu-tests.service
# Start manually
sudo systemctl start ecu-tests.service
# Check status
systemctl status ecu-tests.service
```
## 7) USB and permissions (if using hardware)
- Create udev rules for your device (if required by vendor)
- Add user to dialout or plugdev groups if serial/USB access is needed
- Confirm your hardware library is found by Python and the dynamic linker:
- Ensure `vendor/BabyLIN_library.py` is importable (add `vendor/` to PYTHONPATH if needed)
- Ensure `.so` files are discoverable (e.g., place in `/usr/local/lib` and run `sudo ldconfig`, or set `LD_LIBRARY_PATH`)
## 8) Tips
- Use the mock interface on Pi for quick smoke tests and documentation/report generation
- For full HIL on Pi, the **MUM is the easiest path** — it's IP-reachable so the Pi doesn't need vendor-specific native libraries, just the Melexis Python packages (`pylin`, `pymumclient`)
- For BabyLIN HIL, ensure vendor SDK supports Linux/ARM and provide a shared object (`.so`) and headers
- If only Windows is supported by your hardware path, run the hardware suite on a Windows host and use the Pi for lightweight tasks (archiving, reporting, quick checks)

View File

@ -1,80 +1,86 @@
# Build a Custom Raspberry Pi Image with ECU Tests
This guide walks you through building your own Raspberry Pi OS image that already contains this framework, dependencies, config, and services. It uses the official pi-gen tool (used by Raspberry Pi OS) or the simpler pi-gen-lite alternatives.
> Important: BabyLin support on ARM/Linux depends on vendor SDKs. If no `.so` is provided for ARM, either use the Mock interface on the Pi, or keep hardware tests on Windows.
## Approach A: Using pi-gen (official)
1. Prepare a build host (Debian/Ubuntu)
```bash
sudo apt update && sudo apt install -y git coreutils quilt parted qemu-user-static debootstrap zerofree \
pxz zip dosfstools libcap2-bin grep rsync xz-utils file bc curl jq
```
2. Clone pi-gen
```bash
git clone https://github.com/RPi-Distro/pi-gen.git
cd pi-gen
```
3. Create a custom stage for ECU Tests (e.g., `stage2/02-ecu-tests/`):
- `00-packages` (optional OS deps like python3, libusb-1.0-0)
- `01-run.sh` to clone your repo, create venv, install deps, and set up systemd units
Example `01-run.sh` contents:
```bash
#!/bin/bash -e
REPO_DIR=/home/pi/ecu_tests
sudo -u pi git clone <your-repo-url> "$REPO_DIR"
cd "$REPO_DIR"
sudo -u pi python3 -m venv .venv
sudo -u pi bash -lc "source .venv/bin/activate && pip install --upgrade pip && pip install -r requirements.txt"
sudo mkdir -p "$REPO_DIR/reports"
sudo chown -R pi:pi "$REPO_DIR/reports"
sudo install -Dm644 "$REPO_DIR/scripts/ecu-tests.service" /etc/systemd/system/ecu-tests.service
sudo install -Dm644 "$REPO_DIR/scripts/ecu-tests.timer" /etc/systemd/system/ecu-tests.timer
sudo systemctl enable ecu-tests.service
sudo systemctl enable ecu-tests.timer || true
# Optional udev rules
if [ -f "$REPO_DIR/scripts/99-babylin.rules" ]; then
sudo install -Dm644 "$REPO_DIR/scripts/99-babylin.rules" /etc/udev/rules.d/99-babylin.rules
fi
```
4. Configure build options (`config` file in pi-gen root):
```bash
IMG_NAME=ecu-tests-os
ENABLE_SSH=1
STAGE_LIST="stage0 stage1 stage2" # include your custom stage2 additions
```
5. Build
```bash
sudo ./build.sh
```
6. Flash the resulting `.img` to SD card with `Raspberry Pi Imager` or `dd`.
## Approach B: Preseed on first boot (lighter)
- Ship a minimal Raspberry Pi OS image and a cloud-init/user-data or first-boot script that pulls your repo and runs `scripts/pi_install.sh`.
- Pros: Faster iteration; you control repo URL at install time.
- Cons: Requires internet on first boot.
## CI Integration (optional)
- You can automate image builds with GitHub Actions or GitLab CI using a Docker runner that executes pi-gen.
- Upload the `.img` as a release asset or pipeline artifact.
- Optionally, bake environment-specific `config/test_config.yaml` or keep it external and set `ECU_TESTS_CONFIG` in the systemd unit.
## Hardware Notes
- If using BabyLin, ensure: `.so` for ARM, udev rules, and any kernel modules.
- Validate the SDK wrapper and libraries are present under `/opt/ecu_tests/vendor/` (or your chosen path). Ensure `.so` files are on the linker path (run `sudo ldconfig`) and `BabyLIN_library.py` is importable.
## Boot-time Behavior
- The `ecu-tests.timer` can schedule daily or hourly test runs; edit `OnUnitActiveSec` as needed.
- Logs are written to `reports/service.log` and `reports/service.err` on the Pi.
## Security
- Consider read-only root filesystem for robustness.
- Use a dedicated user with limited privileges for test execution.
- Keep secrets (if any) injected via environment and not committed.
# Build a Custom Raspberry Pi Image with ECU Tests
This guide walks you through building your own Raspberry Pi OS image that already contains this framework, dependencies, config, and services. It uses the official pi-gen tool (used by Raspberry Pi OS) or the simpler pi-gen-lite alternatives.
> Important: For full HIL on the Pi, the **MUM (Melexis Universal Master)** is
> the recommended hardware path — it's IP-reachable so the Pi only needs the
> Melexis Python packages (`pylin`, `pymumclient`), no native libraries. Bake
> those into the image's site-packages from the Melexis IDE bundle. BabyLin
> support on ARM/Linux depends on vendor SDKs; if no `.so` is provided for
> ARM, either use the Mock or MUM interface on the Pi, or keep BabyLIN
> hardware tests on Windows.
## Approach A: Using pi-gen (official)
1. Prepare a build host (Debian/Ubuntu)
```bash
sudo apt update && sudo apt install -y git coreutils quilt parted qemu-user-static debootstrap zerofree \
pxz zip dosfstools libcap2-bin grep rsync xz-utils file bc curl jq
```
2. Clone pi-gen
```bash
git clone https://github.com/RPi-Distro/pi-gen.git
cd pi-gen
```
3. Create a custom stage for ECU Tests (e.g., `stage2/02-ecu-tests/`):
- `00-packages` (optional OS deps like python3, libusb-1.0-0)
- `01-run.sh` to clone your repo, create venv, install deps, and set up systemd units
Example `01-run.sh` contents:
```bash
#!/bin/bash -e
REPO_DIR=/home/pi/ecu_tests
sudo -u pi git clone <your-repo-url> "$REPO_DIR"
cd "$REPO_DIR"
sudo -u pi python3 -m venv .venv
sudo -u pi bash -lc "source .venv/bin/activate && pip install --upgrade pip && pip install -r requirements.txt"
sudo mkdir -p "$REPO_DIR/reports"
sudo chown -R pi:pi "$REPO_DIR/reports"
sudo install -Dm644 "$REPO_DIR/scripts/ecu-tests.service" /etc/systemd/system/ecu-tests.service
sudo install -Dm644 "$REPO_DIR/scripts/ecu-tests.timer" /etc/systemd/system/ecu-tests.timer
sudo systemctl enable ecu-tests.service
sudo systemctl enable ecu-tests.timer || true
# Optional udev rules
if [ -f "$REPO_DIR/scripts/99-babylin.rules" ]; then
sudo install -Dm644 "$REPO_DIR/scripts/99-babylin.rules" /etc/udev/rules.d/99-babylin.rules
fi
```
4. Configure build options (`config` file in pi-gen root):
```bash
IMG_NAME=ecu-tests-os
ENABLE_SSH=1
STAGE_LIST="stage0 stage1 stage2" # include your custom stage2 additions
```
5. Build
```bash
sudo ./build.sh
```
6. Flash the resulting `.img` to SD card with `Raspberry Pi Imager` or `dd`.
## Approach B: Preseed on first boot (lighter)
- Ship a minimal Raspberry Pi OS image and a cloud-init/user-data or first-boot script that pulls your repo and runs `scripts/pi_install.sh`.
- Pros: Faster iteration; you control repo URL at install time.
- Cons: Requires internet on first boot.
## CI Integration (optional)
- You can automate image builds with GitHub Actions or GitLab CI using a Docker runner that executes pi-gen.
- Upload the `.img` as a release asset or pipeline artifact.
- Optionally, bake environment-specific `config/test_config.yaml` or keep it external and set `ECU_TESTS_CONFIG` in the systemd unit.
## Hardware Notes
- If using BabyLin, ensure: `.so` for ARM, udev rules, and any kernel modules.
- Validate the SDK wrapper and libraries are present under `/opt/ecu_tests/vendor/` (or your chosen path). Ensure `.so` files are on the linker path (run `sudo ldconfig`) and `BabyLIN_library.py` is importable.
## Boot-time Behavior
- The `ecu-tests.timer` can schedule daily or hourly test runs; edit `OnUnitActiveSec` as needed.
- Logs are written to `reports/service.log` and `reports/service.err` on the Pi.
## Security
- Consider read-only root filesystem for robustness.
- Use a dedicated user with limited privileges for test execution.
- Keep secrets (if any) injected via environment and not committed.

View File

@ -1,91 +1,91 @@
# Pytest Plugin: Reporting & Traceability Overview
This guide explains the custom pytest plugin in `conftest_plugin.py` that enriches reports with business-facing metadata and builds requirements traceability artifacts.
## What it does
- Extracts metadata (Title, Description, Requirements, Test Steps, Expected Result) from test docstrings and markers.
- Attaches this metadata as `user_properties` on each test report.
- Adds custom columns (Title, Requirements) to the HTML report.
- Produces two artifacts under `reports/` at the end of the run:
- `requirements_coverage.json`: a traceability matrix mapping requirement IDs to test nodeids, plus unmapped tests.
- `summary.md`: a compact summary of results suitable for CI dashboards or PR comments.
## Inputs and sources
- Test docstrings prefixed lines:
- `Title:` one-line title
- `Description:` free-form text until the next section
- `Requirements:` comma- or space-separated tokens such as `REQ-001`, `req_002`
- `Test Steps:` numbered list (1., 2., 3., ...)
- `Expected Result:` free-form text
- Pytest markers on tests: `@pytest.mark.req_001` etc. are normalized to `REQ-001`.
## Normalization logic
Requirement IDs are normalized to the canonical form `REQ-XYZ` using:
- `req_001``REQ-001`
- `REQ-1` / `REQ-001` / `REQ_001``REQ-001`
This ensures consistent keys in the coverage JSON and HTML.
## Hook call sequence
Below is the high-level call sequence of relevant plugin hooks during a typical run:
```mermaid
sequenceDiagram
autonumber
participant Pytest
participant Plugin as conftest_plugin
participant FS as File System
Pytest->>Plugin: pytest_configure(config)
Note right of Plugin: Ensure ./reports exists
Pytest->>Plugin: pytest_collection_modifyitems(session, config, items)
Note right of Plugin: Track all collected nodeids for unmapped detection
loop For each test phase
Pytest->>Plugin: pytest_runtest_makereport(item, call)
Note right of Plugin: hookwrapper
Plugin-->>Pytest: yield to get report
Plugin->>Plugin: parse docstring & markers
Plugin->>Plugin: attach user_properties (Title, Requirements, ...)
Plugin->>Plugin: update _REQ_TO_TESTS, _MAPPED_TESTS
end
Pytest->>Plugin: pytest_terminal_summary(terminalreporter, exitstatus)
Plugin->>Plugin: compile stats, coverage map, unmapped tests
Plugin->>FS: write reports/requirements_coverage.json
Plugin->>FS: write reports/summary.md
```
## HTML report integration
- `pytest_html_results_table_header`: inserts Title and Requirements columns.
- `pytest_html_results_table_row`: fills in values from `report.user_properties`.
The HTML plugin reads `user_properties` to render the extra metadata per test row.
## Artifacts
- `reports/requirements_coverage.json`
- `generated_at`: ISO timestamp
- `results`: counts of passed/failed/skipped/etc.
- `requirements`: map of `REQ-XXX` to an array of test nodeids
- `unmapped_tests`: tests with no requirement mapping
- `files`: relative locations of key artifacts
- `reports/summary.md`
- Human-readable summary with counts and quick artifact links
## Error handling
Artifact writes are wrapped in try/except to avoid failing the test run if the filesystem is read-only or unavailable. Any write failure is logged to the terminal.
## Extensibility ideas
- Add more normalized marker families (e.g., `capability_*`, `risk_*`).
- Emit CSV or Excel in addition to JSON/Markdown.
- Include per-test durations and flakiness stats in the summary.
- Support a `--requirement` CLI filter that selects tests by normalized req IDs.
# Pytest Plugin: Reporting & Traceability Overview
This guide explains the custom pytest plugin in `conftest_plugin.py` that enriches reports with business-facing metadata and builds requirements traceability artifacts.
## What it does
- Extracts metadata (Title, Description, Requirements, Test Steps, Expected Result) from test docstrings and markers.
- Attaches this metadata as `user_properties` on each test report.
- Adds custom columns (Title, Requirements) to the HTML report.
- Produces two artifacts under `reports/` at the end of the run:
- `requirements_coverage.json`: a traceability matrix mapping requirement IDs to test nodeids, plus unmapped tests.
- `summary.md`: a compact summary of results suitable for CI dashboards or PR comments.
## Inputs and sources
- Test docstrings prefixed lines:
- `Title:` one-line title
- `Description:` free-form text until the next section
- `Requirements:` comma- or space-separated tokens such as `REQ-001`, `req_002`
- `Test Steps:` numbered list (1., 2., 3., ...)
- `Expected Result:` free-form text
- Pytest markers on tests: `@pytest.mark.req_001` etc. are normalized to `REQ-001`.
## Normalization logic
Requirement IDs are normalized to the canonical form `REQ-XYZ` using:
- `req_001``REQ-001`
- `REQ-1` / `REQ-001` / `REQ_001``REQ-001`
This ensures consistent keys in the coverage JSON and HTML.
## Hook call sequence
Below is the high-level call sequence of relevant plugin hooks during a typical run:
```mermaid
sequenceDiagram
autonumber
participant Pytest
participant Plugin as conftest_plugin
participant FS as File System
Pytest->>Plugin: pytest_configure(config)
Note right of Plugin: Ensure ./reports exists
Pytest->>Plugin: pytest_collection_modifyitems(session, config, items)
Note right of Plugin: Track all collected nodeids for unmapped detection
loop For each test phase
Pytest->>Plugin: pytest_runtest_makereport(item, call)
Note right of Plugin: hookwrapper
Plugin-->>Pytest: yield to get report
Plugin->>Plugin: parse docstring & markers
Plugin->>Plugin: attach user_properties (Title, Requirements, ...)
Plugin->>Plugin: update _REQ_TO_TESTS, _MAPPED_TESTS
end
Pytest->>Plugin: pytest_terminal_summary(terminalreporter, exitstatus)
Plugin->>Plugin: compile stats, coverage map, unmapped tests
Plugin->>FS: write reports/requirements_coverage.json
Plugin->>FS: write reports/summary.md
```
## HTML report integration
- `pytest_html_results_table_header`: inserts Title and Requirements columns.
- `pytest_html_results_table_row`: fills in values from `report.user_properties`.
The HTML plugin reads `user_properties` to render the extra metadata per test row.
## Artifacts
- `reports/requirements_coverage.json`
- `generated_at`: ISO timestamp
- `results`: counts of passed/failed/skipped/etc.
- `requirements`: map of `REQ-XXX` to an array of test nodeids
- `unmapped_tests`: tests with no requirement mapping
- `files`: relative locations of key artifacts
- `reports/summary.md`
- Human-readable summary with counts and quick artifact links
## Error handling
Artifact writes are wrapped in try/except to avoid failing the test run if the filesystem is read-only or unavailable. Any write failure is logged to the terminal.
## Extensibility ideas
- Add more normalized marker families (e.g., `capability_*`, `risk_*`).
- Emit CSV or Excel in addition to JSON/Markdown.
- Include per-test durations and flakiness stats in the summary.
- Support a `--requirement` CLI filter that selects tests by normalized req IDs.

View File

@ -1,188 +1,220 @@
# Using the ECU Test Framework
This guide shows common ways to run the test framework: from fast local mock runs to full hardware loops, CI, and Raspberry Pi deployments. Commands use Windows PowerShell (as your default shell).
## Prerequisites
- Python 3.x and a virtual environment
- Dependencies installed (see `requirements.txt`)
- Optional: BabyLIN SDK files placed under `vendor/` as described in `vendor/README.md` when running hardware tests
## Configuring tests
- Configuration is loaded from YAML files and can be selected via the environment variable `ECU_TESTS_CONFIG`.
- See `docs/02_configuration_resolution.md` for details and examples.
Example PowerShell:
```powershell
# Use a mock-only config for fast local runs
$env:ECU_TESTS_CONFIG = ".\config\mock.yml"
# Use a hardware config with BabyLIN SDK wrapper
$env:ECU_TESTS_CONFIG = ".\config\hardware_babylin.yml"
```
Quick try with provided examples:
```powershell
# Point to the combined examples file
$env:ECU_TESTS_CONFIG = ".\config\examples.yaml"
# The 'active' section defaults to the mock profile; run non-hardware tests
pytest -m "not hardware" -v
# Edit 'active' to the babylin profile (or point to babylin.example.yaml) and run hardware tests
```
```
## Running locally (mock interface)
Use the mock interface to develop tests quickly without hardware:
```powershell
# Run all mock tests with HTML and JUnit outputs (see pytest.ini defaults)
pytest
# Run only smoke tests (mock) and show progress
pytest -m smoke -q
# Filter by test file or node id
pytest tests\test_smoke_mock.py::TestMockLinInterface::test_mock_send_receive_echo -q
```
What you get:
- Fast execution, deterministic results
- Reports in `reports/` (HTML, JUnit, coverage JSON, CI summary)
Open the HTML report on Windows:
```powershell
start .\reports\report.html
```
## Running on hardware (BabyLIN SDK wrapper)
1) Place SDK files per `vendor/README.md`.
2) Select a config that defines `interface.type: babylin`, `sdf_path`, and `schedule_nr`.
3) Markers allow restricting to hardware tests.
```powershell
# Example environment selection
$env:ECU_TESTS_CONFIG = ".\config\babylin.example.yaml"
# Run only hardware tests
pytest -m "hardware and babylin"
# Run the schedule smoke only
pytest tests\test_babylin_hardware_schedule_smoke.py -q
```
Tips:
- If multiple devices are attached, update your config to select the desired port (future enhancement) or keep only one connected.
- On timeout, tests often accept None to avoid flakiness; increase timeouts if your bus is slow.
- Master request behavior: the adapter prefers `BLC_sendRawMasterRequest(channel, id, length)`; it falls back to the bytes variant or a header+receive strategy as needed. The mock covers both forms.
## Selecting tests with markers
Markers in use:
- `smoke`: quick confidence tests
- `hardware`: needs real device
- `babylin`: targets the BabyLIN SDK adapter
- `req_XXX`: requirement mapping (e.g., `@pytest.mark.req_001`)
Examples:
```powershell
# Only smoke tests (mock + hardware smoke)
pytest -m smoke
# Requirements-based selection (docstrings and markers are normalized)
pytest -k REQ-001
```
## Enriched reporting
- HTML report includes custom columns (Title, Requirements)
- JUnit XML written for CI
- `reports/requirements_coverage.json` maps requirement IDs to tests and lists unmapped tests
- `reports/summary.md` aggregates key counts (pass/fail/etc.)
See `docs/03_reporting_and_metadata.md` and `docs/11_conftest_plugin_overview.md`.
To verify the reporting pipeline end-to-end, run the plugin self-test:
```powershell
python -m pytest tests\plugin\test_conftest_plugin_artifacts.py -q
```
To generate two separate HTML/JUnit reports (unit vs non-unit):
```powershell
./scripts/run_two_reports.ps1
```
## Writing well-documented tests
Use a docstring template so the plugin can extract metadata:
```python
"""
Title: <short title>
Description:
<what the test validates and why>
Requirements: REQ-001, REQ-002
Test Steps:
1. <step one>
2. <step two>
Expected Result:
<succinct expected outcome>
"""
```
Tip: For runtime properties in reports, prefer the shared `rp` fixture (wrapper around `record_property`) and use standardized keys from `docs/15_report_properties_cheatsheet.md`.
## Continuous Integration (CI)
- Run `pytest` with your preferred markers in your pipeline.
- Publish artifacts from `reports/` (HTML, JUnit, coverage JSON, summary.md).
- Optionally parse `requirements_coverage.json` to power dashboards and gates.
Example PowerShell (local CI mimic):
```powershell
# Run smoke tests and collect reports
pytest -m smoke --maxfail=1 -q
```
## Raspberry Pi / Headless usage
- Follow `docs/09_raspberry_pi_deployment.md` to set up a venv and systemd service
- For a golden image approach, see `docs/10_build_custom_image.md`
Running tests headless via systemd typically involves:
- A service that sets `ECU_TESTS_CONFIG` to a hardware YAML
- Running `pytest -m "hardware and babylin"` on boot or via timer
## Troubleshooting quick hits
- ImportError for `BabyLIN_library`: verify placement under `vendor/` and native library presence.
- No BabyLIN devices found: check USB connection, drivers, and permissions.
- Timeouts on receive: increase `timeout` or verify schedule activity and SDF correctness.
- Missing reports: ensure `pytest.ini` includes the HTML/JUnit plugins and the custom plugin is loaded.
## Power supply (Owon) hardware test
Enable `power_supply` in your config and set the serial port, then run the dedicated test or the quick demo script.
```powershell
copy .\config\owon_psu.example.yaml .\config\owon_psu.yaml
# edit COM port in .\config\owon_psu.yaml or set values in config\test_config.yaml
pytest -k test_owon_psu_idn_and_optional_set -m hardware -q
python .\vendor\Owon\owon_psu_quick_demo.py
```
See also: `docs/14_power_supply.md` for details and troubleshooting.
# Using the ECU Test Framework
This guide shows common ways to run the test framework: from fast local mock runs to full hardware loops, CI, and Raspberry Pi deployments. Commands use Windows PowerShell (as your default shell).
## Prerequisites
- Python 3.x and a virtual environment
- Dependencies installed (see `requirements.txt`)
- For MUM hardware: Melexis `pylin` and `pymumclient` Python packages on `PYTHONPATH` (see `vendor/automated_lin_test/install_packages.sh`) plus a reachable MUM (default IP `192.168.7.2`)
- For BabyLIN (legacy) hardware: SDK files placed under `vendor/` as described in `vendor/README.md`
## Configuring tests
- Configuration is loaded from YAML files and can be selected via the environment variable `ECU_TESTS_CONFIG`.
- See `docs/02_configuration_resolution.md` for details and examples.
Example PowerShell:
```powershell
# Use a mock-only config for fast local runs
$env:ECU_TESTS_CONFIG = ".\config\mock.yml"
# Use a hardware config with the MUM (current default)
$env:ECU_TESTS_CONFIG = ".\config\mum.example.yaml"
# Use a hardware config with the BabyLIN SDK wrapper (legacy)
$env:ECU_TESTS_CONFIG = ".\config\babylin.example.yaml"
```
Quick try with provided examples:
```powershell
# Point to the combined examples file
$env:ECU_TESTS_CONFIG = ".\config\examples.yaml"
# The 'active' section defaults to the mock profile; run non-hardware tests
pytest -m "not hardware" -v
# Edit 'active' to the mum or babylin profile (or point to mum.example.yaml /
# babylin.example.yaml) and run hardware tests
```
## Running locally (mock interface)
Use the mock interface to develop tests quickly without hardware:
```powershell
# Run all mock tests with HTML and JUnit outputs (see pytest.ini defaults)
pytest
# Run only smoke tests (mock) and show progress
pytest -m smoke -q
# Filter by test file or node id
pytest tests\test_smoke_mock.py::TestMockLinInterface::test_mock_send_receive_echo -q
```
What you get:
- Fast execution, deterministic results
- Reports in `reports/` (HTML, JUnit, coverage JSON, CI summary)
Open the HTML report on Windows:
```powershell
start .\reports\report.html
```
## Running on hardware (MUM — current default)
1) Install Melexis `pylin` and `pymumclient` (see `vendor/automated_lin_test/install_packages.sh` — on Windows, point `pip` at a wheel or extend `PYTHONPATH` to the Melexis IDE site-packages).
2) Make sure the MUM is reachable: `ping 192.168.7.2`.
3) Select a config that defines `interface.type: mum` plus `host`/`lin_device`/`power_device`.
```powershell
$env:ECU_TESTS_CONFIG = ".\config\mum.example.yaml"
# Run only the MUM-marked hardware tests
pytest -m "hardware and mum" -v
# Run a single MUM test by file
pytest tests\hardware\test_e2e_mum_led_activate.py -q
```
Tips:
- The MUM owns ECU power on `power_out0`; it powers up automatically in `connect()` and powers down on `disconnect()`. The Owon PSU is independent and can be left disabled (`power_supply.enabled: false`).
- The MUM is master-driven: `lin.receive(id)` requires a frame ID. The default `frame_lengths` covers ALM_Status (4 B) and ALM_Req_A (8 B); add others in YAML when you need slave-published frames at non-standard lengths.
- For BSM-SNPD diagnostic frames (service ID 0xB5), use `lin.send_raw(bytes)` — it routes through the transport layer's `ld_put_raw`, which uses LIN 1.x **Classic** checksum. `send()` uses Enhanced and the firmware will reject these frames.
## Running on hardware (BabyLIN SDK wrapper — legacy)
1) Place SDK files per `vendor/README.md`.
2) Select a config that defines `interface.type: babylin`, `sdf_path`, and `schedule_nr`.
3) Markers allow restricting to hardware tests.
```powershell
$env:ECU_TESTS_CONFIG = ".\config\babylin.example.yaml"
# Run only hardware tests
pytest -m "hardware and babylin"
# Run the schedule smoke only
pytest tests\test_babylin_hardware_schedule_smoke.py -q
```
Tips:
- If multiple devices are attached, update your config to select the desired port (future enhancement) or keep only one connected.
- On timeout, tests often accept None to avoid flakiness; increase timeouts if your bus is slow.
- Master request behavior: the adapter prefers `BLC_sendRawMasterRequest(channel, id, length)`; it falls back to the bytes variant or a header+receive strategy as needed. The mock covers both forms.
- `interface.schedule_nr: -1` defers schedule start to the test code (useful when the test wants to pick a specific schedule by name via `lin.start_schedule("CCO")`).
## Selecting tests with markers
Markers in use:
- `smoke`: quick confidence tests
- `hardware`: needs real device (any LIN master)
- `mum`: targets the Melexis Universal Master adapter (current default)
- `babylin`: targets the legacy BabyLIN SDK adapter
- `unit`: pure unit tests (no hardware, no external I/O)
- `req_XXX`: requirement mapping (e.g., `@pytest.mark.req_001`)
Examples:
```powershell
# Only smoke tests (mock + hardware smoke)
pytest -m smoke
# Requirements-based selection (docstrings and markers are normalized)
pytest -k REQ-001
```
## Enriched reporting
- HTML report includes custom columns (Title, Requirements)
- JUnit XML written for CI
- `reports/requirements_coverage.json` maps requirement IDs to tests and lists unmapped tests
- `reports/summary.md` aggregates key counts (pass/fail/etc.)
See `docs/03_reporting_and_metadata.md` and `docs/11_conftest_plugin_overview.md`.
To verify the reporting pipeline end-to-end, run the plugin self-test:
```powershell
python -m pytest tests\plugin\test_conftest_plugin_artifacts.py -q
```
To generate two separate HTML/JUnit reports (unit vs non-unit):
```powershell
./scripts/run_two_reports.ps1
```
## Writing well-documented tests
Use a docstring template so the plugin can extract metadata:
```python
"""
Title: <short title>
Description:
<what the test validates and why>
Requirements: REQ-001, REQ-002
Test Steps:
1. <step one>
2. <step two>
Expected Result:
<succinct expected outcome>
"""
```
Tip: For runtime properties in reports, prefer the shared `rp` fixture (wrapper around `record_property`) and use standardized keys from `docs/15_report_properties_cheatsheet.md`.
## Continuous Integration (CI)
- Run `pytest` with your preferred markers in your pipeline.
- Publish artifacts from `reports/` (HTML, JUnit, coverage JSON, summary.md).
- Optionally parse `requirements_coverage.json` to power dashboards and gates.
Example PowerShell (local CI mimic):
```powershell
# Run smoke tests and collect reports
pytest -m smoke --maxfail=1 -q
```
## Raspberry Pi / Headless usage
- Follow `docs/09_raspberry_pi_deployment.md` to set up a venv and systemd service
- For a golden image approach, see `docs/10_build_custom_image.md`
Running tests headless via systemd typically involves:
- A service that sets `ECU_TESTS_CONFIG` to a hardware YAML
- Running `pytest -m "hardware and mum"` (or `"hardware and babylin"`) on boot or via timer
## Troubleshooting quick hits
- ImportError for `pylin` / `pymumclient`: install Melexis packages (`vendor/automated_lin_test/install_packages.sh`); the MUM adapter raises a clear error pointing at this script.
- "interface.host is required when interface.type == 'mum'": set `interface.host` in YAML.
- MUM unreachable: `ping 192.168.7.2`; check the USB-RNDIS link.
- ImportError for `BabyLIN_library`: verify placement under `vendor/` and native library presence.
- No BabyLIN devices found: check USB connection, drivers, and permissions.
- Timeouts on receive: increase `timeout` or verify schedule activity and SDF correctness.
- Missing reports: ensure `pytest.ini` includes the HTML/JUnit plugins and the custom plugin is loaded.
## Power supply (Owon) hardware test
Enable `power_supply` in your config and set the serial port, then run the dedicated test or the quick demo script.
```powershell
copy .\config\owon_psu.example.yaml .\config\owon_psu.yaml
# edit COM port in .\config\owon_psu.yaml or set values in config\test_config.yaml
pytest -k test_owon_psu_idn_and_optional_set -m hardware -q
python .\vendor\Owon\owon_psu_quick_demo.py
```
See also: `docs/14_power_supply.md` for details and troubleshooting.

View File

@ -1,125 +1,140 @@
# Unit Testing Guide
This guide explains how the project's unit tests are organized, how to run them (with and without markers), how coverage is generated, and tips for writing effective tests.
## Why unit tests?
- Fast feedback without hardware
- Validate contracts (config loader, frames, adapters, flashing scaffold)
- Keep behavior stable as the framework evolves
## Test layout
- `tests/unit/` — pure unit tests (no hardware, no external I/O)
- `test_config_loader.py` — config precedence and defaults
- `test_linframe.py``LinFrame` validation
- `test_babylin_adapter_mocked.py` — BabyLIN adapter error paths with a mocked SDK wrapper
- `test_hex_flasher.py` — flashing scaffold against a stub LIN interface
- `tests/plugin/` — plugin self-tests using `pytester`
- `test_conftest_plugin_artifacts.py` — verifies JSON coverage and summary artifacts
- `tests/` — existing smoke/mock/hardware tests
## Markers and selection
A `unit` marker is provided for easy selection:
- By marker (recommended):
```powershell
pytest -m unit -q
```
- By path:
```powershell
pytest tests\unit -q
```
- Exclude hardware:
```powershell
pytest -m "not hardware" -v
```
## Coverage
Coverage is enabled by default via `pytest.ini` addopts:
- `--cov=ecu_framework --cov-report=term-missing`
Youll see a summary with missing lines directly in the terminal. To disable coverage locally, override addopts on the command line:
```powershell
pytest -q -o addopts=""
```
(Optional) To produce an HTML coverage report, you can add `--cov-report=html` and open `htmlcov/index.html`.
## Writing unit tests
- Prefer small, focused tests
- For BabyLIN adapter logic, inject `wrapper_module` with the mock:
```python
from ecu_framework.lin.babylin import BabyLinInterface
from vendor import mock_babylin_wrapper as mock_bl
lin = BabyLinInterface(wrapper_module=mock_bl)
lin.connect()
# exercise send/receive/request
```
- To simulate specific SDK signatures, use a thin shim (see `_MockBytesOnly` in `tests/test_babylin_wrapper_mock.py`).
- Include a docstring with Title/Description/Requirements/Steps/Expected Result so the reporting plugin can extract metadata (this also helps the HTML report).
- When testing the plugin itself, use the `pytester` fixture to generate a temporary test run and validate artifacts exist and contain expected entries.
## Typical commands (Windows PowerShell)
- Run unit tests with coverage:
```powershell
pytest -m unit -q
```
- Run only plugin self-tests:
```powershell
pytest tests\plugin -q
```
- Run the specific plugin artifact test (verifies HTML/JUnit, summary, and coverage JSON under `reports/`):
```powershell
python -m pytest tests\plugin\test_conftest_plugin_artifacts.py -q
```
- Run all non-hardware tests with verbose output:
```powershell
pytest -m "not hardware" -v
```
- Open the HTML report:
```powershell
start .\reports\report.html
```
- Generate two separate reports (unit vs non-unit):
```powershell
./scripts/run_two_reports.ps1
```
## CI suggestions
- Run `-m unit` and `tests/plugin` on every PR
- Optionally run mock integration/smoke on PR
- Run hardware test matrix on a nightly or on-demand basis (`-m "hardware and babylin"`)
- Publish artifacts from `reports/`: HTML/JUnit/coverage JSON/summary MD
## Troubleshooting
- Coverage not showing: ensure `pytest-cov` is installed (see `requirements.txt`) and `pytest.ini` addopts include `--cov`.
- Import errors: activate the venv and reinstall requirements.
- Plugin artifacts missing under `pytester`: verify tests write to `reports/` (our plugin creates the folder automatically in `pytest_configure`).
# Unit Testing Guide
This guide explains how the project's unit tests are organized, how to run them (with and without markers), how coverage is generated, and tips for writing effective tests.
## Why unit tests?
- Fast feedback without hardware
- Validate contracts (config loader, frames, adapters, flashing scaffold)
- Keep behavior stable as the framework evolves
## Test layout
- `tests/unit/` — pure unit tests (no hardware, no external I/O)
- `test_config_loader.py` — config precedence and defaults
- `test_linframe.py``LinFrame` validation
- `test_babylin_adapter_mocked.py` — BabyLIN adapter error paths with a mocked SDK wrapper
- `test_mum_adapter_mocked.py` — MUM adapter (`MumLinInterface`) plumbing exercised through fake `pylin` / `pymumclient` modules
- `test_hex_flasher.py` — flashing scaffold against a stub LIN interface
- `tests/plugin/` — plugin self-tests using `pytester`
- `test_conftest_plugin_artifacts.py` — verifies JSON coverage and summary artifacts
- `tests/` — existing smoke/mock/hardware tests
## Markers and selection
A `unit` marker is provided for easy selection:
- By marker (recommended):
```powershell
pytest -m unit -q
```
- By path:
```powershell
pytest tests\unit -q
```
- Exclude hardware:
```powershell
pytest -m "not hardware" -v
```
## Coverage
Coverage is enabled by default via `pytest.ini` addopts:
- `--cov=ecu_framework --cov-report=term-missing`
Youll see a summary with missing lines directly in the terminal. To disable coverage locally, override addopts on the command line:
```powershell
pytest -q -o addopts=""
```
(Optional) To produce an HTML coverage report, you can add `--cov-report=html` and open `htmlcov/index.html`.
## Writing unit tests
- Prefer small, focused tests
- For BabyLIN adapter logic, inject `wrapper_module` with the mock:
```python
from ecu_framework.lin.babylin import BabyLinInterface
from vendor import mock_babylin_wrapper as mock_bl
lin = BabyLinInterface(wrapper_module=mock_bl)
lin.connect()
# exercise send/receive/request
```
- For MUM adapter logic, inject `mum_module` and `pylin_module` with fakes
(see `tests/unit/test_mum_adapter_mocked.py` for a full example):
```python
from ecu_framework.lin.mum import MumLinInterface
# fake_mum exposes MelexisUniversalMaster() returning an object with
# open_all(host) and get_device(name)
# fake_pylin exposes LinBusManager(linmaster) and LinDevice22(lin_bus)
lin = MumLinInterface(host="10.0.0.1", mum_module=fake_mum, pylin_module=fake_pylin)
lin.connect()
# exercise send / receive / send_raw / power_*
```
- To simulate specific SDK signatures, use a thin shim (see `_MockBytesOnly` in `tests/test_babylin_wrapper_mock.py`).
- Include a docstring with Title/Description/Requirements/Steps/Expected Result so the reporting plugin can extract metadata (this also helps the HTML report).
- When testing the plugin itself, use the `pytester` fixture to generate a temporary test run and validate artifacts exist and contain expected entries.
## Typical commands (Windows PowerShell)
- Run unit tests with coverage:
```powershell
pytest -m unit -q
```
- Run only plugin self-tests:
```powershell
pytest tests\plugin -q
```
- Run the specific plugin artifact test (verifies HTML/JUnit, summary, and coverage JSON under `reports/`):
```powershell
python -m pytest tests\plugin\test_conftest_plugin_artifacts.py -q
```
- Run all non-hardware tests with verbose output:
```powershell
pytest -m "not hardware" -v
```
- Open the HTML report:
```powershell
start .\reports\report.html
```
- Generate two separate reports (unit vs non-unit):
```powershell
./scripts/run_two_reports.ps1
```
## CI suggestions
- Run `-m unit` and `tests/plugin` on every PR
- Optionally run mock integration/smoke on PR
- Run hardware test matrix on a nightly or on-demand basis (`-m "hardware and mum"` or `-m "hardware and babylin"`)
- Publish artifacts from `reports/`: HTML/JUnit/coverage JSON/summary MD
## Troubleshooting
- Coverage not showing: ensure `pytest-cov` is installed (see `requirements.txt`) and `pytest.ini` addopts include `--cov`.
- Import errors: activate the venv and reinstall requirements.
- Plugin artifacts missing under `pytester`: verify tests write to `reports/` (our plugin creates the folder automatically in `pytest_configure`).

View File

@ -1,103 +1,110 @@
# Power Supply (Owon) — control, configuration, tests, and quick demo
This guide covers using the Owon bench power supply via SCPI over serial with the framework.
- Library: `ecu_framework/power/owon_psu.py`
- Hardware test: `tests/hardware/test_owon_psu.py`
- quick demo script: `vendor/Owon/owon_psu_quick_demo.py`
- Configuration: `config/test_config.yaml` (`power_supply`), optionally merged from `config/owon_psu.yaml` or env `OWON_PSU_CONFIG`
## Install dependencies
```powershell
pip install -r .\requirements.txt
```
## Configure
You can keep PSU settings centrally or in a machine-specific YAML.
- Central: `config/test_config.yaml``power_supply` section
- Separate: `config/owon_psu.yaml` (or `OWON_PSU_CONFIG` env var)
Supported keys:
```yaml
power_supply:
enabled: true
port: COM4 # e.g., COM4 (Windows) or /dev/ttyUSB0 (Linux)
baudrate: 115200
timeout: 1.0
eol: "\n" # or "\r\n" if required
parity: N # N|E|O
stopbits: 1 # 1|2
xonxoff: false
rtscts: false
dsrdtr: false
idn_substr: OWON
do_set: false
set_voltage: 5.0
set_current: 0.1
```
The central config loader automatically merges `config/owon_psu.yaml` (or the path in `OWON_PSU_CONFIG`) into `power_supply`.
## Run the hardware test
Skips unless `power_supply.enabled` is true and `port` is set.
```powershell
pytest -k test_owon_psu_idn_and_optional_set -m hardware -q
```
What it does:
- Opens serial with your configured line params
- Queries `*IDN?` (checks `idn_substr` if provided)
- If `do_set` is true, sets voltage/current, enables output briefly, then disables
## Use the library programmatically
```python
from ecu_framework.power import OwonPSU, SerialParams
params = SerialParams(baudrate=115200, timeout=1.0)
with OwonPSU("COM4", params, eol="\n") as psu:
print(psu.idn())
psu.set_voltage(1, 5.0)
psu.set_current(1, 0.1)
psu.set_output(True)
# ... measure, etc.
psu.set_output(False)
```
Notes:
- Commands use newline-terminated writes; reads use `readline()`
- SCPI forms: `SOUR:VOLT`, `SOUR:CURR`, `MEAS:VOLT?`, `MEAS:CURR?`, `output 0/1`, `output?`
## quick demo script
The quick demo reads `OWON_PSU_CONFIG` or `config/owon_psu.yaml` and performs a small sequence.
```powershell
python .\vendor\Owon\owon_psu_quick_demo.py
```
It also scans ports with `*IDN?` using `scan_ports()`.
## Troubleshooting
- Empty `*IDN?` or timeouts:
- Verify COM port and exclusivity (no other program holding it)
- Try `eol: "\r\n"`
- Adjust `parity` and `stopbits` per your device manual
- Windows COM > 9:
- Most Python code accepts `COM10` directly; if needed in other tools, use `\\.\\COM10`
- Flow control:
- Keep `xonxoff`, `rtscts`, `dsrdtr` false unless required
## Related files
- `ecu_framework/power/owon_psu.py` — PSU controller (pyserial)
- `tests/hardware/test_owon_psu.py` — Hardware test using central config
- `vendor/Owon/owon_psu_quick_demo.py` — Quick demo runner
- `config/owon_psu.example.yaml` — Example machine-specific YAML
# Power Supply (Owon) — control, configuration, tests, and quick demo
This guide covers using the Owon bench power supply via SCPI over serial with the framework.
> **MUM users**: the Melexis Universal Master has its own power output on
> `power_out0` and the MUM adapter calls `power_up()` / `power_down()` in
> `connect()` / `disconnect()` automatically. The Owon PSU is **not required**
> for the standard MUM flow — leave `power_supply.enabled: false`. The Owon
> remains useful for over/under-voltage scenarios, separate-rail tests, or
> when running with the legacy BabyLIN adapter (which has no built-in power).
- Library: `ecu_framework/power/owon_psu.py`
- Hardware test: `tests/hardware/test_owon_psu.py`
- quick demo script: `vendor/Owon/owon_psu_quick_demo.py`
- Configuration: `config/test_config.yaml` (`power_supply`), optionally merged from `config/owon_psu.yaml` or env `OWON_PSU_CONFIG`
## Install dependencies
```powershell
pip install -r .\requirements.txt
```
## Configure
You can keep PSU settings centrally or in a machine-specific YAML.
- Central: `config/test_config.yaml``power_supply` section
- Separate: `config/owon_psu.yaml` (or `OWON_PSU_CONFIG` env var)
Supported keys:
```yaml
power_supply:
enabled: true
port: COM4 # e.g., COM4 (Windows) or /dev/ttyUSB0 (Linux)
baudrate: 115200
timeout: 1.0
eol: "\n" # or "\r\n" if required
parity: N # N|E|O
stopbits: 1 # 1|2
xonxoff: false
rtscts: false
dsrdtr: false
idn_substr: OWON
do_set: false
set_voltage: 5.0
set_current: 0.1
```
The central config loader automatically merges `config/owon_psu.yaml` (or the path in `OWON_PSU_CONFIG`) into `power_supply`.
## Run the hardware test
Skips unless `power_supply.enabled` is true and `port` is set.
```powershell
pytest -k test_owon_psu_idn_and_optional_set -m hardware -q
```
What it does:
- Opens serial with your configured line params
- Queries `*IDN?` (checks `idn_substr` if provided)
- If `do_set` is true, sets voltage/current, enables output briefly, then disables
## Use the library programmatically
```python
from ecu_framework.power import OwonPSU, SerialParams
params = SerialParams(baudrate=115200, timeout=1.0)
with OwonPSU("COM4", params, eol="\n") as psu:
print(psu.idn())
psu.set_voltage(1, 5.0)
psu.set_current(1, 0.1)
psu.set_output(True)
# ... measure, etc.
psu.set_output(False)
```
Notes:
- Commands use newline-terminated writes; reads use `readline()`
- SCPI forms: `SOUR:VOLT`, `SOUR:CURR`, `MEAS:VOLT?`, `MEAS:CURR?`, `output 0/1`, `output?`
## quick demo script
The quick demo reads `OWON_PSU_CONFIG` or `config/owon_psu.yaml` and performs a small sequence.
```powershell
python .\vendor\Owon\owon_psu_quick_demo.py
```
It also scans ports with `*IDN?` using `scan_ports()`.
## Troubleshooting
- Empty `*IDN?` or timeouts:
- Verify COM port and exclusivity (no other program holding it)
- Try `eol: "\r\n"`
- Adjust `parity` and `stopbits` per your device manual
- Windows COM > 9:
- Most Python code accepts `COM10` directly; if needed in other tools, use `\\.\\COM10`
- Flow control:
- Keep `xonxoff`, `rtscts`, `dsrdtr` false unless required
## Related files
- `ecu_framework/power/owon_psu.py` — PSU controller (pyserial)
- `tests/hardware/test_owon_psu.py` — Hardware test using central config
- `vendor/Owon/owon_psu_quick_demo.py` — Quick demo runner
- `config/owon_psu.example.yaml` — Example machine-specific YAML

View File

@ -1,53 +1,53 @@
# Report properties cheatsheet (record_property / rp)
Use these standardized keys when calling `record_property("key", value)` or the `rp("key", value)` helper.
This keeps reports consistent and easy to scan across suites.
## General
- test_phase: setup | call | teardown (if you want to distinguish)
- environment: local | ci | lab
- config_source: defaults | file | env | env+overrides (already used in unit tests)
## LIN (common)
- lin_type: mock | babylin
- tx_id: hex string or int (e.g., "0x12")
- tx_data: list of ints (bytes)
- rx_present: bool
- rx_id: hex string or int
- rx_data: list of ints
- timeout_s: float seconds
## BabyLIN specifics
- sdf_path: string
- schedule_nr: int
- receive_result: frame | timeout
- wrapper: mock_bl | _MockBytesOnly | real (for future)
## Mock-specific
- expected_data: list of ints
## Power supply (PSU)
- psu_idn: string from `*IDN?`
- output_status_before: bool
- output_status_after: bool
- set_voltage: float (V)
- set_current: float (A)
- measured_voltage: float (V)
- measured_current: float (A)
- psu_port: e.g., COM4 or /dev/ttyUSB0 (if helpful)
## Flashing
- hex_path: string
- sent_count: int (frames sent by stub/mock)
- flash_result: ok | fail (for future real flashing)
## Configuration highlights
- interface_type: mock | babylin
- interface_channel: int
- flash_enabled: bool
## Tips
- Prefer simple, lowercase snake_case keys
- Use lists for byte arrays so they render clearly in JSON and HTML
- Log both expected and actual when asserting patterns (e.g., deterministic responses)
- Keep units in the key name when helpful (voltage/current include V/A in the name)
# Report properties cheatsheet (record_property / rp)
Use these standardized keys when calling `record_property("key", value)` or the `rp("key", value)` helper.
This keeps reports consistent and easy to scan across suites.
## General
- test_phase: setup | call | teardown (if you want to distinguish)
- environment: local | ci | lab
- config_source: defaults | file | env | env+overrides (already used in unit tests)
## LIN (common)
- lin_type: mock | babylin
- tx_id: hex string or int (e.g., "0x12")
- tx_data: list of ints (bytes)
- rx_present: bool
- rx_id: hex string or int
- rx_data: list of ints
- timeout_s: float seconds
## BabyLIN specifics
- sdf_path: string
- schedule_nr: int
- receive_result: frame | timeout
- wrapper: mock_bl | _MockBytesOnly | real (for future)
## Mock-specific
- expected_data: list of ints
## Power supply (PSU)
- psu_idn: string from `*IDN?`
- output_status_before: bool
- output_status_after: bool
- set_voltage: float (V)
- set_current: float (A)
- measured_voltage: float (V)
- measured_current: float (A)
- psu_port: e.g., COM4 or /dev/ttyUSB0 (if helpful)
## Flashing
- hex_path: string
- sent_count: int (frames sent by stub/mock)
- flash_result: ok | fail (for future real flashing)
## Configuration highlights
- interface_type: mock | babylin
- interface_channel: int
- flash_enabled: bool
## Tips
- Prefer simple, lowercase snake_case keys
- Use lists for byte arrays so they render clearly in JSON and HTML
- Log both expected and actual when asserting patterns (e.g., deterministic responses)
- Keep units in the key name when helpful (voltage/current include V/A in the name)

167
docs/16_mum_internals.md Normal file
View File

@ -0,0 +1,167 @@
# MUM Adapter Internals (Melexis Universal Master)
This document describes how the `MumLinInterface` adapter wraps the Melexis
`pymumclient` and `pylin` packages, how frames flow across the LIN bus, and
which MUM-specific behaviors callers need to understand.
## Overview
- Location: `ecu_framework/lin/mum.py`
- Vendor reference scripts: `vendor/automated_lin_test/` (`test_led_control.py`, `test_auto_addressing.py`, `power_cycle.py`)
- Default MUM endpoint: `192.168.7.2` over USB-RNDIS
- LIN device name on MUM: `lin0`
- Power-control device on MUM: `power_out0`
- Required Python packages: `pylin`, `pymumclient` (Melexis-supplied; not on PyPI). See `vendor/automated_lin_test/install_packages.sh`.
## What the MUM gives you that BabyLIN doesn't
- **Built-in power control** on `power_out0` — the adapter calls `power_up()` in `connect()` and `power_down()` in `disconnect()`. No external Owon PSU needed for the standard flow.
- **Network access**: the MUM is IP-reachable, so the host machine (Windows, Linux, Pi) does not need vendor native libraries — only the two Python packages.
- **Direct transport-layer access** for sending raw frames with LIN 1.x **Classic** checksum (required for BSM-SNPD diagnostic frames).
## What it doesn't give you
- **No passive listen.** The MUM is master-driven. To "receive" a slave-published frame, the master sends a header on that frame ID and the slave must respond. `MumLinInterface.receive(id=None)` raises `NotImplementedError` for that reason.
- **No SDF / schedule manager.** The adapter does not run a schedule; tests publish frames explicitly (or pull slave frames explicitly) on each call.
## Mermaid: connect / receive / send
```mermaid
sequenceDiagram
autonumber
participant T as Test/Fixture
participant A as MumLinInterface
participant MM as pymumclient (MelexisUniversalMaster)
participant PL as pylin (LinDevice22 / TransportLayer)
participant E as ECU
T->>A: connect()
A->>MM: MelexisUniversalMaster()
A->>MM: open_all(host)
A->>MM: get_device('power_out0')
A->>MM: get_device('lin0')
A->>MM: linmaster.setup()
A->>PL: LinBusManager(linmaster)
A->>PL: LinDevice22(lin_bus); set baudrate
A->>PL: get_device('bus/transport_layer')
A->>MM: power_control.power_up()
Note over A: sleep(boot_settle_seconds)
A-->>T: connected
T->>A: receive(id=0x11)
A->>PL: send_message(master_to_slave=False, frame_id=0x11, data_length=4)
PL->>E: header for 0x11
E-->>PL: response bytes
PL-->>A: bytes
A-->>T: LinFrame(id=0x11, data=...)
T->>A: send(LinFrame(0x0A, payload))
A->>PL: send_message(master_to_slave=True, frame_id=0x0A, data_length=8, data=payload)
PL->>E: header + payload (Enhanced checksum)
T->>A: send_raw(b"\x7F\x06\xB5...")
A->>PL: transport_layer.ld_put_raw(data, baudrate)
Note over PL,E: LIN 1.x Classic checksum (required for BSM-SNPD)
T->>A: disconnect()
A->>MM: power_control.power_down()
A->>MM: linmaster.teardown()
```
## Public API
`MumLinInterface(host, lin_device='lin0', power_device='power_out0', baudrate=19200, frame_lengths=None, default_data_length=8, boot_settle_seconds=0.5)`
LinInterface contract (matches Mock and BabyLIN adapters):
- `connect()` — opens MUM, sets up LIN, **and powers up the ECU**
- `disconnect()` — powers down and tears down (best-effort)
- `send(frame: LinFrame)` — publishes a master-to-slave frame using Enhanced checksum
- `receive(id: int, timeout: float = 1.0) -> LinFrame | None` — triggers a slave read for `id`. The `timeout` argument is informational; the underlying `pylin` call is synchronous. Any pylin exception is treated as "no data" and returns `None`. Passing `id=None` raises `NotImplementedError`.
MUM-only extras:
- `send_raw(bytes)` — sends a raw LIN frame using **Classic** checksum via the transport layer's `ld_put_raw`. Use this for BSM-SNPD diagnostic frames; the firmware will reject them if Enhanced is used.
- `power_up()` / `power_down()` — direct control over `power_out0`
- `power_cycle(wait=2.0)` — convenience: `power_down()`, sleep, `power_up()`, then `boot_settle_seconds` sleep
## Frame-length resolution
Because the MUM is master-driven, every receive needs to know how many bytes
to ask for. The adapter resolves this from `frame_lengths`:
1. Built-in defaults for the 4SEVEN library (ALM_Status=4, ALM_Req_A=8, ConfigFrame=3, PWM_Frame=8, VF_Frame=8, Tj_Frame=8, PWM_wo_Comp=8, NVM_Debug=8).
2. Anything in the constructor's `frame_lengths` argument **overrides** the defaults.
3. If a frame ID isn't in the map, `default_data_length` (default 8) is used.
In YAML, hex keys work:
```yaml
interface:
type: mum
frame_lengths:
0x0A: 8
0x11: 4
```
The config loader coerces hex strings (`"0x0A"`) and integers alike.
## Diagnostic frames (BSM-SNPD)
The vendor's `test_auto_addressing.py` flow runs LIN 2.1 BSM-SNPD via raw
frames on `0x3C` (MasterReq). The framework supports the same flow:
```python
# inside a test that already has the MUM 'lin' fixture
data = bytearray([
0x7F, # NAD broadcast
0x06, # PCI: 6 data bytes
0xB5, # SID: BSM-SNPD
0xFF, # Supplier ID LSB
0x7F, # Supplier ID MSB
0x01, # subfunction (INIT)
0x02, # param 1
0xFF, # param 2
])
lin.send_raw(bytes(data))
```
`send_raw()` calls `transport_layer.ld_put_raw(data=..., baudrate=...)`
which uses LIN 1.x Classic checksum. Using `lin.send()` for these frames
would compute Enhanced checksum and the firmware would discard the frame.
## Error surfaces
- **`pymumclient is not installed`** / **`pylin is not installed`** — raised on `connect()` if the Melexis packages aren't importable. The error message points at `vendor/automated_lin_test/install_packages.sh`.
- **`MUM not connected`** — calling `send` / `receive` / `send_raw` before `connect()` (or after `disconnect()`).
- **`MUM transport layer not available`** — raised by `send_raw` when the LIN device didn't expose `bus/transport_layer`. Practically always available on MUM firmware that supports diagnostic frames.
- **pylin exceptions during `receive`** — converted to `None` (treated as a timeout / no-data). Use this to drive timeout-tolerant tests without try/except in the test body.
## Unit testing without hardware
The adapter accepts `mum_module=` and `pylin_module=` constructor arguments
that bypass the real package imports. Tests in
`tests/unit/test_mum_adapter_mocked.py` use simple in-memory fakes to drive
the connect / send / receive / send_raw / power-cycle paths end to end. See
that file for a complete shim implementation.
```python
from ecu_framework.lin.mum import MumLinInterface
iface = MumLinInterface(
host="10.0.0.1",
boot_settle_seconds=0.0,
mum_module=fake_mum,
pylin_module=fake_pylin,
)
iface.connect()
# ... assertions ...
iface.disconnect()
```
## Notes and pitfalls
- **Boot settling**: After `power_up()` the adapter sleeps `boot_settle_seconds` (default 0.5 s) so the ECU has time to come up before the first frame. Increase if your ECU boots slowly.
- **Owon PSU coexistence**: the MUM provides power on `power_out0` independently of `ecu_framework/power/`. Leave `power_supply.enabled: false` for the standard MUM flow; enable it only for over/under-voltage scenarios that need a separate, programmable rail.
- **Networking**: USB-RNDIS bring-up can take a few seconds after plugging in the MUM. If `connect()` fails with a connection-refused, `ping 192.168.7.2` first.
- **Multiple MUMs**: only one MUM is supported per `MumLinInterface` instance. Different `host` addresses can run different fixture sessions side-by-side.

View File

@ -1,71 +1,71 @@
# Developer Commit Guide
This guide explains exactly what to commit to source control for this repository, and what to keep out. It also includes a suggested commit message and safe commands to stage changes.
## Commit these files
### Core framework (source)
- `ecu_framework/config.py`
- `ecu_framework/lin/base.py`
- `ecu_framework/lin/mock.py`
- `ecu_framework/lin/babylin.py`
- `ecu_framework/flashing/hex_flasher.py`
### Pytest plugin and config
- `conftest_plugin.py`
Generates HTML columns, requirements coverage JSON, and CI summary
- `pytest.ini`
- `requirements.txt`
### Tests and fixtures
- `tests/conftest.py`
- `tests/test_smoke_mock.py`
- `tests/test_babylin_hardware_smoke.py` (if present)
- `tests/test_hardware_placeholder.py` (if present)
### Documentation
- `README.md`
- `TESTING_FRAMEWORK_GUIDE.md`
- `docs/README.md`
- `docs/01_run_sequence.md`
- `docs/02_configuration_resolution.md`
- `docs/03_reporting_and_metadata.md`
- `docs/04_lin_interface_call_flow.md`
- `docs/05_architecture_overview.md`
- `docs/06_requirement_traceability.md`
- `docs/07_flash_sequence.md`
- `docs/08_babylin_internals.md`
### Vendor guidance (no binaries)
- `vendor/README.md`
- Any headers in `vendor/` (if added per SDK)
### Housekeeping
- `.gitignore`
Ignores reports and vendor binaries
- `reports/.gitkeep`
Retains folder structure without committing artifacts
## Do NOT commit (ignored or should be excluded)
- Virtual environments: `.venv/`, `venv/`, etc.
- Generated test artifacts:
`reports/report.html`, `reports/junit.xml`, `reports/summary.md`, `reports/requirements_coverage.json`
<!-- - Vendor binaries: anything under `vendor/**` with `.dll`, `.lib`, `.pdb` keep them for now -->
- Python caches: `__pycache__/`, `.pytest_cache/`
- Local env files: `.env`
## Safe commit commands (PowerShell)
```powershell
# Stage everything except what .gitignore already excludes
git add -A
# Commit with a helpful message
git commit -m "ECU framework: docs, reporting plugin (HTML metadata + requirements JSON + CI summary), .gitignore updates"
```
## Notes
<!-- - Do not commit BabyLin DLLs or proprietary binaries. Keep only the placement/readme and headers. Keep them for now -->
- The plugin writes CI-friendly artifacts into `reports/`; theyre ignored by default but published in CI.
# Developer Commit Guide
This guide explains exactly what to commit to source control for this repository, and what to keep out. It also includes a suggested commit message and safe commands to stage changes.
## Commit these files
### Core framework (source)
- `ecu_framework/config.py`
- `ecu_framework/lin/base.py`
- `ecu_framework/lin/mock.py`
- `ecu_framework/lin/babylin.py`
- `ecu_framework/flashing/hex_flasher.py`
### Pytest plugin and config
- `conftest_plugin.py`
Generates HTML columns, requirements coverage JSON, and CI summary
- `pytest.ini`
- `requirements.txt`
### Tests and fixtures
- `tests/conftest.py`
- `tests/test_smoke_mock.py`
- `tests/test_babylin_hardware_smoke.py` (if present)
- `tests/test_hardware_placeholder.py` (if present)
### Documentation
- `README.md`
- `TESTING_FRAMEWORK_GUIDE.md`
- `docs/README.md`
- `docs/01_run_sequence.md`
- `docs/02_configuration_resolution.md`
- `docs/03_reporting_and_metadata.md`
- `docs/04_lin_interface_call_flow.md`
- `docs/05_architecture_overview.md`
- `docs/06_requirement_traceability.md`
- `docs/07_flash_sequence.md`
- `docs/08_babylin_internals.md`
### Vendor guidance (no binaries)
- `vendor/README.md`
- Any headers in `vendor/` (if added per SDK)
### Housekeeping
- `.gitignore`
Ignores reports and vendor binaries
- `reports/.gitkeep`
Retains folder structure without committing artifacts
## Do NOT commit (ignored or should be excluded)
- Virtual environments: `.venv/`, `venv/`, etc.
- Generated test artifacts:
`reports/report.html`, `reports/junit.xml`, `reports/summary.md`, `reports/requirements_coverage.json`
<!-- - Vendor binaries: anything under `vendor/**` with `.dll`, `.lib`, `.pdb` keep them for now -->
- Python caches: `__pycache__/`, `.pytest_cache/`
- Local env files: `.env`
## Safe commit commands (PowerShell)
```powershell
# Stage everything except what .gitignore already excludes
git add -A
# Commit with a helpful message
git commit -m "ECU framework: docs, reporting plugin (HTML metadata + requirements JSON + CI summary), .gitignore updates"
```
## Notes
<!-- - Do not commit BabyLin DLLs or proprietary binaries. Keep only the placement/readme and headers. Keep them for now -->
- The plugin writes CI-friendly artifacts into `reports/`; theyre ignored by default but published in CI.

View File

@ -1,26 +1,29 @@
# Documentation Index
A guided tour of the ECU testing framework. Start here:
1. `01_run_sequence.md` — End-to-end run sequence and call flow
2. `02_configuration_resolution.md` — How configuration is loaded and merged
3. `03_reporting_and_metadata.md` — How test documentation becomes report metadata
4. `11_conftest_plugin_overview.md` — Custom pytest plugin: hooks, call sequence, and artifacts
5. `04_lin_interface_call_flow.md` — LIN abstraction and adapter behavior (Mock vs BabyLIN SDK wrapper)
6. `05_architecture_overview.md` — High-level architecture and components
7. `06_requirement_traceability.md` — Requirement markers and coverage visuals
8. `07_flash_sequence.md` — ECU flashing workflow and sequence diagram
9. `08_babylin_internals.md` — BabyLIN SDK wrapper internals and call flow
9. `DEVELOPER_COMMIT_GUIDE.md` — What to commit vs ignore, commands
10. `09_raspberry_pi_deployment.md` — Run on Raspberry Pi (venv, service, hardware notes)
11. `10_build_custom_image.md` — Build a custom Raspberry Pi OS image with the framework baked in
12. `12_using_the_framework.md` — Practical usage: local, hardware, CI, and Pi
13. `13_unit_testing_guide.md` — Unit tests layout, markers, coverage, and tips
14. `14_power_supply.md` — Owon PSU control, configuration, tests, and quick demo script
15. `15_report_properties_cheatsheet.md` — Standardized keys for record_property/rp across suites
Related references:
- Root project guide: `../README.md`
- Full framework guide: `../TESTING_FRAMEWORK_GUIDE.md`
- BabyLIN placement and integration: `../vendor/README.md`
- PSU quick demo and scripts: `../vendor/Owon/`
# Documentation Index
A guided tour of the ECU testing framework. Start here:
1. `01_run_sequence.md` — End-to-end run sequence and call flow
2. `02_configuration_resolution.md` — How configuration is loaded and merged
3. `03_reporting_and_metadata.md` — How test documentation becomes report metadata
4. `11_conftest_plugin_overview.md` — Custom pytest plugin: hooks, call sequence, and artifacts
5. `04_lin_interface_call_flow.md` — LIN abstraction and adapter behavior (Mock, MUM, legacy BabyLIN)
6. `05_architecture_overview.md` — High-level architecture and components
7. `06_requirement_traceability.md` — Requirement markers and coverage visuals
8. `07_flash_sequence.md` — ECU flashing workflow and sequence diagram
9. `08_babylin_internals.md` — BabyLIN SDK wrapper internals and call flow (legacy)
10. `16_mum_internals.md` — MUM (Melexis Universal Master) adapter internals and call flow
11. `DEVELOPER_COMMIT_GUIDE.md` — What to commit vs ignore, commands
12. `09_raspberry_pi_deployment.md` — Run on Raspberry Pi (venv, service, hardware notes)
13. `10_build_custom_image.md` — Build a custom Raspberry Pi OS image with the framework baked in
14. `12_using_the_framework.md` — Practical usage: local, hardware (MUM/BabyLIN), CI, and Pi
15. `13_unit_testing_guide.md` — Unit tests layout, markers, coverage, and tips
16. `14_power_supply.md` — Owon PSU control, configuration, tests, and quick demo script
17. `15_report_properties_cheatsheet.md` — Standardized keys for record_property/rp across suites
Related references:
- Root project guide: `../README.md`
- Full framework guide: `../TESTING_FRAMEWORK_GUIDE.md`
- BabyLIN placement and integration: `../vendor/README.md`
- MUM source scripts and protocol details: `../vendor/automated_lin_test/README.md`
- PSU quick demo and scripts: `../vendor/Owon/`

View File

@ -1,15 +1,15 @@
"""
ECU Tests framework package.
Provides:
- config: YAML configuration loader and types
- lin: LIN interface abstraction and adapters (mock and BabyLIN)
Package version is exposed as __version__.
"""
__all__ = [
"config",
"lin",
]
__version__ = "0.1.0"
"""
ECU Tests framework package.
Provides:
- config: YAML configuration loader and types
- lin: LIN interface abstraction and adapters (mock and BabyLIN)
Package version is exposed as __version__.
"""
__all__ = [
"config",
"lin",
]
__version__ = "0.1.0"

View File

@ -1,236 +1,266 @@
from __future__ import annotations # Postponed annotations for forward references and speed
import os # For environment variables and filesystem checks
import pathlib # Path handling across platforms
from dataclasses import dataclass, field # Lightweight typed containers
from typing import Any, Dict, Optional # Type hints for clarity
import yaml # Safe YAML parsing for configuration files
@dataclass
class FlashConfig:
"""Flashing-related configuration.
enabled: Whether to trigger flashing at session start.
hex_path: Path to the firmware HEX file (if any).
"""
enabled: bool = False # Off by default
hex_path: Optional[str] = None # No default file path
@dataclass
class InterfaceConfig:
"""LIN interface configuration.
type: Adapter type name: "mock" for the simulated adapter, "babylin" for real hardware via SDK.
channel: Channel index to use (0-based in most SDKs); default chosen by project convention.
bitrate: Informational; typically SDF/schedule defines effective bitrate for BabyLIN.
dll_path: Legacy/optional pointer to vendor DLLs when using ctypes (not used by SDK wrapper).
node_name: Optional friendly name for display/logging.
func_names: Legacy mapping for ctypes function names; ignored by SDK wrapper.
sdf_path: Path to the SDF to load on connect (BabyLIN only).
schedule_nr: Schedule index to start after connect (BabyLIN only).
"""
type: str = "mock" # "mock" or "babylin"
channel: int = 1 # Default channel index (project-specific default)
bitrate: int = 19200 # Typical LIN bitrate; SDF may override
dll_path: Optional[str] = None # Legacy ctypes option; not used with SDK wrapper
node_name: Optional[str] = None # Optional label for node/adapter
func_names: Dict[str, str] = field(default_factory=dict) # Legacy ctypes mapping; safe to leave empty
# SDK wrapper options
sdf_path: Optional[str] = None # Path to SDF file to load (BabyLIN)
schedule_nr: int = 0 # Schedule number to start after connect (BabyLIN)
@dataclass
class EcuTestConfig:
"""Top-level, fully-typed configuration for the framework.
interface: Settings for LIN communication (mock or BabyLIN).
flash: Optional flashing behavior configuration.
"""
interface: InterfaceConfig = field(default_factory=InterfaceConfig)
flash: FlashConfig = field(default_factory=FlashConfig)
# Serial power supply (e.g., Owon) configuration
# Test code can rely on these values to interact with PSU if enabled
power_supply: "PowerSupplyConfig" = field(default_factory=lambda: PowerSupplyConfig())
@dataclass
class PowerSupplyConfig:
"""Serial power supply configuration (e.g., Owon PSU).
enabled: Whether PSU tests/features should be active.
port: Serial device (e.g., COM4 on Windows, /dev/ttyUSB0 on Linux).
baudrate/timeout/eol: Basic line settings; eol often "\n" or "\r\n".
parity: One of "N", "E", "O".
stopbits: 1 or 2.
xonxoff/rtscts/dsrdtr: Flow control flags.
idn_substr: Optional substring to assert in *IDN? responses.
do_set/set_voltage/set_current: Optional demo/test actions.
"""
enabled: bool = False
port: Optional[str] = None
baudrate: int = 115200
timeout: float = 1.0
eol: str = "\n"
parity: str = "N"
stopbits: float = 1.0
xonxoff: bool = False
rtscts: bool = False
dsrdtr: bool = False
idn_substr: Optional[str] = None
do_set: bool = False
set_voltage: float = 1.0
set_current: float = 0.1
DEFAULT_CONFIG_RELATIVE = pathlib.Path("config") / "test_config.yaml" # Default config path relative to repo root
ENV_CONFIG_PATH = "ECU_TESTS_CONFIG" # Env var to override config file location
def _deep_update(base: Dict[str, Any], updates: Dict[str, Any]) -> Dict[str, Any]:
"""Recursively merge dict 'updates' into dict 'base'.
- Nested dicts are merged by key
- Scalars/collections at any level are replaced entirely
- Mutation occurs in-place on 'base' and the same object is returned
"""
for k, v in updates.items(): # Iterate all update keys
if isinstance(v, dict) and isinstance(base.get(k), dict): # Both sides dict → recurse
base[k] = _deep_update(base[k], v)
else: # Otherwise replace
base[k] = v
return base # Return the mutated base for chaining
def _to_dataclass(cfg: Dict[str, Any]) -> EcuTestConfig:
"""Convert a merged plain dict config into strongly-typed dataclasses.
Defensive casting is used to ensure correct types even if YAML contains strings.
"""
iface = cfg.get("interface", {}) # Sub-config for interface
flash = cfg.get("flash", {}) # Sub-config for flashing
psu = cfg.get("power_supply", {}) # Sub-config for power supply
return EcuTestConfig(
interface=InterfaceConfig(
type=str(iface.get("type", "mock")).lower(), # Normalize to lowercase
channel=int(iface.get("channel", 1)), # Coerce to int
bitrate=int(iface.get("bitrate", 19200)), # Coerce to int
dll_path=iface.get("dll_path"), # Optional legacy field
node_name=iface.get("node_name"), # Optional friendly name
func_names=dict(iface.get("func_names", {}) or {}), # Ensure a dict
sdf_path=iface.get("sdf_path"), # Optional SDF path
schedule_nr=int(iface.get("schedule_nr", 0)), # Coerce to int
),
flash=FlashConfig(
enabled=bool(flash.get("enabled", False)), # Coerce to bool
hex_path=flash.get("hex_path"), # Optional hex path
),
power_supply=PowerSupplyConfig(
enabled=bool(psu.get("enabled", False)),
port=psu.get("port"),
baudrate=int(psu.get("baudrate", 115200)),
timeout=float(psu.get("timeout", 1.0)),
eol=str(psu.get("eol", "\n")),
parity=str(psu.get("parity", "N")),
stopbits=float(psu.get("stopbits", 1.0)),
xonxoff=bool(psu.get("xonxoff", False)),
rtscts=bool(psu.get("rtscts", False)),
dsrdtr=bool(psu.get("dsrdtr", False)),
idn_substr=psu.get("idn_substr"),
do_set=bool(psu.get("do_set", False)),
set_voltage=float(psu.get("set_voltage", 1.0)),
set_current=float(psu.get("set_current", 0.1)),
),
)
def load_config(workspace_root: Optional[str] = None, overrides: Optional[Dict[str, Any]] = None) -> EcuTestConfig:
"""Load configuration from YAML file, environment, overrides, or defaults.
Precedence (highest to lowest):
1. in-memory 'overrides' dict
2. YAML file specified by env var ECU_TESTS_CONFIG
3. YAML at ./config/test_config.yaml (relative to workspace_root)
4. built-in defaults in this function
"""
# Start with built-in defaults; minimal, safe baseline
base: Dict[str, Any] = {
"interface": {
"type": "mock", # mock by default for developer friendliness
"channel": 1,
"bitrate": 19200,
},
"flash": {
"enabled": False,
"hex_path": None,
},
"power_supply": {
"enabled": False,
"port": None,
"baudrate": 115200,
"timeout": 1.0,
"eol": "\n",
"parity": "N",
"stopbits": 1.0,
"xonxoff": False,
"rtscts": False,
"dsrdtr": False,
"idn_substr": None,
"do_set": False,
"set_voltage": 1.0,
"set_current": 0.1,
},
}
cfg_path: Optional[pathlib.Path] = None # Resolved configuration file path
# 2) Environment variable can point to any YAML file
env_path = os.getenv(ENV_CONFIG_PATH)
if env_path:
candidate = pathlib.Path(env_path)
if candidate.is_file(): # Only accept existing files
cfg_path = candidate
# 3) Fallback to default path under the provided workspace root
if cfg_path is None and workspace_root:
candidate = pathlib.Path(workspace_root) / DEFAULT_CONFIG_RELATIVE
if candidate.is_file():
cfg_path = candidate
# Load YAML file if we have one
if cfg_path and cfg_path.is_file():
with open(cfg_path, "r", encoding="utf-8") as f:
file_cfg = yaml.safe_load(f) or {} # Parse YAML safely; empty → {}
if isinstance(file_cfg, dict): # Only merge dicts
_deep_update(base, file_cfg)
# Optionally merge a dedicated PSU YAML if present (or env var path)
# This allows users to keep sensitive or machine-specific serial settings separate
psu_env = os.getenv("OWON_PSU_CONFIG")
psu_default = None
if workspace_root:
candidate = pathlib.Path(workspace_root) / "config" / "owon_psu.yaml"
if candidate.is_file():
psu_default = candidate
psu_path: Optional[pathlib.Path] = pathlib.Path(psu_env) if psu_env else psu_default
if psu_path and psu_path.is_file():
with open(psu_path, "r", encoding="utf-8") as f:
psu_cfg = yaml.safe_load(f) or {}
if isinstance(psu_cfg, dict):
base.setdefault("power_supply", {})
# Merge PSU YAML into power_supply section
base["power_supply"] = _deep_update(base["power_supply"], psu_cfg)
# 1) In-memory overrides always win
if overrides:
_deep_update(base, overrides)
# Convert to typed dataclasses for ergonomic downstream usage
return _to_dataclass(base)
from __future__ import annotations # Postponed annotations for forward references and speed
import os # For environment variables and filesystem checks
import pathlib # Path handling across platforms
from dataclasses import dataclass, field # Lightweight typed containers
from typing import Any, Dict, Optional # Type hints for clarity
import yaml # Safe YAML parsing for configuration files
@dataclass
class FlashConfig:
"""Flashing-related configuration.
enabled: Whether to trigger flashing at session start.
hex_path: Path to the firmware HEX file (if any).
"""
enabled: bool = False # Off by default
hex_path: Optional[str] = None # No default file path
@dataclass
class InterfaceConfig:
"""LIN interface configuration.
type: Adapter type "mock" (simulated), "babylin" (legacy BabyLIN SDK), or "mum"
(Melexis Universal Master).
channel: Channel index to use (0-based in most SDKs); BabyLIN-specific.
bitrate: Effective LIN bitrate; the MUM uses this directly, the BabyLIN SDF may override.
dll_path: Legacy/optional pointer to vendor DLLs when using ctypes (not used by SDK wrapper).
node_name: Optional friendly name for display/logging.
func_names: Legacy mapping for ctypes function names; ignored by SDK wrapper.
sdf_path: Path to the SDF to load on connect (BabyLIN only).
schedule_nr: Schedule index to start after connect (BabyLIN only). -1 = skip.
host: MUM IP address (MUM only).
lin_device: MUM LIN device name (MUM only, default 'lin0').
power_device: MUM power-control device name (MUM only, default 'power_out0').
boot_settle_seconds: Delay after MUM power-up before sending the first frame.
frame_lengths: Optional map of frame_id (int) -> data length (int) used by the
MUM adapter when receiving slave-published frames.
"""
type: str = "mock" # "mock", "babylin", or "mum"
channel: int = 1
bitrate: int = 19200
dll_path: Optional[str] = None
node_name: Optional[str] = None
func_names: Dict[str, str] = field(default_factory=dict)
# BabyLIN-specific
sdf_path: Optional[str] = None
schedule_nr: int = 0
# MUM-specific
host: Optional[str] = None
lin_device: str = "lin0"
power_device: str = "power_out0"
boot_settle_seconds: float = 0.5
frame_lengths: Dict[int, int] = field(default_factory=dict)
@dataclass
class EcuTestConfig:
"""Top-level, fully-typed configuration for the framework.
interface: Settings for LIN communication (mock or BabyLIN).
flash: Optional flashing behavior configuration.
"""
interface: InterfaceConfig = field(default_factory=InterfaceConfig)
flash: FlashConfig = field(default_factory=FlashConfig)
# Serial power supply (e.g., Owon) configuration
# Test code can rely on these values to interact with PSU if enabled
power_supply: "PowerSupplyConfig" = field(default_factory=lambda: PowerSupplyConfig())
@dataclass
class PowerSupplyConfig:
"""Serial power supply configuration (e.g., Owon PSU).
enabled: Whether PSU tests/features should be active.
port: Serial device (e.g., COM4 on Windows, /dev/ttyUSB0 on Linux).
baudrate/timeout/eol: Basic line settings; eol often "\n" or "\r\n".
parity: One of "N", "E", "O".
stopbits: 1 or 2.
xonxoff/rtscts/dsrdtr: Flow control flags.
idn_substr: Optional substring to assert in *IDN? responses.
do_set/set_voltage/set_current: Optional demo/test actions.
"""
enabled: bool = False
port: Optional[str] = None
baudrate: int = 115200
timeout: float = 1.0
eol: str = "\n"
parity: str = "N"
stopbits: float = 1.0
xonxoff: bool = False
rtscts: bool = False
dsrdtr: bool = False
idn_substr: Optional[str] = None
do_set: bool = False
set_voltage: float = 1.0
set_current: float = 0.1
DEFAULT_CONFIG_RELATIVE = pathlib.Path("config") / "test_config.yaml" # Default config path relative to repo root
ENV_CONFIG_PATH = "ECU_TESTS_CONFIG" # Env var to override config file location
def _deep_update(base: Dict[str, Any], updates: Dict[str, Any]) -> Dict[str, Any]:
"""Recursively merge dict 'updates' into dict 'base'.
- Nested dicts are merged by key
- Scalars/collections at any level are replaced entirely
- Mutation occurs in-place on 'base' and the same object is returned
"""
for k, v in updates.items(): # Iterate all update keys
if isinstance(v, dict) and isinstance(base.get(k), dict): # Both sides dict → recurse
base[k] = _deep_update(base[k], v)
else: # Otherwise replace
base[k] = v
return base # Return the mutated base for chaining
def _to_dataclass(cfg: Dict[str, Any]) -> EcuTestConfig:
"""Convert a merged plain dict config into strongly-typed dataclasses.
Defensive casting is used to ensure correct types even if YAML contains strings.
"""
iface = cfg.get("interface", {}) # Sub-config for interface
flash = cfg.get("flash", {}) # Sub-config for flashing
psu = cfg.get("power_supply", {}) # Sub-config for power supply
# Coerce frame_lengths keys to int (YAML may parse numeric keys as int already,
# but accept hex strings like "0x0A: 8" too).
raw_fl = iface.get("frame_lengths", {}) or {}
frame_lengths: Dict[int, int] = {}
if isinstance(raw_fl, dict):
for k, v in raw_fl.items():
try:
key = int(k, 0) if isinstance(k, str) else int(k)
frame_lengths[key] = int(v)
except (TypeError, ValueError):
continue
return EcuTestConfig(
interface=InterfaceConfig(
type=str(iface.get("type", "mock")).lower(),
channel=int(iface.get("channel", 1)),
bitrate=int(iface.get("bitrate", 19200)),
dll_path=iface.get("dll_path"),
node_name=iface.get("node_name"),
func_names=dict(iface.get("func_names", {}) or {}),
sdf_path=iface.get("sdf_path"),
schedule_nr=int(iface.get("schedule_nr", 0)),
host=iface.get("host"),
lin_device=str(iface.get("lin_device", "lin0")),
power_device=str(iface.get("power_device", "power_out0")),
boot_settle_seconds=float(iface.get("boot_settle_seconds", 0.5)),
frame_lengths=frame_lengths,
),
flash=FlashConfig(
enabled=bool(flash.get("enabled", False)), # Coerce to bool
hex_path=flash.get("hex_path"), # Optional hex path
),
power_supply=PowerSupplyConfig(
enabled=bool(psu.get("enabled", False)),
port=psu.get("port"),
baudrate=int(psu.get("baudrate", 115200)),
timeout=float(psu.get("timeout", 1.0)),
eol=str(psu.get("eol", "\n")),
parity=str(psu.get("parity", "N")),
stopbits=float(psu.get("stopbits", 1.0)),
xonxoff=bool(psu.get("xonxoff", False)),
rtscts=bool(psu.get("rtscts", False)),
dsrdtr=bool(psu.get("dsrdtr", False)),
idn_substr=psu.get("idn_substr"),
do_set=bool(psu.get("do_set", False)),
set_voltage=float(psu.get("set_voltage", 1.0)),
set_current=float(psu.get("set_current", 0.1)),
),
)
def load_config(workspace_root: Optional[str] = None, overrides: Optional[Dict[str, Any]] = None) -> EcuTestConfig:
"""Load configuration from YAML file, environment, overrides, or defaults.
Precedence (highest to lowest):
1. in-memory 'overrides' dict
2. YAML file specified by env var ECU_TESTS_CONFIG
3. YAML at ./config/test_config.yaml (relative to workspace_root)
4. built-in defaults in this function
"""
# Start with built-in defaults; minimal, safe baseline
base: Dict[str, Any] = {
"interface": {
"type": "mock", # mock by default for developer friendliness
"channel": 1,
"bitrate": 19200,
},
"flash": {
"enabled": False,
"hex_path": None,
},
"power_supply": {
"enabled": False,
"port": None,
"baudrate": 115200,
"timeout": 1.0,
"eol": "\n",
"parity": "N",
"stopbits": 1.0,
"xonxoff": False,
"rtscts": False,
"dsrdtr": False,
"idn_substr": None,
"do_set": False,
"set_voltage": 1.0,
"set_current": 0.1,
},
}
cfg_path: Optional[pathlib.Path] = None # Resolved configuration file path
# 2) Environment variable can point to any YAML file
env_path = os.getenv(ENV_CONFIG_PATH)
if env_path:
candidate = pathlib.Path(env_path)
if candidate.is_file(): # Only accept existing files
cfg_path = candidate
# 3) Fallback to default path under the provided workspace root
if cfg_path is None and workspace_root:
candidate = pathlib.Path(workspace_root) / DEFAULT_CONFIG_RELATIVE
if candidate.is_file():
cfg_path = candidate
# Load YAML file if we have one
if cfg_path and cfg_path.is_file():
with open(cfg_path, "r", encoding="utf-8") as f:
file_cfg = yaml.safe_load(f) or {} # Parse YAML safely; empty → {}
if isinstance(file_cfg, dict): # Only merge dicts
_deep_update(base, file_cfg)
# Optionally merge a dedicated PSU YAML if present (or env var path)
# This allows users to keep sensitive or machine-specific serial settings separate
psu_env = os.getenv("OWON_PSU_CONFIG")
psu_default = None
if workspace_root:
candidate = pathlib.Path(workspace_root) / "config" / "owon_psu.yaml"
if candidate.is_file():
psu_default = candidate
psu_path: Optional[pathlib.Path] = pathlib.Path(psu_env) if psu_env else psu_default
if psu_path and psu_path.is_file():
with open(psu_path, "r", encoding="utf-8") as f:
psu_cfg = yaml.safe_load(f) or {}
if isinstance(psu_cfg, dict):
base.setdefault("power_supply", {})
# Merge PSU YAML into power_supply section
base["power_supply"] = _deep_update(base["power_supply"], psu_cfg)
# 1) In-memory overrides always win
if overrides:
_deep_update(base, overrides)
# Convert to typed dataclasses for ergonomic downstream usage
return _to_dataclass(base)

View File

@ -1,9 +1,9 @@
"""
Flashing package.
Exports:
- HexFlasher: scaffold class to wire up UDS-based ECU programming over LIN.
"""
from .hex_flasher import HexFlasher
__all__ = ["HexFlasher"]
"""
Flashing package.
Exports:
- HexFlasher: scaffold class to wire up UDS-based ECU programming over LIN.
"""
from .hex_flasher import HexFlasher
__all__ = ["HexFlasher"]

View File

@ -1,25 +1,25 @@
from __future__ import annotations
import pathlib
from typing import Optional
from ..lin.base import LinInterface
class HexFlasher:
"""Stubbed ECU flasher over LIN.
Replace with your actual UDS flashing sequence. For now, just validates the file exists
and pretends to flash successfully.
"""
def __init__(self, lin: LinInterface) -> None:
self.lin = lin
def flash_hex(self, hex_path: str, *, erase: bool = True, verify: bool = True, timeout_s: float = 120.0) -> bool:
path = pathlib.Path(hex_path)
if not path.is_file():
raise FileNotFoundError(f"HEX file not found: {hex_path}")
# TODO: Implement real flashing over LIN (UDS). This is a placeholder.
# You might send specific frames or use a higher-level protocol library.
return True
from __future__ import annotations
import pathlib
from typing import Optional
from ..lin.base import LinInterface
class HexFlasher:
"""Stubbed ECU flasher over LIN.
Replace with your actual UDS flashing sequence. For now, just validates the file exists
and pretends to flash successfully.
"""
def __init__(self, lin: LinInterface) -> None:
self.lin = lin
def flash_hex(self, hex_path: str, *, erase: bool = True, verify: bool = True, timeout_s: float = 120.0) -> bool:
path = pathlib.Path(hex_path)
if not path.is_file():
raise FileNotFoundError(f"HEX file not found: {hex_path}")
# TODO: Implement real flashing over LIN (UDS). This is a placeholder.
# You might send specific frames or use a higher-level protocol library.
return True

View File

@ -1,17 +1,20 @@
"""
LIN interface package.
Exports:
- LinInterface, LinFrame: core abstraction and frame type
- MockBabyLinInterface: mock implementation for fast, hardware-free tests
Real hardware adapter (BabyLIN) is available in babylin.py.
"""
from .base import LinInterface, LinFrame
from .mock import MockBabyLinInterface
__all__ = [
"LinInterface",
"LinFrame",
"MockBabyLinInterface",
]
"""
LIN interface package.
Exports:
- LinInterface, LinFrame: core abstraction and frame type
- MockBabyLinInterface: mock implementation for fast, hardware-free tests
Real hardware adapters live in their own modules and are imported by the
fixture only when selected by config:
- babylin.BabyLinInterface (legacy; needs the BabyLIN SDK + native libs)
- mum.MumLinInterface (current; needs Melexis pylin + pymumclient)
"""
from .base import LinInterface, LinFrame
from .mock import MockBabyLinInterface
__all__ = [
"LinInterface",
"LinFrame",
"MockBabyLinInterface",
]

View File

@ -1,220 +1,393 @@
from __future__ import annotations # Enable postponed evaluation of annotations (PEP 563/649 style)
from typing import Optional # For optional type hints
from .base import LinInterface, LinFrame # Base abstraction and frame dataclass used by all LIN adapters
class BabyLinInterface(LinInterface):
"""LIN adapter that uses the vendor's BabyLIN Python SDK wrapper.
- Avoids manual ctypes; relies on BabyLIN_library.py BLC_* functions.
- Keeps the same LinInterface contract for send/receive/request/flush.
"""
def __init__(
self,
dll_path: Optional[str] = None, # Not used by SDK wrapper (auto-selects platform libs)
bitrate: int = 19200, # Informational; typically defined by SDF/schedule
channel: int = 0, # Channel index used with BLC_getChannelHandle (0-based)
node_name: Optional[str] = None, # Optional friendly name (not used by SDK calls)
func_names: Optional[dict] = None, # Legacy (ctypes) compatibility; unused here
sdf_path: Optional[str] = None, # Optional SDF file to load after open
schedule_nr: int = 0, # Schedule number to start after connect
wrapper_module: Optional[object] = None, # Inject a wrapper (e.g., mock) for tests
) -> None:
self.bitrate = bitrate # Store configured (informational) bitrate
self.channel_index = channel # Desired channel index
self.node_name = node_name or "ECU_TEST_NODE" # Default node name if not provided
self.sdf_path = sdf_path # SDF to load (if provided)
self.schedule_nr = schedule_nr # Schedule to start on connect
# Choose the BabyLIN wrapper module to use:
# - If wrapper_module provided (unit tests with mock), use it
# - Else dynamically import the real SDK wrapper (BabyLIN_library.py)
if wrapper_module is not None:
_bl = wrapper_module
else:
import importlib, sys, os # Local import to avoid global dependency during unit tests
_bl = None # Placeholder for resolved module
import_errors = [] # Accumulate import errors for diagnostics
for modname in ("BabyLIN_library", "vendor.BabyLIN_library"):
try:
_bl = importlib.import_module(modname)
break
except Exception as e: # pragma: no cover
import_errors.append((modname, str(e)))
if _bl is None:
# Try adding the common 'vendor' folder to sys.path then retry import
repo_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
vendor_dir = os.path.join(repo_root, "vendor")
if os.path.isdir(vendor_dir) and vendor_dir not in sys.path:
sys.path.insert(0, vendor_dir)
try:
_bl = importlib.import_module("BabyLIN_library")
except Exception as e: # pragma: no cover
import_errors.append(("BabyLIN_library", str(e)))
if _bl is None:
# Raise a helpful error with all attempted import paths
details = "; ".join([f"{m}: {err}" for m, err in import_errors]) or "not found"
raise RuntimeError(
"Failed to import BabyLIN_library. Ensure the SDK's BabyLIN_library.py is present in the project (e.g., vendor/BabyLIN_library.py). Details: "
+ details
)
# Create the BabyLIN SDK instance (module exposes create_BabyLIN())
self._BabyLIN = _bl.create_BabyLIN()
# Small helper to call BLC_* functions by name (keeps call sites concise)
self._bl_call = lambda name, *args, **kwargs: getattr(self._BabyLIN, name)(*args, **kwargs)
self._handle = None # Device handle returned by BLC_openPort
self._channel_handle = None # Per-channel handle returned by BLC_getChannelHandle
self._connected = False # Internal connection state flag
def _err(self, rc: int) -> None:
"""Raise a RuntimeError with a readable SDK error message for rc != BL_OK."""
if rc == self._BabyLIN.BL_OK:
return
# Prefer a human-friendly error string if the SDK provides it
try:
get_str = getattr(self._BabyLIN, 'BLC_getDetailedErrorString', None)
msg = get_str(rc) if get_str else f"rc={rc}"
if not isinstance(msg, str):
msg = str(msg)
except Exception:
msg = f"rc={rc}"
raise RuntimeError(f"BabyLIN error: {msg}")
def connect(self) -> None:
"""Open device, optionally load SDF, select channel, and start schedule."""
# Discover BabyLIN devices (returns a list of port identifiers)
ports = self._bl_call('BLC_getBabyLinPorts', 100)
if not ports:
raise RuntimeError("No BabyLIN devices found")
# Open the first available device port (you could extend to select by config)
self._handle = self._bl_call('BLC_openPort', ports[0])
if not self._handle:
raise RuntimeError("Failed to open BabyLIN port")
# Load SDF onto the device, if configured (3rd arg '1' often means 'download')
if self.sdf_path:
rc = self._bl_call('BLC_loadSDF', self._handle, self.sdf_path, 1)
if rc != self._BabyLIN.BL_OK:
self._err(rc)
# Get channel count and pick the configured channel index (default 0)
ch_count = self._bl_call('BLC_getChannelCount', self._handle)
if ch_count <= 0:
raise RuntimeError("No channels reported by device")
ch_idx = int(self.channel_index)
if ch_idx < 0 or ch_idx >= ch_count:
ch_idx = 0
# Resolve a channel handle used for all subsequent Tx/Rx commands
self._channel_handle = self._bl_call('BLC_getChannelHandle', self._handle, ch_idx)
# Start a schedule if configured (common requirement for regular polling/masters)
if self.schedule_nr is not None:
cmd = f"start schedule {int(self.schedule_nr)};"
rc = self._bl_call('BLC_sendCommand', self._channel_handle, cmd)
if rc != self._BabyLIN.BL_OK:
self._err(rc)
self._connected = True # Mark interface as connected
def disconnect(self) -> None:
"""Close device handles and reset internal state (best-effort)."""
try:
self._bl_call('BLC_closeAll') # Close all device connections via SDK
except Exception:
pass # Ignore SDK exceptions during shutdown
self._connected = False
self._handle = None
self._channel_handle = None
def send(self, frame: LinFrame) -> None:
"""Transmit a LIN frame using BLC_mon_set_xmit."""
if not self._connected or not self._channel_handle:
raise RuntimeError("BabyLIN not connected")
# slotTime=0 means use default timing configured by schedule/SDF
rc = self._bl_call('BLC_mon_set_xmit', self._channel_handle, int(frame.id), bytes(frame.data), 0)
if rc != self._BabyLIN.BL_OK:
self._err(rc)
def receive(self, id: Optional[int] = None, timeout: float = 1.0):
"""Receive a LIN frame with optional ID filter and timeout (seconds)."""
if not self._connected or not self._channel_handle:
raise RuntimeError("BabyLIN not connected")
ms = max(0, int(timeout * 1000)) # SDK expects milliseconds
try:
frame = self._bl_call('BLC_getNextFrameTimeout', self._channel_handle, ms)
except Exception:
# Many wrappers raise on timeout; unify as 'no data'
return None
if not frame:
return None
# Convert SDK frame to our LinFrame (mask to classic 6-bit LIN ID range)
fid = int(frame.frameId & 0x3F)
data = bytes(list(frame.frameData)[: int(frame.lenOfData)])
lin_frame = LinFrame(id=fid, data=data)
if id is None or fid == id:
return lin_frame
# If a different ID was received and caller requested a filter, return None
return None
def flush(self) -> None:
"""Flush RX buffers if the SDK exposes such a function (optional)."""
if not self._connected or not self._channel_handle:
return
try:
# Some SDKs may not expose flush; no-op if missing
flush = getattr(self._BabyLIN, 'BLC_flush', None)
if flush:
flush(self._channel_handle)
except Exception:
pass
def request(self, id: int, length: int, timeout: float = 1.0):
"""Perform a LIN master request and wait for response.
Strategy:
- Prefer SDK method `BLC_sendRawMasterRequest` if present (bytes or length variants).
- Fallback: transmit a header with zeroed payload; then wait for response.
- Always attempt to receive a frame with matching ID within 'timeout'.
"""
if not self._connected or not self._channel_handle:
raise RuntimeError("BabyLIN not connected")
sent = False # Track whether a request command was successfully issued
# Attempt to use raw master request if provided by SDK
# Preference: try (channel, frameId, length) first because our mock wrapper
# synthesizes a deterministic payload for this form (see vendor/mock_babylin_wrapper.py),
# then fall back to (channel, frameId, dataBytes) if the SDK only supports that.
raw_req = getattr(self._BabyLIN, 'BLC_sendRawMasterRequest', None)
if raw_req:
# Prefer the (channel, frameId, length) variant first if supported
try:
rc = raw_req(self._channel_handle, int(id), int(length))
if rc == self._BabyLIN.BL_OK:
sent = True
else:
self._err(rc)
except TypeError:
# Fallback to (channel, frameId, dataBytes)
try:
payload = bytes([0] * max(0, min(8, int(length))))
rc = raw_req(self._channel_handle, int(id), payload)
if rc == self._BabyLIN.BL_OK:
sent = True
else:
self._err(rc)
except Exception:
sent = False
except Exception:
sent = False
if not sent:
# Fallback: issue a transmit; many stacks will respond on the bus
self.send(LinFrame(id=id, data=bytes([0] * max(0, min(8, int(length))))))
# Wait for the response frame with matching ID (or None on timeout)
return self.receive(id=id, timeout=timeout)
from __future__ import annotations # Enable postponed evaluation of annotations (PEP 563/649 style)
from typing import Optional # For optional type hints
from .base import LinInterface, LinFrame # Base abstraction and frame dataclass used by all LIN adapters
class BabyLinInterface(LinInterface):
"""LIN adapter that uses the vendor's BabyLIN Python SDK wrapper.
- Avoids manual ctypes; relies on BabyLIN_library.py BLC_* functions.
- Keeps the same LinInterface contract for send/receive/request/flush.
"""
def __init__(
self,
dll_path: Optional[str] = None, # Not used by SDK wrapper (auto-selects platform libs)
bitrate: int = 19200, # Informational; typically defined by SDF/schedule
channel: int = 0, # Channel index used with BLC_getChannelHandle (0-based)
node_name: Optional[str] = None, # Optional friendly name (not used by SDK calls)
func_names: Optional[dict] = None, # Legacy (ctypes) compatibility; unused here
sdf_path: Optional[str] = None, # Optional SDF file to load after open
schedule_nr: int = 0, # Schedule number to start after connect
wrapper_module: Optional[object] = None, # Inject a wrapper (e.g., mock) for tests
) -> None:
self.bitrate = bitrate # Store configured (informational) bitrate
self.channel_index = channel # Desired channel index
self.node_name = node_name or "ECU_TEST_NODE" # Default node name if not provided
self.sdf_path = sdf_path # SDF to load (if provided)
self.schedule_nr = schedule_nr # Schedule to start on connect
# Choose the BabyLIN wrapper module to use:
# - If wrapper_module provided (unit tests with mock), use it
# - Else dynamically import the real SDK wrapper (BabyLIN_library.py)
if wrapper_module is not None:
_bl = wrapper_module
else:
import importlib, sys, os # Local import to avoid global dependency during unit tests
_bl = None # Placeholder for resolved module
import_errors = [] # Accumulate import errors for diagnostics
for modname in ("BabyLIN_library", "vendor.BabyLIN_library"):
try:
_bl = importlib.import_module(modname)
break
except Exception as e: # pragma: no cover
import_errors.append((modname, str(e)))
if _bl is None:
# Try adding the common 'vendor' folder to sys.path then retry import
repo_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
vendor_dir = os.path.join(repo_root, "vendor")
if os.path.isdir(vendor_dir) and vendor_dir not in sys.path:
sys.path.insert(0, vendor_dir)
try:
_bl = importlib.import_module("BabyLIN_library")
except Exception as e: # pragma: no cover
import_errors.append(("BabyLIN_library", str(e)))
if _bl is None:
# Raise a helpful error with all attempted import paths
details = "; ".join([f"{m}: {err}" for m, err in import_errors]) or "not found"
raise RuntimeError(
"Failed to import BabyLIN_library. Ensure the SDK's BabyLIN_library.py is present in the project (e.g., vendor/BabyLIN_library.py). Details: "
+ details
)
# Create the BabyLIN SDK instance (module exposes create_BabyLIN())
self._BabyLIN = _bl.create_BabyLIN()
# Small helper to call BLC_* functions by name (keeps call sites concise)
self._bl_call = lambda name, *args, **kwargs: getattr(self._BabyLIN, name)(*args, **kwargs)
self._handle = None # Device handle returned by BLC_openPort
self._channel_handle = None # Per-channel handle returned by BLC_getChannelHandle
self._connected = False # Internal connection state flag
def _detail_for(self, rc) -> str:
"""Look up a human-readable SDK error message; never raises.
Tries (in order):
1. BLC_getLastError(channel_handle) device-side last error (best detail)
2. BLC_getErrorString(rc) simple rc lookup
3. BLC_getDetailedErrorString(rc, 0) detailed lookup (rc + report_param)
Returns the first non-empty message, or "".
"""
parts = []
# 1. Device-side last error — usually the most informative.
# BLC_getLastError takes the device connection handle; fall back to the
# channel handle if the device handle isn't set yet.
for h in (self._handle, self._channel_handle):
if h is None:
continue
try:
fn = getattr(self._BabyLIN, 'BLC_getLastError', None)
if fn is not None:
s = fn(h)
if isinstance(s, bytes):
s = s.decode('utf-8', errors='ignore')
if s:
parts.append(str(s))
break
except Exception:
continue
if rc is None:
return " | ".join(parts)
# 2. Simple error string by rc
try:
fn = getattr(self._BabyLIN, 'BLC_getErrorString', None)
if fn is not None:
s = fn(int(rc))
if isinstance(s, bytes):
s = s.decode('utf-8', errors='ignore')
if s:
parts.append(str(s))
except Exception:
pass
# 3. Detailed string (rc + report_parameter)
try:
fn = getattr(self._BabyLIN, 'BLC_getDetailedErrorString', None)
if fn is not None:
s = fn(int(rc), 0)
if isinstance(s, bytes):
s = s.decode('utf-8', errors='ignore')
if s:
parts.append(str(s))
except Exception:
pass
return " | ".join(parts)
def _err(self, rc: int, context: str = "") -> None:
"""Raise a RuntimeError with a readable SDK error message for rc != BL_OK."""
if rc == self._BabyLIN.BL_OK:
return
msg = self._detail_for(rc) or f"rc={rc}"
prefix = f"BabyLIN error{(' (' + context + ')') if context else ''}"
raise RuntimeError(f"{prefix}: {msg} (rc={rc})")
def _exec_command(self, cmd: str) -> None:
"""Run a BLC_sendCommand on the channel handle, surfacing detailed errors.
The SDK's wrapper raises BabyLINException for any non-zero rc. We catch
that and re-raise a RuntimeError that includes BLC_getDetailedErrorString,
so callers see e.g. "schedule index out of range" instead of opaque "303".
"""
if self._channel_handle is None:
raise RuntimeError("BabyLIN not connected")
try:
rc = self._bl_call('BLC_sendCommand', self._channel_handle, cmd)
except Exception as e:
rc = getattr(e, 'errorCode', None)
if rc is None:
# Try common alternate attributes used by SDK exception types
for attr in ('rc', 'returncode', 'code'):
rc = getattr(e, attr, None)
if rc is not None:
break
detail = self._detail_for(rc) if rc is not None else ""
rc_part = f"rc={rc}" if rc is not None else "rc=?"
extra = f"{detail}" if detail else ""
raise RuntimeError(
f"BabyLIN command failed: {cmd!r} ({rc_part}){extra}"
) from e
if rc != self._BabyLIN.BL_OK:
self._err(rc, context=f"command {cmd!r}")
def connect(self) -> None:
"""Open device, optionally load SDF, select channel, and start schedule."""
# Discover BabyLIN devices (returns a list of port identifiers)
ports = self._bl_call('BLC_getBabyLinPorts', 100)
if not ports:
raise RuntimeError("No BabyLIN devices found")
# Open the first available device port (you could extend to select by config)
self._handle = self._bl_call('BLC_openPort', ports[0])
if not self._handle:
raise RuntimeError("Failed to open BabyLIN port")
# Load SDF onto the device, if configured (3rd arg '1' often means 'download')
if self.sdf_path:
rc = self._bl_call('BLC_loadSDF', self._handle, self.sdf_path, 1)
if rc != self._BabyLIN.BL_OK:
self._err(rc)
# Get channel count and resolve the channel handle.
# A BabyLIN device may expose multiple channel types (LIN/CAN/...).
# When the SDK supports BLC_getChannelInfo, we filter by info.type==0
# to find LIN channels (mirrors vendor/BLCInterfaceExample.py).
# Without it (older SDKs, mock wrappers), we fall back to honoring
# the configured index and validating the handle.
ch_count = self._bl_call('BLC_getChannelCount', self._handle)
if ch_count <= 0:
raise RuntimeError("No channels reported by device")
configured_idx = int(self.channel_index)
get_info = getattr(self._BabyLIN, 'BLC_getChannelInfo', None)
if get_info is not None:
lin_channels = [] # [(idx, handle, info)] for type==0 channels
seen = [] # diagnostics if no LIN channel is found
for idx in range(int(ch_count)):
h = self._bl_call('BLC_getChannelHandle', self._handle, idx)
if not h:
seen.append((idx, None, None))
continue
try:
info = get_info(h)
except Exception:
info = None
seen.append((idx, h, info))
if info is not None and getattr(info, 'type', None) == 0:
lin_channels.append((idx, h, info))
if not lin_channels:
details = ", ".join(
f"idx={i} handle={'ok' if h else 'None'} "
f"type={getattr(info, 'type', '?') if info is not None else '?'} "
f"name={getattr(info, 'name', b'').decode('utf-8', errors='ignore') if info is not None else ''}"
for i, h, info in seen
)
raise RuntimeError(
f"No LIN channel (type==0) found on device. Channels seen: [{details}]"
)
# Prefer the configured index if it is a LIN channel; otherwise the first LIN channel.
chosen = next((t for t in lin_channels if t[0] == configured_idx), lin_channels[0])
ch_idx, self._channel_handle, _ = chosen
else:
ch_idx = configured_idx if 0 <= configured_idx < int(ch_count) else 0
self._channel_handle = self._bl_call('BLC_getChannelHandle', self._handle, ch_idx)
if not self._channel_handle:
raise RuntimeError(f"BLC_getChannelHandle returned invalid handle for channel {ch_idx}")
# Mark connected before any sendCommand so send_command()/_exec_command()
# accept the call. Auto-start a schedule only if a non-negative index is set;
# use -1 (or None) in config to defer starting to the test/caller.
self._connected = True
if self.schedule_nr is not None and int(self.schedule_nr) >= 0:
self._exec_command(f"start schedule {int(self.schedule_nr)};")
def send_command(self, cmd: str) -> None:
"""Send a raw BabyLIN SDK command via BLC_sendCommand on the channel handle.
Useful for actions that don't fit the abstract LinInterface, e.g.:
send_command("stop;")
send_command("setsig 0 255;")
Note: BabyLIN firmware accepts 'start schedule <index>;' but not the
schedule name. Use start_schedule() for name-or-index lookup.
"""
if not self._connected:
raise RuntimeError("BabyLIN not connected")
self._exec_command(cmd)
def schedule_nr_for_name(self, name: str) -> int:
"""Return the schedule index matching `name` from the loaded SDF.
Tries BLC_SDF_getScheduleNr first; falls back to enumerating with
BLC_SDF_getNumSchedules + BLC_SDF_getScheduleName for older SDKs.
Raises RuntimeError if the schedule isn't found.
"""
if self._channel_handle is None:
raise RuntimeError("BabyLIN not connected")
get_nr = getattr(self._BabyLIN, 'BLC_SDF_getScheduleNr', None)
if get_nr is not None:
try:
return int(get_nr(self._channel_handle, name))
except Exception:
pass # fall through to enumeration
get_count = getattr(self._BabyLIN, 'BLC_SDF_getNumSchedules', None)
get_name = getattr(self._BabyLIN, 'BLC_SDF_getScheduleName', None)
if get_count is None or get_name is None:
raise RuntimeError(
f"SDK does not expose schedule lookup; cannot resolve schedule {name!r}"
)
count = int(get_count(self._channel_handle))
names = []
for i in range(count):
try:
n = get_name(self._channel_handle, i)
except Exception:
n = ""
names.append(n)
if n == name:
return i
raise RuntimeError(
f"Schedule {name!r} not found in SDF. Available: {names}"
)
def start_schedule(self, name_or_nr) -> int:
"""Start a schedule by name (str) or index (int). Returns the index used."""
nr = name_or_nr if isinstance(name_or_nr, int) else self.schedule_nr_for_name(str(name_or_nr))
self.send_command(f"start schedule {int(nr)};")
return int(nr)
def disconnect(self) -> None:
"""Close device handles and reset internal state (best-effort)."""
try:
self._bl_call('BLC_closeAll') # Close all device connections via SDK
except Exception:
pass # Ignore SDK exceptions during shutdown
self._connected = False
self._handle = None
self._channel_handle = None
def send(self, frame: LinFrame) -> None:
"""Transmit a LIN frame using BLC_mon_set_xmit."""
if not self._connected or not self._channel_handle:
raise RuntimeError("BabyLIN not connected")
# slotTime=0 means use default timing configured by schedule/SDF
rc = self._bl_call('BLC_mon_set_xmit', self._channel_handle, int(frame.id), bytes(frame.data), 0)
if rc != self._BabyLIN.BL_OK:
self._err(rc)
def receive(self, id: Optional[int] = None, timeout: float = 1.0):
"""Receive a LIN frame with optional ID filter and timeout (seconds)."""
if not self._connected or not self._channel_handle:
raise RuntimeError("BabyLIN not connected")
ms = max(0, int(timeout * 1000)) # SDK expects milliseconds
try:
frame = self._bl_call('BLC_getNextFrameTimeout', self._channel_handle, ms)
except Exception:
# Many wrappers raise on timeout; unify as 'no data'
return None
if not frame:
return None
# Convert SDK frame to our LinFrame (mask to classic 6-bit LIN ID range)
fid = int(frame.frameId & 0x3F)
data = bytes(list(frame.frameData)[: int(frame.lenOfData)])
lin_frame = LinFrame(id=fid, data=data)
if id is None or fid == id:
return lin_frame
# If a different ID was received and caller requested a filter, return None
return None
def flush(self) -> None:
"""Flush RX buffers if the SDK exposes such a function (optional)."""
if not self._connected or not self._channel_handle:
return
try:
# Some SDKs may not expose flush; no-op if missing
flush = getattr(self._BabyLIN, 'BLC_flush', None)
if flush:
flush(self._channel_handle)
except Exception:
pass
def request(self, id: int, length: int, timeout: float = 1.0):
"""Perform a LIN master request and wait for response.
Strategy:
- Prefer SDK method `BLC_sendRawMasterRequest` if present (bytes or length variants).
- Fallback: transmit a header with zeroed payload; then wait for response.
- Always attempt to receive a frame with matching ID within 'timeout'.
"""
if not self._connected or not self._channel_handle:
raise RuntimeError("BabyLIN not connected")
sent = False # Track whether a request command was successfully issued
# Attempt to use raw master request if provided by SDK
# Preference: try (channel, frameId, length) first because our mock wrapper
# synthesizes a deterministic payload for this form (see vendor/mock_babylin_wrapper.py),
# then fall back to (channel, frameId, dataBytes) if the SDK only supports that.
raw_req = getattr(self._BabyLIN, 'BLC_sendRawMasterRequest', None)
if raw_req:
# Prefer the (channel, frameId, length) variant first if supported
try:
rc = raw_req(self._channel_handle, int(id), int(length))
if rc == self._BabyLIN.BL_OK:
sent = True
else:
self._err(rc)
except TypeError:
# Fallback to (channel, frameId, dataBytes)
try:
payload = bytes([0] * max(0, min(8, int(length))))
rc = raw_req(self._channel_handle, int(id), payload)
if rc == self._BabyLIN.BL_OK:
sent = True
else:
self._err(rc)
except Exception:
sent = False
except Exception:
sent = False
if not sent:
# Fallback: issue a transmit; many stacks will respond on the bus
self.send(LinFrame(id=id, data=bytes([0] * max(0, min(8, int(length))))))
# Wait for the response frame with matching ID (or None on timeout)
return self.receive(id=id, timeout=timeout)

View File

@ -1,60 +1,60 @@
from __future__ import annotations
from abc import ABC, abstractmethod
from dataclasses import dataclass
from typing import Optional
@dataclass
class LinFrame:
"""Represents a LIN frame.
id: Frame identifier (0x00 - 0x3F typical for classic LIN IDs)
data: Up to 8 bytes payload.
"""
id: int
data: bytes
def __post_init__(self) -> None:
if not (0 <= self.id <= 0x3F):
raise ValueError(f"LIN ID out of range: {self.id}")
if not isinstance(self.data, (bytes, bytearray)):
# allow list of ints
try:
self.data = bytes(self.data) # type: ignore[arg-type]
except Exception as e: # pragma: no cover - defensive
raise TypeError("data must be bytes-like") from e
if len(self.data) > 8:
raise ValueError("LIN data length must be <= 8")
class LinInterface(ABC):
"""Abstract interface for LIN communication."""
@abstractmethod
def connect(self) -> None:
"""Open the interface connection."""
@abstractmethod
def disconnect(self) -> None:
"""Close the interface connection."""
@abstractmethod
def send(self, frame: LinFrame) -> None:
"""Send a LIN frame."""
@abstractmethod
def receive(self, id: Optional[int] = None, timeout: float = 1.0) -> Optional[LinFrame]:
"""Receive a LIN frame, optionally filtered by ID. Returns None on timeout."""
def request(self, id: int, length: int, timeout: float = 1.0) -> Optional[LinFrame]:
"""Default request implementation: send header then wait a frame.
Override in concrete implementation if different behavior is needed.
"""
# By default, just wait for any frame with this ID
return self.receive(id=id, timeout=timeout)
def flush(self) -> None:
"""Optional: flush RX buffers."""
pass
from __future__ import annotations
from abc import ABC, abstractmethod
from dataclasses import dataclass
from typing import Optional
@dataclass
class LinFrame:
"""Represents a LIN frame.
id: Frame identifier (0x00 - 0x3F typical for classic LIN IDs)
data: Up to 8 bytes payload.
"""
id: int
data: bytes
def __post_init__(self) -> None:
if not (0 <= self.id <= 0x3F):
raise ValueError(f"LIN ID out of range: {self.id}")
if not isinstance(self.data, (bytes, bytearray)):
# allow list of ints
try:
self.data = bytes(self.data) # type: ignore[arg-type]
except Exception as e: # pragma: no cover - defensive
raise TypeError("data must be bytes-like") from e
if len(self.data) > 8:
raise ValueError("LIN data length must be <= 8")
class LinInterface(ABC):
"""Abstract interface for LIN communication."""
@abstractmethod
def connect(self) -> None:
"""Open the interface connection."""
@abstractmethod
def disconnect(self) -> None:
"""Close the interface connection."""
@abstractmethod
def send(self, frame: LinFrame) -> None:
"""Send a LIN frame."""
@abstractmethod
def receive(self, id: Optional[int] = None, timeout: float = 1.0) -> Optional[LinFrame]:
"""Receive a LIN frame, optionally filtered by ID. Returns None on timeout."""
def request(self, id: int, length: int, timeout: float = 1.0) -> Optional[LinFrame]:
"""Default request implementation: send header then wait a frame.
Override in concrete implementation if different behavior is needed.
"""
# By default, just wait for any frame with this ID
return self.receive(id=id, timeout=timeout)
def flush(self) -> None:
"""Optional: flush RX buffers."""
pass

View File

@ -1,73 +1,73 @@
from __future__ import annotations
import queue
import threading
import time
from typing import Optional
from .base import LinInterface, LinFrame
class MockBabyLinInterface(LinInterface):
"""A mock LIN interface that echoes frames and synthesizes responses.
Useful for local development without hardware. Thread-safe.
"""
def __init__(self, bitrate: int = 19200, channel: int = 1) -> None:
self.bitrate = bitrate
self.channel = channel
self._rx: "queue.Queue[LinFrame]" = queue.Queue()
self._lock = threading.RLock()
self._connected = False
def connect(self) -> None:
with self._lock:
self._connected = True
def disconnect(self) -> None:
with self._lock:
self._connected = False
# drain queue
try:
while True:
self._rx.get_nowait()
except queue.Empty:
pass
def send(self, frame: LinFrame) -> None:
if not self._connected:
raise RuntimeError("Mock interface not connected")
# echo back the frame as a received event
self._rx.put(frame)
def receive(self, id: Optional[int] = None, timeout: float = 1.0) -> Optional[LinFrame]:
if not self._connected:
raise RuntimeError("Mock interface not connected")
deadline = time.time() + max(0.0, timeout)
while time.time() < deadline:
try:
frm = self._rx.get(timeout=max(0.0, deadline - time.time()))
if id is None or frm.id == id:
return frm
# not matching, requeue tail-safe
self._rx.put(frm)
except queue.Empty:
break
return None
def request(self, id: int, length: int, timeout: float = 1.0) -> Optional[LinFrame]:
if not self._connected:
raise RuntimeError("Mock interface not connected")
# synthesize a deterministic response payload of requested length
payload = bytes((id + i) & 0xFF for i in range(max(0, min(8, length))))
frm = LinFrame(id=id, data=payload)
self._rx.put(frm)
return self.receive(id=id, timeout=timeout)
def flush(self) -> None:
while not self._rx.empty():
try:
self._rx.get_nowait()
except queue.Empty: # pragma: no cover - race guard
break
from __future__ import annotations
import queue
import threading
import time
from typing import Optional
from .base import LinInterface, LinFrame
class MockBabyLinInterface(LinInterface):
"""A mock LIN interface that echoes frames and synthesizes responses.
Useful for local development without hardware. Thread-safe.
"""
def __init__(self, bitrate: int = 19200, channel: int = 1) -> None:
self.bitrate = bitrate
self.channel = channel
self._rx: "queue.Queue[LinFrame]" = queue.Queue()
self._lock = threading.RLock()
self._connected = False
def connect(self) -> None:
with self._lock:
self._connected = True
def disconnect(self) -> None:
with self._lock:
self._connected = False
# drain queue
try:
while True:
self._rx.get_nowait()
except queue.Empty:
pass
def send(self, frame: LinFrame) -> None:
if not self._connected:
raise RuntimeError("Mock interface not connected")
# echo back the frame as a received event
self._rx.put(frame)
def receive(self, id: Optional[int] = None, timeout: float = 1.0) -> Optional[LinFrame]:
if not self._connected:
raise RuntimeError("Mock interface not connected")
deadline = time.time() + max(0.0, timeout)
while time.time() < deadline:
try:
frm = self._rx.get(timeout=max(0.0, deadline - time.time()))
if id is None or frm.id == id:
return frm
# not matching, requeue tail-safe
self._rx.put(frm)
except queue.Empty:
break
return None
def request(self, id: int, length: int, timeout: float = 1.0) -> Optional[LinFrame]:
if not self._connected:
raise RuntimeError("Mock interface not connected")
# synthesize a deterministic response payload of requested length
payload = bytes((id + i) & 0xFF for i in range(max(0, min(8, length))))
frm = LinFrame(id=id, data=payload)
self._rx.put(frm)
return self.receive(id=id, timeout=timeout)
def flush(self) -> None:
while not self._rx.empty():
try:
self._rx.get_nowait()
except queue.Empty: # pragma: no cover - race guard
break

220
ecu_framework/lin/mum.py Normal file
View File

@ -0,0 +1,220 @@
"""LIN adapter that uses the Melexis Universal Master (MUM) over the network.
Wraps the vendor's `pylin` + `pymumclient` packages so test code can talk to
the MUM through the same `LinInterface` abstraction used by the BabyLIN and
mock adapters. The MUM is a BeagleBone-based LIN master reachable over IP
(default 192.168.7.2) with built-in power control on `power_out0`.
The MUM is master-driven: a slave frame is fetched by issuing a request via
`send_message(master_to_slave=False, frame_id, data_length)`, so `receive()`
requires a frame ID. Per-frame `data_length` is taken from the constructor's
`frame_lengths` map; ALM_Status (0x11, 4 bytes) and ALM_Req_A (0x0A, 8 bytes)
have built-in defaults so the common cases work out of the box.
Diagnostic frames (BSM-SNPD) need the LIN 1.x **Classic** checksum, which
`send_message` does not produce. Use `send_raw()` (which calls the transport
layer's `ld_put_raw`) for those frames.
"""
from __future__ import annotations
import time
from typing import Dict, Optional
from .base import LinInterface, LinFrame
# Sensible defaults for the 4SEVEN_color_lib_test ECU. Callers can extend or
# override these via the `frame_lengths` constructor argument.
_DEFAULT_FRAME_LENGTHS: Dict[int, int] = {
0x0A: 8, # ALM_Req_A (master-published, RGB control)
0x11: 4, # ALM_Status (slave-published)
0x06: 3, # ConfigFrame (master-published)
0x12: 8, # PWM_Frame (slave-published)
0x13: 8, # VF_Frame (slave-published)
0x14: 8, # Tj_Frame (slave-published)
0x15: 8, # PWM_wo_Comp (slave-published)
0x16: 8, # NVM_Debug (slave-published)
}
class MumLinInterface(LinInterface):
"""LIN adapter for the Melexis Universal Master."""
def __init__(
self,
host: str = "192.168.7.2",
lin_device: str = "lin0",
power_device: str = "power_out0",
baudrate: int = 19200,
frame_lengths: Optional[Dict[int, int]] = None,
default_data_length: int = 8,
boot_settle_seconds: float = 0.5,
# Test seam: inject pre-built modules to bypass real hardware.
mum_module: object = None,
pylin_module: object = None,
) -> None:
self.host = host
self.lin_device = lin_device
self.power_device = power_device
self.baudrate = int(baudrate)
self.boot_settle_seconds = float(boot_settle_seconds)
self.default_data_length = int(default_data_length)
self.frame_lengths = dict(_DEFAULT_FRAME_LENGTHS)
if frame_lengths:
self.frame_lengths.update({int(k): int(v) for k, v in frame_lengths.items()})
self._mum_module = mum_module
self._pylin_module = pylin_module
self._mum = None
self._linmaster = None
self._power_control = None
self._lin_dev = None
self._transport_layer = None
self._connected = False
# -----------------------------
# Lifecycle
# -----------------------------
def _resolve_modules(self):
"""Lazy-import MUM stack so the framework still loads without it."""
if self._mum_module is None:
try:
import pymumclient # type: ignore
except Exception as e:
raise RuntimeError(
"pymumclient is not installed. The MUM adapter requires Melexis "
"packages 'pymumclient' and 'pylin'. See "
"vendor/automated_lin_test/install_packages.sh."
) from e
self._mum_module = pymumclient
if self._pylin_module is None:
try:
import pylin # type: ignore
except Exception as e:
raise RuntimeError(
"pylin is not installed. The MUM adapter requires Melexis "
"packages 'pymumclient' and 'pylin'. See "
"vendor/automated_lin_test/install_packages.sh."
) from e
self._pylin_module = pylin
return self._mum_module, self._pylin_module
def connect(self) -> None:
"""Open MUM, set up LIN master, attach LIN bus, and power up the ECU."""
pymumclient, pylin = self._resolve_modules()
self._mum = pymumclient.MelexisUniversalMaster()
self._mum.open_all(self.host)
self._power_control = self._mum.get_device(self.power_device)
self._linmaster = self._mum.get_device(self.lin_device)
self._linmaster.setup()
lin_bus = pylin.LinBusManager(self._linmaster)
self._lin_dev = pylin.LinDevice22(lin_bus)
self._lin_dev.baudrate = self.baudrate
# Transport layer is needed for Classic-checksum diagnostic frames.
try:
self._transport_layer = self._lin_dev.get_device("bus/transport_layer")
except Exception:
self._transport_layer = None
# Power up and let the ECU boot before the first frame.
self._power_control.power_up()
if self.boot_settle_seconds > 0:
time.sleep(self.boot_settle_seconds)
self._connected = True
def disconnect(self) -> None:
"""Power down the ECU and tear down the MUM connection (best-effort)."""
if self._power_control is not None:
try:
self._power_control.power_down()
except Exception:
pass
if self._linmaster is not None:
try:
self._linmaster.teardown()
except Exception:
pass
self._connected = False
self._mum = None
self._linmaster = None
self._power_control = None
self._lin_dev = None
self._transport_layer = None
# -----------------------------
# LinInterface contract
# -----------------------------
def send(self, frame: LinFrame) -> None:
"""Publish a master-to-slave frame using Enhanced checksum."""
if not self._connected or self._lin_dev is None:
raise RuntimeError("MUM not connected")
self._lin_dev.send_message(
master_to_slave=True,
frame_id=int(frame.id),
data_length=len(frame.data),
data=list(frame.data),
)
def receive(self, id: Optional[int] = None, timeout: float = 1.0) -> Optional[LinFrame]:
"""Trigger a slave-to-master read for `id` and return the response.
The MUM is master-driven, so a frame ID is required; passing None
raises NotImplementedError. `timeout` is informational only the
underlying pylin call is synchronous and uses its own timing.
"""
if not self._connected or self._lin_dev is None:
raise RuntimeError("MUM not connected")
if id is None:
raise NotImplementedError(
"MUM receive requires a frame ID; passive listen is not supported"
)
length = self.frame_lengths.get(int(id), self.default_data_length)
try:
response = self._lin_dev.send_message(
master_to_slave=False,
frame_id=int(id),
data_length=int(length),
data=None,
)
except Exception:
return None # treat any pylin exception as a timeout / no-data
if not response:
return None
return LinFrame(id=int(id) & 0x3F, data=bytes(response[: int(length)]))
# -----------------------------
# MUM-specific extras
# -----------------------------
def send_raw(self, data: bytes) -> None:
"""Send a raw LIN frame using LIN 1.x **Classic** checksum.
Required for BSM-SNPD diagnostic frames (service ID 0xB5) the
firmware rejects these if Enhanced checksum is used.
"""
if not self._connected or self._transport_layer is None:
raise RuntimeError("MUM transport layer not available")
self._transport_layer.ld_put_raw(data=bytearray(data), baudrate=self.baudrate)
def power_up(self) -> None:
if self._power_control is None:
raise RuntimeError("MUM not connected")
self._power_control.power_up()
def power_down(self) -> None:
if self._power_control is None:
raise RuntimeError("MUM not connected")
self._power_control.power_down()
def power_cycle(self, wait: float = 2.0) -> None:
"""Power the ECU down, wait `wait` seconds, then back up."""
self.power_down()
time.sleep(wait)
self.power_up()
if self.boot_settle_seconds > 0:
time.sleep(self.boot_settle_seconds)

View File

@ -1,13 +1,13 @@
"""Power control helpers for ECU tests.
Currently includes Owon PSU serial SCPI controller.
"""
from .owon_psu import SerialParams, OwonPSU, scan_ports, auto_detect
__all__ = [
"SerialParams",
"OwonPSU",
"scan_ports",
"auto_detect",
]
"""Power control helpers for ECU tests.
Currently includes Owon PSU serial SCPI controller.
"""
from .owon_psu import SerialParams, OwonPSU, scan_ports, auto_detect
__all__ = [
"SerialParams",
"OwonPSU",
"scan_ports",
"auto_detect",
]

View File

@ -1,193 +1,193 @@
"""Owon PSU SCPI control over raw serial (pyserial).
This module provides a small, programmatic API suitable for tests:
- OwonPSU: context-manageable controller class
- scan_ports(): find devices responding to *IDN?
- auto_detect(): select the first matching device by IDN substring
Behavior follows the working quick demo example (serial):
- Both commands and queries are terminated with a newline ("\n" by default).
- Queries use readline() to fetch a single-line response.
- Command set uses: 'output 0/1', 'output?', 'SOUR:VOLT <V>', 'SOUR:CURR <A>', 'MEAS:VOLT?', 'MEAS:CURR?', '*IDN?'
"""
from __future__ import annotations
from dataclasses import dataclass
from time import sleep
from typing import Iterable, Optional
import serial
from serial import Serial
from serial.tools import list_ports
@dataclass
class SerialParams:
baudrate: int = 115200
timeout: float = 1.0 # seconds
bytesize: int = serial.EIGHTBITS
parity: str = serial.PARITY_NONE
stopbits: float = serial.STOPBITS_ONE
xonxoff: bool = False
rtscts: bool = False
dsrdtr: bool = False
write_timeout: float = 1.0 # seconds
class OwonPSU:
def __init__(self, port: str, params: SerialParams | None = None, eol: str = "\n") -> None:
self.port = port
self.params = params or SerialParams()
self.eol = eol
self._ser: Optional[Serial] = None
def open(self) -> None:
if self._ser and self._ser.is_open:
return
ser = Serial()
ser.port = self.port
ser.baudrate = self.params.baudrate
ser.bytesize = self.params.bytesize
ser.parity = self.params.parity
ser.stopbits = self.params.stopbits
ser.xonxoff = self.params.xonxoff
ser.rtscts = self.params.rtscts
ser.dsrdtr = self.params.dsrdtr
ser.timeout = self.params.timeout
ser.write_timeout = self.params.write_timeout
ser.open()
self._ser = ser
def close(self) -> None:
if self._ser and self._ser.is_open:
try:
self._ser.close()
finally:
self._ser = None
def __enter__(self) -> "OwonPSU":
self.open()
return self
def __exit__(self, exc_type, exc, tb) -> None:
self.close()
@property
def is_open(self) -> bool:
return bool(self._ser and self._ser.is_open)
# ---- low-level ops ----
def write(self, cmd: str) -> None:
"""Write a SCPI command (append eol)."""
if not self._ser:
raise RuntimeError("Port is not open")
data = (cmd + self.eol).encode("ascii", errors="ignore")
self._ser.write(data)
self._ser.flush()
def query(self, q: str) -> str:
"""Send a query with terminator and return a single-line response using readline()."""
if not self._ser:
raise RuntimeError("Port is not open")
# clear buffers to avoid stale data
try:
self._ser.reset_input_buffer()
self._ser.reset_output_buffer()
except Exception:
pass
self._ser.write((q + self.eol).encode("ascii", errors="ignore"))
self._ser.flush()
line = self._ser.readline().strip()
return line.decode("ascii", errors="ignore")
# ---- high-level ops ----
def idn(self) -> str:
return self.query("*IDN?")
def set_voltage(self, channel: int, volts: float) -> None:
# Using SOUR:VOLT <V> per working example
self.write(f"SOUR:VOLT {volts:.3f}")
def set_current(self, channel: int, amps: float) -> None:
# Using SOUR:CURR <A> per working example
self.write(f"SOUR:CURR {amps:.3f}")
def set_output(self, on: bool) -> None:
# Using 'output 1/0' per working example
self.write("output 1" if on else "output 0")
def output_status(self) -> str:
return self.query("output?")
def measure_voltage(self) -> str:
return self.query("MEAS:VOLT?")
def measure_current(self) -> str:
return self.query("MEAS:CURR?")
# ------- discovery helpers -------
def try_idn_on_port(port: str, params: SerialParams) -> str:
dev: Optional[Serial] = None
try:
dev = Serial()
dev.port = port
dev.baudrate = params.baudrate
dev.bytesize = params.bytesize
dev.parity = params.parity
dev.stopbits = params.stopbits
dev.xonxoff = params.xonxoff
dev.rtscts = params.rtscts
dev.dsrdtr = params.dsrdtr
dev.timeout = params.timeout
dev.write_timeout = params.write_timeout
dev.open()
# Query with newline terminator and read a single line
dev.reset_input_buffer(); dev.reset_output_buffer()
dev.write(b"*IDN?\n"); dev.flush()
line = dev.readline().strip()
return line.decode("ascii", errors="ignore")
except Exception:
return ""
finally:
if dev and dev.is_open:
try:
dev.close()
except Exception:
pass
def scan_ports(params: SerialParams | None = None) -> list[tuple[str, str]]:
"""Return [(port, idn_response), ...] for ports that responded."""
params = params or SerialParams()
results: list[tuple[str, str]] = []
for p in list_ports.comports():
dev = p.device
resp = try_idn_on_port(dev, params)
if resp:
results.append((dev, resp))
return results
def auto_detect(params: SerialParams | None = None, idn_substr: str | None = None) -> Optional[str]:
"""Return the first port whose *IDN? contains idn_substr (case-insensitive), else first responder."""
params = params or SerialParams()
matches = scan_ports(params)
if not matches:
return None
if idn_substr:
isub = idn_substr.lower()
for port, idn in matches:
if isub in idn.lower():
return port
return matches[0][0]
__all__ = [
"SerialParams",
"OwonPSU",
"scan_ports",
"auto_detect",
]
"""Owon PSU SCPI control over raw serial (pyserial).
This module provides a small, programmatic API suitable for tests:
- OwonPSU: context-manageable controller class
- scan_ports(): find devices responding to *IDN?
- auto_detect(): select the first matching device by IDN substring
Behavior follows the working quick demo example (serial):
- Both commands and queries are terminated with a newline ("\n" by default).
- Queries use readline() to fetch a single-line response.
- Command set uses: 'output 0/1', 'output?', 'SOUR:VOLT <V>', 'SOUR:CURR <A>', 'MEAS:VOLT?', 'MEAS:CURR?', '*IDN?'
"""
from __future__ import annotations
from dataclasses import dataclass
from time import sleep
from typing import Iterable, Optional
import serial
from serial import Serial
from serial.tools import list_ports
@dataclass
class SerialParams:
baudrate: int = 115200
timeout: float = 1.0 # seconds
bytesize: int = serial.EIGHTBITS
parity: str = serial.PARITY_NONE
stopbits: float = serial.STOPBITS_ONE
xonxoff: bool = False
rtscts: bool = False
dsrdtr: bool = False
write_timeout: float = 1.0 # seconds
class OwonPSU:
def __init__(self, port: str, params: SerialParams | None = None, eol: str = "\n") -> None:
self.port = port
self.params = params or SerialParams()
self.eol = eol
self._ser: Optional[Serial] = None
def open(self) -> None:
if self._ser and self._ser.is_open:
return
ser = Serial()
ser.port = self.port
ser.baudrate = self.params.baudrate
ser.bytesize = self.params.bytesize
ser.parity = self.params.parity
ser.stopbits = self.params.stopbits
ser.xonxoff = self.params.xonxoff
ser.rtscts = self.params.rtscts
ser.dsrdtr = self.params.dsrdtr
ser.timeout = self.params.timeout
ser.write_timeout = self.params.write_timeout
ser.open()
self._ser = ser
def close(self) -> None:
if self._ser and self._ser.is_open:
try:
self._ser.close()
finally:
self._ser = None
def __enter__(self) -> "OwonPSU":
self.open()
return self
def __exit__(self, exc_type, exc, tb) -> None:
self.close()
@property
def is_open(self) -> bool:
return bool(self._ser and self._ser.is_open)
# ---- low-level ops ----
def write(self, cmd: str) -> None:
"""Write a SCPI command (append eol)."""
if not self._ser:
raise RuntimeError("Port is not open")
data = (cmd + self.eol).encode("ascii", errors="ignore")
self._ser.write(data)
self._ser.flush()
def query(self, q: str) -> str:
"""Send a query with terminator and return a single-line response using readline()."""
if not self._ser:
raise RuntimeError("Port is not open")
# clear buffers to avoid stale data
try:
self._ser.reset_input_buffer()
self._ser.reset_output_buffer()
except Exception:
pass
self._ser.write((q + self.eol).encode("ascii", errors="ignore"))
self._ser.flush()
line = self._ser.readline().strip()
return line.decode("ascii", errors="ignore")
# ---- high-level ops ----
def idn(self) -> str:
return self.query("*IDN?")
def set_voltage(self, channel: int, volts: float) -> None:
# Using SOUR:VOLT <V> per working example
self.write(f"SOUR:VOLT {volts:.3f}")
def set_current(self, channel: int, amps: float) -> None:
# Using SOUR:CURR <A> per working example
self.write(f"SOUR:CURR {amps:.3f}")
def set_output(self, on: bool) -> None:
# Using 'output 1/0' per working example
self.write("output 1" if on else "output 0")
def output_status(self) -> str:
return self.query("output?")
def measure_voltage(self) -> str:
return self.query("MEAS:VOLT?")
def measure_current(self) -> str:
return self.query("MEAS:CURR?")
# ------- discovery helpers -------
def try_idn_on_port(port: str, params: SerialParams) -> str:
dev: Optional[Serial] = None
try:
dev = Serial()
dev.port = port
dev.baudrate = params.baudrate
dev.bytesize = params.bytesize
dev.parity = params.parity
dev.stopbits = params.stopbits
dev.xonxoff = params.xonxoff
dev.rtscts = params.rtscts
dev.dsrdtr = params.dsrdtr
dev.timeout = params.timeout
dev.write_timeout = params.write_timeout
dev.open()
# Query with newline terminator and read a single line
dev.reset_input_buffer(); dev.reset_output_buffer()
dev.write(b"*IDN?\n"); dev.flush()
line = dev.readline().strip()
return line.decode("ascii", errors="ignore")
except Exception:
return ""
finally:
if dev and dev.is_open:
try:
dev.close()
except Exception:
pass
def scan_ports(params: SerialParams | None = None) -> list[tuple[str, str]]:
"""Return [(port, idn_response), ...] for ports that responded."""
params = params or SerialParams()
results: list[tuple[str, str]] = []
for p in list_ports.comports():
dev = p.device
resp = try_idn_on_port(dev, params)
if resp:
results.append((dev, resp))
return results
def auto_detect(params: SerialParams | None = None, idn_substr: str | None = None) -> Optional[str]:
"""Return the first port whose *IDN? contains idn_substr (case-insensitive), else first responder."""
params = params or SerialParams()
matches = scan_ports(params)
if not matches:
return None
if idn_substr:
isub = idn_substr.lower()
for port, idn in matches:
if isub in idn.lower():
return port
return matches[0][0]
__all__ = [
"SerialParams",
"OwonPSU",
"scan_ports",
"auto_detect",
]

View File

@ -1,30 +1,31 @@
[pytest]
# addopts: Default CLI options applied to every pytest run.
# -ra → Show extra test summary info for skipped, xfailed, etc.
# --junitxml=... → Emit JUnit XML for CI systems (machines can parse it).
# --html=... → Generate a human-friendly HTML report after each run.
# --self-contained-html → Inline CSS/JS in the HTML report for easy sharing.
# --tb=short → Short tracebacks to keep logs readable.
# Plugin note: We no longer force-load via `-p conftest_plugin` to avoid ImportError
# on environments where the file might be missing. Instead, `conftest.py` will
# register the plugin if present. The plugin:
# - extracts Title/Description/Requirements/Steps from test docstrings
# - adds custom columns to the HTML report
# - writes requirements_coverage.json and summary.md in reports/
addopts = -ra --junitxml=reports/junit.xml --html=reports/report.html --self-contained-html --tb=short --cov=ecu_framework --cov-report=term-missing
# markers: Document all custom markers so pytest doesn't warn and so usage is clear.
# Use with: pytest -m "markername"
markers =
hardware: requires real hardware (BabyLIN device and ECU); excluded by default in mock runs
babylin: tests that use the BabyLIN interface (may require hardware)
unit: fast, isolated tests (no hardware, no external I/O)
req_001: REQ-001 - Mock interface shall echo transmitted frames for local testing
req_002: REQ-002 - Mock interface shall synthesize deterministic responses for request operations
req_003: REQ-003 - Mock interface shall support frame filtering by ID
req_004: REQ-004 - Mock interface shall handle timeout scenarios gracefully
smoke: Basic functionality validation tests
boundary: Boundary condition and edge case tests
# testpaths: Where pytest looks for tests by default.
testpaths = tests
[pytest]
# addopts: Default CLI options applied to every pytest run.
# -ra → Show extra test summary info for skipped, xfailed, etc.
# --junitxml=... → Emit JUnit XML for CI systems (machines can parse it).
# --html=... → Generate a human-friendly HTML report after each run.
# --self-contained-html → Inline CSS/JS in the HTML report for easy sharing.
# --tb=short → Short tracebacks to keep logs readable.
# Plugin note: We no longer force-load via `-p conftest_plugin` to avoid ImportError
# on environments where the file might be missing. Instead, `conftest.py` will
# register the plugin if present. The plugin:
# - extracts Title/Description/Requirements/Steps from test docstrings
# - adds custom columns to the HTML report
# - writes requirements_coverage.json and summary.md in reports/
addopts = -ra --junitxml=reports/junit.xml --html=reports/report.html --self-contained-html --tb=short --cov=ecu_framework --cov-report=term-missing
# markers: Document all custom markers so pytest doesn't warn and so usage is clear.
# Use with: pytest -m "markername"
markers =
hardware: requires real hardware (LIN master + ECU); excluded by default in mock runs
babylin: tests that use the legacy BabyLIN interface (may require hardware)
mum: tests that use the Melexis Universal Master (MUM) interface (requires hardware)
unit: fast, isolated tests (no hardware, no external I/O)
req_001: REQ-001 - Mock interface shall echo transmitted frames for local testing
req_002: REQ-002 - Mock interface shall synthesize deterministic responses for request operations
req_003: REQ-003 - Mock interface shall support frame filtering by ID
req_004: REQ-004 - Mock interface shall handle timeout scenarios gracefully
smoke: Basic functionality validation tests
boundary: Boundary condition and edge case tests
# testpaths: Where pytest looks for tests by default.
testpaths = tests

View File

@ -1,17 +1,17 @@
# Core testing and utilities
pytest>=8,<9 # Test runner and framework (parametrize, fixtures, markers)
pyyaml>=6,<7 # Parse YAML config files under ./config/
pyserial>=3,<4 # Serial communication for Owon PSU and hardware tests
# BabyLIN SDK wrapper requires 'six' on some platforms
six>=1.16,<2
# Test productivity
pytest-xdist>=3.6,<4 # Parallel test execution (e.g., pytest -n auto)
pytest-html>=4,<5 # Generate HTML test reports for CI and sharing
pytest-cov>=5,<6 # Coverage reports for Python packages
# Logging and config extras
configparser>=6,<7 # Optional INI-based config support if you add .ini configs later
colorlog>=6,<7 # Colored logging output for readable test logs
typing-extensions>=4.12,<5 # Typing backports for older Python versions
# Core testing and utilities
pytest>=8,<9 # Test runner and framework (parametrize, fixtures, markers)
pyyaml>=6,<7 # Parse YAML config files under ./config/
pyserial>=3,<4 # Serial communication for Owon PSU and hardware tests
# BabyLIN SDK wrapper requires 'six' on some platforms
six>=1.16,<2
# Test productivity
pytest-xdist>=3.6,<4 # Parallel test execution (e.g., pytest -n auto)
pytest-html>=4,<5 # Generate HTML test reports for CI and sharing
pytest-cov>=5,<6 # Coverage reports for Python packages
# Logging and config extras
configparser>=6,<7 # Optional INI-based config support if you add .ini configs later
colorlog>=6,<7 # Colored logging output for readable test logs
typing-extensions>=4.12,<5 # Typing backports for older Python versions

View File

@ -1,5 +1,5 @@
# Example udev rules for BabyLin-like USB device
# Replace ATTRS{idVendor} and ATTRS{idProduct} with actual values
# Find values with: lsusb
SUBSYSTEM=="usb", ATTRS{idVendor}=="1234", ATTRS{idProduct}=="5678", MODE="0660", GROUP="plugdev", TAG+="uaccess"
# Example udev rules for BabyLin-like USB device
# Replace ATTRS{idVendor} and ATTRS{idProduct} with actual values
# Find values with: lsusb
SUBSYSTEM=="usb", ATTRS{idVendor}=="1234", ATTRS{idProduct}=="5678", MODE="0660", GROUP="plugdev", TAG+="uaccess"

View File

@ -1,17 +1,17 @@
[Unit]
Description=ECU Tests Runner
After=network-online.target
Wants=network-online.target
[Service]
Type=oneshot
WorkingDirectory=/home/pi/ecu_tests
ExecStart=/home/pi/ecu_tests/scripts/run_tests.sh
User=pi
Group=pi
Environment=ECU_TESTS_CONFIG=/home/pi/ecu_tests/config/test_config.yaml
StandardOutput=append:/home/pi/ecu_tests/reports/service.log
StandardError=append:/home/pi/ecu_tests/reports/service.err
[Install]
WantedBy=multi-user.target
[Unit]
Description=ECU Tests Runner
After=network-online.target
Wants=network-online.target
[Service]
Type=oneshot
WorkingDirectory=/home/pi/ecu_tests
ExecStart=/home/pi/ecu_tests/scripts/run_tests.sh
User=pi
Group=pi
Environment=ECU_TESTS_CONFIG=/home/pi/ecu_tests/config/test_config.yaml
StandardOutput=append:/home/pi/ecu_tests/reports/service.log
StandardError=append:/home/pi/ecu_tests/reports/service.err
[Install]
WantedBy=multi-user.target

View File

@ -1,10 +1,10 @@
[Unit]
Description=Schedule ECU Tests Runner
[Timer]
OnBootSec=2min
OnUnitActiveSec=24h
Persistent=true
[Install]
WantedBy=timers.target
[Unit]
Description=Schedule ECU Tests Runner
[Timer]
OnBootSec=2min
OnUnitActiveSec=24h
Persistent=true
[Install]
WantedBy=timers.target

View File

@ -1,29 +1,29 @@
# Runs two pytest invocations to generate separate HTML/JUnit reports
# - Unit tests → reports/report-unit.html, reports/junit-unit.xml
# - All non-unit tests → reports/report-tests.html, reports/junit-tests.xml
#
# Usage (from repo root, PowerShell):
# .\scripts\run_two_reports.ps1
#
# Notes:
# - We override pytest.ini addopts to avoid duplicate --html/--junitxml and explicitly
# load our custom plugin.
# - Adjust the second marker to exclude hardware if desired (see commented example).
# Ensure reports directory exists
if (-not (Test-Path -LiteralPath "reports")) { New-Item -ItemType Directory -Path "reports" | Out-Null }
# 1) Unit tests report
pytest -q -o addopts="" -p conftest_plugin -ra --tb=short --self-contained-html `
--cov=ecu_framework --cov-report=term-missing `
--html=reports/report-unit.html `
--junitxml=reports/junit-unit.xml `
-m unit
# 2) All non-unit tests (integration/smoke/hardware) report
# To exclude hardware here, change the marker expression to: -m "not unit and not hardware"
pytest -q -o addopts="" -p conftest_plugin -ra --tb=short --self-contained-html `
--cov=ecu_framework --cov-report=term-missing `
--html=reports/report-tests.html `
--junitxml=reports/junit-tests.xml `
-m "not unit"
# Runs two pytest invocations to generate separate HTML/JUnit reports
# - Unit tests → reports/report-unit.html, reports/junit-unit.xml
# - All non-unit tests → reports/report-tests.html, reports/junit-tests.xml
#
# Usage (from repo root, PowerShell):
# .\scripts\run_two_reports.ps1
#
# Notes:
# - We override pytest.ini addopts to avoid duplicate --html/--junitxml and explicitly
# load our custom plugin.
# - Adjust the second marker to exclude hardware if desired (see commented example).
# Ensure reports directory exists
if (-not (Test-Path -LiteralPath "reports")) { New-Item -ItemType Directory -Path "reports" | Out-Null }
# 1) Unit tests report
pytest -q -o addopts="" -p conftest_plugin -ra --tb=short --self-contained-html `
--cov=ecu_framework --cov-report=term-missing `
--html=reports/report-unit.html `
--junitxml=reports/junit-unit.xml `
-m unit
# 2) All non-unit tests (integration/smoke/hardware) report
# To exclude hardware here, change the marker expression to: -m "not unit and not hardware"
pytest -q -o addopts="" -p conftest_plugin -ra --tb=short --self-contained-html `
--cov=ecu_framework --cov-report=term-missing `
--html=reports/report-tests.html `
--junitxml=reports/junit-tests.xml `
-m "not unit"

View File

@ -1,83 +1,101 @@
import os
import pathlib
import typing as t
import pytest
from ecu_framework.config import load_config, EcuTestConfig
from ecu_framework.lin.base import LinInterface
from ecu_framework.lin.mock import MockBabyLinInterface
try:
from ecu_framework.lin.babylin import BabyLinInterface # type: ignore
except Exception:
BabyLinInterface = None # type: ignore
WORKSPACE_ROOT = pathlib.Path(__file__).resolve().parents[1]
@pytest.fixture(scope="session")
def config() -> EcuTestConfig:
cfg = load_config(str(WORKSPACE_ROOT))
return cfg
@pytest.fixture(scope="session")
def lin(config: EcuTestConfig) -> t.Iterator[LinInterface]:
iface_type = config.interface.type
if iface_type == "mock":
lin = MockBabyLinInterface(bitrate=config.interface.bitrate, channel=config.interface.channel)
elif iface_type == "babylin":
if BabyLinInterface is None:
pytest.skip("BabyLin interface not available in this environment")
lin = BabyLinInterface(
dll_path=config.interface.dll_path,
bitrate=config.interface.bitrate,
channel=config.interface.channel,
node_name=config.interface.node_name,
func_names=config.interface.func_names,
sdf_path=config.interface.sdf_path,
schedule_nr=config.interface.schedule_nr,
)
else:
raise RuntimeError(f"Unknown interface type: {iface_type}")
lin.connect()
yield lin
lin.disconnect()
@pytest.fixture(scope="session", autouse=False)
def flash_ecu(config: EcuTestConfig, lin: LinInterface) -> None:
if not config.flash.enabled:
pytest.skip("Flashing disabled in config")
# Lazy import to avoid dependency during mock-only runs
from ecu_framework.flashing import HexFlasher
if not config.flash.hex_path:
pytest.skip("No HEX path provided in config")
flasher = HexFlasher(lin)
ok = flasher.flash_hex(config.flash.hex_path)
if not ok:
pytest.fail("ECU flashing failed")
@pytest.fixture
def rp(record_property: "pytest.RecordProperty"):
"""Convenience reporter: attaches a key/value as a test property and echoes to captured output.
Usage in tests:
def test_something(rp):
rp("key", value)
"""
def _rp(key: str, value):
# Attach property (pytest-html will show in Properties table)
record_property(str(key), value)
# Echo to captured output for quick scanning in report details
try:
print(f"[prop] {key}={value}")
except Exception:
pass
return _rp
import os
import pathlib
import typing as t
import pytest
from ecu_framework.config import load_config, EcuTestConfig
from ecu_framework.lin.base import LinInterface
from ecu_framework.lin.mock import MockBabyLinInterface
try:
from ecu_framework.lin.babylin import BabyLinInterface # type: ignore
except Exception:
BabyLinInterface = None # type: ignore
try:
from ecu_framework.lin.mum import MumLinInterface # type: ignore
except Exception:
MumLinInterface = None # type: ignore
WORKSPACE_ROOT = pathlib.Path(__file__).resolve().parents[1]
@pytest.fixture(scope="session")
def config() -> EcuTestConfig:
cfg = load_config(str(WORKSPACE_ROOT))
return cfg
@pytest.fixture(scope="session")
def lin(config: EcuTestConfig) -> t.Iterator[LinInterface]:
iface_type = config.interface.type
if iface_type == "mock":
lin = MockBabyLinInterface(bitrate=config.interface.bitrate, channel=config.interface.channel)
elif iface_type == "babylin":
if BabyLinInterface is None:
pytest.skip("BabyLin interface not available in this environment")
lin = BabyLinInterface(
dll_path=config.interface.dll_path,
bitrate=config.interface.bitrate,
channel=config.interface.channel,
node_name=config.interface.node_name,
func_names=config.interface.func_names,
sdf_path=config.interface.sdf_path,
schedule_nr=config.interface.schedule_nr,
)
elif iface_type == "mum":
if MumLinInterface is None:
pytest.skip("MUM interface not available in this environment")
if not config.interface.host:
pytest.skip("interface.host is required when interface.type == 'mum'")
lin = MumLinInterface(
host=config.interface.host,
lin_device=config.interface.lin_device,
power_device=config.interface.power_device,
baudrate=config.interface.bitrate,
boot_settle_seconds=config.interface.boot_settle_seconds,
frame_lengths=config.interface.frame_lengths or None,
)
else:
raise RuntimeError(f"Unknown interface type: {iface_type}")
lin.connect()
yield lin
lin.disconnect()
@pytest.fixture(scope="session", autouse=False)
def flash_ecu(config: EcuTestConfig, lin: LinInterface) -> None:
if not config.flash.enabled:
pytest.skip("Flashing disabled in config")
# Lazy import to avoid dependency during mock-only runs
from ecu_framework.flashing import HexFlasher
if not config.flash.hex_path:
pytest.skip("No HEX path provided in config")
flasher = HexFlasher(lin)
ok = flasher.flash_hex(config.flash.hex_path)
if not ok:
pytest.fail("ECU flashing failed")
@pytest.fixture
def rp(record_property: "pytest.RecordProperty"):
"""Convenience reporter: attaches a key/value as a test property and echoes to captured output.
Usage in tests:
def test_something(rp):
rp("key", value)
"""
def _rp(key: str, value):
# Attach property (pytest-html will show in Properties table)
record_property(str(key), value)
# Echo to captured output for quick scanning in report details
try:
print(f"[prop] {key}={value}")
except Exception:
pass
return _rp

View File

@ -0,0 +1,118 @@
"""End-to-end hardware test on the MUM (Melexis Universal Master).
Power the ECU via MUM's built-in power output, then activate the RGB LED via
the master-published ALM_Req_A frame (ID 0x0A) and verify the slave responds
on ALM_Status (ID 0x11).
Frame layout (from vendor/4SEVEN_color_lib_test.ldf, ALM_Req_A @ 0x0A, 8B):
byte 0 AmbLightColourRed (0..255)
byte 1 AmbLightColourGreen (0..255)
byte 2 AmbLightColourBlue (0..255)
byte 3 AmbLightIntensity (0..255)
byte 4 AmbLightUpdate (bits 0-1) | AmbLightMode (bits 2-7)
byte 5 AmbLightDuration
byte 6 AmbLightLIDFrom
byte 7 AmbLightLIDTo
The ECU answers ALM_Req_A only when AmbLightLIDFrom <= ALMNadNo <= LIDTo, so
we read the current NAD from ALM_Status first and target that NAD exactly.
"""
from __future__ import annotations
import pytest
from ecu_framework.config import EcuTestConfig
from ecu_framework.lin.base import LinFrame, LinInterface
pytestmark = [pytest.mark.hardware, pytest.mark.mum]
ALM_REQ_A_ID = 0x0A
ALM_STATUS_ID = 0x11
DEFAULT_RGB = (0xFF, 0xFF, 0xFF)
DEFAULT_INTENSITY = 0xFF
def _build_alm_req_a_payload(
r: int, g: int, b: int,
intensity: int = DEFAULT_INTENSITY,
update: int = 0,
mode: int = 0,
duration: int = 0,
lid_from: int = 0x01,
lid_to: int = 0xFF,
) -> bytes:
"""Pack RGB+mode signals into the 8-byte ALM_Req_A payload."""
byte4 = (update & 0x03) | ((mode & 0x3F) << 2)
return bytes([
r & 0xFF, g & 0xFF, b & 0xFF,
intensity & 0xFF,
byte4 & 0xFF,
duration & 0xFF,
lid_from & 0xFF,
lid_to & 0xFF,
])
def test_mum_e2e_power_on_then_led_activate(config: EcuTestConfig, lin: LinInterface, rp):
"""
Title: MUM E2E - Power ECU, Read NAD, Activate RGB LED
Description:
Drives the full hardware path through the Melexis Universal Master:
the `lin` fixture has already powered the ECU via power_out0 and set
up the LIN bus. This test reads ALM_Status to discover the slave's
NAD, publishes ALM_Req_A targeting that NAD with full white at full
intensity, and re-reads ALM_Status to confirm the bus is alive.
Requirements: REQ-MUM-LED-ACTIVATE
Test Steps:
1. Skip unless interface.type == 'mum'
2. Read ALM_Status (0x11) and extract ALMNadNo (byte 0 lower 8 bits)
3. Build ALM_Req_A payload with RGB=(0xFF,0xFF,0xFF), intensity=0xFF,
targeting LIDFrom=LIDTo=current_nad
4. Publish ALM_Req_A via lin.send()
5. Re-read ALM_Status and assert it still returns a valid frame
Expected Result:
- First ALM_Status read returns a 4-byte frame with a NAD in 0x01..0xFE
- Second ALM_Status read returns a frame (bus still alive after Tx)
"""
if config.interface.type != "mum":
pytest.skip("interface.type must be 'mum' for this test")
# Step 2: read current NAD from ALM_Status
status = lin.receive(id=ALM_STATUS_ID, timeout=1.0)
assert status is not None, "No ALM_Status received — check MUM/ECU wiring and power"
assert len(status.data) >= 1, f"ALM_Status too short: {status.data!r}"
current_nad = status.data[0]
rp("alm_status_data_hex", bytes(status.data).hex())
rp("current_nad", f"0x{current_nad:02X}")
assert 0x01 <= current_nad <= 0xFE, (
f"ALMNadNo {current_nad:#x} is out of valid range; ECU may be unconfigured"
)
# Step 3 + 4: target the discovered NAD with full white
payload = _build_alm_req_a_payload(
*DEFAULT_RGB,
intensity=DEFAULT_INTENSITY,
lid_from=current_nad,
lid_to=current_nad,
)
rp("tx_id", f"0x{ALM_REQ_A_ID:02X}")
rp("tx_data_hex", payload.hex())
rp("rgb", list(DEFAULT_RGB))
rp("intensity", DEFAULT_INTENSITY)
lin.send(LinFrame(id=ALM_REQ_A_ID, data=payload))
# Step 5: confirm bus liveness after the activation frame
status_after = lin.receive(id=ALM_STATUS_ID, timeout=1.0)
rp("post_status_present", status_after is not None)
if status_after is not None:
rp("post_status_data_hex", bytes(status_after.data).hex())
assert status_after is not None, (
"ALM_Status not received after publishing ALM_Req_A — ECU may have reset"
)

View File

@ -0,0 +1,235 @@
"""End-to-end hardware test: power the ECU on via Owon PSU, switch to the
'CCO' schedule, and publish an RGB activation frame on ALM_Req_A (ID 0x0A).
Frame layout (from vendor/4SEVEN_color_lib_test.ldf, ALM_Req_A @ ID 0x0A, 8B):
byte 0 AmbLightColourRed (0..255)
byte 1 AmbLightColourGreen (0..255)
byte 2 AmbLightColourBlue (0..255)
byte 3 AmbLightIntensity (0..255)
byte 4 AmbLightUpdate (bits 0-1) | AmbLightMode (bits 2-7)
byte 5 AmbLightDuration
byte 6 AmbLightLIDFrom
byte 7 AmbLightLIDTo
Schedule 'CCO' polls ALM_Req_A every 10 ms (LDF line 252-263). Updating the
master-published frame data via BLC_mon_set_xmit makes the next CCO slot
publish the new RGB values. The slave answers ALM_Status (ID 0x11) which we
use as evidence the bus is alive.
"""
from __future__ import annotations
import time
import pytest
import serial
from ecu_framework.config import EcuTestConfig
from ecu_framework.lin.base import LinFrame, LinInterface
from ecu_framework.power import OwonPSU, SerialParams
pytestmark = [pytest.mark.hardware, pytest.mark.babylin]
# Frame IDs from the LDF
ALM_REQ_A_ID = 0x0A # master-published RGB control frame
ALM_STATUS_ID = 0x11 # slave-published status frame
# Default RGB activation: full white at full intensity, immediate setpoint.
DEFAULT_RGB = (0xFF, 0xFF, 0xFF)
DEFAULT_INTENSITY = 0xFF
_PARITY_MAP = {
"N": serial.PARITY_NONE,
"E": serial.PARITY_EVEN,
"O": serial.PARITY_ODD,
}
_STOPBITS_MAP = {
1: serial.STOPBITS_ONE,
2: serial.STOPBITS_TWO,
}
def _build_serial_params(psu_cfg) -> SerialParams:
return SerialParams(
baudrate=int(psu_cfg.baudrate),
timeout=float(psu_cfg.timeout),
parity=_PARITY_MAP.get(str(psu_cfg.parity or "N").upper(), serial.PARITY_NONE),
stopbits=_STOPBITS_MAP.get(int(float(psu_cfg.stopbits or 1)), serial.STOPBITS_ONE),
xonxoff=bool(psu_cfg.xonxoff),
rtscts=bool(psu_cfg.rtscts),
dsrdtr=bool(psu_cfg.dsrdtr),
)
def _build_alm_req_a_payload(
r: int, g: int, b: int,
intensity: int = DEFAULT_INTENSITY,
update: int = 0, # 0 = Immediate color update
mode: int = 0, # 0 = Immediate Setpoint
duration: int = 0,
lid_from: int = 0,
lid_to: int = 0,
) -> bytes:
"""Pack RGB-activation signals into the 8-byte ALM_Req_A payload."""
# byte 4 packs Update (2 bits, LSB) and Mode (6 bits) per the LDF offsets.
byte4 = (update & 0x03) | ((mode & 0x3F) << 2)
return bytes([
r & 0xFF, g & 0xFF, b & 0xFF,
intensity & 0xFF,
byte4 & 0xFF,
duration & 0xFF,
lid_from & 0xFF,
lid_to & 0xFF,
])
def test_e2e_power_on_then_cco_rgb_activate(config: EcuTestConfig, lin: LinInterface, rp):
"""
Title: E2E - Power ECU, Switch to CCO Schedule, Activate RGB
Description:
Powers the ECU via the Owon PSU, switches the BabyLIN master to the
'CCO' schedule (which polls ALM_Req_A every 10 ms per the LDF), and
publishes an RGB activation payload on ALM_Req_A (ID 0x0A). Captures
bus traffic for a short window to confirm activity (typically the
slave-published ALM_Status at ID 0x11 will appear).
Requirements: REQ-E2E-CCO-RGB
Test Steps:
1. Skip unless interface.type == 'babylin'
2. Skip unless power_supply is enabled and a port is configured
3. Open the PSU, IDN check, set V/I, enable output
4. Wait for ECU boot (boot_settle_seconds, default 1.5 s)
5. Stop any current schedule and start schedule 'CCO'
6. Build the ALM_Req_A payload from RGB+intensity+mode+update
7. Publish the payload via lin.send(LinFrame(0x0A, ...))
8. Drain RX briefly and collect frames seen during the activation window
9. Assert at least one frame was observed; report IDs/lengths
10. Disable PSU output (always)
Expected Result:
- PSU comes up, ECU boots, CCO starts without SDK errors
- At least one LIN frame is observed on the bus during the window
- PSU output is disabled at end of test
"""
# Step 1 / 2: gate on hardware availability
if config.interface.type != "babylin":
pytest.skip("interface.type must be 'babylin' for this E2E test")
psu_cfg = config.power_supply
if not psu_cfg.enabled:
pytest.skip("Power supply disabled in config.power_supply.enabled")
if not psu_cfg.port:
pytest.skip("No power supply 'port' configured (config.power_supply.port)")
set_v = float(psu_cfg.set_voltage)
print(f"Debug: set_v={set_v}, type={type(set_v)}")
set_i = float(psu_cfg.set_current)
print(f"Debug: set_i={set_i}, type={type(set_i)}")
eol = psu_cfg.eol or "\n"
port = str(psu_cfg.port).strip()
boot_settle_s = float(getattr(psu_cfg, "boot_settle_seconds", 1.5))
activation_window_s = float(getattr(psu_cfg, "activation_window", 1.0))
# The adapter is hardware-only here; the test is gated on interface.type=='babylin'.
send_command = getattr(lin, "send_command", None)
start_schedule = getattr(lin, "start_schedule", None)
if send_command is None or start_schedule is None:
pytest.skip("LIN adapter does not expose send_command/start_schedule (need BabyLinInterface)")
rgb = (DEFAULT_RGB[0], DEFAULT_RGB[1], DEFAULT_RGB[2])
rp("interface_type", config.interface.type)
rp("psu_port", port)
rp("set_voltage", set_v)
rp("set_current", set_i)
rp("schedule", "CCO")
rp("rgb", list(rgb))
rp("intensity", DEFAULT_INTENSITY)
sparams = _build_serial_params(psu_cfg)
with OwonPSU(port, sparams, eol=eol) as psu:
# Step 3: bring up PSU
idn = psu.idn()
rp("psu_idn", idn)
assert isinstance(idn, str) and idn != "", "PSU *IDN? returned empty"
if psu_cfg.idn_substr:
assert str(psu_cfg.idn_substr).lower() in idn.lower(), (
f"PSU IDN does not contain expected substring "
f"{psu_cfg.idn_substr!r}; got {idn!r}"
)
psu.set_voltage(1, set_v)
psu.set_current(1, set_i)
try:
psu.set_output(True)
# Step 4: let ECU boot
time.sleep(boot_settle_s)
try:
rp("measured_voltage", psu.measure_voltage())
rp("measured_current", psu.measure_current())
except Exception as meas_err:
rp("measure_error", repr(meas_err))
# Step 5: switch to schedule CCO. The BabyLIN firmware only accepts
# 'start schedule <index>;', so we resolve the name to its SDF index
# via BLC_SDF_getScheduleNr (handled inside start_schedule).
try:
send_command("stop;")
except Exception as e:
rp("stop_error", repr(e))
cco_idx = start_schedule("CCO")
rp("schedule_index", cco_idx)
# Step 6 + 7: build and publish the RGB activation frame.
payload = _build_alm_req_a_payload(*rgb, intensity=DEFAULT_INTENSITY)
rp("tx_id", f"0x{ALM_REQ_A_ID:02X}")
rp("tx_data_hex", payload.hex())
lin.send(LinFrame(id=ALM_REQ_A_ID, data=payload))
# Step 8: collect frames over the activation window. CCO publishes
# ALM_Req_A (0x0A) and ALM_Status (0x11) every ~10 ms each.
try:
lin.flush()
except Exception:
pass
seen = []
deadline = time.monotonic() + activation_window_s
while time.monotonic() < deadline:
rx = lin.receive(timeout=0.1)
if rx is None:
continue
seen.append((rx.id, bytes(rx.data)))
ids = sorted({fid for fid, _ in seen})
rp("rx_count", len(seen))
rp("rx_ids", [f"0x{i:02X}" for i in ids])
if seen:
last_id, last_data = seen[-1]
rp("rx_last_id", f"0x{last_id:02X}")
rp("rx_last_data_hex", last_data.hex())
# Step 9: minimal liveness assertion. We don't require ALM_Status
# specifically because absence-of-slave is a separate failure mode
# to diagnose; we just want to know the bus moved at all.
assert seen, (
f"No LIN frames observed during {activation_window_s:.2f}s on schedule CCO. "
f"Check wiring, SDF, and that 'CCO' exists in the loaded SDF."
)
if ALM_STATUS_ID in ids:
rp("alm_status_seen", True)
else:
# Not asserted, but logged so the report shows it clearly.
rp("alm_status_seen", False)
finally:
# Step 10: always cut power
try:
psu.set_output(False)
except Exception as off_err:
rp("set_output_off_error", repr(off_err))

View File

@ -1,102 +1,102 @@
import time
import pytest
import serial
from ecu_framework.power import OwonPSU, SerialParams
from ecu_framework.config import EcuTestConfig
pytestmark = [pytest.mark.hardware]
def test_owon_psu_idn_and_optional_set(config: EcuTestConfig, rp):
"""
Title: Owon PSU - IDN, Output Status, Set/Measure Verification
Description:
Validates serial SCPI control of an Owon PSU: IDN retrieval, output status query,
and optional set/measure cycle using values from central configuration.
Test Steps:
1. Load PSU config from EcuTestConfig.power_supply
2. Open serial connection and query *IDN?
3. Query output status (output?) and record initial state
4. If configured, set voltage/current, enable output briefly, measure V/I, then disable output
5. Record IDN, output status before/after, set values, and measured values in the report
Expected Result:
*IDN? returns a non-empty string (containing idn_substr if configured), serial operations succeed,
and, when enabled, the output toggles on then off with measurements returned.
"""
psu_cfg = config.power_supply
if not psu_cfg.enabled:
pytest.skip("Power supply tests disabled in config.power_supply.enabled")
if not psu_cfg.port:
pytest.skip("No power supply 'port' configured (config.power_supply.port)")
# Serial params (with sensible defaults via central config)
baud = int(psu_cfg.baudrate)
timeout = float(psu_cfg.timeout)
parity = psu_cfg.parity or "N"
stopbits = psu_cfg.stopbits or 1
xonxoff = bool(psu_cfg.xonxoff)
rtscts = bool(psu_cfg.rtscts)
dsrdtr = bool(psu_cfg.dsrdtr)
eol = psu_cfg.eol or "\n"
ps = SerialParams(
baudrate=baud,
timeout=timeout,
parity={"N": serial.PARITY_NONE, "E": serial.PARITY_EVEN, "O": serial.PARITY_ODD}.get(str(parity).upper(), serial.PARITY_NONE),
stopbits={1: serial.STOPBITS_ONE, 2: serial.STOPBITS_TWO}.get(int(float(stopbits)), serial.STOPBITS_ONE),
xonxoff=xonxoff,
rtscts=rtscts,
dsrdtr=dsrdtr,
)
want_substr = psu_cfg.idn_substr
do_set = bool(psu_cfg.do_set)
set_v = float(psu_cfg.set_voltage)
set_i = float(psu_cfg.set_current)
port = str(psu_cfg.port).strip()
with OwonPSU(port, ps, eol=eol) as psu:
# Step 2: IDN
idn = psu.idn()
rp("psu_idn", idn)
print(f"PSU IDN: {idn}")
assert isinstance(idn, str)
assert idn != "", "*IDN? returned empty response"
if want_substr:
assert str(want_substr).lower() in idn.lower(), f"IDN does not contain expected substring: {want_substr}. Got: {idn}"
# Step 3: Output status before
out_before = psu.output_status()
rp("output_status_before", str(out_before))
print(f"Output status (before): {out_before}")
if do_set:
# Step 4: Set and measure
rp("set_voltage", set_v)
rp("set_current", set_i)
print(f"Setting: voltage={set_v}V, current={set_i}A")
psu.set_voltage(1, set_v)
psu.set_current(1, set_i)
psu.set_output(True)
time.sleep(1.0) # allow settling
try:
mv = psu.measure_voltage()
mi = psu.measure_current()
rp("measured_voltage", mv)
rp("measured_current", mi)
print(f"Measured: voltage={mv}V, current={mi}A")
finally:
psu.set_output(False)
out_after = psu.output_status()
rp("output_status_after", str(out_after))
print(f"Output status (after): {out_after}")
import time
import pytest
import serial
from ecu_framework.power import OwonPSU, SerialParams
from ecu_framework.config import EcuTestConfig
pytestmark = [pytest.mark.hardware]
def test_owon_psu_idn_and_optional_set(config: EcuTestConfig, rp):
"""
Title: Owon PSU - IDN, Output Status, Set/Measure Verification
Description:
Validates serial SCPI control of an Owon PSU: IDN retrieval, output status query,
and optional set/measure cycle using values from central configuration.
Test Steps:
1. Load PSU config from EcuTestConfig.power_supply
2. Open serial connection and query *IDN?
3. Query output status (output?) and record initial state
4. If configured, set voltage/current, enable output briefly, measure V/I, then disable output
5. Record IDN, output status before/after, set values, and measured values in the report
Expected Result:
*IDN? returns a non-empty string (containing idn_substr if configured), serial operations succeed,
and, when enabled, the output toggles on then off with measurements returned.
"""
psu_cfg = config.power_supply
if not psu_cfg.enabled:
pytest.skip("Power supply tests disabled in config.power_supply.enabled")
if not psu_cfg.port:
pytest.skip("No power supply 'port' configured (config.power_supply.port)")
# Serial params (with sensible defaults via central config)
baud = int(psu_cfg.baudrate)
timeout = float(psu_cfg.timeout)
parity = psu_cfg.parity or "N"
stopbits = psu_cfg.stopbits or 1
xonxoff = bool(psu_cfg.xonxoff)
rtscts = bool(psu_cfg.rtscts)
dsrdtr = bool(psu_cfg.dsrdtr)
eol = psu_cfg.eol or "\n"
ps = SerialParams(
baudrate=baud,
timeout=timeout,
parity={"N": serial.PARITY_NONE, "E": serial.PARITY_EVEN, "O": serial.PARITY_ODD}.get(str(parity).upper(), serial.PARITY_NONE),
stopbits={1: serial.STOPBITS_ONE, 2: serial.STOPBITS_TWO}.get(int(float(stopbits)), serial.STOPBITS_ONE),
xonxoff=xonxoff,
rtscts=rtscts,
dsrdtr=dsrdtr,
)
want_substr = psu_cfg.idn_substr
do_set = bool(psu_cfg.do_set)
set_v = float(psu_cfg.set_voltage)
set_i = float(psu_cfg.set_current)
port = str(psu_cfg.port).strip()
with OwonPSU(port, ps, eol=eol) as psu:
# Step 2: IDN
idn = psu.idn()
rp("psu_idn", idn)
print(f"PSU IDN: {idn}")
assert isinstance(idn, str)
assert idn != "", "*IDN? returned empty response"
if want_substr:
assert str(want_substr).lower() in idn.lower(), f"IDN does not contain expected substring: {want_substr}. Got: {idn}"
# Step 3: Output status before
out_before = psu.output_status()
rp("output_status_before", str(out_before))
print(f"Output status (before): {out_before}")
if do_set:
# Step 4: Set and measure
rp("set_voltage", set_v)
rp("set_current", set_i)
print(f"Setting: voltage={set_v}V, current={set_i}A")
psu.set_voltage(1, set_v)
psu.set_current(1, set_i)
psu.set_output(True)
time.sleep(1.0) # allow settling
try:
mv = psu.measure_voltage()
mi = psu.measure_current()
rp("measured_voltage", mv)
rp("measured_current", mi)
print(f"Measured: voltage={mv}V, current={mi}A")
finally:
psu.set_output(False)
out_after = psu.output_status()
rp("output_status_after", str(out_after))
print(f"Output status (after): {out_after}")

View File

@ -1,61 +1,61 @@
import json
from pathlib import Path
import pytest
# Enable access to the built-in 'pytester' fixture
pytest_plugins = ("pytester",)
@pytest.mark.unit
def test_plugin_writes_artifacts(pytester):
# Make the project root importable so '-p conftest_plugin' works inside pytester
project_root = Path(__file__).resolve().parents[2]
pytester.syspathinsert(str(project_root))
# Create a minimal test file that includes a rich docstring
pytester.makepyfile(
test_sample='''
import pytest
@pytest.mark.req_001
def test_docstring_metadata():
"""
Title: Example Test
Description:
Small sample to exercise the reporting plugin.
Requirements: REQ-001
Test Steps:
1. do it
Expected Result:
- done
"""
assert True
'''
)
# Run pytest in the temporary test environment, loading our reporting plugin
result = pytester.runpytest(
"-q",
"-p",
"conftest_plugin",
"--html=reports/report.html",
"--self-contained-html",
"--junitxml=reports/junit.xml",
)
result.assert_outcomes(passed=1)
# Check for the JSON coverage artifact
cov = pytester.path / "reports" / "requirements_coverage.json"
assert cov.is_file()
data = json.loads(cov.read_text())
# Validate REQ mapping and presence of artifacts
assert "REQ-001" in data["requirements"]
assert data["files"]["html"].endswith("report.html")
assert data["files"]["junit"].endswith("junit.xml")
# Check that the CI summary exists
summary = pytester.path / "reports" / "summary.md"
assert summary.is_file()
import json
from pathlib import Path
import pytest
# Enable access to the built-in 'pytester' fixture
pytest_plugins = ("pytester",)
@pytest.mark.unit
def test_plugin_writes_artifacts(pytester):
# Make the project root importable so '-p conftest_plugin' works inside pytester
project_root = Path(__file__).resolve().parents[2]
pytester.syspathinsert(str(project_root))
# Create a minimal test file that includes a rich docstring
pytester.makepyfile(
test_sample='''
import pytest
@pytest.mark.req_001
def test_docstring_metadata():
"""
Title: Example Test
Description:
Small sample to exercise the reporting plugin.
Requirements: REQ-001
Test Steps:
1. do it
Expected Result:
- done
"""
assert True
'''
)
# Run pytest in the temporary test environment, loading our reporting plugin
result = pytester.runpytest(
"-q",
"-p",
"conftest_plugin",
"--html=reports/report.html",
"--self-contained-html",
"--junitxml=reports/junit.xml",
)
result.assert_outcomes(passed=1)
# Check for the JSON coverage artifact
cov = pytester.path / "reports" / "requirements_coverage.json"
assert cov.is_file()
data = json.loads(cov.read_text())
# Validate REQ mapping and presence of artifacts
assert "REQ-001" in data["requirements"]
assert data["files"]["html"].endswith("report.html")
assert data["files"]["junit"].endswith("junit.xml")
# Check that the CI summary exists
summary = pytester.path / "reports" / "summary.md"
assert summary.is_file()

View File

@ -1,48 +1,48 @@
import os
import pathlib
import pytest
# Hardware + babylin + smoke: this is the canonical end-to-end schedule flow
pytestmark = [pytest.mark.hardware, pytest.mark.babylin, pytest.mark.smoke]
WORKSPACE_ROOT = pathlib.Path(__file__).resolve().parents[1]
def test_babylin_sdk_example_flow(config, lin, rp):
"""
Title: BabyLIN SDK Example Flow - Open, Load SDF, Start Schedule, Rx Timeout
Description:
Mirrors the vendor example flow: discover/open, load SDF, start a
schedule, and attempt a receive. Validates that the adapter can perform
the essential control sequence without exceptions and that the receive
path is operational even if it times out.
Requirements: REQ-HW-OPEN, REQ-HW-SDF, REQ-HW-SCHEDULE
Preconditions:
- ECU_TESTS_CONFIG points to a hardware YAML with interface.sdf_path and schedule_nr
- BabyLIN_library.py and native libs placed per vendor/README.md
Test Steps:
1. Verify hardware config requests the BabyLIN SDK with SDF path
2. Connect via fixture (opens device, loads SDF, starts schedule)
3. Try to receive a frame with a short timeout
4. Assert no crash; accept None or a LinFrame (environment-dependent)
Expected Result:
- No exceptions during open/load/start
- Receive returns None (timeout) or a LinFrame
"""
# Step 1: Ensure config is set for hardware with SDK wrapper
assert config.interface.type == "babylin"
assert config.interface.sdf_path is not None
rp("sdf_path", str(config.interface.sdf_path))
rp("schedule_nr", int(config.interface.schedule_nr))
# Step 3: Attempt a short receive to validate RX path while schedule runs
rx = lin.receive(timeout=0.2)
rp("receive_result", "timeout" if rx is None else "frame")
# Step 4: Accept timeout or a valid frame object depending on bus activity
assert rx is None or hasattr(rx, "id")
import os
import pathlib
import pytest
# Hardware + babylin + smoke: this is the canonical end-to-end schedule flow
pytestmark = [pytest.mark.hardware, pytest.mark.babylin, pytest.mark.smoke]
WORKSPACE_ROOT = pathlib.Path(__file__).resolve().parents[1]
def test_babylin_sdk_example_flow(config, lin, rp):
"""
Title: BabyLIN SDK Example Flow - Open, Load SDF, Start Schedule, Rx Timeout
Description:
Mirrors the vendor example flow: discover/open, load SDF, start a
schedule, and attempt a receive. Validates that the adapter can perform
the essential control sequence without exceptions and that the receive
path is operational even if it times out.
Requirements: REQ-HW-OPEN, REQ-HW-SDF, REQ-HW-SCHEDULE
Preconditions:
- ECU_TESTS_CONFIG points to a hardware YAML with interface.sdf_path and schedule_nr
- BabyLIN_library.py and native libs placed per vendor/README.md
Test Steps:
1. Verify hardware config requests the BabyLIN SDK with SDF path
2. Connect via fixture (opens device, loads SDF, starts schedule)
3. Try to receive a frame with a short timeout
4. Assert no crash; accept None or a LinFrame (environment-dependent)
Expected Result:
- No exceptions during open/load/start
- Receive returns None (timeout) or a LinFrame
"""
# Step 1: Ensure config is set for hardware with SDK wrapper
assert config.interface.type == "babylin"
assert config.interface.sdf_path is not None
rp("sdf_path", str(config.interface.sdf_path))
rp("schedule_nr", int(config.interface.schedule_nr))
# Step 3: Attempt a short receive to validate RX path while schedule runs
rx = lin.receive(timeout=0.2)
rp("receive_result", "timeout" if rx is None else "frame")
# Step 4: Accept timeout or a valid frame object depending on bus activity
assert rx is None or hasattr(rx, "id")

View File

@ -1,34 +1,34 @@
import pytest
# Mark entire module as hardware + babylin so it's easy to select/deselect via -m
pytestmark = [pytest.mark.hardware, pytest.mark.babylin]
def test_babylin_connect_receive_timeout(lin, rp):
"""
Title: BabyLIN Hardware Smoke - Connect and Timed Receive
Description:
Minimal hardware sanity check that relies on the configured fixtures to
connect to a BabyLIN device and perform a short receive call.
The test is intentionally permissive: it accepts either a valid LinFrame
or a None (timeout) as success, focusing on verifying that the adapter
is functional and not crashing.
Requirements: REQ-HW-SMOKE
Test Steps:
1. Use the 'lin' fixture to connect to the BabyLIN SDK adapter
2. Call receive() with a short timeout
3. Assert the outcome is either a LinFrame or None (timeout)
Expected Result:
- No exceptions are raised
- Return value is None (timeout) or an object with an 'id' attribute
"""
# Step 2: Perform a short receive to verify operability
rx = lin.receive(timeout=0.2)
rp("receive_result", "timeout" if rx is None else "frame")
# Step 3: Accept either a timeout (None) or a frame-like object
assert rx is None or hasattr(rx, "id")
import pytest
# Mark entire module as hardware + babylin so it's easy to select/deselect via -m
pytestmark = [pytest.mark.hardware, pytest.mark.babylin]
def test_babylin_connect_receive_timeout(lin, rp):
"""
Title: BabyLIN Hardware Smoke - Connect and Timed Receive
Description:
Minimal hardware sanity check that relies on the configured fixtures to
connect to a BabyLIN device and perform a short receive call.
The test is intentionally permissive: it accepts either a valid LinFrame
or a None (timeout) as success, focusing on verifying that the adapter
is functional and not crashing.
Requirements: REQ-HW-SMOKE
Test Steps:
1. Use the 'lin' fixture to connect to the BabyLIN SDK adapter
2. Call receive() with a short timeout
3. Assert the outcome is either a LinFrame or None (timeout)
Expected Result:
- No exceptions are raised
- Return value is None (timeout) or an object with an 'id' attribute
"""
# Step 2: Perform a short receive to verify operability
rx = lin.receive(timeout=1.0) # 1 second timeout
rp("receive_result", "timeout" if rx is None else "frame")
# Step 3: Accept either a timeout (None) or a frame-like object
assert rx is None or hasattr(rx, "id")

View File

@ -1,145 +1,145 @@
import pytest
from ecu_framework.lin.base import LinFrame
from ecu_framework.lin.babylin import BabyLinInterface
# Inject the pure-Python mock wrapper to run SDK adapter tests without hardware
from vendor import mock_babylin_wrapper as mock_bl
class _MockBytesOnly:
"""Shim exposing BLC_sendRawMasterRequest(bytes) only, to test bytes signature.
We wrap the existing mock but override BLC_sendRawMasterRequest to accept
only the bytes payload form. The response still uses the deterministic pattern
implied by the payload length (zeros are fine; we assert by length here).
"""
@staticmethod
def create_BabyLIN():
base = mock_bl.create_BabyLIN()
def bytes_only(channel, frame_id, payload):
# Delegate to the base mock's bytes variant by ensuring we pass bytes
if not isinstance(payload, (bytes, bytearray)):
raise TypeError("expected bytes payload")
return base.BLC_sendRawMasterRequest(channel, frame_id, bytes(payload))
# Monkey-patch the method to raise TypeError when a length is provided
def patched_raw_req(*args):
# Expected signature: (channel, frame_id, payload_bytes)
if len(args) != 3 or not isinstance(args[2], (bytes, bytearray)):
raise TypeError("bytes signature only")
return bytes_only(*args)
base.BLC_sendRawMasterRequest = patched_raw_req
return base
@pytest.mark.babylin
@pytest.mark.smoke
@pytest.mark.req_001
def test_babylin_sdk_adapter_with_mock_wrapper(rp):
"""
Title: SDK Adapter - Send/Receive with Mock Wrapper
Description:
Validate that the BabyLIN SDK-based adapter can send and receive using
a mocked wrapper exposing BLC_* APIs. The mock implements loopback by
echoing transmitted frames into the receive queue.
Requirements: REQ-001
Test Steps:
1. Construct BabyLinInterface with injected mock wrapper
2. Connect (discovers port, opens, loads SDF, starts schedule)
3. Send a frame via BLC_mon_set_xmit
4. Receive the same frame via BLC_getNextFrameTimeout
5. Disconnect
Expected Result:
- Received frame matches sent frame (ID and payload)
"""
# Step 1-2: Create adapter with wrapper injection and connect
lin = BabyLinInterface(sdf_path="./vendor/Example.sdf", schedule_nr=0, wrapper_module=mock_bl)
rp("wrapper", "mock_bl")
lin.connect()
try:
# Step 3: Transmit a known payload on a chosen ID
tx = LinFrame(id=0x12, data=bytes([0xAA, 0x55, 0x01]))
lin.send(tx)
# Step 4: Receive from the mock's RX queue (loopback)
rx = lin.receive(timeout=0.1)
rp("tx_id", f"0x{tx.id:02X}")
rp("tx_data", list(tx.data))
rp("rx_present", rx is not None)
# Step 5: Validate ID and payload integrity
assert rx is not None, "Expected a frame from mock loopback"
assert rx.id == tx.id
assert rx.data == tx.data
finally:
# Always disconnect to leave the mock in a clean state
lin.disconnect()
@pytest.mark.babylin
@pytest.mark.smoke
@pytest.mark.req_001
@pytest.mark.parametrize("wrapper,expect_pattern", [
(mock_bl, True), # length signature available: expect deterministic pattern
(_MockBytesOnly, False), # bytes-only signature: expect zeros of requested length
])
def test_babylin_master_request_with_mock_wrapper(wrapper, expect_pattern, rp):
"""
Title: SDK Adapter - Master Request using Mock Wrapper
Description:
Verify that request() prefers the SDK's BLC_sendRawMasterRequest when
available. The mock wrapper enqueues a deterministic response where
data[i] = (id + i) & 0xFF, allowing predictable assertions.
Requirements: REQ-001
Test Steps:
1. Construct BabyLinInterface with injected mock wrapper
2. Connect (mock open/initialize)
3. Issue a master request for a specific ID and length
4. Receive the response frame
5. Validate ID and deterministic payload pattern
Expected Result:
- Response frame ID matches request ID
- Response data length matches requested length
- Response data follows deterministic pattern
"""
# Step 1-2: Initialize mock-backed adapter
lin = BabyLinInterface(wrapper_module=wrapper)
rp("wrapper", getattr(wrapper, "__name__", str(wrapper)))
lin.connect()
try:
# Step 3: Request 4 bytes for ID 0x22
req_id = 0x22
length = 4
rp("req_id", f"0x{req_id:02X}")
rp("req_len", length)
rx = lin.request(id=req_id, length=length, timeout=0.1)
# Step 4-5: Validate response
assert rx is not None, "Expected a response from mock master request"
assert rx.id == req_id
if expect_pattern:
# length-signature mock returns deterministic pattern
expected = bytes(((req_id + i) & 0xFF) for i in range(length))
rp("expected_data", list(expected))
rp("rx_data", list(rx.data))
assert rx.data == expected
else:
# bytes-only mock returns exactly the bytes we sent (zeros of requested length)
expected = bytes([0] * length)
rp("expected_data", list(expected))
rp("rx_data", list(rx.data))
assert rx.data == expected
finally:
lin.disconnect()
import pytest
from ecu_framework.lin.base import LinFrame
from ecu_framework.lin.babylin import BabyLinInterface
# Inject the pure-Python mock wrapper to run SDK adapter tests without hardware
from vendor import mock_babylin_wrapper as mock_bl
class _MockBytesOnly:
"""Shim exposing BLC_sendRawMasterRequest(bytes) only, to test bytes signature.
We wrap the existing mock but override BLC_sendRawMasterRequest to accept
only the bytes payload form. The response still uses the deterministic pattern
implied by the payload length (zeros are fine; we assert by length here).
"""
@staticmethod
def create_BabyLIN():
base = mock_bl.create_BabyLIN()
def bytes_only(channel, frame_id, payload):
# Delegate to the base mock's bytes variant by ensuring we pass bytes
if not isinstance(payload, (bytes, bytearray)):
raise TypeError("expected bytes payload")
return base.BLC_sendRawMasterRequest(channel, frame_id, bytes(payload))
# Monkey-patch the method to raise TypeError when a length is provided
def patched_raw_req(*args):
# Expected signature: (channel, frame_id, payload_bytes)
if len(args) != 3 or not isinstance(args[2], (bytes, bytearray)):
raise TypeError("bytes signature only")
return bytes_only(*args)
base.BLC_sendRawMasterRequest = patched_raw_req
return base
@pytest.mark.babylin
@pytest.mark.smoke
@pytest.mark.req_001
def test_babylin_sdk_adapter_with_mock_wrapper(rp):
"""
Title: SDK Adapter - Send/Receive with Mock Wrapper
Description:
Validate that the BabyLIN SDK-based adapter can send and receive using
a mocked wrapper exposing BLC_* APIs. The mock implements loopback by
echoing transmitted frames into the receive queue.
Requirements: REQ-001
Test Steps:
1. Construct BabyLinInterface with injected mock wrapper
2. Connect (discovers port, opens, loads SDF, starts schedule)
3. Send a frame via BLC_mon_set_xmit
4. Receive the same frame via BLC_getNextFrameTimeout
5. Disconnect
Expected Result:
- Received frame matches sent frame (ID and payload)
"""
# Step 1-2: Create adapter with wrapper injection and connect
lin = BabyLinInterface(sdf_path="./vendor/Example.sdf", schedule_nr=0, wrapper_module=mock_bl)
rp("wrapper", "mock_bl")
lin.connect()
try:
# Step 3: Transmit a known payload on a chosen ID
tx = LinFrame(id=0x12, data=bytes([0xAA, 0x55, 0x01]))
lin.send(tx)
# Step 4: Receive from the mock's RX queue (loopback)
rx = lin.receive(timeout=0.1)
rp("tx_id", f"0x{tx.id:02X}")
rp("tx_data", list(tx.data))
rp("rx_present", rx is not None)
# Step 5: Validate ID and payload integrity
assert rx is not None, "Expected a frame from mock loopback"
assert rx.id == tx.id
assert rx.data == tx.data
finally:
# Always disconnect to leave the mock in a clean state
lin.disconnect()
@pytest.mark.babylin
@pytest.mark.smoke
@pytest.mark.req_001
@pytest.mark.parametrize("wrapper,expect_pattern", [
(mock_bl, True), # length signature available: expect deterministic pattern
(_MockBytesOnly, False), # bytes-only signature: expect zeros of requested length
])
def test_babylin_master_request_with_mock_wrapper(wrapper, expect_pattern, rp):
"""
Title: SDK Adapter - Master Request using Mock Wrapper
Description:
Verify that request() prefers the SDK's BLC_sendRawMasterRequest when
available. The mock wrapper enqueues a deterministic response where
data[i] = (id + i) & 0xFF, allowing predictable assertions.
Requirements: REQ-001
Test Steps:
1. Construct BabyLinInterface with injected mock wrapper
2. Connect (mock open/initialize)
3. Issue a master request for a specific ID and length
4. Receive the response frame
5. Validate ID and deterministic payload pattern
Expected Result:
- Response frame ID matches request ID
- Response data length matches requested length
- Response data follows deterministic pattern
"""
# Step 1-2: Initialize mock-backed adapter
lin = BabyLinInterface(wrapper_module=wrapper)
rp("wrapper", getattr(wrapper, "__name__", str(wrapper)))
lin.connect()
try:
# Step 3: Request 4 bytes for ID 0x22
req_id = 0x22
length = 4
rp("req_id", f"0x{req_id:02X}")
rp("req_len", length)
rx = lin.request(id=req_id, length=length, timeout=0.1)
# Step 4-5: Validate response
assert rx is not None, "Expected a response from mock master request"
assert rx.id == req_id
if expect_pattern:
# length-signature mock returns deterministic pattern
expected = bytes(((req_id + i) & 0xFF) for i in range(length))
rp("expected_data", list(expected))
rp("rx_data", list(rx.data))
assert rx.data == expected
else:
# bytes-only mock returns exactly the bytes we sent (zeros of requested length)
expected = bytes([0] * length)
rp("expected_data", list(expected))
rp("rx_data", list(rx.data))
assert rx.data == expected
finally:
lin.disconnect()

View File

@ -1,19 +1,19 @@
import pytest
# This module is gated by 'hardware' and 'babylin' markers to only run in hardware jobs
pytestmark = [pytest.mark.hardware, pytest.mark.babylin]
def test_babylin_placeholder():
"""
Title: Hardware Test Placeholder
Description:
Minimal placeholder to verify hardware selection and CI plumbing. It
ensures that -m hardware pipelines and marker-based selection work as
expected even when no specific hardware assertions are needed.
Expected Result:
- Always passes.
"""
assert True
import pytest
# This module is gated by 'hardware' and 'babylin' markers to only run in hardware jobs
pytestmark = [pytest.mark.hardware, pytest.mark.babylin]
def test_babylin_placeholder():
"""
Title: Hardware Test Placeholder
Description:
Minimal placeholder to verify hardware selection and CI plumbing. It
ensures that -m hardware pipelines and marker-based selection work as
expected even when no specific hardware assertions are needed.
Expected Result:
- Always passes.
"""
assert True

View File

@ -1,190 +1,202 @@
import pytest
from ecu_framework.lin.base import LinFrame
class TestMockLinInterface:
"""Test suite validating the pure-Python mock LIN interface behavior.
Coverage goals:
- REQ-001: Echo loopback for local testing (send -> receive same frame)
- REQ-002: Deterministic master request responses (no randomness)
- REQ-003: Frame ID filtering in receive()
- REQ-004: Graceful handling of timeout when no frame is available
Notes:
- These tests run entirely without hardware and should be fast and stable.
- The injected mock interface enqueues frames on transmit to emulate a bus.
- Deterministic responses allow exact byte-for-byte assertions.
"""
@pytest.mark.smoke
@pytest.mark.req_001
@pytest.mark.req_003
def test_mock_send_receive_echo(self, lin, rp):
"""
Title: Mock LIN Interface - Send/Receive Echo Test
Description:
Validates that the mock LIN interface correctly echoes frames sent on the bus,
enabling loopback testing without hardware dependencies.
Requirements: REQ-001, REQ-003
Test Steps:
1. Create a LIN frame with specific ID and data payload
2. Send the frame via the mock interface
3. Attempt to receive the echoed frame with ID filtering
4. Verify the received frame matches the transmitted frame exactly
Expected Result:
- Frame is successfully echoed by mock interface
- Received frame ID matches transmitted frame ID (0x12)
- Received frame data payload matches transmitted data [1, 2, 3]
"""
# Step 1: Create test frame with known ID and payload
test_frame = LinFrame(id=0x12, data=bytes([1, 2, 3]))
rp("lin_type", "mock")
rp("tx_id", f"0x{test_frame.id:02X}")
rp("tx_data", list(test_frame.data))
# Step 2: Transmit frame via mock interface (mock will enqueue to RX)
lin.send(test_frame)
# Step 3: Receive echoed frame with ID filtering and timeout
received_frame = lin.receive(id=0x12, timeout=0.5)
rp("rx_present", received_frame is not None)
if received_frame is not None:
rp("rx_id", f"0x{received_frame.id:02X}")
rp("rx_data", list(received_frame.data))
# Step 4: Validate echo functionality and payload integrity
assert received_frame is not None, "Mock interface should echo transmitted frames"
assert received_frame.id == test_frame.id, f"Expected ID {test_frame.id:#x}, got {received_frame.id:#x}"
assert received_frame.data == test_frame.data, f"Expected data {test_frame.data!r}, got {received_frame.data!r}"
@pytest.mark.smoke
@pytest.mark.req_002
def test_mock_request_synthesized_response(self, lin, rp):
"""
Title: Mock LIN Interface - Master Request Response Test
Description:
Validates that the mock interface synthesizes deterministic responses
for master request operations, simulating slave node behavior.
Requirements: REQ-002
Test Steps:
1. Issue a master request for specific frame ID and data length
2. Verify mock interface generates a response frame
3. Validate response frame ID matches request ID
4. Verify response data length matches requested length
5. Confirm response data is deterministic (not random)
Expected Result:
- Mock interface generates response within timeout period
- Response frame ID matches request ID (0x21)
- Response data length equals requested length (4 bytes)
- Response data follows deterministic pattern: [id+0, id+1, id+2, id+3]
"""
# Step 1: Issue master request with specific parameters
request_id = 0x21
requested_length = 4
# Step 2: Execute request operation; mock synthesizes deterministic bytes
rp("lin_type", "mock")
rp("req_id", f"0x{request_id:02X}")
rp("req_len", requested_length)
response_frame = lin.request(id=request_id, length=requested_length, timeout=0.5)
# Step 3: Validate response generation
assert response_frame is not None, "Mock interface should generate response for master requests"
# Step 4: Verify response frame properties (ID and length)
assert response_frame.id == request_id, f"Response ID {response_frame.id:#x} should match request ID {request_id:#x}"
assert len(response_frame.data) == requested_length, f"Response length {len(response_frame.data)} should match requested length {requested_length}"
# Step 5: Validate deterministic response pattern
expected_data = bytes((request_id + i) & 0xFF for i in range(requested_length))
rp("rx_data", list(response_frame.data) if response_frame else None)
rp("expected_data", list(expected_data))
assert response_frame.data == expected_data, f"Response data {response_frame.data!r} should follow deterministic pattern {expected_data!r}"
@pytest.mark.smoke
@pytest.mark.req_004
def test_mock_receive_timeout_behavior(self, lin, rp):
"""
Title: Mock LIN Interface - Receive Timeout Test
Description:
Validates that the mock interface properly handles timeout scenarios
when no matching frames are available for reception.
Requirements: REQ-004
Test Steps:
1. Attempt to receive a frame with non-existent ID
2. Use short timeout to avoid blocking test execution
3. Verify timeout behavior returns None rather than blocking indefinitely
Expected Result:
- Receive operation returns None when no matching frames available
- Operation completes within specified timeout period
- No exceptions or errors during timeout scenario
"""
# Step 1: Attempt to receive frame with ID that hasn't been transmitted
non_existent_id = 0xFF
short_timeout = 0.1 # 100ms timeout
# Step 2: Execute receive with timeout (should return None quickly)
rp("lin_type", "mock")
rp("rx_id", f"0x{non_existent_id:02X}")
rp("timeout_s", short_timeout)
result = lin.receive(id=non_existent_id, timeout=short_timeout)
rp("rx_present", result is not None)
# Step 3: Verify proper timeout behavior (no exceptions, returns None)
assert result is None, "Receive operation should return None when no matching frames available"
@pytest.mark.boundary
@pytest.mark.req_001
@pytest.mark.req_003
@pytest.mark.parametrize("frame_id,data_payload", [
(0x01, bytes([0x55])),
(0x3F, bytes([0xAA, 0x55])),
(0x20, bytes([0x01, 0x02, 0x03, 0x04, 0x05])),
(0x15, bytes([0xFF, 0x00, 0xCC, 0x33, 0xF0, 0x0F, 0xA5, 0x5A])),
])
def test_mock_frame_validation_boundaries(self, lin, rp, frame_id, data_payload):
"""
Title: Mock LIN Interface - Frame Validation Boundaries Test
Description:
Validates mock interface handling of various frame configurations
including boundary conditions for frame IDs and data lengths.
Requirements: REQ-001, REQ-003
Test Steps:
1. Test various valid frame ID values (0x01 to 0x3F)
2. Test different data payload lengths (1 to 8 bytes)
3. Verify proper echo behavior for all valid combinations
Expected Result:
- All valid frame configurations are properly echoed
- Frame ID and data integrity preserved across echo operation
"""
# Step 1: Create frame with parameterized values
test_frame = LinFrame(id=frame_id, data=data_payload)
rp("lin_type", "mock")
rp("tx_id", f"0x{frame_id:02X}")
rp("tx_len", len(data_payload))
# Step 2: Send and receive frame
lin.send(test_frame)
received_frame = lin.receive(id=frame_id, timeout=0.5)
# Step 3: Validate frame integrity across IDs and payload sizes
assert received_frame is not None, f"Frame with ID {frame_id:#x} should be echoed"
assert received_frame.id == frame_id, f"Frame ID should be preserved: expected {frame_id:#x}"
assert received_frame.data == data_payload, f"Frame data should be preserved for ID {frame_id:#x}"
import pytest
from ecu_framework.lin.base import LinFrame
from ecu_framework.lin.mock import MockBabyLinInterface
@pytest.fixture(scope="module")
def lin():
"""Module-local override: these tests are explicitly mock-only and must
not depend on whatever real-hardware interface the central config selects."""
iface = MockBabyLinInterface(bitrate=19200, channel=0)
iface.connect()
yield iface
iface.disconnect()
class TestMockLinInterface:
"""Test suite validating the pure-Python mock LIN interface behavior.
Coverage goals:
- REQ-001: Echo loopback for local testing (send -> receive same frame)
- REQ-002: Deterministic master request responses (no randomness)
- REQ-003: Frame ID filtering in receive()
- REQ-004: Graceful handling of timeout when no frame is available
Notes:
- These tests run entirely without hardware and should be fast and stable.
- The injected mock interface enqueues frames on transmit to emulate a bus.
- Deterministic responses allow exact byte-for-byte assertions.
"""
@pytest.mark.smoke
@pytest.mark.req_001
@pytest.mark.req_003
def test_mock_send_receive_echo(self, lin, rp):
"""
Title: Mock LIN Interface - Send/Receive Echo Test
Description:
Validates that the mock LIN interface correctly echoes frames sent on the bus,
enabling loopback testing without hardware dependencies.
Requirements: REQ-001, REQ-003
Test Steps:
1. Create a LIN frame with specific ID and data payload
2. Send the frame via the mock interface
3. Attempt to receive the echoed frame with ID filtering
4. Verify the received frame matches the transmitted frame exactly
Expected Result:
- Frame is successfully echoed by mock interface
- Received frame ID matches transmitted frame ID (0x12)
- Received frame data payload matches transmitted data [1, 2, 3]
"""
# Step 1: Create test frame with known ID and payload
test_frame = LinFrame(id=0x12, data=bytes([1, 2, 3]))
rp("lin_type", "mock")
rp("tx_id", f"0x{test_frame.id:02X}")
rp("tx_data", list(test_frame.data))
# Step 2: Transmit frame via mock interface (mock will enqueue to RX)
lin.send(test_frame)
# Step 3: Receive echoed frame with ID filtering and timeout
received_frame = lin.receive(id=0x12, timeout=0.5)
rp("rx_present", received_frame is not None)
if received_frame is not None:
rp("rx_id", f"0x{received_frame.id:02X}")
rp("rx_data", list(received_frame.data))
# Step 4: Validate echo functionality and payload integrity
assert received_frame is not None, "Mock interface should echo transmitted frames"
assert received_frame.id == test_frame.id, f"Expected ID {test_frame.id:#x}, got {received_frame.id:#x}"
assert received_frame.data == test_frame.data, f"Expected data {test_frame.data!r}, got {received_frame.data!r}"
@pytest.mark.smoke
@pytest.mark.req_002
def test_mock_request_synthesized_response(self, lin, rp):
"""
Title: Mock LIN Interface - Master Request Response Test
Description:
Validates that the mock interface synthesizes deterministic responses
for master request operations, simulating slave node behavior.
Requirements: REQ-002
Test Steps:
1. Issue a master request for specific frame ID and data length
2. Verify mock interface generates a response frame
3. Validate response frame ID matches request ID
4. Verify response data length matches requested length
5. Confirm response data is deterministic (not random)
Expected Result:
- Mock interface generates response within timeout period
- Response frame ID matches request ID (0x21)
- Response data length equals requested length (4 bytes)
- Response data follows deterministic pattern: [id+0, id+1, id+2, id+3]
"""
# Step 1: Issue master request with specific parameters
request_id = 0x21
requested_length = 4
# Step 2: Execute request operation; mock synthesizes deterministic bytes
rp("lin_type", "mock")
rp("req_id", f"0x{request_id:02X}")
rp("req_len", requested_length)
response_frame = lin.request(id=request_id, length=requested_length, timeout=0.5)
# Step 3: Validate response generation
assert response_frame is not None, "Mock interface should generate response for master requests"
# Step 4: Verify response frame properties (ID and length)
assert response_frame.id == request_id, f"Response ID {response_frame.id:#x} should match request ID {request_id:#x}"
assert len(response_frame.data) == requested_length, f"Response length {len(response_frame.data)} should match requested length {requested_length}"
# Step 5: Validate deterministic response pattern
expected_data = bytes((request_id + i) & 0xFF for i in range(requested_length))
rp("rx_data", list(response_frame.data) if response_frame else None)
rp("expected_data", list(expected_data))
assert response_frame.data == expected_data, f"Response data {response_frame.data!r} should follow deterministic pattern {expected_data!r}"
@pytest.mark.smoke
@pytest.mark.req_004
def test_mock_receive_timeout_behavior(self, lin, rp):
"""
Title: Mock LIN Interface - Receive Timeout Test
Description:
Validates that the mock interface properly handles timeout scenarios
when no matching frames are available for reception.
Requirements: REQ-004
Test Steps:
1. Attempt to receive a frame with non-existent ID
2. Use short timeout to avoid blocking test execution
3. Verify timeout behavior returns None rather than blocking indefinitely
Expected Result:
- Receive operation returns None when no matching frames available
- Operation completes within specified timeout period
- No exceptions or errors during timeout scenario
"""
# Step 1: Attempt to receive frame with ID that hasn't been transmitted
non_existent_id = 0xFF
short_timeout = 0.1 # 100ms timeout
# Step 2: Execute receive with timeout (should return None quickly)
rp("lin_type", "mock")
rp("rx_id", f"0x{non_existent_id:02X}")
rp("timeout_s", short_timeout)
result = lin.receive(id=non_existent_id, timeout=short_timeout)
rp("rx_present", result is not None)
# Step 3: Verify proper timeout behavior (no exceptions, returns None)
assert result is None, "Receive operation should return None when no matching frames available"
@pytest.mark.boundary
@pytest.mark.req_001
@pytest.mark.req_003
@pytest.mark.parametrize("frame_id,data_payload", [
(0x01, bytes([0x55])),
(0x3F, bytes([0xAA, 0x55])),
(0x20, bytes([0x01, 0x02, 0x03, 0x04, 0x05])),
(0x15, bytes([0xFF, 0x00, 0xCC, 0x33, 0xF0, 0x0F, 0xA5, 0x5A])),
])
def test_mock_frame_validation_boundaries(self, lin, rp, frame_id, data_payload):
"""
Title: Mock LIN Interface - Frame Validation Boundaries Test
Description:
Validates mock interface handling of various frame configurations
including boundary conditions for frame IDs and data lengths.
Requirements: REQ-001, REQ-003
Test Steps:
1. Test various valid frame ID values (0x01 to 0x3F)
2. Test different data payload lengths (1 to 8 bytes)
3. Verify proper echo behavior for all valid combinations
Expected Result:
- All valid frame configurations are properly echoed
- Frame ID and data integrity preserved across echo operation
"""
# Step 1: Create frame with parameterized values
test_frame = LinFrame(id=frame_id, data=data_payload)
rp("lin_type", "mock")
rp("tx_id", f"0x{frame_id:02X}")
rp("tx_len", len(data_payload))
# Step 2: Send and receive frame
lin.send(test_frame)
received_frame = lin.receive(id=frame_id, timeout=0.5)
# Step 3: Validate frame integrity across IDs and payload sizes
assert received_frame is not None, f"Frame with ID {frame_id:#x} should be echoed"
assert received_frame.id == frame_id, f"Frame ID should be preserved: expected {frame_id:#x}"
assert received_frame.data == data_payload, f"Frame data should be preserved for ID {frame_id:#x}"

View File

@ -1,22 +1,22 @@
import pytest
from ecu_framework.lin.babylin import BabyLinInterface
from vendor import mock_babylin_wrapper as mock_bl
class _ErrMock:
@staticmethod
def create_BabyLIN():
bl = mock_bl.create_BabyLIN()
# Force loadSDF to return a non-OK code
def fail_load(*args, **kwargs):
return 1 # non BL_OK
bl.BLC_loadSDF = fail_load
return bl
@pytest.mark.unit
def test_connect_sdf_error_raises():
lin = BabyLinInterface(sdf_path="dummy.sdf", wrapper_module=_ErrMock)
with pytest.raises(RuntimeError):
lin.connect()
import pytest
from ecu_framework.lin.babylin import BabyLinInterface
from vendor import mock_babylin_wrapper as mock_bl
class _ErrMock:
@staticmethod
def create_BabyLIN():
bl = mock_bl.create_BabyLIN()
# Force loadSDF to return a non-OK code
def fail_load(*args, **kwargs):
return 1 # non BL_OK
bl.BLC_loadSDF = fail_load
return bl
@pytest.mark.unit
def test_connect_sdf_error_raises():
lin = BabyLinInterface(sdf_path="dummy.sdf", wrapper_module=_ErrMock)
with pytest.raises(RuntimeError):
lin.connect()

View File

@ -1,40 +1,40 @@
import os
import json
import pathlib
import pytest
from ecu_framework.config import load_config
@pytest.mark.unit
def test_config_precedence_env_overrides(monkeypatch, tmp_path, rp):
# Create a YAML file to use via env var
yaml_path = tmp_path / "cfg.yaml"
yaml_path.write_text("interface:\n type: babylin\n channel: 7\n")
# Point ECU_TESTS_CONFIG to env YAML
monkeypatch.setenv("ECU_TESTS_CONFIG", str(yaml_path))
# Apply overrides on top
cfg = load_config(workspace_root=str(tmp_path), overrides={"interface": {"channel": 9}})
rp("config_source", "env+overrides")
rp("interface_type", cfg.interface.type)
rp("interface_channel", cfg.interface.channel)
# Env file applied
assert cfg.interface.type == "babylin"
# Overrides win
assert cfg.interface.channel == 9
@pytest.mark.unit
def test_config_defaults_when_no_file(monkeypatch, rp):
# Ensure no env path
monkeypatch.delenv("ECU_TESTS_CONFIG", raising=False)
cfg = load_config(workspace_root=None)
rp("config_source", "defaults")
rp("interface_type", cfg.interface.type)
rp("flash_enabled", cfg.flash.enabled)
assert cfg.interface.type == "mock"
assert cfg.flash.enabled is False
import os
import json
import pathlib
import pytest
from ecu_framework.config import load_config
@pytest.mark.unit
def test_config_precedence_env_overrides(monkeypatch, tmp_path, rp):
# Create a YAML file to use via env var
yaml_path = tmp_path / "cfg.yaml"
yaml_path.write_text("interface:\n type: babylin\n channel: 7\n")
# Point ECU_TESTS_CONFIG to env YAML
monkeypatch.setenv("ECU_TESTS_CONFIG", str(yaml_path))
# Apply overrides on top
cfg = load_config(workspace_root=str(tmp_path), overrides={"interface": {"channel": 9}})
rp("config_source", "env+overrides")
rp("interface_type", cfg.interface.type)
rp("interface_channel", cfg.interface.channel)
# Env file applied
assert cfg.interface.type == "babylin"
# Overrides win
assert cfg.interface.channel == 9
@pytest.mark.unit
def test_config_defaults_when_no_file(monkeypatch, rp):
# Ensure no env path
monkeypatch.delenv("ECU_TESTS_CONFIG", raising=False)
cfg = load_config(workspace_root=None)
rp("config_source", "defaults")
rp("interface_type", cfg.interface.type)
rp("flash_enabled", cfg.flash.enabled)
assert cfg.interface.type == "mock"
assert cfg.flash.enabled is False

View File

@ -1,32 +1,32 @@
import pytest
from ecu_framework.flashing.hex_flasher import HexFlasher
from ecu_framework.lin.base import LinFrame
class _StubLin:
def __init__(self):
self.sent = []
def connect(self):
pass
def disconnect(self):
pass
def send(self, frame: LinFrame):
self.sent.append(frame)
def receive(self, id=None, timeout=1.0):
return None
@pytest.mark.unit
def test_hex_flasher_sends_basic_sequence(tmp_path, rp):
# Minimal valid Intel HEX file (EOF record)
hex_path = tmp_path / "fw.hex"
hex_path.write_text(":00000001FF\n")
lin = _StubLin()
flasher = HexFlasher(lin)
flasher.flash_hex(str(hex_path))
rp("hex_path", str(hex_path))
rp("sent_count", len(lin.sent))
# Placeholder assertion; refine as the flasher gains functionality
assert isinstance(lin.sent, list)
import pytest
from ecu_framework.flashing.hex_flasher import HexFlasher
from ecu_framework.lin.base import LinFrame
class _StubLin:
def __init__(self):
self.sent = []
def connect(self):
pass
def disconnect(self):
pass
def send(self, frame: LinFrame):
self.sent.append(frame)
def receive(self, id=None, timeout=1.0):
return None
@pytest.mark.unit
def test_hex_flasher_sends_basic_sequence(tmp_path, rp):
# Minimal valid Intel HEX file (EOF record)
hex_path = tmp_path / "fw.hex"
hex_path.write_text(":00000001FF\n")
lin = _StubLin()
flasher = HexFlasher(lin)
flasher.flash_hex(str(hex_path))
rp("hex_path", str(hex_path))
rp("sent_count", len(lin.sent))
# Placeholder assertion; refine as the flasher gains functionality
assert isinstance(lin.sent, list)

View File

@ -1,25 +1,25 @@
import pytest
from ecu_framework.lin.base import LinFrame
@pytest.mark.unit
def test_linframe_accepts_valid_ranges(record_property: "pytest.RecordProperty"): # type: ignore[name-defined]
f = LinFrame(id=0x3F, data=bytes([0] * 8))
record_property("valid_id", f"0x{f.id:02X}")
record_property("data_len", len(f.data))
assert f.id == 0x3F and len(f.data) == 8
@pytest.mark.unit
@pytest.mark.parametrize("bad_id", [-1, 0x40])
def test_linframe_invalid_id_raises(bad_id, record_property: "pytest.RecordProperty"): # type: ignore[name-defined]
record_property("bad_id", bad_id)
with pytest.raises(ValueError):
LinFrame(id=bad_id, data=b"\x00")
@pytest.mark.unit
def test_linframe_too_long_raises(record_property: "pytest.RecordProperty"): # type: ignore[name-defined]
record_property("data_len", 9)
with pytest.raises(ValueError):
LinFrame(id=0x01, data=bytes(range(9)))
import pytest
from ecu_framework.lin.base import LinFrame
@pytest.mark.unit
def test_linframe_accepts_valid_ranges(record_property: "pytest.RecordProperty"): # type: ignore[name-defined]
f = LinFrame(id=0x3F, data=bytes([0] * 8))
record_property("valid_id", f"0x{f.id:02X}")
record_property("data_len", len(f.data))
assert f.id == 0x3F and len(f.data) == 8
@pytest.mark.unit
@pytest.mark.parametrize("bad_id", [-1, 0x40])
def test_linframe_invalid_id_raises(bad_id, record_property: "pytest.RecordProperty"): # type: ignore[name-defined]
record_property("bad_id", bad_id)
with pytest.raises(ValueError):
LinFrame(id=bad_id, data=b"\x00")
@pytest.mark.unit
def test_linframe_too_long_raises(record_property: "pytest.RecordProperty"): # type: ignore[name-defined]
record_property("data_len", 9)
with pytest.raises(ValueError):
LinFrame(id=0x01, data=bytes(range(9)))

View File

@ -0,0 +1,242 @@
"""Unit tests for the MUM LIN adapter using fake pylin/pymumclient modules.
These tests don't talk to real hardware — they inject lightweight fakes via
the adapter's `mum_module` / `pylin_module` constructor args to validate the
adapter's plumbing (connect/disconnect, send, receive, send_raw, power_*).
"""
from __future__ import annotations
import pytest
from ecu_framework.lin.base import LinFrame
from ecu_framework.lin.mum import MumLinInterface
# ---- fakes ---------------------------------------------------------------
class _FakePower:
def __init__(self):
self.up_calls = 0
self.down_calls = 0
def power_up(self):
self.up_calls += 1
def power_down(self):
self.down_calls += 1
class _FakeTransport:
def __init__(self):
self.raw_frames = []
def ld_put_raw(self, data, baudrate):
self.raw_frames.append((bytes(data), int(baudrate)))
class _FakeLinDev:
def __init__(self, transport):
self.baudrate = 0
self.tx = []
self._transport = transport
# Pre-canned slave responses keyed by frame_id
self.slave_responses = {0x11: [0x07, 0x00, 0x00, 0x00]}
self.fail_on_recv_id = None
def get_device(self, name):
if name == "bus/transport_layer":
return self._transport
raise KeyError(name)
def send_message(self, master_to_slave, frame_id, data_length, data=None):
if master_to_slave:
self.tx.append((int(frame_id), int(data_length), list(data or [])))
return None
# slave-to-master
if self.fail_on_recv_id == int(frame_id):
raise RuntimeError("simulated rx timeout")
return self.slave_responses.get(int(frame_id))
class _FakeLinMaster:
def __init__(self):
self.setup_calls = 0
self.teardown_calls = 0
def setup(self):
self.setup_calls += 1
def teardown(self):
self.teardown_calls += 1
class _FakeMUM:
"""Stand-in for pymumclient.MelexisUniversalMaster()."""
def __init__(self):
self.opened_with = None
self._lin_master = _FakeLinMaster()
self._power = _FakePower()
self._transport = _FakeTransport()
self._lin_dev = _FakeLinDev(self._transport)
def open_all(self, host):
self.opened_with = host
def get_device(self, name):
if name == "lin0":
return self._lin_master
if name == "power_out0":
return self._power
raise KeyError(name)
class _FakeMumModule:
def __init__(self):
self.last = None
def MelexisUniversalMaster(self): # noqa: N802 - matches vendor API
self.last = _FakeMUM()
return self.last
class _FakePylinModule:
"""Stand-in for pylin: provides LinBusManager and LinDevice22."""
def __init__(self, lin_dev_factory):
# lin_dev_factory(lin_bus) returns an object with the .get_device,
# .send_message and .baudrate API used by MumLinInterface.
self._lin_dev_factory = lin_dev_factory
def LinBusManager(self, linmaster): # noqa: N802
return ("bus_for", linmaster)
def LinDevice22(self, lin_bus): # noqa: N802
return self._lin_dev_factory(lin_bus)
# ---- helpers -------------------------------------------------------------
def _build_iface(boot_settle=0.0):
"""Construct a MumLinInterface wired to fake modules; return (iface, fakes)."""
mum_mod = _FakeMumModule()
# Pylin's LinDevice22 should hand back the same FakeLinDev that's
# attached to the MUM instance for this test, so assertions can read tx.
captured = {}
def lin_dev_factory(lin_bus):
# The mum module's get_device('lin0') will be called from connect();
# but pylin.LinDevice22(lin_bus) just needs to expose the same API.
# We pull the FakeLinDev off the FakeMUM that was constructed.
captured["lin_dev"] = mum_mod.last._lin_dev
return mum_mod.last._lin_dev
pylin_mod = _FakePylinModule(lin_dev_factory)
iface = MumLinInterface(
host="10.0.0.1",
boot_settle_seconds=boot_settle,
mum_module=mum_mod,
pylin_module=pylin_mod,
)
return iface, mum_mod, captured
# ---- tests ---------------------------------------------------------------
@pytest.mark.unit
def test_connect_opens_mum_and_powers_up():
iface, mum_mod, _ = _build_iface()
iface.connect()
try:
assert mum_mod.last.opened_with == "10.0.0.1"
assert mum_mod.last._lin_master.setup_calls == 1
assert mum_mod.last._power.up_calls == 1
assert iface._lin_dev.baudrate == 19200
finally:
iface.disconnect()
@pytest.mark.unit
def test_disconnect_powers_down_and_tears_down():
iface, mum_mod, _ = _build_iface()
iface.connect()
iface.disconnect()
assert mum_mod.last._power.down_calls == 1
assert mum_mod.last._lin_master.teardown_calls == 1
@pytest.mark.unit
def test_send_publishes_master_frame():
iface, mum_mod, _ = _build_iface()
iface.connect()
try:
iface.send(LinFrame(id=0x0A, data=bytes([1, 2, 3, 4, 5, 6, 7, 8])))
tx = mum_mod.last._lin_dev.tx
assert tx == [(0x0A, 8, [1, 2, 3, 4, 5, 6, 7, 8])]
finally:
iface.disconnect()
@pytest.mark.unit
def test_receive_uses_frame_lengths_default():
iface, _, _ = _build_iface()
iface.connect()
try:
frame = iface.receive(id=0x11, timeout=0.1)
assert frame is not None
assert frame.id == 0x11
# Default frame_lengths maps 0x11 -> 4
assert len(frame.data) == 4
assert frame.data[0] == 0x07
finally:
iface.disconnect()
@pytest.mark.unit
def test_receive_returns_none_on_pylin_exception():
iface, mum_mod, _ = _build_iface()
iface.connect()
try:
mum_mod.last._lin_dev.fail_on_recv_id = 0x11
assert iface.receive(id=0x11, timeout=0.1) is None
finally:
iface.disconnect()
@pytest.mark.unit
def test_receive_without_id_raises():
iface, _, _ = _build_iface()
iface.connect()
try:
with pytest.raises(NotImplementedError):
iface.receive(id=None)
finally:
iface.disconnect()
@pytest.mark.unit
def test_send_raw_uses_classic_checksum_path():
iface, mum_mod, _ = _build_iface()
iface.connect()
try:
iface.send_raw(b"\x7f\x06\xb5\xff\x7f\x01\x02\xff")
raw = mum_mod.last._transport.raw_frames
assert len(raw) == 1
assert raw[0][0] == b"\x7f\x06\xb5\xff\x7f\x01\x02\xff"
assert raw[0][1] == 19200
finally:
iface.disconnect()
@pytest.mark.unit
def test_power_cycle_calls_down_then_up():
iface, mum_mod, _ = _build_iface()
iface.connect()
try:
iface.power_cycle(wait=0.0)
finally:
iface.disconnect()
assert mum_mod.last._power.up_calls >= 2 # initial connect + cycle
assert mum_mod.last._power.down_calls >= 1

405
vendor/4SEVEN_color_lib_test.ldf vendored Normal file
View File

@ -0,0 +1,405 @@
LIN_description_file;
LIN_protocol_version = "2.1";
LIN_language_version = "2.1";
LIN_speed = 19.2 kbps;
Nodes {
Master: Master_Node, 5 ms, 0.5 ms ;
Slaves: ALM_Node ;
}
Signals {
AmbLightColourRed:8,0x00,Master_Node,ALM_Node;
AmbLightColourGreen:8,0x00,Master_Node,ALM_Node;
AmbLightColourBlue:8,0x00,Master_Node,ALM_Node;
AmbLightIntensity:8,0x00,Master_Node,ALM_Node;
AmbLightUpdate:2,0x0,Master_Node,ALM_Node;
AmbLightMode:6,0x0,Master_Node,ALM_Node;
AmbLightDuration:8,0x00,Master_Node,ALM_Node;
AmbLightLIDFrom:8,0x00,Master_Node,ALM_Node;
AmbLightLIDTo:8,0x00,Master_Node,ALM_Node;
ALMNVMStatus:4,0x0,ALM_Node,Master_Node;
ALMThermalStatus:4,0x0,ALM_Node,Master_Node;
ALMNadNo:8,0x00,ALM_Node,Master_Node;
SigCommErr:1,0x0,ALM_Node,Master_Node;
ALMVoltageStatus:4,0x0,ALM_Node,Master_Node;
ALMLEDState:2,0x0,ALM_Node,Master_Node;
ColorConfigFrameRed_X: 16, 5665, Master_Node, ALM_Node ;
ColorConfigFrameRed_Y: 16, 2396, Master_Node, ALM_Node ;
ColorConfigFrameRed_Z: 16, 0, Master_Node, ALM_Node ;
ColorConfigFrameGreen_X: 16, 1094, Master_Node, ALM_Node ;
ColorConfigFrameGreen_Y: 16, 5534, Master_Node, ALM_Node ;
ColorConfigFrameGreen_Z: 16, 996, Master_Node, ALM_Node ;
ColorConfigFrameBlue_X: 16, 9618, Master_Node, ALM_Node ;
ColorConfigFrameBlue_Y: 16, 0, Master_Node, ALM_Node ;
ColorConfigFrameBlue_Z: 16, 51922, Master_Node, ALM_Node ;
PWM_Frame_Red: 16, 0, ALM_Node, Master_Node ;
PWM_Frame_Green: 16, 0, ALM_Node, Master_Node ;
PWM_Frame_Blue1: 16, 0, ALM_Node, Master_Node ;
ConfigFrame_Calibration: 1, 0, Master_Node, ALM_Node ;
PWM_Frame_Blue2: 16, 0, ALM_Node, Master_Node ;
ColorConfigFrameRed_Vf_Cal: 16, 2031, Master_Node, ALM_Node ;
ColorConfigFrameGreen_VfCal: 16, 2903, Master_Node, ALM_Node ;
ColorConfigFrameBlue_VfCal: 16, 2950, Master_Node, ALM_Node ;
VF_Frame_Red_VF: 16, 0, ALM_Node, Master_Node ;
VF_Frame_Green_VF: 16, 0, ALM_Node, Master_Node ;
VF_Frame_Blue1_VF: 16, 0, ALM_Node, Master_Node ;
VF_Frame_VLED: 16, 0, ALM_Node, Master_Node ;
VF_Frame_VS: 16, 0, ALM_Node, Master_Node ;
Tj_Frame_Red: 16, 0, ALM_Node, Master_Node ;
Tj_Frame_Green: 16, 0, ALM_Node, Master_Node ;
Tj_Frame_Blue: 16, 0, ALM_Node, Master_Node ;
ConfigFrame_MaxLM: 16, 3840, Master_Node, ALM_Node ;
Calibration_status: 1, 0, ALM_Node, Master_Node ;
Tj_Frame_NTC: 15, 0, ALM_Node, Master_Node ;
PWM_wo_Comp_Red: 16, 0, ALM_Node, Master_Node ;
PWM_wo_Comp_Green: 16, 0, ALM_Node, Master_Node ;
PWM_wo_Comp_Blue: 16, 0, ALM_Node, Master_Node ;
NVM_Static_Valid: 16, 0, ALM_Node, Master_Node ;
NVM_Static_Rev: 16, 0, ALM_Node, Master_Node ;
NVM_Calib_Version: 8, 0, ALM_Node, Master_Node ;
NVM_OADCCAL: 8, 0, ALM_Node, Master_Node ;
NVM_GainADCLowCal: 8, 0, ALM_Node, Master_Node ;
NVM_GainADCHighCal: 8, 0, ALM_Node, Master_Node ;
ConfigFrame_EnableDerating: 1, 1, Master_Node, ALM_Node ;
ConfigFrame_EnableCompensation: 1, 1, Master_Node, ALM_Node ;
}
Diagnostic_signals {
MasterReqB0: 8, 0 ;
MasterReqB1: 8, 0 ;
MasterReqB2: 8, 0 ;
MasterReqB3: 8, 0 ;
MasterReqB4: 8, 0 ;
MasterReqB5: 8, 0 ;
MasterReqB6: 8, 0 ;
MasterReqB7: 8, 0 ;
SlaveRespB0: 8, 0 ;
SlaveRespB1: 8, 0 ;
SlaveRespB2: 8, 0 ;
SlaveRespB3: 8, 0 ;
SlaveRespB4: 8, 0 ;
SlaveRespB5: 8, 0 ;
SlaveRespB6: 8, 0 ;
SlaveRespB7: 8, 0 ;
}
Frames {
ALM_Req_A:0x0A,Master_Node,8{
AmbLightColourRed,0;
AmbLightColourGreen,8;
AmbLightColourBlue,16;
AmbLightIntensity,24;
AmbLightUpdate,32;
AmbLightMode,34;
AmbLightDuration,40;
AmbLightLIDFrom,48;
AmbLightLIDTo,56;
}
ALM_Status:0x11,ALM_Node,4{
ALMNVMStatus,16;
SigCommErr,24;
ALMLEDState,20;
ALMVoltageStatus,8;
ALMNadNo,0;
ALMThermalStatus,12;
}
ColorConfigFrameRed: 3, Master_Node, 8 {
ColorConfigFrameRed_X, 0 ;
ColorConfigFrameRed_Y, 16 ;
ColorConfigFrameRed_Z, 32 ;
ColorConfigFrameRed_Vf_Cal, 48 ;
}
ColorConfigFrameGreen: 4, Master_Node, 8 {
ColorConfigFrameGreen_X, 0 ;
ColorConfigFrameGreen_Y, 16 ;
ColorConfigFrameGreen_Z, 32 ;
ColorConfigFrameGreen_VfCal, 48 ;
}
ColorConfigFrameBlue: 5, Master_Node, 8 {
ColorConfigFrameBlue_X, 0 ;
ColorConfigFrameBlue_Y, 16 ;
ColorConfigFrameBlue_Z, 32 ;
ColorConfigFrameBlue_VfCal, 48 ;
}
PWM_Frame: 18, ALM_Node, 8 {
PWM_Frame_Red, 0 ;
PWM_Frame_Green, 16 ;
PWM_Frame_Blue1, 32 ;
PWM_Frame_Blue2, 48 ;
}
ConfigFrame: 6, Master_Node, 3 {
ConfigFrame_Calibration, 0 ;
ConfigFrame_MaxLM, 3 ;
ConfigFrame_EnableDerating, 1 ;
ConfigFrame_EnableCompensation, 2 ;
}
VF_Frame: 19, ALM_Node, 8 {
VF_Frame_Red_VF, 0 ;
VF_Frame_Green_VF, 16 ;
VF_Frame_Blue1_VF, 32 ;
VF_Frame_VLED, 48 ;
}
Tj_Frame: 20, ALM_Node, 8 {
Tj_Frame_Red, 0 ;
Tj_Frame_Green, 16 ;
Tj_Frame_Blue, 32 ;
Calibration_status, 63 ;
Tj_Frame_NTC, 48 ;
}
PWM_wo_Comp: 21, ALM_Node, 8 {
PWM_wo_Comp_Red, 0 ;
PWM_wo_Comp_Green, 16 ;
PWM_wo_Comp_Blue, 32 ;
VF_Frame_VS, 48 ;
}
NVM_Debug: 22, ALM_Node, 8 {
NVM_Static_Valid, 0 ;
NVM_Static_Rev, 16 ;
NVM_Calib_Version, 32 ;
NVM_OADCCAL, 40 ;
NVM_GainADCLowCal, 48 ;
NVM_GainADCHighCal, 56 ;
}
}
Diagnostic_frames {
MasterReq: 0x3c {
MasterReqB0, 0 ;
MasterReqB1, 8 ;
MasterReqB2, 16 ;
MasterReqB3, 24 ;
MasterReqB4, 32 ;
MasterReqB5, 40 ;
MasterReqB6, 48 ;
MasterReqB7, 56 ;
}
SlaveResp: 0x3d {
SlaveRespB0, 0 ;
SlaveRespB1, 8 ;
SlaveRespB2, 16 ;
SlaveRespB3, 24 ;
SlaveRespB4, 32 ;
SlaveRespB5, 40 ;
SlaveRespB6, 48 ;
SlaveRespB7, 56 ;
}
}
Node_attributes {
ALM_Node {
LIN_protocol = 2.1 ;
configured_NAD = 0x01 ;
initial_NAD = 0x02 ;
product_id = 0x0013, 0x0003, 1 ;
response_error = SigCommErr ;
P2_min = 50.0000 ms ;
ST_min = 20.0000 ms ;
configurable_frames {
ALM_Req_A;
ALM_Status;
ColorConfigFrameRed ;
ColorConfigFrameGreen ;
ColorConfigFrameBlue ;
PWM_Frame ;
ConfigFrame ;
VF_Frame ;
Tj_Frame ;
PWM_wo_Comp ;
NVM_Debug ;
}
}
}
Schedule_tables {
LIN_AA {
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x1, 0x2, 0xFF } delay 50 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x1 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x2 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x3 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x4 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x5 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x6 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x7 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x8 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x9 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0xA } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0xB } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0xC } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0xD } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0xE } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0xF } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x2, 0x2, 0x10 } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x3, 0x2, 0xFF } delay 20 ms ;
FreeFormat { 0x7F, 0x6, 0xB5, 0xFF, 0x7F, 0x4, 0x2, 0xFF } delay 20 ms ;
}
User_serv {
ALM_Req_A delay 10.0000 ms ;
}
Pub_serv {
ALM_Status delay 20.0000 ms ;
}
RequestResponse {
ALM_Req_A delay 10 ms ;
ALM_Status delay 10 ms ;
}
CCO {
ALM_Req_A delay 10 ms ;
ALM_Status delay 10 ms ;
ConfigFrame delay 10 ms ;
ColorConfigFrameRed delay 10 ms ;
ColorConfigFrameGreen delay 10 ms ;
ColorConfigFrameBlue delay 10 ms ;
VF_Frame delay 10 ms ;
PWM_Frame delay 10 ms ;
Tj_Frame delay 10 ms ;
PWM_wo_Comp delay 10 ms ;
}
calib {
NVM_Debug delay 10 ms ;
}
}
Signal_encoding_types {
Red {
physical_value,0,255,1.0000,0.0000,"Red" ;
}
Green {
physical_value,0,255,1.0000,0.0000,"Green" ;
}
Blue {
physical_value,0,255,1.0000,0.0000,"Blue" ;
}
Intensity {
physical_value,0,255,1.0000,0.0000,"Intensity" ;
}
Update {
logical_value,0x00,"Immediate color Update" ;
logical_value,0x01,"Color memorization" ;
logical_value,0x02,"Apply memorized color" ;
logical_value,0x03,"Discard memorized color" ;
}
Mode {
logical_value,0x00,"Immediate Setpoint" ;
logical_value,0x01,"Fading effect 1 (color and intensity fade)" ;
logical_value,0x02,"Fading effect 2 (intensity fade only; color changes immediately)" ;
logical_value,0x03,"TBD" ;
logical_value,0x04,"TBD" ;
physical_value,5,63,1.0000,0.0000,"Not Used" ;
}
Duration {
physical_value,0,255,0.2000,0.0000,"s" ;
}
ModuleID {
physical_value,0,255,1.0000,0.0000,"ModuleID" ;
}
NVMStatus {
logical_value,0x00,"NVM OK" ;
logical_value,0x01,"NVM NOK" ;
logical_value,0x02,"Reserved" ;
logical_value,0x03,"Reserved" ;
logical_value,0x04,"Reserved" ;
logical_value,0x05,"Reserved" ;
logical_value,0x06,"Reserved" ;
logical_value,0x07,"Reserved" ;
logical_value,0x08,"Reserved" ;
logical_value,0x09,"Reserved" ;
logical_value,0x0A,"Reserved" ;
logical_value,0x0B,"Reserved" ;
logical_value,0x0C,"Reserved" ;
logical_value,0x0D,"Reserved" ;
logical_value,0x0E,"Reserved" ;
logical_value,0x0F,"Reserved" ;
}
VoltageStatus {
logical_value,0x00,"Normal Voltage" ;
logical_value,0x01,"Power UnderVoltage" ;
logical_value,0x02,"Power OverVoltage" ;
logical_value,0x03,"Reserved" ;
logical_value,0x04,"Reserved" ;
logical_value,0x05,"Reserved" ;
logical_value,0x06,"Reserved" ;
logical_value,0x07,"Reserved" ;
logical_value,0x08,"Reserved" ;
logical_value,0x09,"Reserved" ;
logical_value,0x0A,"Reserved" ;
logical_value,0x0B,"Reserved" ;
logical_value,0x0C,"Reserved" ;
logical_value,0x0D,"Reserved" ;
logical_value,0x0E,"Reserved" ;
logical_value,0x0F,"Reserved" ;
}
ThermalStatus {
logical_value,0x00,"Normal Temperature" ;
logical_value,0x01,"Thermal derating" ;
logical_value,0x02,"Thermal shutdown" ;
logical_value,0x03,"Reserved" ;
logical_value,0x04,"Reserved" ;
logical_value,0x05,"Reserved" ;
logical_value,0x06,"Reserved" ;
logical_value,0x07,"Reserved" ;
logical_value,0x08,"Reserved" ;
logical_value,0x09,"Reserved" ;
logical_value,0x0A,"Reserved" ;
logical_value,0x0B,"Reserved" ;
logical_value,0x0C,"Reserved" ;
logical_value,0x0D,"Reserved" ;
logical_value,0x0E,"Reserved" ;
logical_value,0x0F,"Reserved" ;
}
LED_State {
logical_value,0x00,"LED OFF" ;
logical_value,0x01,"LED ANIMATING" ;
logical_value,0x02,"LED ON" ;
logical_value,0x03,"Reserved" ;
}
NVM_Static_Valid_Encoding {
logical_value, 0, "NVM Corrupted/Zero" ;
logical_value, 42331, "NVM Valid (0xA55B)" ;
logical_value, 65535, "NVM Empty/Erased" ;
}
NVM_Static_Rev_Encoding {
logical_value, 0, "Invalid Revision" ;
logical_value, 1, "Revision 1 (Current)" ;
logical_value, 65535, "Not Programmed" ;
}
NVM_Calib_Version_Encoding {
physical_value, 0, 255, 1, 0, "Factory Calib Version (>=1 valid)" ;
}
NVM_OADCCAL_Encoding {
physical_value, 0, 255, 1, 0, "ADC Offset Cal (signed 8-bit)" ;
}
NVM_GainADCLowCal_Encoding {
physical_value, 0, 255, 1, 0, "ADC Gain Low Temp (signed 8-bit)" ;
}
NVM_GainADCHighCal_Encoding {
physical_value, 0, 255, 1, 0, "ADC Gain High Temp (signed 8-bit)" ;
}
}
Signal_representation {
Red:AmbLightColourRed;
Green:AmbLightColourGreen;
Blue:AmbLightColourBlue;
Intensity:AmbLightIntensity;
Update:AmbLightUpdate;
Mode:AmbLightMode;
Duration:AmbLightDuration;
ModuleID:AmbLightLIDFrom,AmbLightLIDTo;
NVMStatus:ALMNVMStatus;
LED_State:ALMLEDState;
NVM_Calib_Version_Encoding: NVM_Calib_Version ;
NVM_GainADCHighCal_Encoding: NVM_GainADCHighCal ;
NVM_GainADCLowCal_Encoding: NVM_GainADCLowCal ;
NVM_OADCCAL_Encoding: NVM_OADCCAL ;
NVM_Static_Rev_Encoding: NVM_Static_Rev ;
NVM_Static_Valid_Encoding: NVM_Static_Valid ;
}

Binary file not shown.

BIN
vendor/4SEVEN_color_lib_test.sdf vendored Normal file

Binary file not shown.

Binary file not shown.

View File

@ -1,95 +1,95 @@
"""Owon PSU quick demo (optimized to use ecu_framework.power.owon_psu).
This script reads configuration from OWON_PSU_CONFIG (YAML) or ./config/owon_psu.yaml,
prints discovered ports responding to *IDN?, then connects to the configured port
and performs a small sequence (IDN, optional V/I set, toggle output, measure V/I).
No CLI flags; edit YAML to change behavior.
"""
from __future__ import annotations
import os
import time
from pathlib import Path
import yaml
try:
from ecu_framework.power import OwonPSU, SerialParams, scan_ports
except ModuleNotFoundError:
# Ensure repository root is on sys.path when running this file directly
import sys
repo_root = Path(__file__).resolve().parents[2]
if str(repo_root) not in sys.path:
sys.path.insert(0, str(repo_root))
from ecu_framework.power import OwonPSU, SerialParams, scan_ports
def _load_yaml_config() -> dict:
cfg_path = str(Path("config") / "owon_psu.yaml")
p = Path(cfg_path).resolve()
print("Using config path:", str(p))
if not p.is_file():
return {}
with p.open("r", encoding="utf-8") as f:
data = yaml.safe_load(f) or {}
return data if isinstance(data, dict) else {}
def run_demo() -> int:
cfg = _load_yaml_config()
if not cfg or "port" not in cfg:
print("Config not found or missing 'port'. Set OWON_PSU_CONFIG or create ./config/owon_psu.yaml")
return 2
print("Scanning ports (responding to *IDN?):")
for dev, idn in scan_ports(SerialParams(baudrate=int(cfg.get("baudrate", 115200)), timeout=float(cfg.get("timeout", 1.0)))):
print(f" {dev} -> {idn}")
# Serial params
baud = int(cfg.get("baudrate", 115200))
timeout = float(cfg.get("timeout", 1.0))
eol = cfg.get("eol", "\n")
from serial import PARITY_NONE, PARITY_EVEN, PARITY_ODD, STOPBITS_ONE, STOPBITS_TWO
parity = {"N": PARITY_NONE, "E": PARITY_EVEN, "O": PARITY_ODD}.get(str(cfg.get("parity", "N")).upper(), PARITY_NONE)
stopbits = {1: STOPBITS_ONE, 2: STOPBITS_TWO}.get(int(float(cfg.get("stopbits", 1))), STOPBITS_ONE)
xonxoff = bool(cfg.get("xonxoff", False))
rtscts = bool(cfg.get("rtscts", False))
dsrdtr = bool(cfg.get("dsrdtr", False))
ps = SerialParams(
baudrate=baud,
timeout=timeout,
parity=parity,
stopbits=stopbits,
xonxoff=xonxoff,
rtscts=rtscts,
dsrdtr=dsrdtr,
)
port = str(cfg["port"]).strip()
do_set = bool(cfg.get("do_set", False))
set_v = float(cfg.get("set_voltage", 1.0))
set_i = float(cfg.get("set_current", 0.1))
with OwonPSU(port, ps, eol=eol) as psu:
idn = psu.idn()
print(f"IDN: {idn}")
print(f"Output status: {psu.output_status()}")
if do_set:
# psu.set_output(True)
time.sleep(0.8)
psu.set_voltage(1, set_v)
psu.set_current(1, set_i)
time.sleep(0.75)
print(f"Measured V: {psu.measure_voltage()} V")
print(f"Measured I: {psu.measure_current()} A")
time.sleep(0.5)
# psu.set_output(False)
return 0
if __name__ == "__main__":
raise SystemExit(run_demo())
"""Owon PSU quick demo (optimized to use ecu_framework.power.owon_psu).
This script reads configuration from OWON_PSU_CONFIG (YAML) or ./config/owon_psu.yaml,
prints discovered ports responding to *IDN?, then connects to the configured port
and performs a small sequence (IDN, optional V/I set, toggle output, measure V/I).
No CLI flags; edit YAML to change behavior.
"""
from __future__ import annotations
import os
import time
from pathlib import Path
import yaml
try:
from ecu_framework.power import OwonPSU, SerialParams, scan_ports
except ModuleNotFoundError:
# Ensure repository root is on sys.path when running this file directly
import sys
repo_root = Path(__file__).resolve().parents[2]
if str(repo_root) not in sys.path:
sys.path.insert(0, str(repo_root))
from ecu_framework.power import OwonPSU, SerialParams, scan_ports
def _load_yaml_config() -> dict:
cfg_path = str(Path("config") / "owon_psu.yaml")
p = Path(cfg_path).resolve()
print("Using config path:", str(p))
if not p.is_file():
return {}
with p.open("r", encoding="utf-8") as f:
data = yaml.safe_load(f) or {}
return data if isinstance(data, dict) else {}
def run_demo() -> int:
cfg = _load_yaml_config()
if not cfg or "port" not in cfg:
print("Config not found or missing 'port'. Set OWON_PSU_CONFIG or create ./config/owon_psu.yaml")
return 2
print("Scanning ports (responding to *IDN?):")
for dev, idn in scan_ports(SerialParams(baudrate=int(cfg.get("baudrate", 115200)), timeout=float(cfg.get("timeout", 1.0)))):
print(f" {dev} -> {idn}")
# Serial params
baud = int(cfg.get("baudrate", 115200))
timeout = float(cfg.get("timeout", 1.0))
eol = cfg.get("eol", "\n")
from serial import PARITY_NONE, PARITY_EVEN, PARITY_ODD, STOPBITS_ONE, STOPBITS_TWO
parity = {"N": PARITY_NONE, "E": PARITY_EVEN, "O": PARITY_ODD}.get(str(cfg.get("parity", "N")).upper(), PARITY_NONE)
stopbits = {1: STOPBITS_ONE, 2: STOPBITS_TWO}.get(int(float(cfg.get("stopbits", 1))), STOPBITS_ONE)
xonxoff = bool(cfg.get("xonxoff", False))
rtscts = bool(cfg.get("rtscts", False))
dsrdtr = bool(cfg.get("dsrdtr", False))
ps = SerialParams(
baudrate=baud,
timeout=timeout,
parity=parity,
stopbits=stopbits,
xonxoff=xonxoff,
rtscts=rtscts,
dsrdtr=dsrdtr,
)
port = str(cfg["port"]).strip()
do_set = bool(cfg.get("do_set", False))
set_v = float(cfg.get("set_voltage", 1.0))
set_i = float(cfg.get("set_current", 0.1))
with OwonPSU(port, ps, eol=eol) as psu:
idn = psu.idn()
print(f"IDN: {idn}")
print(f"Output status: {psu.output_status()}")
if do_set:
# psu.set_output(True)
time.sleep(0.8)
psu.set_voltage(1, set_v)
psu.set_current(1, set_i)
time.sleep(0.75)
print(f"Measured V: {psu.measure_voltage()} V")
print(f"Measured I: {psu.measure_current()} A")
time.sleep(0.5)
# psu.set_output(False)
return 0
if __name__ == "__main__":
raise SystemExit(run_demo())

118
vendor/README.md vendored
View File

@ -1,59 +1,59 @@
# BabyLIN SDK placement
Place the SDK's Python wrapper and platform-specific libraries here so the test framework can import and use them.
## Required files
- BabyLIN_library.py
- BabyLIN library/ (directory provided by the SDK containing platform-specific binaries)
- Windows: `BabyLIN library/Windows/x64/*.dll`
- Linux x86_64: `BabyLIN library/Linux/x86_64/*.so`
- Raspberry Pi (ARM): `BabyLIN library/Linux/armv7/*.so` (or as provided by your SDK)
- Optional: Example SDF file (e.g., `Example.sdf`)
Folder structure example:
```
vendor/
├─ BabyLIN_library.py
├─ Example.sdf
└─ BabyLIN library/
├─ Windows/
│ └─ x64/
│ ├─ BabyLIN.dll
│ ├─ BabyLIN_FTDI.dll
│ └─ ... (other DLLs from SDK)
├─ Linux/
│ ├─ x86_64/
│ │ └─ libBabyLIN.so
│ └─ armv7/
│ └─ libBabyLIN.so
└─ ...
```
Notes:
- Keep the directory names and casing exactly as the SDK expects (often referenced in `BabyLIN_library.py`).
- Ensure your Python environment architecture matches the binaries (e.g., 64-bit Python with 64-bit DLLs).
- On Linux/RPi, you may need to set `LD_LIBRARY_PATH` to include the directory with the shared libraries.
## Configuration
Point your config to the SDF and schedule:
```yaml
interface:
type: babylin
channel: 0
sdf_path: ./vendor/Example.sdf
schedule_nr: 0
```
## Troubleshooting
- ImportError: BabyLIN_library not found
- Ensure `vendor/BabyLIN_library.py` exists or add the vendor folder to `PYTHONPATH`.
- DLL/SO not found
- On Windows, ensure the DLLs are in PATH or next to `BabyLIN_library.py` per SDK instructions.
- On Linux/RPi, export `LD_LIBRARY_PATH` to the folder with the `.so` files.
- Device not found
- Check USB connection, drivers, and that no other tool holds the device open.
# BabyLIN SDK placement
Place the SDK's Python wrapper and platform-specific libraries here so the test framework can import and use them.
## Required files
- BabyLIN_library.py
- BabyLIN library/ (directory provided by the SDK containing platform-specific binaries)
- Windows: `BabyLIN library/Windows/x64/*.dll`
- Linux x86_64: `BabyLIN library/Linux/x86_64/*.so`
- Raspberry Pi (ARM): `BabyLIN library/Linux/armv7/*.so` (or as provided by your SDK)
- Optional: Example SDF file (e.g., `Example.sdf`)
Folder structure example:
```
vendor/
├─ BabyLIN_library.py
├─ Example.sdf
└─ BabyLIN library/
├─ Windows/
│ └─ x64/
│ ├─ BabyLIN.dll
│ ├─ BabyLIN_FTDI.dll
│ └─ ... (other DLLs from SDK)
├─ Linux/
│ ├─ x86_64/
│ │ └─ libBabyLIN.so
│ └─ armv7/
│ └─ libBabyLIN.so
└─ ...
```
Notes:
- Keep the directory names and casing exactly as the SDK expects (often referenced in `BabyLIN_library.py`).
- Ensure your Python environment architecture matches the binaries (e.g., 64-bit Python with 64-bit DLLs).
- On Linux/RPi, you may need to set `LD_LIBRARY_PATH` to include the directory with the shared libraries.
## Configuration
Point your config to the SDF and schedule:
```yaml
interface:
type: babylin
channel: 0
sdf_path: ./vendor/Example.sdf
schedule_nr: 0
```
## Troubleshooting
- ImportError: BabyLIN_library not found
- Ensure `vendor/BabyLIN_library.py` exists or add the vendor folder to `PYTHONPATH`.
- DLL/SO not found
- On Windows, ensure the DLLs are in PATH or next to `BabyLIN_library.py` per SDK instructions.
- On Linux/RPi, export `LD_LIBRARY_PATH` to the folder with the `.so` files.
- Device not found
- Check USB connection, drivers, and that no other tool holds the device open.

321
vendor/automated_lin_test/README.md vendored Normal file
View File

@ -0,0 +1,321 @@
# LIN Automated Test Scripts
Automated test scripts for LIN bus communication and auto-addressing functionality using the Melexis Universal Master (MUM) hardware.
## Purpose
This folder contains Python scripts to automate LIN bus testing without requiring manual tool switching between MUM and babylin. The scripts provide:
- **LIN Auto-Addressing Test**: Automated BSM-SNPD (Bus Shunt Method - Slave Node Position Detection) auto-addressing
- **LED Control Test**: Verify LIN communication by controlling the board LED
- **Power Cycle Utility**: Power cycle the ECU through MUM
- **Dependency Installation**: Automated setup of required Python packages
## Hardware Setup
### Required Hardware
1. **Melexis Universal Master (MUM)**
- BeagleBone-based LIN master device
- Default IP: 192.168.7.2
- LIN interface: lin0
- Power control: power_out0
2. **ALM Platform MLX81124 Board**
- Target ECU with LIN auto-addressing support
- RGB LED for visual feedback
### Hardware Connections
```
┌─────────────────┐ ┌──────────────────┐
│ MUM │ │ ALM Platform │
│ (192.168.7.2) │ │ MLX81124 │
├─────────────────┤ ├──────────────────┤
│ │ │ │
│ LIN (lin0) ├────────────────────┤ LIN │
│ │ │ │
│ Power ├────────────────────┤ VCC/GND │
│ (power_out0) │ │ │
│ │ │ RGB LED │
└─────────────────┘ └──────────────────┘
```
### Connection Details
1. **LIN Bus**: Connect MUM LIN0 to ALM Platform LIN pin
2. **Power**: Connect MUM power_out0 to ALM Platform power (controlled by scripts)
3. **Ground**: Common ground between MUM and ALM Platform
## Files
### Scripts
- **`test_auto_addressing.py`** - Main auto-addressing test
- **`test_led_control.py`** - LED control verification test
- **`power_cycle.py`** - ECU power cycle utility
- **`install_packages.sh`** - Dependency installer
### Configuration
- **`config.py`** - Hardware and protocol configuration
- MUM connection settings
- LIN bus parameters
- BSM-SNPD protocol constants
- Test defaults
## Dependencies
### Python Packages
The scripts require these Python packages:
- `pylin` - LIN bus communication library
- `pymumclient` - Melexis Universal Master client library
### Installation
Run the installer script to set up dependencies:
```bash
./install_packages.sh
```
Or manually install:
```bash
pip3 install pylin pymumclient
```
## Usage
### 1. Auto-Addressing Test
Tests LIN auto-addressing using BSM-SNPD protocol. Automatically selects a target NAD different from the current NAD.
**Basic usage:**
```bash
python3 test_auto_addressing.py
```
**With options:**
```bash
python3 test_auto_addressing.py --iterations 1 --check-interval 1
```
**Parameters:**
- `--host` - MUM IP address (default: 192.168.7.2)
- `--iterations` - Number of auto-addressing iterations (default: 1)
- `--check-interval` - Check status every N iterations (0 = only at end)
**What it does:**
1. Connects to MUM
2. Reads current NAD from ECU
3. Selects target NAD (automatically different from current)
4. Sends BSM-SNPD sequence:
- INIT (0x01) - Initialize auto-addressing
- ASSIGN (0x02) - Assign NAD (16 frames)
- STORE (0x03) - Store to NVM
- FINALIZE (0x04) - Exit auto-addressing mode
5. Polls status frames between iterations
6. Verifies NAD change
**Expected output:**
```
Initial NAD: 0x07
Target NAD: 0x01
SUCCESS! NAD changed from 0x07 to 0x01
```
### 2. LED Control Test
Verifies LIN communication by controlling the RGB LED through color fades.
**Basic usage:**
```bash
python3 test_led_control.py
```
**With options:**
```bash
python3 test_led_control.py --nad 0x02 --cycles 3 --duration 3.0
```
**Parameters:**
- `--host` - MUM IP address (default: 192.168.7.2)
- `--nad` - Node address to control (default: 0x01)
- `--cycles` - Number of fade cycles (default: 3)
- `--duration` - Duration per color in seconds (default: 3.0)
**What it does:**
1. Connects to MUM
2. Reads current NAD from ECU
3. Fades LED through Red → Green → Blue
4. Each color fades in and out smoothly
**Expected output:**
```
Current NAD: 0x02
Fading Red...
Fading Green...
Fading Blue...
LED test complete
```
### 3. Power Cycle Utility
Power cycles the ECU through MUM power control.
**Basic usage:**
```bash
python3 power_cycle.py
```
**With options:**
```bash
python3 power_cycle.py --wait 3.0
```
**Parameters:**
- `--host` - MUM IP address (default: 192.168.7.2)
- `--wait` - Wait time after power down/up in seconds (default: 2.0)
**What it does:**
1. Powers down ECU
2. Waits specified duration
3. Powers up ECU
4. Waits for ECU to boot
## Configuration
All hardware-specific settings are centralized in [`config.py`](config.py). Edit this file to match your setup:
### Common Settings to Modify
```python
# MUM Configuration
MUM_HOST = '192.168.7.2' # Change if MUM has different IP
# LIN Bus Configuration
LIN_BAUDRATE = 19200 # Change if using different baudrate
# Test Parameters
AUTOADDRESSING_DEFAULT_ITERATIONS = 1 # Default test iterations
LED_DEFAULT_NAD = 0x01 # Default NAD for LED test
```
## Firmware Requirements
The firmware must have auto-addressing enabled with twist detection disabled for single-node MUM testing:
**File:** `02-Software/02-Source-Code/code/src/03-HAL/LAA/cfg/HAL_LAA_cfg.h`
```c
#define HAL_LAA_LINAATWISTDETECTDISABLE (1u)
```
This allows the `LASTSLAVE` flag to be set directly without requiring multi-node hardware setup.
## Troubleshooting
### MUM Connection Issues
**Problem:** Cannot connect to MUM
```
Error: Connection to 192.168.7.2 failed
```
**Solution:**
1. Check MUM is powered and connected via USB
2. Verify IP address with `ip addr show` or `ifconfig`
3. Ping MUM: `ping 192.168.7.2`
4. Check USB connection is recognized: `lsusb`
### No Response from ECU
**Problem:** ECU not responding to LIN frames
```
Error: S2M frame receiving failed with error code: 3 - Rx timeout error
```
**Solution:**
1. Check LIN bus connections
2. Verify ECU is powered (use power_cycle.py)
3. Check baudrate matches (19200)
4. Verify NAD is correct
### NAD Not Changing
**Problem:** Auto-addressing completes but NAD doesn't change
**Solution:**
1. Verify firmware has `HAL_LAA_LINAATWISTDETECTDISABLE = 1`
2. Rebuild and flash firmware
3. Check initial NAD is in valid range (0x01-0x10)
4. Run test with `--check-interval 1` to see intermediate status
### LED Not Changing
**Problem:** LED control test doesn't change LED color
**Solution:**
1. Verify NAD parameter matches ECU NAD
2. Check `ALM_Req_A` frame ID is 0x01 in LDF
3. Run auto-addressing test first to verify communication
4. Check LED connections on hardware
## Integration with Build/Flash Pipeline
These tests integrate with the automated firmware development pipeline:
```bash
# 1. Modify firmware
vim 02-Software/02-Source-Code/code/src/...
# 2. Build
./00-Tools/migrate_mlx_tools_linux/build_linux.sh
# 3. Flash
./00-Tools/migrate_mlx_tools_linux/flash_linux.sh
# 4. Test auto-addressing
python3 00-Tools/automated_lin_test/test_auto_addressing.py
# 5. Verify LED control
python3 00-Tools/automated_lin_test/test_led_control.py
```
## Technical Details
### LIN Frame IDs
- `0x3C` - MasterReq (diagnostic frames)
- `0x11` - ALM_Status (4 bytes, contains NAD in byte 0)
- `0x01` - ALM_Req_A (8 bytes, LED control)
### BSM-SNPD Protocol
Auto-addressing uses diagnostic service 0xB5 with subfunctions:
- `0x01` - INIT: Enable auto-addressing mode
- `0x02` - ASSIGN: Assign NAD to node
- `0x03` - STORE: Save NAD to NVM
- `0x04` - FINALIZE: Exit auto-addressing mode
Frame structure:
```
Byte 0: NAD = 0x7F (broadcast)
Byte 1: PCI = 0x06 (6 data bytes)
Byte 2: SID = 0xB5 (BSM-SNPD service)
Byte 3: Supplier ID LSB = 0xFF
Byte 4: Supplier ID MSB = 0x7F
Byte 5: Subfunction
Byte 6: Parameter 1
Byte 7: Parameter 2
```
### Checksum Requirements
**Critical:** BSM frames must use **LIN 1.x Classic checksum**. The scripts use `ld_put_raw()` to ensure Classic checksum. Using `send_message()` with Enhanced checksum will cause frames to be rejected by firmware.
## License
Part of the ALM Platform MLX81124 project.

Binary file not shown.

190
vendor/automated_lin_test/config.py vendored Normal file
View File

@ -0,0 +1,190 @@
#!/usr/bin/env python3
"""
Configuration file for LIN automated test scripts
This file contains all hardware-specific settings and tool dependencies.
Modify these values to match your test setup.
"""
# ============================================================================
# Hardware Configuration
# ============================================================================
# MUM (Melexis Universal Master) Configuration
MUM_HOST = '192.168.7.2' # Default MUM IP address on BeagleBone
MUM_LIN_DEVICE = 'lin0' # LIN interface name on MUM
MUM_POWER_DEVICE = 'power_out0' # Power control device name
# LIN Bus Configuration
LIN_BAUDRATE = 19200 # LIN bus baudrate in bps
# Valid NAD range for auto-addressing
VALID_NAD_RANGE = range(0x01, 0x11) # NADs 0x01 through 0x10
# ============================================================================
# External Tool Dependencies
# ============================================================================
# Python packages required (install with: pip3 install <package>)
REQUIRED_PACKAGES = [
'pylin', # LIN bus communication library
'pymumclient', # Melexis Universal Master client library
]
# ============================================================================
# Test Parameters
# ============================================================================
# Auto-addressing test defaults
AUTOADDRESSING_DEFAULT_ITERATIONS = 1 # Number of BSM iterations
AUTOADDRESSING_POLL_DURATION = 2.0 # Status polling duration between iterations (seconds)
AUTOADDRESSING_STATUS_POLL_INTERVAL = 0.020 # Status frame poll interval (20ms)
# LED control test defaults
LED_DEFAULT_NAD = 0x01 # Default NAD for LED control test
# Power cycle defaults
POWER_CYCLE_WAIT_TIME = 2.0 # Wait time after power down/up (seconds)
# ============================================================================
# Frame IDs (from 4SEVEN_color_lib_test.ldf)
# ============================================================================
LIN_FRAME_ID_MASTERREQ = 0x3C # Diagnostic master request frame
LIN_FRAME_ID_ALM_STATUS = 0x11 # ALM_Status (slave-to-master, 4 bytes)
LIN_FRAME_ID_ALM_REQ_A = 0x0A # ALM_Req_A (master-to-slave, 8 bytes, LED control)
LIN_FRAME_ID_CONFIG_FRAME = 0x06 # ConfigFrame (master-to-slave, 3 bytes)
LIN_FRAME_ID_VF_FRAME = 0x13 # VF_Frame (slave-to-master, 8 bytes, LED forward voltages + VLED)
LIN_FRAME_ID_PWM_WO_COMP = 0x15 # PWM_wo_Comp (slave-to-master, 8 bytes, PWM values + VS)
# ============================================================================
# Frame Definitions (from 4SEVEN_color_lib_test.ldf)
# ============================================================================
# Each entry mirrors the LDF Frames section. The signal tuple is:
# 'SignalName': (start_bit, width_in_bits)
# where start_bit comes from the LDF Frames block and width comes from
# the LDF Signals section. To update after an LDF change, copy the new
# Frames entry here and adjust widths from the Signals section.
#
# NAD selection for ALM_Req_A:
# node responds if AmbLightLIDFrom <= ALMNadNo <= AmbLightLIDTo
# single node -> set both to the target NAD
# broadcast -> AmbLightLIDFrom=0x01, AmbLightLIDTo=0xFF
# ALM_Req_A: 0x0A, Master_Node, 8
ALM_REQ_A_FRAME = {
'frame_id': LIN_FRAME_ID_ALM_REQ_A,
'length': 8,
'signals': {
'AmbLightColourRed': (0, 8), # AmbLightColourRed, 0;
'AmbLightColourGreen': (8, 8), # AmbLightColourGreen, 8;
'AmbLightColourBlue': (16, 8), # AmbLightColourBlue, 16;
'AmbLightIntensity': (24, 8), # AmbLightIntensity, 24;
'AmbLightUpdate': (32, 2), # AmbLightUpdate, 32;
'AmbLightMode': (34, 6), # AmbLightMode, 34;
'AmbLightDuration': (40, 8), # AmbLightDuration, 40;
'AmbLightLIDFrom': (48, 8), # AmbLightLIDFrom, 48;
'AmbLightLIDTo': (56, 8), # AmbLightLIDTo, 56;
},
}
# ALM_Status: 0x11, ALM_Node, 4
ALM_STATUS_FRAME = {
'frame_id': LIN_FRAME_ID_ALM_STATUS,
'length': 4,
'signals': {
'ALMNadNo': (0, 8), # ALMNadNo, 0;
'ALMVoltageStatus': (8, 4), # ALMVoltageStatus, 8;
'ALMThermalStatus': (12, 4), # ALMThermalStatus, 12;
'ALMNVMStatus': (16, 4), # ALMNVMStatus, 16;
'ALMLEDState': (20, 2), # ALMLEDState, 20;
'SigCommErr': (24, 1), # SigCommErr, 24;
},
}
# ConfigFrame: 6, Master_Node, 3
CONFIG_FRAME = {
'frame_id': LIN_FRAME_ID_CONFIG_FRAME,
'length': 3,
'signals': {
'ConfigFrame_Calibration': (0, 1), # ConfigFrame_Calibration, 0;
'ConfigFrame_EnableDerating': (1, 1), # ConfigFrame_EnableDerating, 1;
'ConfigFrame_EnableCompensation': (2, 1), # ConfigFrame_EnableCompensation, 2;
'ConfigFrame_MaxLM': (3, 16), # ConfigFrame_MaxLM, 3;
},
}
# VF_Frame: 19 (0x13), ALM_Node, 8
VF_FRAME = {
'frame_id': LIN_FRAME_ID_VF_FRAME,
'length': 8,
'signals': {
'VF_Frame_Red_VF': (0, 16), # VF_Frame_Red_VF, 0;
'VF_Frame_Green_VF': (16, 16), # VF_Frame_Green_VF, 16;
'VF_Frame_Blue1_VF': (32, 16), # VF_Frame_Blue1_VF, 32;
'VF_Frame_VLED': (48, 16), # VF_Frame_VLED, 48;
},
}
# PWM_wo_Comp: 21 (0x15), ALM_Node, 8
PWM_WO_COMP_FRAME = {
'frame_id': LIN_FRAME_ID_PWM_WO_COMP,
'length': 8,
'signals': {
'PWM_wo_Comp_Red': (0, 16), # PWM_wo_Comp_Red, 0;
'PWM_wo_Comp_Green': (16, 16), # PWM_wo_Comp_Green, 16;
'PWM_wo_Comp_Blue': (32, 16), # PWM_wo_Comp_Blue, 32;
'VF_Frame_VS': (48, 16), # VF_Frame_VS, 48;
},
}
def pack_frame(frame_def, **signals):
"""Pack signal values into a byte list using a frame definition.
Unlisted signals default to 0. Bit ordering follows the LDF/LIN
convention: bit 0 of the signal sits at start_bit in the frame,
packed little-endian within each byte.
"""
data = bytearray(frame_def['length'])
for name, value in signals.items():
start_bit, width = frame_def['signals'][name]
value = int(value) & ((1 << width) - 1)
for i in range(width):
bit_pos = start_bit + i
if value & (1 << i):
data[bit_pos // 8] |= 1 << (bit_pos % 8)
return list(data)
def unpack_frame(frame_def, data):
"""Unpack a received byte sequence into a dict of signal values."""
result = {}
for name, (start_bit, width) in frame_def['signals'].items():
value = 0
for i in range(width):
bit_pos = start_bit + i
if data[bit_pos // 8] & (1 << (bit_pos % 8)):
value |= 1 << i
result[name] = value
return result
# ============================================================================
# BSM-SNPD Protocol Constants
# ============================================================================
BSM_NAD_BROADCAST = 0x7F # Broadcast NAD for BSM frames
BSM_PCI = 0x06 # Protocol Control Information (6 data bytes)
BSM_SID = 0xB5 # Service ID for BSM-SNPD
BSM_SUPPLIER_ID_LSB = 0xFF # Supplier ID LSB (broadcast)
BSM_SUPPLIER_ID_MSB = 0x7F # Supplier ID MSB (broadcast)
# BSM Subfunctions
BSM_SUBF_INIT = 0x01 # Initialize auto-addressing
BSM_SUBF_ASSIGN = 0x02 # Assign NAD
BSM_SUBF_STORE = 0x03 # Store to NVM
BSM_SUBF_FINALIZE = 0x04 # Finalize auto-addressing
# Timing parameters (matching babylin behavior)
BSM_INIT_DELAY = 0.050 # Delay after INIT subfunction (50ms)
BSM_FRAME_DELAY = 0.020 # Delay between frames (20ms)

Binary file not shown.

View File

@ -0,0 +1,71 @@
#!/bin/bash
# Install Melexis Python packages to system Python
echo "Installing Melexis LIN packages to system Python..."
MELEXIS_SITE_PACKAGES="/mnt/WINDRV/InstalledPrograms/Melexis IDE/plugins/com.melexis.mlxide.python_1.2.0.202408130945/python/Lib/site-packages"
# Try to install from Melexis packages
if [ -d "$MELEXIS_SITE_PACKAGES" ]; then
echo "Found Melexis packages at: $MELEXIS_SITE_PACKAGES"
# Copy packages to system site-packages
SYSTEM_SITE_PACKAGES=$(python3 -c "import site; print(site.getsitepackages()[0])" 2>/dev/null)
if [ -z "$SYSTEM_SITE_PACKAGES" ]; then
echo "Error: Could not determine system site-packages directory"
exit 1
fi
echo "System site-packages: $SYSTEM_SITE_PACKAGES"
# Check if we have write permissions
if [ ! -w "$SYSTEM_SITE_PACKAGES" ]; then
echo "Note: You may need sudo to install packages system-wide"
SUDO="sudo"
else
SUDO=""
fi
# Copy packages
echo "Copying pylin..."
$SUDO cp -r "$MELEXIS_SITE_PACKAGES/pylin" "$SYSTEM_SITE_PACKAGES/"
$SUDO cp -r "$MELEXIS_SITE_PACKAGES/pylin-"*".dist-info" "$SYSTEM_SITE_PACKAGES/"
echo "Copying pylinframe..."
$SUDO cp -r "$MELEXIS_SITE_PACKAGES/pylinframe" "$SYSTEM_SITE_PACKAGES/"
$SUDO cp -r "$MELEXIS_SITE_PACKAGES/pylinframe-"*".dist-info" "$SYSTEM_SITE_PACKAGES/"
echo "Copying pymumclient..."
$SUDO cp -r "$MELEXIS_SITE_PACKAGES/pymumclient" "$SYSTEM_SITE_PACKAGES/"
$SUDO cp -r "$MELEXIS_SITE_PACKAGES/pymumclient-"*".dist-info" "$SYSTEM_SITE_PACKAGES/"
# Copy all dependencies
echo "Copying all Melexis dependencies..."
for pkg_dir in "$MELEXIS_SITE_PACKAGES"/*; do
pkg=$(basename "$pkg_dir")
# Skip dist-info directories and __pycache__
if [[ "$pkg" == *".dist-info" ]] || [[ "$pkg" == "__pycache__" ]]; then
continue
fi
# Only copy directories (packages)
if [ -d "$pkg_dir" ]; then
echo " - $pkg"
$SUDO cp -r "$pkg_dir" "$SYSTEM_SITE_PACKAGES/"
# Copy corresponding .dist-info if exists
$SUDO cp -r "$MELEXIS_SITE_PACKAGES/${pkg}-"*".dist-info" "$SYSTEM_SITE_PACKAGES/" 2>/dev/null || true
fi
done
echo ""
echo "Installation complete!"
echo ""
echo "Verifying installation..."
python3 -c "import pylin; import pymumclient; print('✓ Packages imported successfully')" && echo "Success!" || echo "Failed - some packages missing"
else
echo "Error: Melexis packages not found"
exit 1
fi

Binary file not shown.

View File

@ -0,0 +1,37 @@
#!/usr/bin/env python3
"""
Power cycle the ECU via MUM
"""
import argparse
import time
from pymumclient import MelexisUniversalMaster
from config import MUM_HOST, MUM_POWER_DEVICE, POWER_CYCLE_WAIT_TIME
def main():
parser = argparse.ArgumentParser(description='Power cycle ECU via MUM')
parser.add_argument('--host', default=MUM_HOST,
help=f'MUM IP address (default: {MUM_HOST})')
parser.add_argument('--wait', type=float, default=POWER_CYCLE_WAIT_TIME,
help=f'Wait time in seconds (default: {POWER_CYCLE_WAIT_TIME})')
args = parser.parse_args()
print(f"Connecting to MUM at {args.host}...")
mum = MelexisUniversalMaster()
mum.open_all(args.host)
power_control = mum.get_device(MUM_POWER_DEVICE)
print("Powering down ECU...")
power_control.power_down()
print(f"Waiting {args.wait} seconds...")
time.sleep(args.wait)
print("Powering up ECU...")
power_control.power_up()
print(f"Waiting {args.wait} seconds for ECU to boot...")
time.sleep(args.wait)
print("Power cycle complete!")
if __name__ == "__main__":
main()

Binary file not shown.

View File

@ -0,0 +1,493 @@
#!/usr/bin/env python3
"""
LIN ADC Measurement Verification Test
This test reads ADC measurement values from the ECU over LIN and verifies
they are within expected ranges across multiple LED states.
Test cases:
1. All LEDs off
2. Only Red on (color=255, intensity=255)
3. Only Green on (color=255, intensity=255)
4. Only Blue on (color=255, intensity=255)
5. All LEDs on (color=255, intensity=255)
Verified signals:
- VF_Frame_VS: Supply voltage (expected ~12V = ~12000 mV)
- VF_Frame_VLED: DC-DC converter output voltage feeding LEDs (expected ~5V = ~5000 mV)
- VF_Frame_Red_VF: Red LED forward voltage (0 when off, ~1500-3500 mV when on)
- VF_Frame_Green_VF: Green LED forward voltage (0 when off, ~1500-3500 mV when on)
- VF_Frame_Blue1_VF: Blue LED forward voltage (0 when off, ~1500-3500 mV when on)
Frame structures:
ALM_Req_A (ID=0x0A, master-to-slave, 8 bytes):
- Byte 0: AmbLightColourRed (0-255)
- Byte 1: AmbLightColourGreen (0-255)
- Byte 2: AmbLightColourBlue (0-255)
- Byte 3: AmbLightIntensity (0-255)
- Byte 4: AmbLightUpdate[1:0] | (AmbLightMode[5:0] << 2)
- Byte 5: AmbLightDuration (0-255)
- Byte 6: AmbLightLIDFrom (NAD range start set equal to LIDTo to target one node)
- Byte 7: AmbLightLIDTo (NAD range end)
PWM_wo_Comp (ID=0x15, slave-to-master, 8 bytes):
- Byte 0-1: PWM_wo_Comp_Red (16-bit, little-endian)
- Byte 2-3: PWM_wo_Comp_Green (16-bit, little-endian)
- Byte 4-5: PWM_wo_Comp_Blue (16-bit, little-endian)
- Byte 6-7: VF_Frame_VS (16-bit, little-endian, value in mV)
VF_Frame (ID=0x13, slave-to-master, 8 bytes):
- Byte 0-1: VF_Frame_Red_VF (16-bit, little-endian, value in mV)
- Byte 2-3: VF_Frame_Green_VF (16-bit, little-endian, value in mV)
- Byte 4-5: VF_Frame_Blue1_VF (16-bit, little-endian, value in mV)
- Byte 6-7: VF_Frame_VLED (16-bit, little-endian, value in mV)
"""
import argparse
import logging
import time
import sys
from pylin import LinBusManager, LinDevice22
from pymumclient import MelexisUniversalMaster
from config import *
logging.basicConfig(
level=logging.INFO,
format='%(asctime)-15s %(levelname)-8s %(message)s'
)
logger = logging.getLogger(__name__)
# ADC measurement expected ranges (in mV)
VS_EXPECTED_MIN_MV = 10000 # 10.0V minimum
VS_EXPECTED_MAX_MV = 14000 # 14.0V maximum
VS_EXPECTED_NOMINAL_MV = 12000 # 12.0V nominal
VLED_EXPECTED_MIN_MV = 4000 # 4.0V minimum
VLED_EXPECTED_MAX_MV = 6000 # 6.0V maximum
VLED_EXPECTED_NOMINAL_MV = 5000 # 5.0V nominal
# LED forward voltage ranges when LEDs are off
LED_VF_OFF_MIN_MV = 0 # 0V minimum (off)
LED_VF_OFF_MAX_MV = 500 # 0.5V maximum (off, allowing some noise)
# LED forward voltage ranges when LEDs are on
LED_VF_ON_MIN_MV = 1500 # 1.5V minimum (on)
LED_VF_ON_MAX_MV = 3500 # 3.5V maximum (on)
# Settle time after changing LED state (seconds)
LED_SETTLE_TIME = 1.0
def read_alm_status(lin_dev):
"""Read ALM_Status frame and return (ALMNadNo, raw_bytes)."""
try:
response = lin_dev.send_message(
master_to_slave=False,
frame_id=ALM_STATUS_FRAME['frame_id'],
data_length=ALM_STATUS_FRAME['length'],
data=None,
)
if response and len(response) >= ALM_STATUS_FRAME['length']:
parsed = unpack_frame(ALM_STATUS_FRAME, response)
return parsed['ALMNadNo'], response
return None, None
except Exception as e:
logger.error(f"Failed to read ALM_Status: {e}")
return None, None
def send_config_frame(lin_dev, calibration=0, enable_derating=1,
enable_compensation=1, max_lm=3840):
"""Send ConfigFrame to configure calibration, derating and compensation."""
data = pack_frame(CONFIG_FRAME,
ConfigFrame_Calibration=calibration,
ConfigFrame_EnableDerating=enable_derating,
ConfigFrame_EnableCompensation=enable_compensation,
ConfigFrame_MaxLM=max_lm,
)
lin_dev.send_message(
master_to_slave=True,
frame_id=CONFIG_FRAME['frame_id'],
data_length=CONFIG_FRAME['length'],
data=data,
)
def set_led_color(lin_dev, nad, red, green, blue, intensity):
"""Set LED color and intensity via ALM_Req_A frame."""
data = pack_frame(ALM_REQ_A_FRAME,
AmbLightColourRed=red,
AmbLightColourGreen=green,
AmbLightColourBlue=blue,
AmbLightIntensity=intensity,
AmbLightLIDFrom=nad,
AmbLightLIDTo=nad,
)
lin_dev.send_message(
master_to_slave=True,
frame_id=ALM_REQ_A_FRAME['frame_id'],
data_length=ALM_REQ_A_FRAME['length'],
data=data,
)
def read_pwm_wo_comp_frame(lin_dev):
"""
Read PWM_wo_Comp frame from slave.
Returns:
tuple: (raw_bytes, parsed_dict) or (None, None) on failure.
parsed_dict keys: pwm_red, pwm_green, pwm_blue, vs_mv
"""
try:
response = lin_dev.send_message(
master_to_slave=False,
frame_id=PWM_WO_COMP_FRAME['frame_id'],
data_length=PWM_WO_COMP_FRAME['length'],
data=None,
)
if response and len(response) >= PWM_WO_COMP_FRAME['length']:
s = unpack_frame(PWM_WO_COMP_FRAME, response)
return response, {
'pwm_red': s['PWM_wo_Comp_Red'],
'pwm_green': s['PWM_wo_Comp_Green'],
'pwm_blue': s['PWM_wo_Comp_Blue'],
'vs_mv': s['VF_Frame_VS'],
}
return None, None
except Exception as e:
logger.error(f"Failed to read PWM_wo_Comp frame: {e}")
return None, None
def read_vf_frame(lin_dev):
"""
Read VF_Frame from slave.
Returns:
tuple: (raw_bytes, parsed_dict) or (None, None) on failure.
parsed_dict keys: red_vf_mv, green_vf_mv, blue_vf_mv, vled_mv
"""
try:
response = lin_dev.send_message(
master_to_slave=False,
frame_id=VF_FRAME['frame_id'],
data_length=VF_FRAME['length'],
data=None,
)
if response and len(response) >= VF_FRAME['length']:
s = unpack_frame(VF_FRAME, response)
return response, {
'red_vf_mv': s['VF_Frame_Red_VF'],
'green_vf_mv': s['VF_Frame_Green_VF'],
'blue_vf_mv': s['VF_Frame_Blue1_VF'],
'vled_mv': s['VF_Frame_VLED'],
}
return None, None
except Exception as e:
logger.error(f"Failed to read VF_Frame: {e}")
return None, None
def sample_signal(lin_dev, signal_name, read_func, signal_key,
expected_min, expected_max, num_samples=5, sample_interval=0.1):
"""
Read a signal multiple times and verify it is within expected range.
Args:
lin_dev: LinDevice22 instance
signal_name: Display name for the signal
read_func: Function to call to read the frame (returns raw, parsed)
signal_key: Key in parsed dict to extract the signal value
expected_min: Minimum expected value in mV
expected_max: Maximum expected value in mV
num_samples: Number of samples to read
sample_interval: Delay between samples in seconds
Returns:
tuple: (passed, avg_voltage_mv, samples)
"""
samples = []
passed = True
for i in range(num_samples):
raw, parsed = read_func(lin_dev)
if parsed is None:
logger.warning(f" Sample {i+1}/{num_samples}: No response")
continue
value_mv = parsed[signal_key]
samples.append(value_mv)
in_range = expected_min <= value_mv <= expected_max
status = "OK" if in_range else "FAIL"
logger.info(f" Sample {i+1}/{num_samples}: {signal_name} = {value_mv} mV ({value_mv/1000:.2f} V) [{status}]")
if not in_range:
passed = False
if i < num_samples - 1:
time.sleep(sample_interval)
if len(samples) == 0:
logger.error(f" No valid samples received for {signal_name}")
return False, 0, samples
avg_mv = sum(samples) / len(samples)
return passed, avg_mv, samples
def log_signal_summary(signal_name, passed, avg_mv, samples):
"""Log summary statistics for a verified signal."""
if len(samples) > 0:
s_min = min(samples)
s_max = max(samples)
logger.info(f" Average: {avg_mv:.0f} mV ({avg_mv/1000:.2f} V)")
logger.info(f" Min: {s_min} mV ({s_min/1000:.2f} V)")
logger.info(f" Max: {s_max} mV ({s_max/1000:.2f} V)")
logger.info(f" Result: {'PASS' if passed else 'FAIL'}")
else:
logger.error(f" Result: FAIL (no data)")
def verify_adc_signals(lin_dev, num_samples, sample_interval,
expected_red_vf, expected_green_vf, expected_blue_vf):
"""
Verify all ADC signals (VS, VLED, Red_VF, Green_VF, Blue_VF) for the current LED state.
Args:
lin_dev: LinDevice22 instance
num_samples: Number of samples per signal
sample_interval: Delay between samples in seconds
expected_red_vf: Tuple (min_mv, max_mv) for Red forward voltage
expected_green_vf: Tuple (min_mv, max_mv) for Green forward voltage
expected_blue_vf: Tuple (min_mv, max_mv) for Blue forward voltage
Returns:
bool: True if all signals pass, False otherwise
"""
all_passed = True
logger.info(f" --- VS (Supply Voltage) ---")
vs_passed, vs_avg, vs_samples = sample_signal(
lin_dev, "VS", read_pwm_wo_comp_frame, 'vs_mv',
VS_EXPECTED_MIN_MV, VS_EXPECTED_MAX_MV,
num_samples=num_samples, sample_interval=sample_interval
)
log_signal_summary("VS", vs_passed, vs_avg, vs_samples)
if not vs_passed:
all_passed = False
logger.info(f" --- VLED (DC-DC Voltage) ---")
vled_passed, vled_avg, vled_samples = sample_signal(
lin_dev, "VLED", read_vf_frame, 'vled_mv',
VLED_EXPECTED_MIN_MV, VLED_EXPECTED_MAX_MV,
num_samples=num_samples, sample_interval=sample_interval
)
log_signal_summary("VLED", vled_passed, vled_avg, vled_samples)
if not vled_passed:
all_passed = False
led_checks = [
("Red_VF", 'red_vf_mv', expected_red_vf),
("Green_VF", 'green_vf_mv', expected_green_vf),
("Blue_VF", 'blue_vf_mv', expected_blue_vf),
]
for signal_name, signal_key, (exp_min, exp_max) in led_checks:
logger.info(f" --- {signal_name} (expected {exp_min}-{exp_max} mV) ---")
led_passed, led_avg, led_samples = sample_signal(
lin_dev, signal_name, read_vf_frame, signal_key,
exp_min, exp_max,
num_samples=num_samples, sample_interval=sample_interval
)
log_signal_summary(signal_name, led_passed, led_avg, led_samples)
if not led_passed:
all_passed = False
return all_passed
def main():
parser = argparse.ArgumentParser(description='LIN ADC Measurement Verification Test')
parser.add_argument('--host', default=MUM_HOST,
help=f'MUM IP address (default: {MUM_HOST})')
parser.add_argument('--nad', type=lambda x: int(x, 0), default=LED_DEFAULT_NAD,
help=f'Node address (default: 0x{LED_DEFAULT_NAD:02X})')
parser.add_argument('--samples', type=int, default=10,
help='Number of samples to read per signal (default: 10)')
parser.add_argument('--interval', type=float, default=0.1,
help='Interval between samples in seconds (default: 0.1)')
parser.add_argument('--settle-time', type=float, default=LED_SETTLE_TIME,
help=f'Settle time after LED state change (default: {LED_SETTLE_TIME}s)')
args = parser.parse_args()
# Define test cases: (name, red, green, blue, intensity,
# expected_red_vf, expected_green_vf, expected_blue_vf)
test_cases = [
(
"All LEDs OFF",
0, 0, 0, 0,
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
),
(
"Red ON (255/255)",
255, 0, 0, 255,
(LED_VF_ON_MIN_MV, LED_VF_ON_MAX_MV),
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
),
(
"Green ON (255/255)",
0, 255, 0, 255,
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
(LED_VF_ON_MIN_MV, LED_VF_ON_MAX_MV),
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
),
(
"Blue ON (255/255)",
0, 0, 255, 255,
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
(LED_VF_OFF_MIN_MV, LED_VF_OFF_MAX_MV),
(LED_VF_ON_MIN_MV, LED_VF_ON_MAX_MV),
),
(
"All LEDs ON (255/255)",
255, 255, 255, 255,
(LED_VF_ON_MIN_MV, LED_VF_ON_MAX_MV),
(LED_VF_ON_MIN_MV, LED_VF_ON_MAX_MV),
(LED_VF_ON_MIN_MV, LED_VF_ON_MAX_MV),
),
]
test_results = {}
nad = args.nad # may be updated below after reading ALM_Status
try:
logger.info(f"Connecting to MUM at {args.host}...")
mum = MelexisUniversalMaster()
mum.open_all(args.host)
power_control = mum.get_device(MUM_POWER_DEVICE)
linmaster = mum.get_device(MUM_LIN_DEVICE)
linmaster.setup()
lin_bus = LinBusManager(linmaster)
lin_dev = LinDevice22(lin_bus)
lin_dev.baudrate = LIN_BAUDRATE
lin_dev.nad = args.nad
power_control.power_up()
time.sleep(0.5)
logger.info("MUM connected and LIN bus ready")
logger.info("=" * 70)
logger.info("ADC MEASUREMENT VERIFICATION TEST")
logger.info(f"Samples: {args.samples}, Interval: {args.interval}s, "
f"Settle: {args.settle_time}s")
logger.info("=" * 70)
# Wait for ADC to settle after power-up
logger.info("Waiting for ADC to settle after power-up...")
time.sleep(1.0)
# Read the actual NAD from the node. Using args.nad directly risks
# a silent miss if the node was assigned a different NAD (e.g. via
# auto-addressing), because AmbLightLIDFrom/LIDTo must equal ALMNadNo.
logger.info("Reading node NAD from ALM_Status...")
detected_nad, status_data = read_alm_status(lin_dev)
if detected_nad is not None:
nad = detected_nad
data_hex = ' '.join(f'{b:02X}' for b in status_data)
logger.info(f"Detected NAD: 0x{nad:02X} (Status frame: {data_hex})")
else:
nad = args.nad
logger.warning(f"Could not read NAD, falling back to 0x{nad:02X}")
logger.info("=" * 70)
# Configure: disable derating and compensation so PWM output directly
# reflects the requested color/brightness.
logger.info("Sending ConfigFrame: Calibration=1, Derating=0, Compensation=0")
send_config_frame(lin_dev, calibration=1, enable_derating=0,
enable_compensation=0)
time.sleep(0.1)
# Ensure LEDs are off before starting
set_led_color(lin_dev, nad, 0, 0, 0, 0)
time.sleep(args.settle_time)
for idx, (name, red, green, blue, intensity,
exp_red, exp_green, exp_blue) in enumerate(test_cases, 1):
logger.info("")
logger.info("-" * 70)
logger.info(f"TEST {idx}/{len(test_cases)}: {name}")
logger.info(f" Command: R={red} G={green} B={blue} I={intensity}"
f" -> NAD 0x{nad:02X}")
logger.info("-" * 70)
# Set LED state
set_led_color(lin_dev, nad, red, green, blue, intensity)
logger.info(f" Waiting {args.settle_time}s for ADC to settle...")
time.sleep(args.settle_time)
# Verify all ADC signals
passed = verify_adc_signals(
lin_dev, args.samples, args.interval,
exp_red, exp_green, exp_blue
)
test_results[name] = passed
logger.info(f" >> TEST {idx} {'PASS' if passed else 'FAIL'}")
# Turn LEDs off at the end
set_led_color(lin_dev, nad, 0, 0, 0, 0)
# Summary
logger.info("")
logger.info("=" * 70)
logger.info("TEST SUMMARY")
logger.info("=" * 70)
all_passed = True
for name, passed in test_results.items():
status = "PASS" if passed else "FAIL"
logger.info(f" {status} - {name}")
if not passed:
all_passed = False
logger.info("-" * 70)
if all_passed:
logger.info("RESULT: ALL TESTS PASSED")
else:
logger.info("RESULT: SOME TESTS FAILED")
logger.info("=" * 70)
logger.info("Tearing down...")
linmaster.teardown()
logger.info("Done (ECU still powered)")
sys.exit(0 if all_passed else 1)
except KeyboardInterrupt:
logger.info("")
logger.info("Interrupted by user")
try:
set_led_color(lin_dev, nad, 0, 0, 0, 0)
linmaster.teardown()
except:
pass
sys.exit(130)
except Exception as e:
logger.error(f"Error: {e}", exc_info=True)
sys.exit(1)
if __name__ == "__main__":
main()

Binary file not shown.

View File

@ -0,0 +1,543 @@
#!/usr/bin/env python3
"""
Interactive BABYLIN animation validation for ALM_Req_A.
This script executes the requirement-oriented checks step-by-step and pauses
after each action so the tester can verify physical LED behavior.
Covered checks:
1) AmbLightMode behavior (0 immediate, 1 fade RGBI, 2 immediate color + fade I)
2) AmbLightUpdate save/apply/discard
3) AmbLightDuration scaling (0.2 s/LSB)
4) LID range selection (single-node, broadcast, invalid From>To)
"""
import argparse
import logging
import time
from pylin import LinBusManager, LinDevice22
from pymumclient import MelexisUniversalMaster
from config import *
logging.basicConfig(
level=logging.INFO,
format="%(asctime)-15s %(levelname)-8s %(message)s",
)
logger = logging.getLogger(__name__)
SEPARATOR = "=" * 78
SUB = "-" * 78
# ALM_Status.ALMLedState values
LED_STATE_OFF = 0
LED_STATE_ANIMATING = 1
LED_STATE_ON = 2
LED_STATE_NAMES = {
LED_STATE_OFF: "OFF",
LED_STATE_ANIMATING: "ANIMATING",
LED_STATE_ON: "ON",
}
def pause(msg):
print()
input(f">>> {msg}")
print()
def banner(title):
logger.info(SEPARATOR)
logger.info(title)
logger.info(SEPARATOR)
def section(title):
logger.info(SUB)
logger.info(title)
logger.info(SUB)
def read_alm_status(lin_dev):
"""Return (parsed_dict, raw_bytes) or (None, None)."""
try:
response = lin_dev.send_message(
master_to_slave=False,
frame_id=ALM_STATUS_FRAME["frame_id"],
data_length=ALM_STATUS_FRAME["length"],
data=None,
)
if response and len(response) >= ALM_STATUS_FRAME["length"]:
return unpack_frame(ALM_STATUS_FRAME, response), response
return None, None
except Exception as exc:
logger.error("Failed reading ALM_Status: %s", exc)
return None, None
def read_led_state(lin_dev):
parsed, _ = read_alm_status(lin_dev)
if parsed is None:
return -1
return parsed.get("ALMLEDState", -1)
def read_nad(lin_dev, fallback):
parsed, raw = read_alm_status(lin_dev)
if parsed is None:
logger.warning("Could not read ALM_Status, fallback NAD=0x%02X", fallback)
return fallback
nad = parsed.get("ALMNadNo", fallback)
logger.info("Detected ALMNadNo=0x%02X (raw: %s)", nad, " ".join(f"{b:02X}" for b in raw))
return nad
def send_req(
lin_dev,
*,
red,
green,
blue,
intensity,
update,
mode,
duration,
lid_from,
lid_to,
):
data = pack_frame(
ALM_REQ_A_FRAME,
AmbLightColourRed=red,
AmbLightColourGreen=green,
AmbLightColourBlue=blue,
AmbLightIntensity=intensity,
AmbLightUpdate=update,
AmbLightMode=mode,
AmbLightDuration=duration,
AmbLightLIDFrom=lid_from,
AmbLightLIDTo=lid_to,
)
lin_dev.send_message(
master_to_slave=True,
frame_id=ALM_REQ_A_FRAME["frame_id"],
data_length=ALM_REQ_A_FRAME["length"],
data=data,
)
def observe_state(lin_dev, seconds):
"""Poll status slowly and print changes."""
logger.info("Observing for %.1f s...", seconds)
end_t = time.time() + seconds
last = None
while time.time() < end_t:
st = read_led_state(lin_dev)
if st != last:
name = LED_STATE_NAMES.get(st, f"UNKNOWN({st})")
logger.info(" ALMLEDState -> %s", name)
last = st
time.sleep(0.25)
def guided_step(lin_dev, title, expectation_lines, command_kwargs, observe_s):
section(title)
logger.info("What you should see:")
for line in expectation_lines:
logger.info(" - %s", line)
pause("Press Enter to send this command...")
send_req(lin_dev, **command_kwargs)
observe_state(lin_dev, observe_s)
pause("Verify visually, then press Enter for the next step...")
def main():
parser = argparse.ArgumentParser(description="Interactive ALM animation checks for BABYLIN")
parser.add_argument("--host", default=MUM_HOST, help=f"MUM IP (default: {MUM_HOST})")
parser.add_argument(
"--nad",
type=lambda x: int(x, 0),
default=LED_DEFAULT_NAD,
help=f"Fallback NAD if ALM_Status read fails (default: 0x{LED_DEFAULT_NAD:02X})",
)
parser.add_argument(
"--slow-factor",
type=float,
default=1.0,
help="Multiply wait/observe durations (default: 1.0)",
)
args = parser.parse_args()
mum = None
linmaster = None
lin_dev = None
try:
banner("Connecting to MUM / LIN")
mum = MelexisUniversalMaster()
mum.open_all(args.host)
power_control = mum.get_device(MUM_POWER_DEVICE)
linmaster = mum.get_device(MUM_LIN_DEVICE)
linmaster.setup()
lin_bus = LinBusManager(linmaster)
lin_dev = LinDevice22(lin_bus)
lin_dev.baudrate = LIN_BAUDRATE
lin_dev.nad = args.nad
power_control.power_up()
time.sleep(0.5 * args.slow_factor)
nad = read_nad(lin_dev, args.nad)
lin_dev.nad = nad
banner("Interactive Requirement Validation")
logger.info("Target NAD: 0x%02X", nad)
logger.info("Slow factor: %.2f", args.slow_factor)
logger.info("You will be prompted before and after every test step.")
pause("Press Enter to start from a known OFF baseline...")
# Step 0: Baseline OFF
guided_step(
lin_dev,
"Step 0 - Baseline OFF",
[
"LED should turn OFF quickly.",
"ALMLEDState should become OFF.",
],
{
"red": 0,
"green": 0,
"blue": 0,
"intensity": 0,
"update": 0,
"mode": 0,
"duration": 0,
"lid_from": nad,
"lid_to": nad,
},
1.0 * args.slow_factor,
)
# 1) Mode behavior checks
guided_step(
lin_dev,
"Step 1 - Mode 0 Immediate Setpoint",
[
"Color/intensity should change immediately.",
"No visible fade; direct jump to requested setpoint.",
],
{
"red": 0,
"green": 180,
"blue": 80,
"intensity": 200,
"update": 0,
"mode": 0,
"duration": 10,
"lid_from": nad,
"lid_to": nad,
},
1.0 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 2 - Mode 1 Fade RGB + Intensity (2.0 s)",
[
"RGB and intensity should both transition smoothly.",
"Transition duration should be close to 2.0 s (Duration=10).",
],
{
"red": 255,
"green": 40,
"blue": 0,
"intensity": 220,
"update": 0,
"mode": 1,
"duration": 10,
"lid_from": nad,
"lid_to": nad,
},
3.0 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 3 - Mode 2 Immediate Color + Faded Intensity (2.0 s)",
[
"Color should jump immediately to the new RGB target.",
"Only intensity should ramp over ~2.0 s.",
],
{
"red": 0,
"green": 0,
"blue": 255,
"intensity": 50,
"update": 0,
"mode": 2,
"duration": 10,
"lid_from": nad,
"lid_to": nad,
},
3.0 * args.slow_factor,
)
# 2) Update save/apply/discard checks
guided_step(
lin_dev,
"Step 4 - Update=1 Save (must NOT apply)",
[
"LED output should remain unchanged after this command.",
"No visible color/intensity change should occur.",
],
{
"red": 0,
"green": 255,
"blue": 0,
"intensity": 255,
"update": 1,
"mode": 1,
"duration": 10,
"lid_from": nad,
"lid_to": nad,
},
1.5 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 5 - Update=2 Apply Saved",
[
"Saved command from Step 4 should execute now.",
"Payload in this Apply frame should be ignored by ECU logic.",
"You should see saved behavior (mode/duration/RGBI from Step 4).",
],
{
"red": 7,
"green": 7,
"blue": 7,
"intensity": 7,
"update": 2,
"mode": 0,
"duration": 0,
"lid_from": nad,
"lid_to": nad,
},
3.0 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 6 - Update=3 Discard Saved",
[
"Saved buffer should be cleared.",
"This discard command itself should not change output.",
],
{
"red": 0,
"green": 0,
"blue": 0,
"intensity": 0,
"update": 3,
"mode": 0,
"duration": 0,
"lid_from": nad,
"lid_to": nad,
},
1.5 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 7 - Update=2 After Discard",
[
"No saved command should exist now.",
"Apply should behave like a no-op (no new visible action).",
],
{
"red": 123,
"green": 12,
"blue": 45,
"intensity": 200,
"update": 2,
"mode": 1,
"duration": 5,
"lid_from": nad,
"lid_to": nad,
},
2.0 * args.slow_factor,
)
# 3) Duration scaling checks
guided_step(
lin_dev,
"Step 8 - Duration=1 (expect ~0.2 s)",
[
"Transition should complete very quickly (~0.2 s).",
],
{
"red": 255,
"green": 0,
"blue": 0,
"intensity": 200,
"update": 0,
"mode": 1,
"duration": 1,
"lid_from": nad,
"lid_to": nad,
},
1.5 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 9 - Duration=5 (expect ~1.0 s)",
[
"Transition should take around 1.0 s.",
"Visibly slower than Step 8.",
],
{
"red": 0,
"green": 255,
"blue": 0,
"intensity": 200,
"update": 0,
"mode": 1,
"duration": 5,
"lid_from": nad,
"lid_to": nad,
},
2.0 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 10 - Duration=10 (expect ~2.0 s)",
[
"Transition should take around 2.0 s.",
"Visibly slower than Step 9.",
],
{
"red": 0,
"green": 0,
"blue": 255,
"intensity": 200,
"update": 0,
"mode": 1,
"duration": 10,
"lid_from": nad,
"lid_to": nad,
},
3.0 * args.slow_factor,
)
# 4) LID selection checks
guided_step(
lin_dev,
"Step 11 - LID Single-Node Select (From=To=NAD)",
[
"This node should react (it is explicitly selected).",
],
{
"red": 255,
"green": 120,
"blue": 0,
"intensity": 180,
"update": 0,
"mode": 0,
"duration": 0,
"lid_from": nad,
"lid_to": nad,
},
1.0 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 12 - LID Broadcast Select (From=0, To=255)",
[
"This node should react (broadcast range).",
],
{
"red": 120,
"green": 0,
"blue": 255,
"intensity": 180,
"update": 0,
"mode": 0,
"duration": 0,
"lid_from": 0,
"lid_to": 255,
},
1.0 * args.slow_factor,
)
guided_step(
lin_dev,
"Step 13 - LID Invalid Range (From > To)",
[
"Node should ignore this command.",
"No visible output change is expected.",
],
{
"red": 255,
"green": 255,
"blue": 255,
"intensity": 255,
"update": 0,
"mode": 0,
"duration": 0,
"lid_from": 20,
"lid_to": 10,
},
1.5 * args.slow_factor,
)
pause("All checks done. Press Enter to send final OFF cleanup...")
send_req(
lin_dev,
red=0,
green=0,
blue=0,
intensity=0,
update=0,
mode=0,
duration=0,
lid_from=nad,
lid_to=nad,
)
observe_state(lin_dev, 1.0 * args.slow_factor)
banner("Test sequence completed")
except KeyboardInterrupt:
logger.info("Interrupted by user")
finally:
try:
if lin_dev is not None:
# Best effort: leave node OFF
send_req(
lin_dev,
red=0,
green=0,
blue=0,
intensity=0,
update=0,
mode=0,
duration=0,
lid_from=lin_dev.nad,
lid_to=lin_dev.nad,
)
except Exception:
pass
try:
if linmaster is not None:
linmaster.teardown()
except Exception:
pass
if __name__ == "__main__":
main()

Binary file not shown.

View File

@ -0,0 +1,260 @@
#!/usr/bin/env python3
"""
LIN Auto-Addressing Test - Matching babylin behavior
This test replicates the exact babylin sequence that successfully changed NAD.
Key observations from babylin log:
1. Uses FreeFormat frame (ID 0x3C)
2. Frame structure: [NAD, PCI, SID, SupID_LSB, SupID_MSB, Subf, Param1, Param2]
3. Uses LIN 1.x Classic checksum
4. Loops the auto-addressing schedule multiple times (6+ iterations in babylin log)
5. NAD change happens after several iterations
"""
import argparse
import logging
import time
from pylin import LinBusManager, LinDevice22
from pymumclient import MelexisUniversalMaster
from config import *
logging.basicConfig(
level=logging.INFO,
format='%(asctime)-15s %(levelname)-8s %(message)s'
)
logger = logging.getLogger(__name__)
def read_status(lin_dev):
"""Read ALM_Status frame"""
try:
response = lin_dev.send_message(
master_to_slave=False,
frame_id=LIN_FRAME_ID_ALM_STATUS,
data_length=4,
data=None
)
if response and len(response) > 0:
return response[0], response
return None, None
except Exception as e:
logger.error(f"Failed to read status: {e}")
return None, None
def send_bsm_frame(transport_layer, subfunction, param1, param2):
"""
Send BSM-SNPD diagnostic frame with Classic checksum.
Uses ld_put_raw() which sends with LIN 1.x Classic checksum (like babylin).
send_message() uses Enhanced checksum which causes firmware to reject frames.
"""
try:
data = bytearray([
BSM_NAD_BROADCAST,
BSM_PCI,
BSM_SID,
BSM_SUPPLIER_ID_LSB,
BSM_SUPPLIER_ID_MSB,
subfunction,
param1,
param2
])
transport_layer.ld_put_raw(data=data, baudrate=LIN_BAUDRATE)
return True
except Exception as e:
logger.error(f"Failed to send BSM frame: {e}")
return False
def poll_status_frames(lin_dev, duration_seconds=AUTOADDRESSING_POLL_DURATION):
"""
Poll status frames for a specified duration, matching babylin's Pub_serv schedule.
This acts as a keepalive and gives the ECU time to process.
"""
start_time = time.time()
poll_count = 0
while (time.time() - start_time) < duration_seconds:
try:
lin_dev.send_message(
master_to_slave=False,
frame_id=LIN_FRAME_ID_ALM_STATUS,
data_length=4,
data=[]
)
poll_count += 1
time.sleep(AUTOADDRESSING_STATUS_POLL_INTERVAL)
except Exception:
# Ignore timeout errors during polling
time.sleep(AUTOADDRESSING_STATUS_POLL_INTERVAL)
logger.debug(f" Polled status {poll_count} times over {duration_seconds:.1f}s")
def run_auto_addressing_sequence(transport_layer, target_nad):
"""
Run one complete auto-addressing sequence matching babylin.
Babylin sequence:
1. INIT (subf 0x01)
2. Wait 50ms
3. 16x NAD assignments (subf 0x02) with 20ms delays
4. STORE (subf 0x03)
5. FINALIZE (subf 0x04)
Args:
transport_layer: LIN transport layer for sending frames
target_nad: Target NAD to assign (will be placed first in sequence)
"""
# Step 1: Initialize auto-addressing mode
logger.debug(" INIT (0x01)")
if not send_bsm_frame(transport_layer, BSM_SUBF_INIT, 0x02, 0xFF):
return False
time.sleep(BSM_INIT_DELAY)
# Step 2: Send 16 NAD assignment frames
# Put target NAD first in sequence to ensure it gets assigned
nad_sequence = list(VALID_NAD_RANGE)
# Move target_nad to the front of the sequence
if target_nad in nad_sequence:
nad_sequence.remove(target_nad)
nad_sequence.insert(0, target_nad)
for nad in nad_sequence:
logger.debug(f" ASSIGN NAD 0x{nad:02X} (0x02)")
if not send_bsm_frame(transport_layer, BSM_SUBF_ASSIGN, 0x02, nad):
return False
time.sleep(BSM_FRAME_DELAY)
# Step 3: Store configuration
logger.debug(" STORE (0x03)")
if not send_bsm_frame(transport_layer, BSM_SUBF_STORE, 0x02, 0xFF):
return False
time.sleep(BSM_FRAME_DELAY)
# Step 4: Finalize
logger.debug(" FINALIZE (0x04)")
if not send_bsm_frame(transport_layer, BSM_SUBF_FINALIZE, 0x02, 0xFF):
return False
time.sleep(BSM_FRAME_DELAY)
return True
def main():
parser = argparse.ArgumentParser(description='LIN Auto-Addressing Test')
parser.add_argument('--host', default=MUM_HOST,
help=f'MUM IP address (default: {MUM_HOST})')
parser.add_argument('--iterations', type=int, default=AUTOADDRESSING_DEFAULT_ITERATIONS,
help=f'Number of auto-addressing iterations (default: {AUTOADDRESSING_DEFAULT_ITERATIONS})')
parser.add_argument('--check-interval', type=int, default=0,
help='Check status every N iterations (0=only at end)')
args = parser.parse_args()
try:
logger.info(f"Connecting to MUM at {args.host}...")
# Initialize MUM
mum = MelexisUniversalMaster()
mum.open_all(args.host)
power_control = mum.get_device(MUM_POWER_DEVICE)
linmaster = mum.get_device(MUM_LIN_DEVICE)
linmaster.setup()
# Initialize LIN
lin_bus = LinBusManager(linmaster)
lin_dev = LinDevice22(lin_bus)
lin_dev.baudrate = LIN_BAUDRATE
# Get transport layer for sending with Classic checksum
transport_layer = lin_dev.get_device("bus/transport_layer")
# Power up
power_control.power_up()
time.sleep(0.5)
logger.info("=" * 70)
logger.info("MUM connected, LIN bus ready")
# Read initial status
initial_nad, _ = read_status(lin_dev)
if initial_nad:
logger.info(f"Initial NAD: 0x{initial_nad:02X}")
# Calculate target NAD (different from initial NAD)
valid_nads = list(VALID_NAD_RANGE)
if initial_nad and initial_nad in valid_nads:
valid_nads.remove(initial_nad)
target_nad = valid_nads[0] # Pick the first available NAD
logger.info(f"Target NAD: 0x{target_nad:02X}")
logger.info("=" * 70)
logger.info(f"Running {args.iterations} auto-addressing iterations...")
logger.info("(Like babylin: iterate multiple times, then check result)")
logger.info("=" * 70)
# Run iterations with status polling (like babylin's schedule switching)
for iteration in range(1, args.iterations + 1):
logger.info(f"Iteration {iteration}/{args.iterations}")
# Run BSM sequence (like babylin's LIN_AA schedule)
if not run_auto_addressing_sequence(transport_layer, target_nad):
logger.error("Auto-addressing sequence failed")
break
# Poll status frames between iterations (like babylin's Pub_serv schedule)
# This gives ECU time to process and keeps communication alive
logger.debug(f" Status polling between iterations...")
poll_status_frames(lin_dev, duration_seconds=2.0)
# Check status at intervals if requested
if args.check_interval > 0 and iteration % args.check_interval == 0:
nad, _ = read_status(lin_dev)
if nad:
logger.info(f" After iteration {iteration}: NAD = 0x{nad:02X}")
if initial_nad and nad != initial_nad:
logger.info("=" * 70)
logger.info(f"SUCCESS! NAD changed from 0x{initial_nad:02X} to 0x{nad:02X}")
logger.info(f"Change occurred after {iteration} iterations")
logger.info("=" * 70)
break
# Final status check
logger.info("=" * 70)
logger.info("Checking final status...")
time.sleep(1.0)
final_nad, final_data = read_status(lin_dev)
if final_nad:
data_hex = ' '.join(f'{b:02X}' for b in final_data)
logger.info(f"Final NAD: 0x{final_nad:02X}, Data: {data_hex}")
if initial_nad and final_nad != initial_nad:
logger.info("=" * 70)
logger.info(f"SUCCESS! NAD changed from 0x{initial_nad:02X} to 0x{final_nad:02X}")
logger.info("=" * 70)
else:
logger.info(f"NAD unchanged (still 0x{final_nad:02X})")
logger.info("=" * 70)
linmaster.teardown()
logger.info("Done")
except KeyboardInterrupt:
logger.info("\nInterrupted by user")
try:
linmaster.teardown()
except:
pass
except Exception as e:
logger.error(f"Error: {e}", exc_info=True)
if __name__ == "__main__":
main()

Binary file not shown.

View File

@ -0,0 +1,230 @@
#!/usr/bin/env python3
"""
LIN LED Control Test
This test verifies LIN communication by controlling the LED on the board.
It will fade through different colors (Red, Green, Blue) to verify that
frames are being received correctly.
Frame structure (ALM_Req_A, ID=0x0A, 8 bytes):
- Byte 0: AmbLightColourRed (0-255)
- Byte 1: AmbLightColourGreen (0-255)
- Byte 2: AmbLightColourBlue (0-255)
- Byte 3: AmbLightIntensity (0-255)
- Byte 4: AmbLightUpdate[1:0] | (AmbLightMode[5:0] << 2)
- Byte 5: AmbLightDuration (0-255)
- Byte 6: AmbLightLIDFrom (NAD range start set equal to LIDTo to target one node)
- Byte 7: AmbLightLIDTo (NAD range end)
"""
import argparse
import logging
import time
import math
from pylin import LinBusManager, LinDevice22
from pymumclient import MelexisUniversalMaster
from config import *
logging.basicConfig(
level=logging.INFO,
format='%(asctime)-15s %(levelname)-8s %(message)s'
)
logger = logging.getLogger(__name__)
def read_alm_status(lin_dev):
"""Read ALM_Status frame and return (ALMNadNo, raw_bytes)."""
try:
response = lin_dev.send_message(
master_to_slave=False,
frame_id=ALM_STATUS_FRAME['frame_id'],
data_length=ALM_STATUS_FRAME['length'],
data=None
)
if response and len(response) >= ALM_STATUS_FRAME['length']:
parsed = unpack_frame(ALM_STATUS_FRAME, response)
return parsed['ALMNadNo'], response
return None, None
except Exception as e:
logger.error(f"Failed to read ALM_Status: {e}")
return None, None
def set_led_color(lin_dev, nad, red, green, blue, intensity,
update=0, mode=0, duration=0):
"""
Set LED color and intensity via ALM_Req_A frame.
The node responds only if AmbLightLIDFrom <= ALMNadNo <= AmbLightLIDTo.
Setting both to the same NAD targets a single node.
"""
try:
data = pack_frame(ALM_REQ_A_FRAME,
AmbLightColourRed=red,
AmbLightColourGreen=green,
AmbLightColourBlue=blue,
AmbLightIntensity=intensity,
AmbLightUpdate=update,
AmbLightMode=mode,
AmbLightDuration=duration,
AmbLightLIDFrom=nad,
AmbLightLIDTo=nad,
)
lin_dev.send_message(
master_to_slave=True,
frame_id=ALM_REQ_A_FRAME['frame_id'],
data_length=ALM_REQ_A_FRAME['length'],
data=data,
)
return True
except Exception as e:
logger.error(f"Failed to set LED color: {e}")
return False
def fade_test(lin_dev, nad, duration_per_color=5.0):
"""
Fade through Red, Green, and Blue colors.
Args:
lin_dev: LinDevice22 instance
nad: Node address
duration_per_color: How long to fade each color (seconds)
"""
colors = [
("Red", 255, 0, 0),
("Green", 0, 255, 0),
("Blue", 0, 0, 255),
]
steps = 50 # Number of fade steps
delay = duration_per_color / steps
for color_name, r_max, g_max, b_max in colors:
logger.info(f"Fading {color_name}...")
# Fade in
for step in range(steps + 1):
progress = step / steps
# Use sine wave for smoother fade
brightness = math.sin(progress * math.pi / 2)
red = int(r_max * brightness)
green = int(g_max * brightness)
blue = int(b_max * brightness)
intensity = int(100 * brightness)
set_led_color(lin_dev, nad, red, green, blue, intensity)
time.sleep(delay)
# Fade out
for step in range(steps, -1, -1):
progress = step / steps
brightness = math.sin(progress * math.pi / 2)
red = int(r_max * brightness)
green = int(g_max * brightness)
blue = int(b_max * brightness)
intensity = int(100 * brightness)
set_led_color(lin_dev, nad, red, green, blue, intensity)
time.sleep(delay)
def main():
parser = argparse.ArgumentParser(description='LIN LED Control Test')
parser.add_argument('--host', default=MUM_HOST,
help=f'MUM IP address (default: {MUM_HOST})')
parser.add_argument('--nad', type=lambda x: int(x,0), default=LED_DEFAULT_NAD,
help=f'Node address to control (default: 0x{LED_DEFAULT_NAD:02X})')
parser.add_argument('--cycles', type=int, default=3,
help='Number of fade cycles (default: 3)')
parser.add_argument('--duration', type=float, default=3.0,
help='Duration per color in seconds (default: 3.0)')
args = parser.parse_args()
try:
logger.info(f"Connecting to MUM at {args.host}...")
# Setup MUM and LIN
mum = MelexisUniversalMaster()
mum.open_all(args.host)
power_control = mum.get_device(MUM_POWER_DEVICE)
linmaster = mum.get_device(MUM_LIN_DEVICE)
linmaster.setup()
lin_bus = LinBusManager(linmaster)
lin_dev = LinDevice22(lin_bus)
lin_dev.baudrate = LIN_BAUDRATE
lin_dev.nad = args.nad
power_control.power_up()
time.sleep(0.5)
logger.info("MUM connected and LIN bus ready")
logger.info("=" * 70)
# Read current NAD
logger.info("Reading current NAD from ALM_Status...")
current_nad, status_data = read_alm_status(lin_dev)
if current_nad is not None:
data_hex = ' '.join(f'{b:02X}' for b in status_data)
logger.info(f"Current NAD: 0x{current_nad:02X}")
logger.info(f"Full status data: {data_hex}")
else:
logger.warning("Could not read NAD, using command-line NAD")
current_nad = args.nad
logger.info("=" * 70)
logger.info(f"LED FADE TEST")
logger.info(f"Controlling NAD: 0x{current_nad:02X}")
logger.info(f"LIDFrom: 0x{current_nad:02X}, LIDTo: 0x{current_nad:02X}")
logger.info(f"Fade cycles: {args.cycles}")
logger.info(f"Duration per color: {args.duration}s")
logger.info("=" * 70)
# Turn LED off initially
logger.info("Turning LED off...")
set_led_color(lin_dev, current_nad, 0, 0, 0, 0)
time.sleep(1.0)
# Run fade test
for cycle in range(1, args.cycles + 1):
logger.info(f"\nCycle {cycle}/{args.cycles}")
fade_test(lin_dev, current_nad, args.duration)
if cycle < args.cycles:
logger.info("Pausing between cycles...")
time.sleep(1.0)
# Turn LED off at the end
logger.info("\nTurning LED off...")
set_led_color(lin_dev, current_nad, 0, 0, 0, 0)
logger.info("=" * 70)
logger.info("✓ LED TEST COMPLETED")
logger.info("=" * 70)
logger.info("Tearing down...")
linmaster.teardown()
logger.info("Done (ECU still powered)")
except KeyboardInterrupt:
logger.info("")
logger.info("Interrupted by user")
logger.info("Turning LED off...")
try:
set_led_color(lin_dev, args.nad, 0, 0, 0, 0)
linmaster.teardown()
except:
pass
except Exception as e:
logger.error(f"Error: {e}", exc_info=True)
if __name__ == "__main__":
main()

Binary file not shown.

View File

@ -1,116 +1,116 @@
"""Mock implementation of the BabyLIN SDK wrapper API used by our adapter.
This module provides create_BabyLIN() returning an object with BLC_* methods,
so the real adapter can be exercised without hardware.
Design notes:
- We simulate a single device with one channel and an RX queue per channel.
- Transmit (BLC_mon_set_xmit) echoes payload into the RX queue to mimic loopback.
- Master request (BLC_sendRawMasterRequest) enqueues a deterministic response so
tests can validate request/response logic without randomness.
"""
from dataclasses import dataclass
from typing import List
BL_OK = 0 # Success code matching the real SDK convention
@dataclass
class BLC_FRAME:
"""Minimal frame structure to mirror the SDK's BLC_FRAME used by the adapter."""
frameId: int
lenOfData: int
frameData: bytes
class _MockChannel:
"""Represents a BabyLIN channel with a simple RX queue."""
def __init__(self):
self.rx: List[BLC_FRAME] = [] # FIFO for received frames
class _MockBL:
"""BabyLIN mock exposing the subset of BLC_* APIs our adapter calls."""
def __init__(self):
self.BL_OK = BL_OK
self._ports = ["MOCK_PORT"] # Simulate one discoverable device
self._handle = object() # Opaque handle placeholder
self._channels = [_MockChannel()] # Single channel system
# -----------------------------
# Discovery/open/close
# -----------------------------
def BLC_getBabyLinPorts(self, timeout_ms: int):
"""Return a list of mock ports; timeout not used in mock."""
return list(self._ports)
def BLC_openPort(self, port: str):
"""Return an opaque handle for the given port name."""
return self._handle
def BLC_closeAll(self):
"""Pretend to close; always succeeds."""
return BL_OK
# -----------------------------
# SDF and channel handling
# -----------------------------
def BLC_loadSDF(self, handle, sdf_path: str, download: int):
"""No-op in mock; assume success."""
return BL_OK
def BLC_getChannelCount(self, handle):
"""Report number of channels (1 in mock)."""
return len(self._channels)
def BLC_getChannelHandle(self, handle, idx: int):
"""Return the channel object acting as its own handle."""
return self._channels[idx]
def BLC_sendCommand(self, channel, command: str):
"""Accept any command (e.g., start schedule); always succeed."""
return BL_OK
# -----------------------------
# Transmit/Receive primitives
# -----------------------------
def BLC_mon_set_xmit(self, channel: _MockChannel, frame_id: int, data: bytes, slot_time: int):
"""Echo transmitted payload back to RX to simulate a bus loopback."""
channel.rx.append(BLC_FRAME(frameId=frame_id, lenOfData=len(data), frameData=bytes(data)))
return BL_OK
def BLC_getNextFrameTimeout(self, channel: _MockChannel, timeout_ms: int):
"""Pop next frame from RX queue; return None on timeout (empty queue)."""
if channel.rx:
return channel.rx.pop(0)
# Simulate timeout -> real wrapper may raise; we return None for simplicity
return None
def BLC_sendRawMasterRequest(self, channel: _MockChannel, frame_id: int, payload_or_length):
"""Simulate a slave response for a master request.
Supports two call forms to mirror SDK variations:
- (channel, frame_id, bytes): use bytes as the response payload
- (channel, frame_id, length): synthesize payload with a deterministic pattern
"""
if isinstance(payload_or_length, (bytes, bytearray)):
data = bytes(payload_or_length)
else:
length = int(payload_or_length)
# Deterministic pattern: response[i] = (frame_id + i) & 0xFF
data = bytes(((frame_id + i) & 0xFF) for i in range(max(0, min(8, length))))
# Enqueue the response frame as if the slave published it on the bus
channel.rx.append(BLC_FRAME(frameId=frame_id, lenOfData=len(data), frameData=data))
return BL_OK
def BLC_getDetailedErrorString(self, rc: int):
"""Provide a friendly error string for non-OK return codes."""
return f"Mock error rc={rc}"
def create_BabyLIN():
"""Factory method matching the real SDK to construct the mock instance."""
return _MockBL()
"""Mock implementation of the BabyLIN SDK wrapper API used by our adapter.
This module provides create_BabyLIN() returning an object with BLC_* methods,
so the real adapter can be exercised without hardware.
Design notes:
- We simulate a single device with one channel and an RX queue per channel.
- Transmit (BLC_mon_set_xmit) echoes payload into the RX queue to mimic loopback.
- Master request (BLC_sendRawMasterRequest) enqueues a deterministic response so
tests can validate request/response logic without randomness.
"""
from dataclasses import dataclass
from typing import List
BL_OK = 0 # Success code matching the real SDK convention
@dataclass
class BLC_FRAME:
"""Minimal frame structure to mirror the SDK's BLC_FRAME used by the adapter."""
frameId: int
lenOfData: int
frameData: bytes
class _MockChannel:
"""Represents a BabyLIN channel with a simple RX queue."""
def __init__(self):
self.rx: List[BLC_FRAME] = [] # FIFO for received frames
class _MockBL:
"""BabyLIN mock exposing the subset of BLC_* APIs our adapter calls."""
def __init__(self):
self.BL_OK = BL_OK
self._ports = ["MOCK_PORT"] # Simulate one discoverable device
self._handle = object() # Opaque handle placeholder
self._channels = [_MockChannel()] # Single channel system
# -----------------------------
# Discovery/open/close
# -----------------------------
def BLC_getBabyLinPorts(self, timeout_ms: int):
"""Return a list of mock ports; timeout not used in mock."""
return list(self._ports)
def BLC_openPort(self, port: str):
"""Return an opaque handle for the given port name."""
return self._handle
def BLC_closeAll(self):
"""Pretend to close; always succeeds."""
return BL_OK
# -----------------------------
# SDF and channel handling
# -----------------------------
def BLC_loadSDF(self, handle, sdf_path: str, download: int):
"""No-op in mock; assume success."""
return BL_OK
def BLC_getChannelCount(self, handle):
"""Report number of channels (1 in mock)."""
return len(self._channels)
def BLC_getChannelHandle(self, handle, idx: int):
"""Return the channel object acting as its own handle."""
return self._channels[idx]
def BLC_sendCommand(self, channel, command: str):
"""Accept any command (e.g., start schedule); always succeed."""
return BL_OK
# -----------------------------
# Transmit/Receive primitives
# -----------------------------
def BLC_mon_set_xmit(self, channel: _MockChannel, frame_id: int, data: bytes, slot_time: int):
"""Echo transmitted payload back to RX to simulate a bus loopback."""
channel.rx.append(BLC_FRAME(frameId=frame_id, lenOfData=len(data), frameData=bytes(data)))
return BL_OK
def BLC_getNextFrameTimeout(self, channel: _MockChannel, timeout_ms: int):
"""Pop next frame from RX queue; return None on timeout (empty queue)."""
if channel.rx:
return channel.rx.pop(0)
# Simulate timeout -> real wrapper may raise; we return None for simplicity
return None
def BLC_sendRawMasterRequest(self, channel: _MockChannel, frame_id: int, payload_or_length):
"""Simulate a slave response for a master request.
Supports two call forms to mirror SDK variations:
- (channel, frame_id, bytes): use bytes as the response payload
- (channel, frame_id, length): synthesize payload with a deterministic pattern
"""
if isinstance(payload_or_length, (bytes, bytearray)):
data = bytes(payload_or_length)
else:
length = int(payload_or_length)
# Deterministic pattern: response[i] = (frame_id + i) & 0xFF
data = bytes(((frame_id + i) & 0xFF) for i in range(max(0, min(8, length))))
# Enqueue the response frame as if the slave published it on the bus
channel.rx.append(BLC_FRAME(frameId=frame_id, lenOfData=len(data), frameData=data))
return BL_OK
def BLC_getDetailedErrorString(self, rc: int):
"""Provide a friendly error string for non-OK return codes."""
return f"Mock error rc={rc}"
def create_BabyLIN():
"""Factory method matching the real SDK to construct the mock instance."""
return _MockBL()