Compare commits
8 Commits
3a6c102b45
...
master
Author | SHA1 | Date | |
---|---|---|---|
17ce546255 | |||
1126111edb | |||
a9053997a1 | |||
27993d72ee | |||
04705ce666 | |||
98629b744d | |||
c8c67551fd | |||
2b588b3be2 |
22
Jenkinsfile
vendored
22
Jenkinsfile
vendored
@@ -58,25 +58,13 @@ wifi_ap_password = DummyAP
|
||||
}
|
||||
}
|
||||
|
||||
stage('📦 Find & Archive Firmware') {
|
||||
stage('📦 Archive Firmware & FS') {
|
||||
steps {
|
||||
dir('Software') {
|
||||
script {
|
||||
echo "🔍 Suche nach Firmware (.fw.bin) und Filesystem (.fs.gz) Artefakten..."
|
||||
|
||||
def firmwareFiles = findFiles(glob: '.pio/build/**/*.fw.bin')
|
||||
def fsFiles = findFiles(glob: '.pio/build/**/*.fs.gz')
|
||||
|
||||
if (firmwareFiles.length == 0 && fsFiles.length == 0) {
|
||||
echo "⚠️ Keine passenden Artefakte (.fw.bin / .fs.gz) gefunden – nichts zu archivieren."
|
||||
} else {
|
||||
firmwareFiles.each { echo "📦 Firmware: ${it.path}" }
|
||||
fsFiles.each { echo "📦 Filesystem: ${it.path}" }
|
||||
|
||||
def allArtifacts = (firmwareFiles + fsFiles).collect { it.path }
|
||||
archiveArtifacts artifacts: allArtifacts.join(', ')
|
||||
}
|
||||
}
|
||||
echo "🔍 Archiviere Artefakte (.fw.bin / .fs.gz)…"
|
||||
archiveArtifacts artifacts: '.pio/build/**/*.fw.bin, .pio/build/**/*.fs.gz',
|
||||
allowEmptyArchive: true,
|
||||
fingerprint: true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
45
Reverse-Engineering CAN-Bus/.gitignore
vendored
Normal file
45
Reverse-Engineering CAN-Bus/.gitignore
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*.pyo
|
||||
*.pyd
|
||||
*.pkl
|
||||
*.pklz
|
||||
*.egg-info/
|
||||
*.egg
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Build
|
||||
build/
|
||||
dist/
|
||||
.eggs/
|
||||
|
||||
# Logs (Ordner behalten, Dateien ignorieren)
|
||||
logs/*
|
||||
!logs/.gitkeep
|
||||
*.log
|
||||
|
||||
# Virtual Environments
|
||||
venv/
|
||||
.env/
|
||||
.venv/
|
||||
|
||||
# System-Dateien
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Backup-Dateien
|
||||
*.bak
|
||||
*.tmp
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# Editor/IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
|
||||
# Projekt-spezifische
|
||||
settings.json
|
||||
settings.json.bak
|
||||
tmp/
|
90
Reverse-Engineering CAN-Bus/HOW-TO-REVERSE.md
Normal file
90
Reverse-Engineering CAN-Bus/HOW-TO-REVERSE.md
Normal file
@@ -0,0 +1,90 @@
|
||||
# HOW-TO_REVERSE – Praxisleitfaden fürs CAN-Reverse-Engineering
|
||||
|
||||
Dieses How-To ist dein Werkzeugkasten, um aus nackten Frames echte Signale zu destillieren. Es ist kein Orakel, sondern ein **Experimentier-Protokoll**: miss, verifiziere, falsifiziere. Nutze es zusammen mit der GUI/CLI (Splitter, Explorer, Batch-Analyzer, Range-/Unsupervised-Fit).
|
||||
|
||||
---
|
||||
|
||||
## 0) Vorbereitungen (Daten sammeln wie ein Ingenieur)
|
||||
- **Zustände trennen**: *Zündung an*, *Motor aus*, *Motor an Leerlauf*, *Schieben*, *aufgebockt Hinterrad drehen*, *kurze Fahrt*.
|
||||
- **Aktoren toggeln**: Blinker, Bremse, Licht, Lüfter → generiere Ground-Truth für Bits/Flags.
|
||||
- **Nur RX** analysieren, wenn deine Hardware parallel TX sendet (Störmuster vermeiden).
|
||||
|
||||
---
|
||||
|
||||
## 1) Frame- & ID-Ebene zuerst
|
||||
- **Repetition Rate (Hz)**: Zyklische Sensor-IDs senden stabil.
|
||||
- *Daumenwerte*: WheelSpeed 20–100 Hz, RPM 10–50 Hz, TPS/APS 20–100 Hz, Lenkwinkel 50–100 Hz, Temperaturen 1–10 Hz.
|
||||
- **Jitter** der Periodizität: Streuung der Inter-Arrival-Times. Niedrig = sauberer Zyklus.
|
||||
- **DLC-Stabilität**: schwankende Payload-Länge → ggf. ISO-TP / Multiplex.
|
||||
- **Change-Density**: Anteil Frames mit Payload-Änderung. Zu hoch → Counter/Checksumme, zu niedrig → Status/Träge.
|
||||
|
||||
---
|
||||
|
||||
## 2) Byte/Bit-Signaturen
|
||||
- **Bit-Flip-Rate** pro Bit: ~50% → Flag/Event; sehr regelmäßig → Pattern/Timer.
|
||||
- **Rolling Counter**: 4/8-bit Sequenzen (0..15/255), oft konstant steigend.
|
||||
- **Checksumme**: Byte hängt deterministisch von anderen Bytes ab; häufig letzte Position.
|
||||
- **Endianness**: 16-bit LE/BE testen. Monotone Trends/kleine Deltas weisen auf richtige Byteordnung.
|
||||
- **Quantisierung**: typische Schrittweiten (z. B. 0.5 °C, 0.25 km/h).
|
||||
|
||||
---
|
||||
|
||||
## 3) Physik als Filter (Slew-Rate & Grenzen)
|
||||
Miss **ΔWert/Δt** robust (95-Perzentil, nicht Max):
|
||||
- **Temperaturen**: sehr träge → ΔT/s ≪ 1 °C/s.
|
||||
- **Fahrgeschwindigkeit**: 0→100 km/h < 1 s unrealistisch; grob ≤ 30–50 km/h/s (Straße).
|
||||
- **RPM**: schnelle Sprünge möglich, aber nicht teleport. 1k→8k in 1–3 s plausibel.
|
||||
- **Lenkwinkel**: schnell, aber begrenzt; **Jerk** (ΔΔ/Δt) nicht absurd.
|
||||
|
||||
Alles, was diese Checks bricht, ist selten dein gesuchtes physikalisches Signal (oder deine Skalierung ist falsch).
|
||||
|
||||
---
|
||||
|
||||
## 4) Korrelation & Kausalität
|
||||
- **Cross-Korrelation**: RPM ↔ WheelSpeed (Gang drin), Brake-Bit ↔ Pressure, Blinker-Bit ↔ Blinkfrequenz (~1–2 Hz).
|
||||
- **Gang** aus Ratio (RPM/Speed) ableiten und Kandidaten validieren.
|
||||
- **Event-Marker** setzen und zeitgleichen Byte-Kipp suchen.
|
||||
|
||||
---
|
||||
|
||||
## 5) Protokoll & Multiplex
|
||||
- **ISO-TP**: Muster `0x10 len …` (First Frame), `0x21…` (Consecutive). Enthält selten einzelne Sensorkanäle.
|
||||
- **Multiplexer**: Ein Byte schaltet die „Seite“ der Payload um. Erkennbar am Sprungverhalten.
|
||||
|
||||
---
|
||||
|
||||
## 6) Statistik-Fingerabdrücke
|
||||
- **Unique-Ratio** = |unique|/n. Zu klein → Flag/Konstante; moderat → analog.
|
||||
- **Entropy** pro Byte → Daten/Checksumme vs. Status.
|
||||
- **Plateaus/Hysterese**: aktualisiert nur bei Δ≥Schwelle.
|
||||
|
||||
---
|
||||
|
||||
## 7) Scale/Offset systematisch schätzen
|
||||
- **Scale-Raster**: Dekaden + praxisnahe Werte (0.0625, 0.1, 0.25, 0.5, 0.75, 1, 2, 5, 10 …).
|
||||
- **Offset** via **Intervall-Überdeckung**: wähle das Offset, das die meisten Samples in [rmin, rmax] bringt.
|
||||
- **Vorzeichen prüfen**: signed/unsigned, ggf. negative Scales zulassen.
|
||||
|
||||
---
|
||||
|
||||
## 8) Workflow-Cookbook
|
||||
1. **Splitten** (Logs → Traces).
|
||||
2. **ID-Explorer/Batch**: Periodizität, Change-Density, 8/16-bit Plots.
|
||||
3. **Range-Fit** mit physikalischen Ranges *und* **Slew-Limits** (Δ/Δt) + **Rate/Jitter-Constraints**.
|
||||
4. **Cross-Checks**: Kandidaten gegen andere Kanäle testen (RPM↔Speed, Brake↔Pressure).
|
||||
5. **Iterieren**: Range/Constraints verfeinern, Plots sichten, Hypothesen anpassen.
|
||||
|
||||
---
|
||||
|
||||
## 9) Typische Fallen
|
||||
- Falsche Endianness → Teleports.
|
||||
- Counter/Checksumme im selben 16-bit-Wort → zuerst trennen.
|
||||
- DLC<8 → 16-bit-Kombis fehlen; keine Dummies einstreuen.
|
||||
- ×10/×100-Skalierung: Δ/Δt wirkt absurd groß.
|
||||
- BCD/ASCII in Diag/Odometer – nicht physikalisch.
|
||||
|
||||
---
|
||||
|
||||
## 10) Ziel: Berichte statt Bauchgefühl
|
||||
Automatisiere Tests & Schwellen. Lass Skripts einen **Analysebericht** schreiben: „PASS (smooth, low jitter, rate ok)“ vs. „FAIL (jitter, slope99 zu hoch, hit-ratio zu klein)“.
|
||||
Das minimiert Confirmation Bias – und macht Ergebnisse reproduzierbar.
|
272
Reverse-Engineering CAN-Bus/README.md
Normal file
272
Reverse-Engineering CAN-Bus/README.md
Normal file
@@ -0,0 +1,272 @@
|
||||
# Kettenöler – CAN Reverse-Engineering Toolkit
|
||||
|
||||
Toolsuite (GUI + CLI) zum Analysieren von CAN-Logs im **Kettenöler-Format**.
|
||||
Funktionen: Logs **splitten** (pro CAN-ID), **explorative Visualisierung** (8-/16-Bit, LE/BE), **Batch-Analysen** über viele `.trace`, **Ranking** plausibler Signale und **Range-Fit** (lineare Abbildung `phys = raw*scale + offset`), optional **unsupervised** ohne vorgegebene Range.
|
||||
|
||||
---
|
||||
|
||||
## Features (Überblick)
|
||||
|
||||
* **Einheitliche GUI** (`main.py`) mit globalem Header (Workdir, Ordnerstruktur, Log-Auswahl).
|
||||
* **Gemeinsame Trace-Auswahl** in allen Trace-Tabs (gleiches Panel, synchronisiert über Tabs):
|
||||
|
||||
* **ID Explorer** (Multi-Select)
|
||||
* **Traces Batch-Analyse** (Multi-Select oder kompletter Ordner)
|
||||
* **Range-Fit** (Single-Select, supervised *oder* unsupervised)
|
||||
* **Splitter**: Logs → `.trace` pro CAN-ID (`traces/…`, inkl. `overview_ids.csv`).
|
||||
* **Einzel-ID-Explorer**: Plots aller Byte-Kanäle (8-Bit) und Nachbar-Wortkombis (16-Bit LE/BE) + Kurzstatistik.
|
||||
* **Batch-Analyzer**: Kennzahlen/Plots für alle `.trace` in einem Ordner, globales Ranking.
|
||||
* **Range-/Unsupervised-Fit**:
|
||||
|
||||
* *Supervised*: findet `scale` & `offset` für Zielbereich `[rmin, rmax]` (Offset via Intervall-Überdeckung, Scale aus plausibler Menge).
|
||||
* *Unsupervised*: identifiziert „ruhige“ physikalische Kandidaten ohne Range (Smoothness/Varianz/Rate/Spannweite).
|
||||
* **Output-Hygiene**: Ergebnisse stets unter `analyze_out/<timestamp>_<tool>/…`, optionale Zeitstempel-Unterordner verhindern Überschreiben.
|
||||
* **Projektdatei** (`Projekt.json`): speichert Workdir, Subfolder, Log-Auswahl, aktiven Traces-Ordner, etc.
|
||||
* **„Neuester Split“**-Button: springt in den jüngsten Unterordner von `traces/`.
|
||||
|
||||
---
|
||||
|
||||
## Repository-Komponenten
|
||||
|
||||
* **GUI**
|
||||
|
||||
* `main.py` – zentrales Frontend mit Tabs (Multi-Log Analyse, ID Explorer, Traces Batch-Analyse, Range-Fit).
|
||||
* **CLI-Tools**
|
||||
|
||||
* `can_split_by_id.py` – Splittet Logs nach CAN-ID → `.trace`.
|
||||
* `id_signal_explorer.py` – Visualisiert/analysiert eine `.trace` (8-Bit, 16-Bit LE/BE) + `summary_stats.csv`.
|
||||
* `trace_batch_analyzer.py` – Batch-Analyse für viele `.trace` + globales Ranking.
|
||||
* `trace_signal_fitter.py` – **Range-Fit** (scale/offset) **oder** **Unsupervised-Fit** (ohne Range).
|
||||
|
||||
> Optional/Alt: `can_universal_signal_finder.py` – ursprünglicher Multi-Log-Analyzer (Ranking auf Rohdatenebene).
|
||||
|
||||
---
|
||||
|
||||
## Installation
|
||||
|
||||
* **Python** ≥ 3.10
|
||||
* Abhängigkeiten: `pandas`, `numpy`, `matplotlib`
|
||||
* Setup:
|
||||
|
||||
```bash
|
||||
python3 -m venv .venv
|
||||
source .venv/bin/activate # Windows: .venv\Scripts\activate
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Logformat (Kettenöler)
|
||||
|
||||
Eine Zeile pro Frame:
|
||||
|
||||
```
|
||||
<timestamp_ms> <TX|RX> 0x<ID_HEX> <DLC> <byte0> <byte1> ... <byte7>
|
||||
```
|
||||
|
||||
Beispiel:
|
||||
|
||||
```
|
||||
123456 RX 0x208 8 11 22 33 44 55 66 77 88
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Projekt-/Ordnerstruktur
|
||||
|
||||
Ein **Workdir** bündelt alles zu einem Fahrzeug/Projekt:
|
||||
|
||||
```
|
||||
<Workdir>/
|
||||
Projekt.json # GUI-Einstellungen
|
||||
logs/ # Input-Logs
|
||||
traces/ # per-ID .trace (vom Split)
|
||||
analyze_out/ # Ergebnisse; je Run eigener Timestamp-Unterordner
|
||||
```
|
||||
|
||||
**Namenskonventionen**
|
||||
|
||||
* Split-Ergebnisse: `traces/<timestamp?>/0x<ID>_<ursprungslog>.trace`
|
||||
* Outputs: `analyze_out/<YYYYMMDD_HHMMSS>_<tool>/…`
|
||||
|
||||
---
|
||||
|
||||
## Modelle-Ordner & Git
|
||||
|
||||
Wenn du pro Modell arbeitest, z. B.:
|
||||
|
||||
```
|
||||
models/
|
||||
Triumph 2023/
|
||||
logs/
|
||||
traces/
|
||||
analyze_out/
|
||||
Projekt.json
|
||||
```
|
||||
|
||||
Lege in `models/` folgende **`.gitignore`** ab, damit `traces/` und `analyze_out/` **in jedem Modell-Unterordner** ignoriert werden – `logs/` und `.json` bleiben versioniert:
|
||||
|
||||
```gitignore
|
||||
*/traces/
|
||||
*/traces/**
|
||||
*/analyze_out/
|
||||
*/analyze_out/**
|
||||
|
||||
traces/
|
||||
traces/**
|
||||
analyze_out/
|
||||
analyze_out/**
|
||||
|
||||
# optional: typos
|
||||
*/analyze.out/
|
||||
*/analyze.out/**
|
||||
analyze.out/
|
||||
analyze.out/**
|
||||
```
|
||||
|
||||
Leere Ordner wie `logs/` ggf. mit `.gitkeep` befüllen.
|
||||
|
||||
---
|
||||
|
||||
## GUI-Benutzung
|
||||
|
||||
```bash
|
||||
python3 main.py
|
||||
```
|
||||
|
||||
### Globaler Header (immer oben)
|
||||
|
||||
* **Workdir** wählen, **Logs scannen** → Liste aller gefundenen Logfiles (Multi-Select).
|
||||
* Subfolder einstellen: `logs`, `traces`, `analyze_out` (alle **parallel** im Workdir).
|
||||
* **Projekt speichern/laden** (`Projekt.json`).
|
||||
* Beim Workdir-Wechsel/Projekt-Laden setzt die GUI den **aktiven Traces-Ordner** automatisch auf `traces/` bzw. den **jüngsten** Unterordner.
|
||||
|
||||
### Einheitliches Trace-Panel (in allen Trace-Tabs)
|
||||
|
||||
* Links: Liste der `.trace`
|
||||
* Rechts: **Traces-Ordner wählen**, **Workdir/traces**, **Neuester Split**, **Refresh**, (optional **Alle**, **Keine**)
|
||||
* Änderungen am Ordner/Liste wirken **sofort in allen Tabs**.
|
||||
|
||||
### Tab: Multi-Log Analyse
|
||||
|
||||
* Ranking direkt aus Logs (Include/Exclude-IDs, optional Range mit `scale/offset`).
|
||||
* Output: `analyze_out/<ts>_multilog/…`
|
||||
* Optional: „Jede Logdatei separat“ → je Log eigener Unterordner.
|
||||
|
||||
### Tab: ID Explorer
|
||||
|
||||
* **Split** (aus Header-Logauswahl): Logs → `.trace` nach `traces[/<ts>]`, plus `overview_ids.csv`.
|
||||
Danach wird der neue Traces-Pfad **automatisch aktiviert**.
|
||||
* **Einzel-ID Analyse** (Multi-Select):
|
||||
|
||||
* Plots: Byte\[0..7] (8-Bit) + LE/BE für Paare (0-1 … 6-7)
|
||||
* `summary_stats.csv` pro Trace
|
||||
* Output: `analyze_out/<ts>_id_explore/…`
|
||||
|
||||
### Tab: Traces Batch-Analyse
|
||||
|
||||
* Nutzt die gemeinsame Trace-Liste.
|
||||
* **Ohne Auswahl** → kompletter Ordner; **mit Auswahl** → es wird ein Subset-Ordner gebaut (Hardlinks/Kopie) und nur dieses analysiert.
|
||||
* Parameter: `--rx-only`, `scale`, `offset`, `range-min/max`, `top`, `--plots`.
|
||||
* Output:
|
||||
|
||||
* je Trace: `*_combostats.csv` (+ Plots),
|
||||
* global: `summary_top_combinations.csv`
|
||||
* unter `analyze_out/<ts>_trace_batch/…`
|
||||
|
||||
### Tab: Range-Fit (Single-Select)
|
||||
|
||||
* **Zwei Modi**:
|
||||
|
||||
1. **Supervised** (Range-Min/Max gesetzt): findet `scale` & `offset`, maximiert **Hit-Ratio** im Zielbereich.
|
||||
Output: `<trace>_encoding_candidates.csv` + phys-Plots (Top-N).
|
||||
2. **Unsupervised** (Range leer): bewertet Kandidaten nach **Smoothness**, **Spannweite**, **Varianz**, **Rate**, **Uniqueness**.
|
||||
Output: `<trace>_unsupervised_candidates.csv` + Roh-Plots (Top-N).
|
||||
* Optionen: `nur RX`, `negative Scale erlauben` (nur supervised), `Min. Hit-Ratio`, `Min. Smoothness`, `Plots Top-N`, `Output-Label`.
|
||||
* Output: `analyze_out/<ts>_rangefit/…`
|
||||
|
||||
---
|
||||
|
||||
## CLI-Quickstart
|
||||
|
||||
### 1) Splitten
|
||||
|
||||
```bash
|
||||
python3 can_split_by_id.py logs/run1.log logs/run2.log \
|
||||
--outdir <Workdir>/traces/20250827_1200 \
|
||||
--rx-only
|
||||
```
|
||||
|
||||
### 2) Einzel-ID-Explorer
|
||||
|
||||
```bash
|
||||
python3 id_signal_explorer.py <Workdir>/traces/20250827_1200/0x208_run1.trace \
|
||||
--outdir <Workdir>/analyze_out/20250827_1210_id_explore
|
||||
```
|
||||
|
||||
### 3) Batch-Analyse
|
||||
|
||||
```bash
|
||||
python3 trace_batch_analyzer.py \
|
||||
--traces-dir <Workdir>/traces/20250827_1200 \
|
||||
--outdir <Workdir>/analyze_out/20250827_1220_trace_batch \
|
||||
--rx-only --plots --top 8 \
|
||||
--range-min 31 --range-max 80
|
||||
```
|
||||
|
||||
### 4) Range-/Unsupervised-Fit (eine `.trace`)
|
||||
|
||||
```bash
|
||||
# Supervised (z. B. Kühlmittel 31..80°C)
|
||||
python3 trace_signal_fitter.py <trace> \
|
||||
--rmin 31 --rmax 80 \
|
||||
--outdir <Workdir>/analyze_out/20250827_1230_rangefit \
|
||||
--plots-top 8 --min-hit 0.5 --allow-neg-scale
|
||||
|
||||
# Unsupervised (ohne Range)
|
||||
python3 trace_signal_fitter.py <trace> \
|
||||
--outdir <Workdir>/analyze_out/20250827_1240_unsupervised \
|
||||
--plots-top 8 --min-smooth 0.2
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Algorithmen & Heuristiken
|
||||
|
||||
* **Kombinationen**:
|
||||
|
||||
* 8-Bit: `D0..D7`
|
||||
* 16-Bit (adjazent): LE & BE für Paare `(0,1)…(6,7)`
|
||||
*(32-Bit & bit-gepackte Felder: auf der Roadmap)*
|
||||
|
||||
* **Prefilter** (für „ruhige“ physikalische Größen):
|
||||
Mindestanzahl Samples, nicht (nahezu) konstant, keine exzessiven Sprünge (p95 der |Δ| relativ zur Spannweite).
|
||||
|
||||
* **Range-Fit**:
|
||||
Für jeden Kandidaten `raw` wird über eine Menge plausibler **Scales** gesucht; für jedes `scale` wird das **Offset** via **Intervall-Überdeckung** bestimmt (`rmin ≤ scale*raw_i + offset ≤ rmax`). Ranking: Hit-Ratio ↓, dann Glattheit (p95 phys) ↑, Rate ↓, n ↓.
|
||||
|
||||
* **Unsupervised**:
|
||||
**Smoothness** = `1 − clamp(p95(|Δ|)/span, 0..1)`; zusätzlich **span**, **var**, **rate**, **uniq\_ratio**. Ranking auf diese Metriken.
|
||||
|
||||
---
|
||||
|
||||
## Tipps & Troubleshooting
|
||||
|
||||
* **Keine Kandidaten (Range-Fit)**: `--min-hit` senken, `--allow-neg-scale` testen, Range prüfen, längeres/variableres Log nutzen.
|
||||
* **Alles wird gefiltert (Unsupervised)**: `--min-smooth` senken; ggf. `--rx-only` aktivieren.
|
||||
* **Leere/komische Plots**: DLC < 8 → teils keine 16-Bit-Kombis; Frames sehr selten → Rate niedrig.
|
||||
* **Ordner stets sauber**: Zeitstempel-Unterordner aktiv lassen; pro Run eigene Artefakte.
|
||||
|
||||
---
|
||||
|
||||
## Roadmap
|
||||
|
||||
* 32-Bit-Kombinationen, bit-gepackte Felder.
|
||||
* Histogramme, Autokorrelation, Ausreißer-Detektoren.
|
||||
* vordefinierte Signal-Profile (z. B. *WheelSpeed*, *CoolantTemp*).
|
||||
|
||||
---
|
||||
|
||||
## Lizenz / Haftung
|
||||
|
||||
Nur zu Analyse-/Reverse-Engineering-Zwecken. Nutzung auf eigene Verantwortung.
|
95
Reverse-Engineering CAN-Bus/can_split_by_id.py
Normal file
95
Reverse-Engineering CAN-Bus/can_split_by_id.py
Normal file
@@ -0,0 +1,95 @@
|
||||
#!/usr/bin/env python3
|
||||
import re
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
from collections import defaultdict
|
||||
|
||||
LOG_PATTERN = re.compile(r"(\d+)\s+(TX|RX)\s+0x([0-9A-Fa-f]+)\s+(\d+)\s+((?:[0-9A-Fa-f]{2}\s+)+)")
|
||||
|
||||
def main():
|
||||
ap = argparse.ArgumentParser(description="Split Kettenöler CAN log(s) into per-ID .trace files and build an overview")
|
||||
ap.add_argument("logs", nargs="+", help="Input log file(s)")
|
||||
ap.add_argument("--outdir", default="traces", help="Output directory for per-ID trace files")
|
||||
ap.add_argument("--rx-only", action="store_true", help="Keep only RX frames in traces and stats")
|
||||
args = ap.parse_args()
|
||||
|
||||
outdir = Path(args.outdir)
|
||||
outdir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
writers = {}
|
||||
stats = defaultdict(lambda: {
|
||||
"id_hex": None, "rx":0, "tx":0, "count":0, "first_ts":None, "last_ts":None,
|
||||
"first_file":None, "dlc_set": set()
|
||||
})
|
||||
|
||||
def get_writer(can_id_hex: str, src_name: str):
|
||||
# filename pattern: 0xID_<srcfile>.trace
|
||||
safe_src = Path(src_name).name
|
||||
fn = outdir / f"{can_id_hex}_{safe_src}.trace"
|
||||
if fn not in writers:
|
||||
writers[fn] = fn.open("a", encoding="utf-8")
|
||||
return writers[fn]
|
||||
|
||||
total = 0
|
||||
written = 0
|
||||
for p in args.logs:
|
||||
with open(p, "r", errors="ignore") as f:
|
||||
for line in f:
|
||||
m = LOG_PATTERN.match(line)
|
||||
if not m:
|
||||
continue
|
||||
ts = int(m.group(1))
|
||||
dr = m.group(2)
|
||||
cid_hex = m.group(3).upper()
|
||||
dlc = int(m.group(4))
|
||||
data = m.group(5)
|
||||
|
||||
total += 1
|
||||
if args.rx_only and dr != "RX":
|
||||
continue
|
||||
|
||||
key = int(cid_hex, 16)
|
||||
s = stats[key]
|
||||
s["id_hex"] = f"0x{cid_hex}"
|
||||
s["count"] += 1
|
||||
s["rx"] += 1 if dr == "RX" else 0
|
||||
s["tx"] += 1 if dr == "TX" else 0
|
||||
s["first_ts"] = ts if s["first_ts"] is None else min(s["first_ts"], ts)
|
||||
s["last_ts"] = ts if s["last_ts"] is None else max(s["last_ts"], ts)
|
||||
s["first_file"] = s["first_file"] or Path(p).name
|
||||
s["dlc_set"].add(dlc)
|
||||
|
||||
w = get_writer(f"0x{cid_hex}", Path(p).name)
|
||||
w.write(line)
|
||||
written += 1
|
||||
|
||||
for fh in writers.values():
|
||||
fh.close()
|
||||
|
||||
# build overview CSV
|
||||
import pandas as pd
|
||||
rows = []
|
||||
for cid, s in stats.items():
|
||||
dur_ms = 0 if s["first_ts"] is None else (s["last_ts"] - s["first_ts"])
|
||||
rate_hz = (s["rx"] if args.rx_only else s["count"]) / (dur_ms/1000.0) if dur_ms > 0 else 0.0
|
||||
rows.append({
|
||||
"id_dec": cid,
|
||||
"id_hex": s["id_hex"],
|
||||
"count": s["count"],
|
||||
"rx": s["rx"],
|
||||
"tx": s["tx"],
|
||||
"duration_s": round(dur_ms/1000.0, 6),
|
||||
"rate_hz_est": round(rate_hz, 6),
|
||||
"first_file": s["first_file"],
|
||||
"dlc_variants": ",".join(sorted(str(x) for x in s["dlc_set"])),
|
||||
})
|
||||
df = pd.DataFrame(rows).sort_values(["rate_hz_est","count"], ascending=[False, False])
|
||||
csv_path = outdir / "overview_ids.csv"
|
||||
df.to_csv(csv_path, index=False)
|
||||
|
||||
print(f"Done. Parsed {total} lines, wrote {written} lines into per-ID traces at {outdir}.")
|
||||
print(f"Overview: {csv_path}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
272
Reverse-Engineering CAN-Bus/can_universal_signal_finder.py
Normal file
272
Reverse-Engineering CAN-Bus/can_universal_signal_finder.py
Normal file
@@ -0,0 +1,272 @@
|
||||
|
||||
#!/usr/bin/env python3
|
||||
import re
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
from typing import List, Tuple, Optional, Dict
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
LOG_PATTERN = re.compile(r"(\d+)\s+(TX|RX)\s+0x([0-9A-Fa-f]+)\s+\d+\s+((?:[0-9A-Fa-f]{2}\s+)+)")
|
||||
|
||||
def parse_log(path: Path) -> pd.DataFrame:
|
||||
rows = []
|
||||
with open(path, "r", errors="ignore") as f:
|
||||
for line in f:
|
||||
m = LOG_PATTERN.match(line)
|
||||
if not m:
|
||||
continue
|
||||
ts = int(m.group(1))
|
||||
direction = m.group(2)
|
||||
can_id = int(m.group(3), 16)
|
||||
data = [int(x, 16) for x in m.group(4).split() if x.strip()]
|
||||
rows.append((path.name, ts, direction, can_id, data))
|
||||
df = pd.DataFrame(rows, columns=["file","ts","dir","id","data"])
|
||||
if df.empty:
|
||||
return df
|
||||
# time base per file → seconds from file start
|
||||
df["time_s"] = df.groupby("file")["ts"].transform(lambda s: (s - s.min())/1000.0)
|
||||
return df
|
||||
|
||||
def le16(data: List[int], offset: int) -> Optional[int]:
|
||||
if len(data) < offset+2:
|
||||
return None
|
||||
return data[offset] | (data[offset+1] << 8)
|
||||
|
||||
def be16(data: List[int], offset: int) -> Optional[int]:
|
||||
if len(data) < offset+2:
|
||||
return None
|
||||
return (data[offset] << 8) | data[offset+1]
|
||||
|
||||
def phys(val: float, scale: float, offs: float) -> float:
|
||||
return val*scale + offs
|
||||
|
||||
def decode_series(arr_data: List[List[int]], endian: str, offset: int) -> List[Optional[int]]:
|
||||
out = []
|
||||
for d in arr_data:
|
||||
v = le16(d, offset) if endian == "le" else be16(d, offset)
|
||||
out.append(v)
|
||||
return out
|
||||
|
||||
def score_values(vals: np.ndarray) -> Dict[str, float]:
|
||||
if len(vals) < 3:
|
||||
return {"variance":0.0, "changes":0, "unique_ratio":0.0}
|
||||
var = float(np.var(vals))
|
||||
changes = int(np.count_nonzero(np.diff(vals)))
|
||||
unique_ratio = len(set(vals.tolist()))/len(vals)
|
||||
return {"variance":var, "changes":changes, "unique_ratio":unique_ratio}
|
||||
|
||||
def analyze(df: pd.DataFrame, include_ids: Optional[List[int]], exclude_ids: Optional[List[int]]):
|
||||
# Group by ID and try each 16-bit word
|
||||
combos = []
|
||||
ids = sorted(df["id"].unique().tolist())
|
||||
if include_ids:
|
||||
ids = [i for i in ids if i in include_ids]
|
||||
if exclude_ids:
|
||||
ids = [i for i in ids if i not in exclude_ids]
|
||||
|
||||
for cid in ids:
|
||||
grp = df[df["id"]==cid]
|
||||
for endian in ("le","be"):
|
||||
for off in (0,2,4,6):
|
||||
dec = decode_series(grp["data"].tolist(), endian, off)
|
||||
# filter Nones
|
||||
pairs = [(t, v) for t, v in zip(grp["time_s"].tolist(), dec) if v is not None]
|
||||
if len(pairs) < 4:
|
||||
continue
|
||||
times = np.array([p[0] for p in pairs], dtype=float)
|
||||
vals = np.array([p[1] for p in pairs], dtype=float)
|
||||
sc = score_values(vals)
|
||||
combos.append({
|
||||
"id": cid,
|
||||
"endian": endian,
|
||||
"offset": off,
|
||||
"n": len(vals),
|
||||
"variance": sc["variance"],
|
||||
"changes": sc["changes"],
|
||||
"unique_ratio": sc["unique_ratio"],
|
||||
"rate_hz": float(len(vals)) / (times.max()-times.min()+1e-9)
|
||||
})
|
||||
cand_df = pd.DataFrame(combos)
|
||||
return cand_df
|
||||
|
||||
def range_filter_stats(vals: np.ndarray, scale: float, offs: float, rmin: Optional[float], rmax: Optional[float]) -> Dict[str, float]:
|
||||
if vals.size == 0:
|
||||
return {"hit_ratio":0.0, "min_phys":np.nan, "max_phys":np.nan}
|
||||
phys_vals = vals*scale + offs
|
||||
if rmin is None and rmax is None:
|
||||
return {"hit_ratio":1.0, "min_phys":float(np.min(phys_vals)), "max_phys":float(np.max(phys_vals))}
|
||||
mask = np.ones_like(phys_vals, dtype=bool)
|
||||
if rmin is not None:
|
||||
mask &= (phys_vals >= rmin)
|
||||
if rmax is not None:
|
||||
mask &= (phys_vals <= rmax)
|
||||
hit_ratio = float(np.count_nonzero(mask))/len(phys_vals)
|
||||
return {"hit_ratio":hit_ratio, "min_phys":float(np.min(phys_vals)), "max_phys":float(np.max(phys_vals))}
|
||||
|
||||
def export_candidate_timeseries(df: pd.DataFrame, cid: int, endian: str, off: int, scale: float, offs: float, outdir: Path, basename_hint: str):
|
||||
sub = df[df["id"]==cid].copy()
|
||||
if sub.empty:
|
||||
return False, None
|
||||
dec = decode_series(sub["data"].tolist(), endian, off)
|
||||
sub["raw16"] = dec
|
||||
sub = sub.dropna(subset=["raw16"]).copy()
|
||||
if sub.empty:
|
||||
return False, None
|
||||
|
||||
sub["phys"] = sub["raw16"].astype(float)*scale + offs
|
||||
# Save CSV
|
||||
csv_path = outdir / f"{basename_hint}_0x{cid:X}_{endian}_off{off}.csv"
|
||||
sub[["file","time_s","id","raw16","phys"]].to_csv(csv_path, index=False)
|
||||
|
||||
# Plot (single-plot image)
|
||||
plt.figure(figsize=(10,5))
|
||||
plt.plot(sub["time_s"].to_numpy(), sub["phys"].to_numpy(), marker="o")
|
||||
plt.xlabel("Zeit (s)")
|
||||
plt.ylabel("Wert (phys)")
|
||||
plt.title(f"{basename_hint} 0x{cid:X} ({endian} @ +{off})")
|
||||
plt.grid(True)
|
||||
plt.tight_layout()
|
||||
img_path = outdir / f"{basename_hint}_0x{cid:X}_{endian}_off{off}.png"
|
||||
plt.savefig(img_path, dpi=150)
|
||||
plt.close()
|
||||
return True, (csv_path, img_path)
|
||||
|
||||
def main():
|
||||
ap = argparse.ArgumentParser(description="Universal CAN signal finder (WheelSpeed etc.) for Kettenöler logs")
|
||||
ap.add_argument("logs", nargs="+", help="Log-Dateien (gleiche Struktur wie Kettenöler)")
|
||||
ap.add_argument("--outdir", default="analyze_out", help="Ausgabeverzeichnis")
|
||||
ap.add_argument("--top", type=int, default=20, help="Top-N Kandidaten global (nach Variance) exportieren, falls Range-Filter nichts findet")
|
||||
ap.add_argument("--include-ids", default="", help="Nur diese IDs (kommagetrennt, z.B. 0x208,0x209)")
|
||||
ap.add_argument("--exclude-ids", default="", help="Diese IDs ausschließen (kommagetrennt)")
|
||||
ap.add_argument("--scale", type=float, default=1.0, help="Skalierung: phys = raw*scale + offset")
|
||||
ap.add_argument("--offset", type=float, default=0.0, help="Offset: phys = raw*scale + offset")
|
||||
ap.add_argument("--range-min", type=float, default=None, help="Min physischer Zielbereich (nach Scale/Offset)")
|
||||
ap.add_argument("--range-max", type=float, default=None, help="Max physischer Zielbereich (nach Scale/Offset)")
|
||||
ap.add_argument("--range-hit-ratio", type=float, default=0.6, help="Mindestanteil der Werte im Zielbereich [0..1]")
|
||||
ap.add_argument("--per-id-limit", type=int, default=2, help="Max Anzahl Dekodierungen pro ID (z.B. beste zwei Offsets/Endianness)")
|
||||
|
||||
args = ap.parse_args()
|
||||
|
||||
# Parse include/exclude lists
|
||||
def parse_ids(s: str):
|
||||
if not s.strip():
|
||||
return None
|
||||
out = []
|
||||
for tok in s.split(","):
|
||||
tok = tok.strip()
|
||||
if not tok:
|
||||
continue
|
||||
if tok.lower().startswith("0x"):
|
||||
out.append(int(tok,16))
|
||||
else:
|
||||
out.append(int(tok))
|
||||
return out
|
||||
|
||||
include_ids = parse_ids(args.include_ids)
|
||||
exclude_ids = parse_ids(args.exclude_ids)
|
||||
|
||||
# Load logs
|
||||
frames = []
|
||||
for p in args.logs:
|
||||
df = parse_log(Path(p))
|
||||
if df.empty:
|
||||
print(f"Warn: {p} ergab keine Daten oder passte nicht zum Muster.", file=sys.stderr)
|
||||
else:
|
||||
frames.append(df)
|
||||
if not frames:
|
||||
print("Keine Daten.", file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
df_all = pd.concat(frames, ignore_index=True)
|
||||
outdir = Path(args.outdir)
|
||||
outdir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Analyze all combos
|
||||
cand = analyze(df_all, include_ids, exclude_ids)
|
||||
if cand.empty:
|
||||
print("Keine dekodierbaren 16-bit Felder gefunden.", file=sys.stderr)
|
||||
sys.exit(3)
|
||||
|
||||
# Range filter pass
|
||||
cand = cand.sort_values(["variance","changes","unique_ratio"], ascending=[False, False, False]).reset_index(drop=True)
|
||||
|
||||
# For each candidate row, compute range-hit stats
|
||||
hits = []
|
||||
for _, row in cand.iterrows():
|
||||
cid = int(row["id"])
|
||||
endian = row["endian"]
|
||||
off = int(row["offset"])
|
||||
|
||||
sub = df_all[df_all["id"]==cid]
|
||||
dec = decode_series(sub["data"].tolist(), endian, off)
|
||||
vals = np.array([v for v in dec if v is not None], dtype=float)
|
||||
if vals.size == 0:
|
||||
continue
|
||||
rng = range_filter_stats(vals, args.scale, args.offset, args.range_min, args.range_max)
|
||||
hits.append((rng["hit_ratio"], rng["min_phys"], rng["max_phys"]))
|
||||
if hits:
|
||||
cand[["hit_ratio","min_phys","max_phys"]] = pd.DataFrame(hits, index=cand.index)
|
||||
else:
|
||||
cand["hit_ratio"] = 0.0
|
||||
cand["min_phys"] = np.nan
|
||||
cand["max_phys"] = np.nan
|
||||
|
||||
# Export global candidate table
|
||||
cand_out = outdir / "candidates_global.csv"
|
||||
cand.to_csv(cand_out, index=False)
|
||||
print(f"Globales Kandidaten-CSV: {cand_out}")
|
||||
|
||||
# Decide which candidates to export as timeseries
|
||||
selected = []
|
||||
if args.range_min is not None or args.range_max is not None:
|
||||
# choose those meeting ratio threshold; group by ID and take best few per ID
|
||||
ok = cand[cand["hit_ratio"] >= args.range_hit_ratio].copy()
|
||||
if ok.empty:
|
||||
print("Range-Filter hat keine Kandidaten gefunden; falle zurück auf Top-N nach Varianz.", file=sys.stderr)
|
||||
else:
|
||||
# per ID, take best by hit_ratio then variance
|
||||
for cid, grp in ok.groupby("id"):
|
||||
grp = grp.sort_values(["hit_ratio","variance","changes","unique_ratio"], ascending=[False, False, False, False])
|
||||
selected.extend(grp.head(args.per_id_limit).to_dict("records"))
|
||||
if not selected:
|
||||
# fallback → global top-N by variance (limit per ID)
|
||||
per_id_count = {}
|
||||
for _, row in cand.iterrows():
|
||||
cid = int(row["id"]); per_id_count.setdefault(cid,0)
|
||||
if len(selected) >= args.top:
|
||||
break
|
||||
if per_id_count[cid] >= args.per_id_limit:
|
||||
continue
|
||||
selected.append(row.to_dict())
|
||||
per_id_count[cid] += 1
|
||||
|
||||
# Export per-candidate CSVs and plots
|
||||
exp_index = []
|
||||
base_hint = "decoded"
|
||||
for row in selected:
|
||||
cid = int(row["id"])
|
||||
endian = row["endian"]
|
||||
off = int(row["offset"])
|
||||
ok, pair = export_candidate_timeseries(df_all, cid, endian, off, args.scale, args.offset, outdir, base_hint)
|
||||
if ok and pair:
|
||||
exp_index.append({
|
||||
"id": cid,
|
||||
"endian": endian,
|
||||
"offset": off,
|
||||
"csv": str(pair[0]),
|
||||
"plot": str(pair[1])
|
||||
})
|
||||
|
||||
idx_df = pd.DataFrame(exp_index)
|
||||
idx_path = outdir / "exports_index.csv"
|
||||
idx_df.to_csv(idx_path, index=False)
|
||||
print(f"Export-Index: {idx_path}")
|
||||
|
||||
print("Fertig. Tipp: Mit --range-min/--range-max und --scale/--offset kannst du auf plausible physikalische Bereiche filtern.")
|
||||
print("Beispiel: --scale 0.01 --range-min 0 --range-max 250 (wenn raw≈cm/s → km/h)")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
142
Reverse-Engineering CAN-Bus/id_signal_explorer.py
Normal file
142
Reverse-Engineering CAN-Bus/id_signal_explorer.py
Normal file
@@ -0,0 +1,142 @@
|
||||
#!/usr/bin/env python3
|
||||
import re
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
LOG_PATTERN = re.compile(r"(\d+)\s+(TX|RX)\s+0x([0-9A-Fa-f]+)\s+\d+\s+((?:[0-9A-Fa-f]{2}\s+)+)")
|
||||
|
||||
def parse_trace(path: Path) -> pd.DataFrame:
|
||||
rows = []
|
||||
with open(path, "r", errors="ignore") as f:
|
||||
for line in f:
|
||||
m = LOG_PATTERN.match(line)
|
||||
if not m:
|
||||
continue
|
||||
ts = int(m.group(1))
|
||||
direction = m.group(2)
|
||||
can_id = int(m.group(3), 16)
|
||||
data = [int(x, 16) for x in m.group(4).split() if x.strip()]
|
||||
rows.append((ts, direction, can_id, data))
|
||||
df = pd.DataFrame(rows, columns=["ts","dir","id","data"])
|
||||
if df.empty:
|
||||
return df
|
||||
df["time_s"] = (df["ts"] - df["ts"].min())/1000.0
|
||||
return df
|
||||
|
||||
def be16(b):
|
||||
return (b[0]<<8) | b[1]
|
||||
|
||||
def le16(b):
|
||||
return b[0] | (b[1]<<8)
|
||||
|
||||
def main():
|
||||
ap = argparse.ArgumentParser(description="Per-ID explorer: generate plots for 8-bit and 16-bit combinations")
|
||||
ap.add_argument("trace", help="Single-ID .trace file (from can_split_by_id.py)")
|
||||
ap.add_argument("--outdir", default=None, help="Output directory; default: <trace>_explore")
|
||||
ap.add_argument("--prefix", default="viz", help="File prefix for exports")
|
||||
ap.add_argument("--rx-only", action="store_true", help="Use only RX frames")
|
||||
args = ap.parse_args()
|
||||
|
||||
trace = Path(args.trace)
|
||||
df = parse_trace(trace)
|
||||
if df.empty:
|
||||
print("No data in trace.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if args.rx_only:
|
||||
df = df[df["dir"]=="RX"].copy()
|
||||
if df.empty:
|
||||
print("No RX frames.", file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
outdir = Path(args.outdir) if args.outdir else trace.with_suffix("").parent / (trace.stem + "_explore")
|
||||
outdir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# --- 8-bit channels ---
|
||||
for idx in range(8):
|
||||
vals = [d[idx] if len(d)>idx else None for d in df["data"].tolist()]
|
||||
times = [t for t, v in zip(df["time_s"].tolist(), vals) if v is not None]
|
||||
series = [v for v in vals if v is not None]
|
||||
if not series:
|
||||
continue
|
||||
plt.figure(figsize=(10,4))
|
||||
plt.plot(times, series, marker=".", linestyle="-")
|
||||
plt.xlabel("Zeit (s)")
|
||||
plt.ylabel(f"Byte[{idx}] (8-bit)")
|
||||
plt.title(f"{trace.name} – 8-bit Byte {idx}")
|
||||
plt.grid(True)
|
||||
fn = outdir / f"{args.prefix}_byte{idx}.png"
|
||||
plt.tight_layout()
|
||||
plt.savefig(fn, dpi=150)
|
||||
plt.close()
|
||||
|
||||
# --- 16-bit combos ---
|
||||
pairs = [(i,i+1) for i in range(7)]
|
||||
# LE
|
||||
for i,j in pairs:
|
||||
times, series = [], []
|
||||
for t, d in zip(df["time_s"].tolist(), df["data"].tolist()):
|
||||
if len(d) > j:
|
||||
series.append(le16([d[i], d[j]])); times.append(t)
|
||||
if not series:
|
||||
continue
|
||||
plt.figure(figsize=(10,4))
|
||||
plt.plot(times, series, marker=".", linestyle="-")
|
||||
plt.xlabel("Zeit (s)")
|
||||
plt.ylabel(f"LE16 @{i}-{j}")
|
||||
plt.title(f"{trace.name} – LE16 Bytes {i}-{j}")
|
||||
plt.grid(True)
|
||||
fn = outdir / f"{args.prefix}_le16_{i}-{j}.png"
|
||||
plt.tight_layout()
|
||||
plt.savefig(fn, dpi=150)
|
||||
plt.close()
|
||||
|
||||
# BE
|
||||
for i,j in pairs:
|
||||
times, series = [], []
|
||||
for t, d in zip(df["time_s"].tolist(), df["data"].tolist()):
|
||||
if len(d) > j:
|
||||
series.append(be16([d[i], d[j]])); times.append(t)
|
||||
if not series:
|
||||
continue
|
||||
plt.figure(figsize=(10,4))
|
||||
plt.plot(times, series, marker=".", linestyle="-")
|
||||
plt.xlabel("Zeit (s)")
|
||||
plt.ylabel(f"BE16 @{i}-{j}")
|
||||
plt.title(f"{trace.name} – BE16 Bytes {i}-{j}")
|
||||
plt.grid(True)
|
||||
fn = outdir / f"{args.prefix}_be16_{i}-{j}.png"
|
||||
plt.tight_layout()
|
||||
plt.savefig(fn, dpi=150)
|
||||
plt.close()
|
||||
|
||||
# Summary stats
|
||||
stats = []
|
||||
# 8-bit stats
|
||||
for idx in range(8):
|
||||
vals = [d[idx] if len(d)>idx else None for d in df["data"].tolist()]
|
||||
vals = [v for v in vals if v is not None]
|
||||
if not vals:
|
||||
continue
|
||||
arr = np.array(vals, dtype=float)
|
||||
stats.append({"type":"byte8", "slot":idx, "min":float(arr.min()), "max":float(arr.max()), "var":float(arr.var())})
|
||||
# 16-bit stats
|
||||
for i,j in pairs:
|
||||
vals = [le16([d[i],d[j]]) for d in df["data"].tolist() if len(d)>j]
|
||||
if vals:
|
||||
arr = np.array(vals, dtype=float)
|
||||
stats.append({"type":"le16", "slot":f"{i}-{j}", "min":float(arr.min()), "max":float(arr.max()), "var":float(arr.var())})
|
||||
vals = [be16([d[i],d[j]]) for d in df["data"].tolist() if len(d)>j]
|
||||
if vals:
|
||||
arr = np.array(vals, dtype=float)
|
||||
stats.append({"type":"be16", "slot":f"{i}-{j}", "min":float(arr.min()), "max":float(arr.max()), "var":float(arr.var())})
|
||||
|
||||
pd.DataFrame(stats).to_csv(outdir / "summary_stats.csv", index=False)
|
||||
print(f"Exported 8-bit & 16-bit plots and summary_stats.csv to {outdir}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
973
Reverse-Engineering CAN-Bus/main.py
Normal file
973
Reverse-Engineering CAN-Bus/main.py
Normal file
@@ -0,0 +1,973 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
import subprocess
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import tkinter as tk
|
||||
from tkinter import ttk, filedialog, messagebox
|
||||
import tempfile
|
||||
|
||||
SCRIPT_NAME = "can_universal_signal_finder.py"
|
||||
SPLIT_SCRIPT = "can_split_by_id.py"
|
||||
EXPLORE_SCRIPT = "id_signal_explorer.py"
|
||||
TRACE_BATCH = "trace_batch_analyzer.py"
|
||||
RANGE_FITTER = "trace_signal_fitter.py"
|
||||
|
||||
LOG_PATTERNS = ("*.log", "*.txt")
|
||||
TRACE_PATTERNS = ("*.trace",)
|
||||
|
||||
|
||||
# ---------------- helpers ----------------
|
||||
def now_stamp():
|
||||
return datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
|
||||
|
||||
def find_logs(root: Path, rel_logs_dir: str):
|
||||
base = (root / rel_logs_dir) if rel_logs_dir else root
|
||||
found = []
|
||||
if not base.exists():
|
||||
return found
|
||||
for pat in LOG_PATTERNS:
|
||||
found += [str(p) for p in base.glob(pat)]
|
||||
found += [str(p) for p in base.rglob(pat)] # include subdirs
|
||||
return sorted(set(found))
|
||||
|
||||
|
||||
def find_traces(base: Path):
|
||||
"""Liste .trace im Basisordner und eine Ebene tiefer."""
|
||||
files = []
|
||||
if not base.exists():
|
||||
return files
|
||||
for pat in TRACE_PATTERNS:
|
||||
files += [str(p) for p in base.glob(pat)]
|
||||
files += [str(p) for p in base.glob(f"*/*{pat[1:]}")] # eine Ebene tiefer
|
||||
return sorted(set(files))
|
||||
|
||||
|
||||
def ensure_dir(p: Path):
|
||||
p.mkdir(parents=True, exist_ok=True)
|
||||
return p
|
||||
|
||||
|
||||
def latest_subdir(base: Path) -> Path:
|
||||
"""Neuester Unterordner in base, sonst base selbst."""
|
||||
if not base.exists():
|
||||
return base
|
||||
subs = [p for p in base.iterdir() if p.is_dir()]
|
||||
if not subs:
|
||||
return base
|
||||
return max(subs, key=lambda p: p.stat().st_mtime)
|
||||
|
||||
|
||||
# ---------------- shared app state ----------------
|
||||
class AppState:
|
||||
def __init__(self):
|
||||
# core paths
|
||||
self.workdir = tk.StringVar(value="")
|
||||
self.logs_dir = tk.StringVar(value="logs")
|
||||
self.traces_dir = tk.StringVar(value="traces")
|
||||
self.analyze_out_base = tk.StringVar(value="analyze_out")
|
||||
|
||||
# discovered logs
|
||||
self.available_logs = [] # absolute paths
|
||||
self.selected_log_indices = [] # indices in header listbox
|
||||
|
||||
# project defaults
|
||||
self.timestamp_runs = tk.BooleanVar(value=True)
|
||||
|
||||
# shared traces directory + file list
|
||||
self.traces_current_dir = tk.StringVar(value="") # absoluter Pfad zum aktuell angezeigten Traces-Ordner
|
||||
self.traces_files = [] # Liste der .trace in current_dir (inkl. eine Ebene tiefer)
|
||||
self._trace_observers = [] # callbacks, die Liste aktualisieren
|
||||
|
||||
# hook: wenn der Pfad geändert wird, scannen
|
||||
self.traces_current_dir.trace_add("write", self._on_traces_dir_changed)
|
||||
|
||||
# --- path helpers ---
|
||||
def workdir_path(self) -> Path:
|
||||
wd = self.workdir.get().strip() or "."
|
||||
return Path(wd)
|
||||
|
||||
def logs_base_path(self) -> Path:
|
||||
return self.workdir_path() / (self.logs_dir.get().strip() or "logs")
|
||||
|
||||
def traces_base_path(self) -> Path:
|
||||
return self.workdir_path() / (self.traces_dir.get().strip() or "traces")
|
||||
|
||||
def analyze_out_root(self) -> Path:
|
||||
return self.workdir_path() / (self.analyze_out_base.get().strip() or "analyze_out")
|
||||
|
||||
# --- traces state ---
|
||||
def add_trace_observer(self, cb):
|
||||
if cb not in self._trace_observers:
|
||||
self._trace_observers.append(cb)
|
||||
|
||||
def _notify_trace_observers(self):
|
||||
for cb in list(self._trace_observers):
|
||||
try:
|
||||
cb(self.traces_files)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _on_traces_dir_changed(self, *_):
|
||||
base = Path(self.traces_current_dir.get().strip() or str(self.traces_base_path()))
|
||||
self.traces_files = find_traces(base)
|
||||
self._notify_trace_observers()
|
||||
|
||||
def set_traces_dir(self, path: str):
|
||||
self.traces_current_dir.set(path) # löst automatisch scan + notify aus
|
||||
|
||||
def refresh_traces(self):
|
||||
# retrigger write to force refresh
|
||||
self._on_traces_dir_changed()
|
||||
|
||||
def set_traces_to_default_or_latest(self):
|
||||
base = self.traces_base_path()
|
||||
target = latest_subdir(base)
|
||||
self.set_traces_dir(str(target))
|
||||
|
||||
|
||||
# ---------------- header (workdir + logs selection) ----------------
|
||||
class Header(ttk.Frame):
|
||||
def __init__(self, master, state: AppState):
|
||||
super().__init__(master, padding=8)
|
||||
self.state = state
|
||||
self._build_ui()
|
||||
|
||||
def _build_ui(self):
|
||||
self.columnconfigure(1, weight=1)
|
||||
self.columnconfigure(3, weight=1)
|
||||
# row 0: workdir + scan
|
||||
ttk.Label(self, text="Workdir:").grid(row=0, column=0, sticky="w")
|
||||
self.ent_workdir = ttk.Entry(self, textvariable=self.state.workdir)
|
||||
self.ent_workdir.grid(row=0, column=1, sticky="ew", padx=6)
|
||||
ttk.Button(self, text="Wählen…", command=self.pick_workdir).grid(row=0, column=2, padx=5)
|
||||
ttk.Button(self, text="Logs scannen", command=self.scan_logs).grid(row=0, column=3, padx=5)
|
||||
|
||||
# row 1: subfolders + timestamp checkbox
|
||||
ttk.Label(self, text="Logs-Unterordner:").grid(row=1, column=0, sticky="w")
|
||||
ttk.Entry(self, textvariable=self.state.logs_dir, width=24).grid(row=1, column=1, sticky="w", padx=6)
|
||||
|
||||
ttk.Label(self, text="Traces-Unterordner:").grid(row=1, column=2, sticky="w")
|
||||
ttk.Entry(self, textvariable=self.state.traces_dir, width=24).grid(row=1, column=3, sticky="w", padx=6)
|
||||
|
||||
ttk.Label(self, text="Analyze-Output:").grid(row=2, column=0, sticky="w")
|
||||
ttk.Entry(self, textvariable=self.state.analyze_out_base, width=24).grid(row=2, column=1, sticky="w", padx=6)
|
||||
ttk.Checkbutton(self, text="Zeitstempel-Unterordner pro Run", variable=self.state.timestamp_runs).grid(row=2, column=2, columnspan=2, sticky="w")
|
||||
|
||||
# row 3: logs list
|
||||
frm = ttk.LabelFrame(self, text="Gefundene Logdateien (Mehrfachauswahl möglich)")
|
||||
frm.grid(row=3, column=0, columnspan=4, sticky="nsew", pady=(8,0))
|
||||
self.rowconfigure(3, weight=1)
|
||||
frm.columnconfigure(0, weight=1)
|
||||
frm.rowconfigure(0, weight=1)
|
||||
|
||||
self.lst_logs = tk.Listbox(frm, height=6, selectmode=tk.EXTENDED)
|
||||
self.lst_logs.grid(row=0, column=0, sticky="nsew", padx=(8,4), pady=8)
|
||||
|
||||
btns = ttk.Frame(frm)
|
||||
btns.grid(row=0, column=1, sticky="ns", padx=(4,8), pady=8)
|
||||
ttk.Button(btns, text="Alle wählen", command=self.select_all).pack(fill="x", pady=2)
|
||||
ttk.Button(btns, text="Keine", command=self.select_none).pack(fill="x", pady=2)
|
||||
ttk.Separator(btns, orient="horizontal").pack(fill="x", pady=6)
|
||||
ttk.Button(btns, text="Manuell hinzufügen…", command=self.add_logs_manual).pack(fill="x", pady=2)
|
||||
ttk.Button(btns, text="Entfernen", command=self.remove_selected_logs).pack(fill="x", pady=2)
|
||||
ttk.Button(btns, text="Liste leeren", command=self.clear_logs).pack(fill="x", pady=2)
|
||||
ttk.Separator(btns, orient="horizontal").pack(fill="x", pady=6)
|
||||
ttk.Button(btns, text="Projekt speichern…", command=self.save_project).pack(fill="x", pady=2)
|
||||
ttk.Button(btns, text="Projekt laden…", command=self.load_project).pack(fill="x", pady=2)
|
||||
|
||||
# ---- actions ----
|
||||
def pick_workdir(self):
|
||||
d = filedialog.askdirectory(title="Workdir auswählen")
|
||||
if d:
|
||||
self.state.workdir.set(d)
|
||||
self.scan_logs()
|
||||
# automatisch auch traces default/latest setzen
|
||||
self.state.set_traces_to_default_or_latest()
|
||||
|
||||
def scan_logs(self):
|
||||
wd = self.state.workdir_path()
|
||||
logs_dir = self.state.logs_dir.get().strip()
|
||||
found = find_logs(wd, logs_dir)
|
||||
self.state.available_logs = found
|
||||
self.lst_logs.delete(0, tk.END)
|
||||
for p in found:
|
||||
self.lst_logs.insert(tk.END, p)
|
||||
# default-select all
|
||||
self.lst_logs.select_set(0, tk.END)
|
||||
self.state.selected_log_indices = list(range(len(found)))
|
||||
|
||||
def select_all(self):
|
||||
self.lst_logs.select_set(0, tk.END)
|
||||
self.state.selected_log_indices = list(range(self.lst_logs.size()))
|
||||
|
||||
def select_none(self):
|
||||
self.lst_logs.select_clear(0, tk.END)
|
||||
self.state.selected_log_indices = []
|
||||
|
||||
def add_logs_manual(self):
|
||||
paths = filedialog.askopenfilenames(title="Logdateien auswählen", filetypes=[("Logfiles","*.log *.txt"),("Alle Dateien","*.*")])
|
||||
if not paths: return
|
||||
for p in paths:
|
||||
if p not in self.state.available_logs:
|
||||
self.state.available_logs.append(p)
|
||||
self.lst_logs.insert(tk.END, p)
|
||||
# if workdir empty, infer from first added
|
||||
if not self.state.workdir.get().strip():
|
||||
self.state.workdir.set(str(Path(paths[0]).resolve().parent))
|
||||
# auch traces default/latest
|
||||
self.state.set_traces_to_default_or_latest()
|
||||
|
||||
def remove_selected_logs(self):
|
||||
sel = list(self.lst_logs.curselection())
|
||||
sel.reverse()
|
||||
for i in sel:
|
||||
p = self.lst_logs.get(i)
|
||||
if p in self.state.available_logs:
|
||||
self.state.available_logs.remove(p)
|
||||
self.lst_logs.delete(i)
|
||||
self.state.selected_log_indices = [i for i in range(self.lst_logs.size()) if self.lst_logs.select_includes(i)]
|
||||
|
||||
def clear_logs(self):
|
||||
self.state.available_logs = []
|
||||
self.lst_logs.delete(0, tk.END)
|
||||
self.state.selected_log_indices = []
|
||||
|
||||
def selected_logs(self):
|
||||
idx = self.lst_logs.curselection()
|
||||
if not idx:
|
||||
return []
|
||||
return [self.lst_logs.get(i) for i in idx]
|
||||
|
||||
# ---- project save/load ----
|
||||
def collect_project(self):
|
||||
return {
|
||||
"workdir": self.state.workdir.get(),
|
||||
"logs_dir": self.state.logs_dir.get(),
|
||||
"traces_dir": self.state.traces_dir.get(),
|
||||
"analyze_out_base": self.state.analyze_out_base.get(),
|
||||
"timestamp_runs": bool(self.state.timestamp_runs.get()),
|
||||
"available_logs": self.state.available_logs,
|
||||
"selected_indices": list(self.lst_logs.curselection()),
|
||||
"traces_current_dir": self.state.traces_current_dir.get(),
|
||||
}
|
||||
|
||||
def apply_project(self, cfg):
|
||||
self.state.workdir.set(cfg.get("workdir",""))
|
||||
self.state.logs_dir.set(cfg.get("logs_dir","logs"))
|
||||
self.state.traces_dir.set(cfg.get("traces_dir","traces"))
|
||||
self.state.analyze_out_base.set(cfg.get("analyze_out_base","analyze_out"))
|
||||
self.state.timestamp_runs.set(cfg.get("timestamp_runs", True))
|
||||
# restore logs
|
||||
self.scan_logs()
|
||||
# If project contained explicit available_logs, merge
|
||||
for p in cfg.get("available_logs", []):
|
||||
if p not in self.state.available_logs:
|
||||
self.state.available_logs.append(p)
|
||||
self.lst_logs.insert(tk.END, p)
|
||||
# re-select indices if valid
|
||||
self.lst_logs.select_clear(0, tk.END)
|
||||
for i in cfg.get("selected_indices", []):
|
||||
if 0 <= i < self.lst_logs.size():
|
||||
self.lst_logs.select_set(i)
|
||||
# traces current dir: sofern vorhanden nutzen, sonst default/latest
|
||||
tdir = cfg.get("traces_current_dir", "")
|
||||
if tdir and Path(tdir).exists():
|
||||
self.state.set_traces_dir(tdir)
|
||||
else:
|
||||
self.state.set_traces_to_default_or_latest()
|
||||
|
||||
def save_project(self):
|
||||
cfg = self.collect_project()
|
||||
path = filedialog.asksaveasfilename(title="Projekt speichern", defaultextension=".json", filetypes=[("Projektdatei","*.json")])
|
||||
if not path: return
|
||||
with open(path, "w", encoding="utf-8") as f:
|
||||
json.dump(cfg, f, indent=2)
|
||||
messagebox.showinfo("Gespeichert", f"Projekt gespeichert:\n{path}")
|
||||
|
||||
def load_project(self):
|
||||
path = filedialog.askopenfilename(title="Projekt laden", filetypes=[("Projektdatei","*.json"),("Alle Dateien","*.*")])
|
||||
if not path: return
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
cfg = json.load(f)
|
||||
self.apply_project(cfg)
|
||||
messagebox.showinfo("Geladen", f"Projekt geladen:\n{path}")
|
||||
|
||||
|
||||
# ---------------- shared Trace Panel ----------------
|
||||
class TracePanel(ttk.LabelFrame):
|
||||
"""
|
||||
Einheitliche Trace-Auswahl: Liste links, Buttons rechts.
|
||||
Nutzt AppState.traces_current_dir + AppState.traces_files.
|
||||
single_select=True => Listbox SINGLE, versteckt 'Alle/Keine'.
|
||||
"""
|
||||
def __init__(self, master, state: AppState, title="Traces", single_select=False, height=10):
|
||||
super().__init__(master, text=title)
|
||||
self.state = state
|
||||
self.single_select = single_select
|
||||
self.height = height
|
||||
self._build_ui()
|
||||
# subscribe to state updates
|
||||
self.state.add_trace_observer(self._on_traces_updated)
|
||||
# initial fill from state
|
||||
self._on_traces_updated(self.state.traces_files)
|
||||
|
||||
def _build_ui(self):
|
||||
self.columnconfigure(0, weight=1)
|
||||
self.rowconfigure(0, weight=1)
|
||||
|
||||
selectmode = tk.SINGLE if self.single_select else tk.EXTENDED
|
||||
self.lst = tk.Listbox(self, height=self.height, selectmode=selectmode)
|
||||
self.lst.grid(row=0, column=0, sticky="nsew", padx=(8,4), pady=8)
|
||||
|
||||
btns = ttk.Frame(self)
|
||||
btns.grid(row=0, column=1, sticky="ns", padx=(4,8), pady=8)
|
||||
|
||||
ttk.Button(btns, text="Traces-Ordner wählen…", command=self._pick_traces_dir).pack(fill="x", pady=2)
|
||||
ttk.Button(btns, text="Workdir/traces", command=self._use_default_traces).pack(fill="x", pady=2)
|
||||
ttk.Button(btns, text="Neuester Split", command=self._use_latest_split).pack(fill="x", pady=2)
|
||||
ttk.Button(btns, text="Refresh", command=self._refresh_traces).pack(fill="x", pady=6)
|
||||
if not self.single_select:
|
||||
ttk.Button(btns, text="Alle wählen", command=lambda: self.lst.select_set(0, tk.END)).pack(fill="x", pady=2)
|
||||
ttk.Button(btns, text="Keine", command=lambda: self.lst.select_clear(0, tk.END)).pack(fill="x", pady=2)
|
||||
|
||||
# --- state sync ---
|
||||
def _on_traces_updated(self, files):
|
||||
# refresh list content
|
||||
cur_sel_paths = self.get_selected()
|
||||
self.lst.delete(0, tk.END)
|
||||
for p in files:
|
||||
self.lst.insert(tk.END, p)
|
||||
# try to restore selection
|
||||
if cur_sel_paths:
|
||||
path_to_index = {self.lst.get(i): i for i in range(self.lst.size())}
|
||||
for p in cur_sel_paths:
|
||||
if p in path_to_index:
|
||||
self.lst.select_set(path_to_index[p])
|
||||
|
||||
def _pick_traces_dir(self):
|
||||
d = filedialog.askdirectory(title="Traces-Ordner wählen", initialdir=str(self.state.traces_base_path()))
|
||||
if d:
|
||||
self.state.set_traces_dir(d)
|
||||
|
||||
def _use_default_traces(self):
|
||||
# default or latest under Workdir/traces
|
||||
self.state.set_traces_to_default_or_latest()
|
||||
|
||||
def _use_latest_split(self):
|
||||
base = self.state.traces_base_path()
|
||||
target = latest_subdir(base)
|
||||
self.state.set_traces_dir(str(target))
|
||||
|
||||
def _refresh_traces(self):
|
||||
self.state.refresh_traces()
|
||||
|
||||
def get_selected(self):
|
||||
idx = self.lst.curselection()
|
||||
return [self.lst.get(i) for i in idx]
|
||||
|
||||
|
||||
# ---------------- Tab 1: Multi-Log Analyse (ranking optional) ----------------
|
||||
class TabAnalyze(ttk.Frame):
|
||||
def __init__(self, master, state: AppState, header: Header):
|
||||
super().__init__(master, padding=10)
|
||||
self.state = state
|
||||
self.header = header
|
||||
self._build_ui()
|
||||
|
||||
def _build_ui(self):
|
||||
self.columnconfigure(0, weight=1)
|
||||
self.rowconfigure(2, weight=1)
|
||||
|
||||
# params
|
||||
params = ttk.LabelFrame(self, text="Analyse-Parameter")
|
||||
params.grid(row=0, column=0, sticky="nsew", padx=5, pady=5)
|
||||
for c in (1,3):
|
||||
params.columnconfigure(c, weight=1)
|
||||
|
||||
ttk.Label(params, text="Include-IDs (z.B. 0x208,0x209):").grid(row=0, column=0, sticky="w")
|
||||
self.include_var = tk.StringVar(value="")
|
||||
ttk.Entry(params, textvariable=self.include_var).grid(row=0, column=1, sticky="ew", padx=5)
|
||||
|
||||
ttk.Label(params, text="Exclude-IDs:").grid(row=0, column=2, sticky="w")
|
||||
self.exclude_var = tk.StringVar(value="")
|
||||
ttk.Entry(params, textvariable=self.exclude_var).grid(row=0, column=3, sticky="ew", padx=5)
|
||||
|
||||
ttk.Label(params, text="Scale:").grid(row=1, column=0, sticky="w")
|
||||
self.scale_var = tk.DoubleVar(value=1.0)
|
||||
ttk.Entry(params, textvariable=self.scale_var, width=12).grid(row=1, column=1, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(params, text="Offset:").grid(row=1, column=2, sticky="w")
|
||||
self.offset_var = tk.DoubleVar(value=0.0)
|
||||
ttk.Entry(params, textvariable=self.offset_var, width=12).grid(row=1, column=3, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(params, text="Range-Min:").grid(row=2, column=0, sticky="w")
|
||||
self.rmin_var = tk.StringVar(value="")
|
||||
ttk.Entry(params, textvariable=self.rmin_var, width=12).grid(row=2, column=1, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(params, text="Range-Max:").grid(row=2, column=2, sticky="w")
|
||||
self.rmax_var = tk.StringVar(value="")
|
||||
ttk.Entry(params, textvariable=self.rmax_var, width=12).grid(row=2, column=3, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(params, text="Range-Hit-Ratio (0..1):").grid(row=3, column=0, sticky="w")
|
||||
self.hit_ratio_var = tk.DoubleVar(value=0.6)
|
||||
ttk.Entry(params, textvariable=self.hit_ratio_var, width=12).grid(row=3, column=1, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(params, text="Top-N (Fallback):").grid(row=3, column=2, sticky="w")
|
||||
self.top_var = tk.IntVar(value=20)
|
||||
ttk.Entry(params, textvariable=self.top_var, width=12).grid(row=3, column=3, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(params, text="Per-ID-Limit:").grid(row=4, column=0, sticky="w")
|
||||
self.per_id_limit_var = tk.IntVar(value=2)
|
||||
ttk.Entry(params, textvariable=self.per_id_limit_var, width=12).grid(row=4, column=1, sticky="w", padx=5)
|
||||
|
||||
self.run_separately_var = tk.BooleanVar(value=False)
|
||||
ttk.Checkbutton(params, text="Jede Logdatei separat laufen lassen", variable=self.run_separately_var).grid(row=4, column=2, columnspan=2, sticky="w", padx=5)
|
||||
|
||||
# run + console
|
||||
run = ttk.Frame(self)
|
||||
run.grid(row=1, column=0, sticky="ew", padx=5, pady=5)
|
||||
ttk.Button(run, text="Analyse starten (Ranking)", command=self.on_run).pack(side="left", padx=5)
|
||||
|
||||
out = ttk.LabelFrame(self, text="Ausgabe")
|
||||
out.grid(row=2, column=0, sticky="nsew", padx=5, pady=5)
|
||||
out.columnconfigure(0, weight=1); out.rowconfigure(0, weight=1)
|
||||
self.txt = tk.Text(out, height=12); self.txt.grid(row=0, column=0, sticky="nsew")
|
||||
sb = ttk.Scrollbar(out, orient="vertical", command=self.txt.yview); sb.grid(row=0, column=1, sticky="ns")
|
||||
self.txt.configure(yscrollcommand=sb.set)
|
||||
|
||||
def on_run(self):
|
||||
logs = self.header.selected_logs()
|
||||
if not logs:
|
||||
messagebox.showwarning("Hinweis", "Bitte oben im Header Logdateien auswählen.")
|
||||
return
|
||||
t = threading.Thread(target=self._run_worker, args=(logs,), daemon=True)
|
||||
self.txt.delete("1.0", tk.END)
|
||||
self._append("Starte Analyse…\n")
|
||||
t.start()
|
||||
|
||||
def _run_worker(self, logs):
|
||||
script_path = Path(__file__).parent / SCRIPT_NAME
|
||||
if not script_path.exists():
|
||||
self._append(f"[Fehler] Script nicht gefunden: {script_path}\n"); return
|
||||
|
||||
# output root: workdir/analyze_out/<ts>_multilog
|
||||
out_root = self.state.analyze_out_root()
|
||||
stamp = now_stamp() + "_multilog"
|
||||
outdir = ensure_dir(out_root / stamp)
|
||||
|
||||
def build_args():
|
||||
args = [sys.executable, str(script_path)]
|
||||
if self.include_var.get().strip():
|
||||
args += ["--include-ids", self.include_var.get().strip()]
|
||||
if self.exclude_var.get().strip():
|
||||
args += ["--exclude-ids", self.exclude_var.get().strip()]
|
||||
args += ["--scale", str(self.scale_var.get()), "--offset", str(self.offset_var.get())]
|
||||
if self.rmin_var.get().strip(): args += ["--range-min", self.rmin_var.get().strip()]
|
||||
if self.rmax_var.get().strip(): args += ["--range-max", self.rmax_var.get().strip()]
|
||||
args += ["--range-hit-ratio", str(self.hit_ratio_var.get())]
|
||||
args += ["--top", str(self.top_var.get()), "--per-id-limit", str(self.per_id_limit_var.get())]
|
||||
return args
|
||||
|
||||
if self.run_separately_var.get():
|
||||
for p in logs:
|
||||
sub = ensure_dir(outdir / Path(p).stem)
|
||||
cmd = build_args() + ["--outdir", str(sub), p]
|
||||
self._run_cmd(cmd)
|
||||
else:
|
||||
cmd = build_args() + ["--outdir", str(outdir)] + logs
|
||||
self._run_cmd(cmd)
|
||||
|
||||
self._append(f"\nDone. Output: {outdir}\n")
|
||||
|
||||
def _run_cmd(self, cmd):
|
||||
self._append(f"\n>>> RUN: {' '.join(cmd)}\n")
|
||||
try:
|
||||
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) as proc:
|
||||
for line in proc.stdout: self._append(line)
|
||||
rc = proc.wait()
|
||||
if rc != 0: self._append(f"[Exit-Code {rc}]\n")
|
||||
except Exception as e:
|
||||
self._append(f"[Fehler] {e}\n")
|
||||
|
||||
def _append(self, s): self.txt.insert(tk.END, s); self.txt.see(tk.END)
|
||||
|
||||
|
||||
# ---------------- Tab 2: ID Explorer (split + single-ID analyze) ----------------
|
||||
class TabExplorer(ttk.Frame):
|
||||
def __init__(self, master, state: AppState, header: Header):
|
||||
super().__init__(master, padding=10)
|
||||
self.state = state
|
||||
self.header = header
|
||||
self._build_ui()
|
||||
|
||||
def _build_ui(self):
|
||||
self.columnconfigure(0, weight=1)
|
||||
self.rowconfigure(3, weight=1)
|
||||
|
||||
# split controls
|
||||
frm_split = ttk.LabelFrame(self, text="Split: Logs → per-ID Traces")
|
||||
frm_split.grid(row=0, column=0, sticky="ew", padx=5, pady=5)
|
||||
frm_split.columnconfigure(1, weight=1)
|
||||
|
||||
self.rx_only_var = tk.BooleanVar(value=False)
|
||||
self.ts_split_var = tk.BooleanVar(value=True)
|
||||
|
||||
ttk.Label(frm_split, text="Ziel (Workdir/traces[/timestamp])").grid(row=0, column=0, sticky="w", padx=5)
|
||||
ttk.Label(frm_split, textvariable=self.state.traces_dir).grid(row=0, column=1, sticky="w")
|
||||
ttk.Checkbutton(frm_split, text="nur RX", variable=self.rx_only_var).grid(row=1, column=0, sticky="w", padx=5)
|
||||
ttk.Checkbutton(frm_split, text="Zeitstempel-Unterordner", variable=self.ts_split_var).grid(row=1, column=1, sticky="w", padx=5)
|
||||
ttk.Button(frm_split, text="Split starten", command=self.on_split).grid(row=1, column=2, sticky="e", padx=5)
|
||||
|
||||
# unified trace panel (multi-select)
|
||||
self.trace_panel = TracePanel(self, self.state, title="Traces im ausgewählten Ordner", single_select=False, height=10)
|
||||
self.trace_panel.grid(row=1, column=0, sticky="nsew", padx=5, pady=(8,10))
|
||||
|
||||
# single-ID analyze
|
||||
frm_one = ttk.LabelFrame(self, text="Einzel-ID Analyse (Plots + summary_stats)")
|
||||
frm_one.grid(row=2, column=0, sticky="nsew", padx=5, pady=5)
|
||||
frm_one.columnconfigure(1, weight=1)
|
||||
ttk.Label(frm_one, text="Output-Basis (unter Workdir/analyze_out):").grid(row=0, column=0, sticky="w")
|
||||
self.one_out_base = tk.StringVar(value="id_explore")
|
||||
ttk.Entry(frm_one, textvariable=self.one_out_base).grid(row=0, column=1, sticky="ew", padx=5)
|
||||
self.ts_one = tk.BooleanVar(value=True)
|
||||
ttk.Checkbutton(frm_one, text="Zeitstempel-Unterordner", variable=self.ts_one).grid(row=0, column=2, sticky="w", padx=5)
|
||||
ttk.Button(frm_one, text="Analyse starten", command=self.on_one_analyze).grid(row=0, column=3, sticky="e", padx=5)
|
||||
|
||||
# console
|
||||
out = ttk.LabelFrame(self, text="Ausgabe")
|
||||
out.grid(row=3, column=0, sticky="nsew", padx=5, pady=5)
|
||||
out.columnconfigure(0, weight=1); out.rowconfigure(0, weight=1)
|
||||
self.txt = tk.Text(out, height=12); self.txt.grid(row=0, column=0, sticky="nsew")
|
||||
sb = ttk.Scrollbar(out, orient="vertical", command=self.txt.yview); sb.grid(row=0, column=1, sticky="ns")
|
||||
self.txt.configure(yscrollcommand=sb.set)
|
||||
|
||||
def on_split(self):
|
||||
logs = self.header.selected_logs()
|
||||
if not logs:
|
||||
messagebox.showwarning("Hinweis", "Bitte oben im Header Logdateien auswählen."); return
|
||||
outdir = self.state.traces_base_path()
|
||||
if self.ts_split_var.get(): outdir = outdir / now_stamp()
|
||||
ensure_dir(outdir)
|
||||
cmd = [sys.executable, str(Path(__file__).parent / SPLIT_SCRIPT), "--outdir", str(outdir)]
|
||||
if self.rx_only_var.get(): cmd.append("--rx-only")
|
||||
cmd += logs
|
||||
self._run_cmd(cmd)
|
||||
# nach dem Split: globalen Traces-Ordner setzen (neuester Ordner)
|
||||
self.state.set_traces_dir(str(outdir))
|
||||
|
||||
def on_one_analyze(self):
|
||||
sel = self.trace_panel.get_selected()
|
||||
if not sel:
|
||||
messagebox.showwarning("Hinweis", "Bitte mindestens eine .trace auswählen."); return
|
||||
out_root = self.state.analyze_out_root()
|
||||
stamp = now_stamp() + "_id_explore" if self.ts_one.get() else "id_explore"
|
||||
outdir = ensure_dir(out_root / stamp)
|
||||
for trace in sel:
|
||||
cmd = [sys.executable, str(Path(__file__).parent / EXPLORE_SCRIPT), "--outdir", str(outdir), trace]
|
||||
self._run_cmd(cmd)
|
||||
self._append(f"\nDone. Output: {outdir}\n")
|
||||
|
||||
def _run_cmd(self, cmd):
|
||||
self._append(f"\n>>> RUN: {' '.join(cmd)}\n")
|
||||
try:
|
||||
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) as proc:
|
||||
for line in proc.stdout: self._append(line)
|
||||
rc = proc.wait()
|
||||
if rc != 0: self._append(f"[Exit-Code {rc}]\n")
|
||||
except Exception as e:
|
||||
self._append(f"[Fehler] {e}\n")
|
||||
|
||||
def _append(self, s): self.txt.insert(tk.END, s); self.txt.see(tk.END)
|
||||
|
||||
|
||||
# ---------------- Tab 3: Traces Batch-Analyse ----------------
|
||||
class TabTraceBatch(ttk.Frame):
|
||||
def __init__(self, master, state: AppState, header: Header):
|
||||
super().__init__(master, padding=10)
|
||||
self.state = state
|
||||
self.header = header
|
||||
self._build_ui()
|
||||
|
||||
def _build_ui(self):
|
||||
self.columnconfigure(0, weight=1)
|
||||
self.rowconfigure(2, weight=1)
|
||||
|
||||
# unified trace panel (multi-select)
|
||||
self.trace_panel = TracePanel(self, self.state, title="Traces (Ordner/Subset wählen)", single_select=False, height=10)
|
||||
self.trace_panel.grid(row=0, column=0, sticky="nsew", padx=5, pady=(5,10))
|
||||
|
||||
# Params
|
||||
pr = ttk.LabelFrame(self, text="Analyse-Parameter")
|
||||
pr.grid(row=1, column=0, sticky="nsew", padx=5, pady=5)
|
||||
for c in (1,3):
|
||||
pr.columnconfigure(c, weight=1)
|
||||
|
||||
self.rx_only = tk.BooleanVar(value=False)
|
||||
ttk.Checkbutton(pr, text="nur RX", variable=self.rx_only).grid(row=0, column=0, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(pr, text="Scale").grid(row=0, column=1, sticky="e")
|
||||
self.scale = tk.DoubleVar(value=1.0)
|
||||
ttk.Entry(pr, textvariable=self.scale, width=12).grid(row=0, column=2, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(pr, text="Offset").grid(row=0, column=3, sticky="e")
|
||||
self.offset = tk.DoubleVar(value=0.0)
|
||||
ttk.Entry(pr, textvariable=self.offset, width=12).grid(row=0, column=4, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(pr, text="Range-Min").grid(row=1, column=1, sticky="e")
|
||||
self.rmin = tk.StringVar(value="")
|
||||
ttk.Entry(pr, textvariable=self.rmin, width=12).grid(row=1, column=2, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(pr, text="Range-Max").grid(row=1, column=3, sticky="e")
|
||||
self.rmax = tk.StringVar(value="")
|
||||
ttk.Entry(pr, textvariable=self.rmax, width=12).grid(row=1, column=4, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(pr, text="Top pro Trace").grid(row=2, column=1, sticky="e")
|
||||
self.top = tk.IntVar(value=8)
|
||||
ttk.Entry(pr, textvariable=self.top, width=12).grid(row=2, column=2, sticky="w", padx=5)
|
||||
|
||||
self.use_ts = tk.BooleanVar(value=True)
|
||||
ttk.Checkbutton(pr, text="Zeitstempel-Unterordner", variable=self.use_ts).grid(row=2, column=3, sticky="w", padx=5)
|
||||
|
||||
# Run & console
|
||||
run = ttk.Frame(self)
|
||||
run.grid(row=3, column=0, sticky="ew", padx=5, pady=5)
|
||||
ttk.Button(run, text="Batch starten", command=self.on_run).pack(side="left", padx=5)
|
||||
|
||||
out = ttk.LabelFrame(self, text="Ausgabe")
|
||||
out.grid(row=4, column=0, sticky="nsew", padx=5, pady=5)
|
||||
out.columnconfigure(0, weight=1); out.rowconfigure(0, weight=1)
|
||||
self.txt = tk.Text(out, height=12); self.txt.grid(row=0, column=0, sticky="nsew")
|
||||
sb = ttk.Scrollbar(out, orient="vertical", command=self.txt.yview); sb.grid(row=0, column=1, sticky="ns")
|
||||
self.txt.configure(yscrollcommand=sb.set)
|
||||
|
||||
def on_run(self):
|
||||
# nutze Auswahl oder – falls leer – kompletten Ordner
|
||||
selected = self.trace_panel.get_selected()
|
||||
traces_dir = Path(self.state.traces_current_dir.get().strip() or str(self.state.traces_base_path()))
|
||||
if not traces_dir.exists():
|
||||
messagebox.showwarning("Hinweis", "Bitte gültigen Traces-Ordner wählen."); return
|
||||
|
||||
out_root = self.state.analyze_out_root()
|
||||
label = "trace_batch"
|
||||
stamp = now_stamp() + "_" + label if self.use_ts.get() else label
|
||||
outdir = ensure_dir(out_root / stamp)
|
||||
|
||||
# falls Auswahl getroffen wurde, temporären Subset-Ordner bauen
|
||||
subset_dir = None
|
||||
if selected:
|
||||
subset_dir = ensure_dir(outdir / "_subset")
|
||||
for p in selected:
|
||||
src = Path(p)
|
||||
dst = subset_dir / src.name
|
||||
try:
|
||||
# versuchen Hardlink (schnell, platzsparend)
|
||||
if dst.exists():
|
||||
dst.unlink()
|
||||
os.link(src, dst)
|
||||
except Exception:
|
||||
# Fallback: Kopieren
|
||||
shutil.copy2(src, dst)
|
||||
|
||||
run_dir = subset_dir if subset_dir else traces_dir
|
||||
|
||||
cmd = [sys.executable, str(Path(__file__).parent/TRACE_BATCH),
|
||||
"--traces-dir", str(run_dir), "--outdir", str(outdir),
|
||||
"--scale", str(self.scale.get()), "--offset", str(self.offset.get()),
|
||||
"--top", str(self.top.get()), "--plots"]
|
||||
if self.rmin.get().strip(): cmd += ["--range-min", self.rmin.get().strip()]
|
||||
if self.rmax.get().strip(): cmd += ["--range-max", self.rmax.get().strip()]
|
||||
if self.rx_only.get(): cmd.append("--rx-only")
|
||||
|
||||
self._run_cmd(cmd)
|
||||
self._append(f"\nDone. Output: {outdir}\n")
|
||||
|
||||
def _run_cmd(self, cmd):
|
||||
self._append(f"\n>>> RUN: {' '.join(cmd)}\n")
|
||||
try:
|
||||
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) as proc:
|
||||
for line in proc.stdout: self._append(line)
|
||||
rc = proc.wait()
|
||||
if rc != 0: self._append(f"[Exit-Code {rc}]\n")
|
||||
except Exception as e:
|
||||
self._append(f"[Fehler] {e}\n")
|
||||
|
||||
def _append(self, s): self.txt.insert(tk.END, s); self.txt.see(tk.END)
|
||||
|
||||
# ---------------- Tab 4: Range-Fit (supervised + unsupervised, mit Physik-Constraints) ----------------
|
||||
class TabRangeFit(ttk.Frame):
|
||||
def __init__(self, master, state: AppState, header: Header):
|
||||
super().__init__(master, padding=10)
|
||||
self.state = state
|
||||
self.header = header
|
||||
self._last_outdir = None
|
||||
self._build_ui()
|
||||
|
||||
def _build_ui(self):
|
||||
self.columnconfigure(0, weight=1)
|
||||
self.rowconfigure(3, weight=1)
|
||||
|
||||
# unified trace panel (single-select)
|
||||
self.trace_panel = TracePanel(self, self.state, title="Trace wählen (Single)", single_select=True, height=10)
|
||||
self.trace_panel.grid(row=0, column=0, sticky="nsew", padx=5, pady=(5,10))
|
||||
|
||||
# Parameter Frames
|
||||
frm_params = ttk.Frame(self)
|
||||
frm_params.grid(row=1, column=0, sticky="nsew", padx=5, pady=5)
|
||||
for c in range(6):
|
||||
frm_params.columnconfigure(c, weight=1)
|
||||
|
||||
# --- Supervised (Range & Physik) ---
|
||||
box_sup = ttk.LabelFrame(frm_params, text="Supervised (Range-Fit) – lasse leer für Unsupervised")
|
||||
box_sup.grid(row=0, column=0, columnspan=6, sticky="nsew", padx=5, pady=5)
|
||||
for c in range(6):
|
||||
box_sup.columnconfigure(c, weight=1)
|
||||
|
||||
ttk.Label(box_sup, text="Range-Min").grid(row=0, column=0, sticky="e")
|
||||
self.rmin = tk.StringVar(value="")
|
||||
ttk.Entry(box_sup, textvariable=self.rmin, width=12).grid(row=0, column=1, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(box_sup, text="Range-Max").grid(row=0, column=2, sticky="e")
|
||||
self.rmax = tk.StringVar(value="")
|
||||
ttk.Entry(box_sup, textvariable=self.rmax, width=12).grid(row=0, column=3, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(box_sup, text="Min. Hit-Ratio (0..1)").grid(row=0, column=4, sticky="e")
|
||||
self.min_hit = tk.DoubleVar(value=0.5)
|
||||
ttk.Entry(box_sup, textvariable=self.min_hit, width=10).grid(row=0, column=5, sticky="w", padx=5)
|
||||
|
||||
self.allow_neg = tk.BooleanVar(value=False)
|
||||
ttk.Checkbutton(box_sup, text="negative Scale erlauben", variable=self.allow_neg).grid(row=1, column=0, columnspan=2, sticky="w")
|
||||
|
||||
ttk.Label(box_sup, text="Rate-Min (Hz)").grid(row=1, column=2, sticky="e")
|
||||
self.rate_min = tk.StringVar(value="")
|
||||
ttk.Entry(box_sup, textvariable=self.rate_min, width=10).grid(row=1, column=3, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(box_sup, text="Rate-Max (Hz)").grid(row=1, column=4, sticky="e")
|
||||
self.rate_max = tk.StringVar(value="")
|
||||
ttk.Entry(box_sup, textvariable=self.rate_max, width=10).grid(row=1, column=5, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(box_sup, text="Jitter-Max (ms)").grid(row=2, column=0, sticky="e")
|
||||
self.jitter_max = tk.StringVar(value="")
|
||||
ttk.Entry(box_sup, textvariable=self.jitter_max, width=10).grid(row=2, column=1, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(box_sup, text="Max-Slope-Abs (phys/s)").grid(row=2, column=2, sticky="e")
|
||||
self.slope_abs = tk.StringVar(value="")
|
||||
ttk.Entry(box_sup, textvariable=self.slope_abs, width=12).grid(row=2, column=3, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(box_sup, text="Max-Slope-Frac (/s)").grid(row=2, column=4, sticky="e")
|
||||
self.slope_frac = tk.StringVar(value="")
|
||||
ttk.Entry(box_sup, textvariable=self.slope_frac, width=12).grid(row=2, column=5, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(box_sup, text="Slope-Quantile").grid(row=3, column=0, sticky="e")
|
||||
self.slope_q = tk.DoubleVar(value=0.95) # 0.95 oder 0.99
|
||||
ttk.Entry(box_sup, textvariable=self.slope_q, width=10).grid(row=3, column=1, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(box_sup, text="Min-Unique-Ratio").grid(row=3, column=2, sticky="e")
|
||||
self.min_uniq = tk.StringVar(value="")
|
||||
ttk.Entry(box_sup, textvariable=self.min_uniq, width=10).grid(row=3, column=3, sticky="w", padx=5)
|
||||
|
||||
# --- Unsupervised ---
|
||||
box_uns = ttk.LabelFrame(frm_params, text="Unsupervised (ohne Range)")
|
||||
box_uns.grid(row=1, column=0, columnspan=6, sticky="nsew", padx=5, pady=5)
|
||||
for c in range(6):
|
||||
box_uns.columnconfigure(c, weight=1)
|
||||
|
||||
ttk.Label(box_uns, text="Min. Smoothness (0..1)").grid(row=0, column=0, sticky="e")
|
||||
self.min_smooth = tk.DoubleVar(value=0.2)
|
||||
ttk.Entry(box_uns, textvariable=self.min_smooth, width=12).grid(row=0, column=1, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(box_uns, text="Max-Slope-Frac-RAW (/s)").grid(row=0, column=2, sticky="e")
|
||||
self.max_slope_frac_raw = tk.StringVar(value="")
|
||||
ttk.Entry(box_uns, textvariable=self.max_slope_frac_raw, width=12).grid(row=0, column=3, sticky="w", padx=5)
|
||||
|
||||
# --- Allgemein/Output ---
|
||||
box_out = ttk.LabelFrame(frm_params, text="Allgemein & Output")
|
||||
box_out.grid(row=2, column=0, columnspan=6, sticky="nsew", padx=5, pady=5)
|
||||
for c in range(6):
|
||||
box_out.columnconfigure(c, weight=1)
|
||||
|
||||
self.rx_only = tk.BooleanVar(value=False)
|
||||
ttk.Checkbutton(box_out, text="nur RX", variable=self.rx_only).grid(row=0, column=0, sticky="w")
|
||||
|
||||
ttk.Label(box_out, text="Plots Top-N").grid(row=0, column=1, sticky="e")
|
||||
self.plots_top = tk.IntVar(value=8)
|
||||
ttk.Entry(box_out, textvariable=self.plots_top, width=10).grid(row=0, column=2, sticky="w", padx=5)
|
||||
|
||||
ttk.Label(box_out, text="Output-Label").grid(row=0, column=3, sticky="e")
|
||||
self.out_label = tk.StringVar(value="rangefit")
|
||||
ttk.Entry(box_out, textvariable=self.out_label, width=18).grid(row=0, column=4, sticky="w", padx=5)
|
||||
|
||||
self.use_ts = tk.BooleanVar(value=True)
|
||||
ttk.Checkbutton(box_out, text="Zeitstempel-Unterordner", variable=self.use_ts).grid(row=0, column=5, sticky="w")
|
||||
|
||||
# Start + Konsole + Aktionen
|
||||
frm_run = ttk.Frame(self)
|
||||
frm_run.grid(row=2, column=0, sticky="ew", padx=5, pady=5)
|
||||
ttk.Button(frm_run, text="Start Range-/Unsupervised-Fit", command=self._on_run).pack(side="left", padx=5)
|
||||
ttk.Button(frm_run, text="Report öffnen", command=self._open_last_report).pack(side="left", padx=5)
|
||||
ttk.Button(frm_run, text="Output-Ordner öffnen", command=self._open_last_outdir).pack(side="left", padx=5)
|
||||
|
||||
frm_out = ttk.LabelFrame(self, text="Ausgabe")
|
||||
frm_out.grid(row=3, column=0, sticky="nsew", padx=5, pady=5)
|
||||
frm_out.columnconfigure(0, weight=1); frm_out.rowconfigure(0, weight=1)
|
||||
self.txt = tk.Text(frm_out, height=14); self.txt.grid(row=0, column=0, sticky="nsew")
|
||||
sbo = ttk.Scrollbar(frm_out, orient="vertical", command=self.txt.yview); sbo.grid(row=0, column=1, sticky="ns")
|
||||
self.txt.configure(yscrollcommand=sbo.set)
|
||||
|
||||
# --- helpers ---
|
||||
def _append(self, s):
|
||||
self.txt.insert(tk.END, s); self.txt.see(tk.END)
|
||||
|
||||
def _stamp(self):
|
||||
import datetime as _dt
|
||||
return _dt.datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
|
||||
def _build_outdir(self, supervised: bool) -> Path:
|
||||
out_root = self.state.analyze_out_root()
|
||||
label = (self.out_label.get().strip() or ("rangefit" if supervised else "unsupervised"))
|
||||
stamp = f"{self._stamp()}_{label}" if self.use_ts.get() else label
|
||||
outdir = out_root / stamp
|
||||
outdir.mkdir(parents=True, exist_ok=True)
|
||||
self._last_outdir = outdir
|
||||
return outdir
|
||||
|
||||
def _selected_trace(self):
|
||||
sel = self.trace_panel.get_selected()
|
||||
if not sel:
|
||||
messagebox.showwarning("Hinweis", "Bitte genau eine .trace-Datei auswählen.")
|
||||
return None
|
||||
if len(sel) != 1:
|
||||
messagebox.showwarning("Hinweis", "Range-Fit benötigt genau eine .trace-Datei (Single-Select).")
|
||||
return None
|
||||
return sel[0]
|
||||
|
||||
def _maybe(self, val: str, flag: str, args: list):
|
||||
v = (val or "").strip()
|
||||
if v != "":
|
||||
args += [flag, v]
|
||||
|
||||
def _open_path(self, p: Path):
|
||||
try:
|
||||
if sys.platform.startswith("darwin"):
|
||||
subprocess.Popen(["open", str(p)])
|
||||
elif os.name == "nt":
|
||||
os.startfile(str(p)) # type: ignore
|
||||
else:
|
||||
subprocess.Popen(["xdg-open", str(p)])
|
||||
except Exception as e:
|
||||
messagebox.showwarning("Fehler", f"Konnte nicht öffnen:\n{p}\n{e}")
|
||||
|
||||
def _open_last_outdir(self):
|
||||
if self._last_outdir and self._last_outdir.exists():
|
||||
self._open_path(self._last_outdir)
|
||||
else:
|
||||
messagebox.showinfo("Hinweis", "Noch kein Output-Ordner vorhanden.")
|
||||
|
||||
def _open_last_report(self):
|
||||
if not (self._last_outdir and self._last_outdir.exists()):
|
||||
messagebox.showinfo("Hinweis", "Noch kein Report erzeugt.")
|
||||
return
|
||||
# versuche ein *_report.md im letzten Outdir zu finden
|
||||
md = list(Path(self._last_outdir).glob("*_report.md"))
|
||||
if not md:
|
||||
messagebox.showinfo("Hinweis", "Kein Report gefunden.")
|
||||
return
|
||||
self._open_path(md[0])
|
||||
|
||||
def _on_run(self):
|
||||
trace = self._selected_trace()
|
||||
if not trace:
|
||||
return
|
||||
|
||||
# supervised?
|
||||
rmin = self.rmin.get().strip()
|
||||
rmax = self.rmax.get().strip()
|
||||
supervised = bool(rmin) and bool(rmax)
|
||||
|
||||
outdir = self._build_outdir(supervised)
|
||||
|
||||
cmd = [
|
||||
sys.executable,
|
||||
str(Path(__file__).parent / RANGE_FITTER),
|
||||
trace,
|
||||
"--outdir", str(outdir),
|
||||
"--plots-top", str(self.plots_top.get()),
|
||||
]
|
||||
if self.rx_only.get():
|
||||
cmd.append("--rx-only")
|
||||
|
||||
if supervised:
|
||||
cmd += ["--rmin", rmin, "--rmax", rmax, "--min-hit", str(self.min_hit.get())]
|
||||
if self.allow_neg.get():
|
||||
cmd.append("--allow-neg-scale")
|
||||
self._maybe(self.rate_min.get(), "--rate-min", cmd)
|
||||
self._maybe(self.rate_max.get(), "--rate-max", cmd)
|
||||
self._maybe(self.jitter_max.get(), "--jitter-max-ms", cmd)
|
||||
self._maybe(self.slope_abs.get(), "--max-slope-abs", cmd)
|
||||
self._maybe(self.slope_frac.get(), "--max-slope-frac", cmd)
|
||||
cmd += ["--slope-quantile", str(self.slope_q.get())]
|
||||
self._maybe(self.min_uniq.get(), "--min-uniq-ratio", cmd)
|
||||
else:
|
||||
# unsupervised
|
||||
cmd += ["--min-smooth", str(self.min_smooth.get())]
|
||||
self._maybe(self.max_slope_frac_raw.get(), "--max-slope-frac-raw", cmd)
|
||||
cmd += ["--slope-quantile", str(self.slope_q.get())] # wird intern für p95/p99 gewählt
|
||||
|
||||
self._append(f"\n>>> RUN: {' '.join(cmd)}\n")
|
||||
t = threading.Thread(target=self._run_cmd, args=(cmd,), daemon=True)
|
||||
t.start()
|
||||
|
||||
def _run_cmd(self, cmd):
|
||||
try:
|
||||
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) as proc:
|
||||
for line in proc.stdout:
|
||||
self._append(line)
|
||||
rc = proc.wait()
|
||||
if rc != 0:
|
||||
self._append(f"[Exit-Code {rc}]\n")
|
||||
else:
|
||||
self._append(f"\nDone. Output: {self._last_outdir}\n")
|
||||
except Exception as e:
|
||||
self._append(f"[Fehler] {e}\n")
|
||||
|
||||
# ---------------- App Shell ----------------
|
||||
class App(tk.Tk):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.title("CAN Universal Signal Finder – GUI")
|
||||
self.geometry("1180x860")
|
||||
self.configure(padx=8, pady=8)
|
||||
|
||||
# shared state
|
||||
self.state = AppState()
|
||||
|
||||
# header (always visible)
|
||||
self.header = Header(self, self.state)
|
||||
self.header.pack(fill="x", side="top")
|
||||
|
||||
# Tabs
|
||||
nb = ttk.Notebook(self)
|
||||
nb.pack(fill="both", expand=True)
|
||||
|
||||
self.tab_analyze = TabAnalyze(nb, self.state, self.header)
|
||||
self.tab_explorer = TabExplorer(nb, self.state, self.header)
|
||||
self.tab_batch = TabTraceBatch(nb, self.state, self.header)
|
||||
self.tab_rangefit = TabRangeFit(nb, self.state, self.header)
|
||||
|
||||
nb.add(self.tab_analyze, text="Multi-Log Analyse")
|
||||
nb.add(self.tab_explorer, text="ID Explorer")
|
||||
nb.add(self.tab_batch, text="Traces Batch-Analyse")
|
||||
nb.add(self.tab_rangefit, text="Range-Fit")
|
||||
|
||||
# init: traces auf default/latest stellen
|
||||
self.state.set_traces_to_default_or_latest()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = App()
|
||||
app.mainloop()
|
18
Reverse-Engineering CAN-Bus/models/.gitignore
vendored
Normal file
18
Reverse-Engineering CAN-Bus/models/.gitignore
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
# Ignoriere in JEDEM unmittelbaren Unterordner von models/
|
||||
# die Verzeichnisse "traces" und "analyze_out"
|
||||
*/traces/
|
||||
*/traces/**
|
||||
*/analyze_out/
|
||||
*/analyze_out/**
|
||||
|
||||
# Falls jemand versehentlich direkt unter models/ solche Ordner anlegt, auch ignorieren:
|
||||
traces/
|
||||
traces/**
|
||||
analyze_out/
|
||||
analyze_out/**
|
||||
|
||||
# (Optional, falls du dich mal vertippst)
|
||||
*/analyze.out/
|
||||
*/analyze.out/**
|
||||
analyze.out/
|
||||
analyze.out/**
|
@@ -0,0 +1,12 @@
|
||||
|
||||
possible CAN Ids !? (from Forum somwhere)
|
||||
Message ID:
|
||||
|
||||
0x540 - byte 0 - bits 6...4 - Gear Position - 0 = N, 1-6 = gears 1-6
|
||||
bit 1 - Neutral Light - 1 = on, 0 = off
|
||||
bit 2 - Check engine light????
|
||||
0x550 - byte 0 - bits 2...0 - Coolant bars on dashboard
|
||||
- bit 3 - Warning light - 1 = on, 0 = off
|
||||
0x570 - bytes 2-3 - Coolant temp - (256 * byte 3 + byte 2) / 10 = Temp in Degrees C
|
||||
0x518 - Possible revs - divide by 4
|
||||
0x519 - Similar to 0x518 Possibly TPS unsure. Doesn't actuate when only tps is rotated.
|
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"workdir": "models/Triumph Speed Twin 1200 RS (2025)",
|
||||
"logs_dir": "logs",
|
||||
"traces_dir": "traces",
|
||||
"analyze_out_base": "analyze_out",
|
||||
"timestamp_runs": true,
|
||||
"available_logs": [
|
||||
"models/Triumph Speed Twin 1200 RS (2025)/logs/cantrace-raw-2025-08-27T17-45-27-980Z-1.log"
|
||||
],
|
||||
"selected_indices": [
|
||||
0
|
||||
]
|
||||
}
|
3
Reverse-Engineering CAN-Bus/requirements.txt
Normal file
3
Reverse-Engineering CAN-Bus/requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
pandas>=2.0.0
|
||||
numpy>=1.24.0
|
||||
matplotlib>=3.7.0
|
18
Reverse-Engineering CAN-Bus/start.sh
Executable file
18
Reverse-Engineering CAN-Bus/start.sh
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# Choose python (allow override with $PYTHON)
|
||||
PYTHON_BIN="${PYTHON:-python3}"
|
||||
VENV_DIR=".venv"
|
||||
|
||||
if [ ! -d "$VENV_DIR" ]; then
|
||||
"$PYTHON_BIN" -m venv "$VENV_DIR"
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC1091
|
||||
source "$VENV_DIR/bin/activate"
|
||||
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
|
||||
exec python main.py "$@"
|
186
Reverse-Engineering CAN-Bus/trace_batch_analyzer.py
Normal file
186
Reverse-Engineering CAN-Bus/trace_batch_analyzer.py
Normal file
@@ -0,0 +1,186 @@
|
||||
#!/usr/bin/env python3
|
||||
import re
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
LOG_PATTERN = re.compile(r"(\d+)\s+(TX|RX)\s+0x([0-9A-Fa-f]+)\s+\d+\s+((?:[0-9A-Fa-f]{2}\s+)+)")
|
||||
|
||||
def parse_trace(path: Path, rx_only=False) -> pd.DataFrame:
|
||||
rows = []
|
||||
with open(path, "r", errors="ignore") as f:
|
||||
for line in f:
|
||||
m = LOG_PATTERN.match(line)
|
||||
if not m:
|
||||
continue
|
||||
ts = int(m.group(1))
|
||||
dr = m.group(2)
|
||||
if rx_only and dr != "RX":
|
||||
continue
|
||||
cid = int(m.group(3), 16)
|
||||
data = [int(x, 16) for x in m.group(4).split() if x.strip()]
|
||||
rows.append((ts, dr, cid, data))
|
||||
df = pd.DataFrame(rows, columns=["ts","dir","id","data"])
|
||||
if df.empty:
|
||||
return df
|
||||
df["time_s"] = (df["ts"] - df["ts"].min())/1000.0
|
||||
return df
|
||||
|
||||
def be16(a,b): return (a<<8)|b
|
||||
def le16(a,b): return a | (b<<8)
|
||||
|
||||
def analyze_one_trace(df: pd.DataFrame, scale=1.0, offs=0.0, rmin=None, rmax=None):
|
||||
"""Return stats for all 8-bit bytes and all adjacent 16-bit pairs (LE/BE)."""
|
||||
stats = []
|
||||
# 8-bit
|
||||
for i in range(8):
|
||||
vals = [d[i] for d in df["data"] if len(d)>i]
|
||||
if not vals: continue
|
||||
arr = np.array(vals, dtype=float)
|
||||
phys = arr*scale + offs
|
||||
hit = np.ones_like(phys, dtype=bool)
|
||||
if rmin is not None: hit &= (phys>=rmin)
|
||||
if rmax is not None: hit &= (phys<=rmax)
|
||||
stats.append({
|
||||
"type":"byte8","slot":str(i),
|
||||
"n":len(arr),
|
||||
"min":float(arr.min()),"max":float(arr.max()),"var":float(arr.var()),
|
||||
"hit_ratio": float(np.count_nonzero(hit))/len(hit) if len(hit)>0 else 0.0,
|
||||
"min_phys": float(phys.min()), "max_phys": float(phys.max())
|
||||
})
|
||||
# 16-bit
|
||||
pairs = [(i,i+1) for i in range(7)]
|
||||
for i,j in pairs:
|
||||
# LE
|
||||
vals = [le16(d[i],d[j]) for d in df["data"] if len(d)>j]
|
||||
if vals:
|
||||
arr = np.array(vals, dtype=float); phys = arr*scale + offs
|
||||
hit = np.ones_like(phys, dtype=bool)
|
||||
if rmin is not None: hit &= (phys>=rmin)
|
||||
if rmax is not None: hit &= (phys<=rmax)
|
||||
stats.append({
|
||||
"type":"le16","slot":f"{i}-{j}",
|
||||
"n":len(arr),
|
||||
"min":float(arr.min()),"max":float(arr.max()),"var":float(arr.var()),
|
||||
"hit_ratio": float(np.count_nonzero(hit))/len(hit) if len(hit)>0 else 0.0,
|
||||
"min_phys": float(phys.min()), "max_phys": float(phys.max())
|
||||
})
|
||||
# BE
|
||||
vals = [be16(d[i],d[j]) for d in df["data"] if len(d)>j]
|
||||
if vals:
|
||||
arr = np.array(vals, dtype=float); phys = arr*scale + offs
|
||||
hit = np.ones_like(phys, dtype=bool)
|
||||
if rmin is not None: hit &= (phys>=rmin)
|
||||
if rmax is not None: hit &= (phys<=rmax)
|
||||
stats.append({
|
||||
"type":"be16","slot":f"{i}-{j}",
|
||||
"n":len(arr),
|
||||
"min":float(arr.min()),"max":float(arr.max()),"var":float(arr.var()),
|
||||
"hit_ratio": float(np.count_nonzero(hit))/len(hit) if len(hit)>0 else 0.0,
|
||||
"min_phys": float(phys.min()), "max_phys": float(phys.max())
|
||||
})
|
||||
return pd.DataFrame(stats)
|
||||
|
||||
def plot_one_trace(df: pd.DataFrame, outdir: Path, prefix: str):
|
||||
outdir.mkdir(parents=True, exist_ok=True)
|
||||
# 8-bit plots
|
||||
for i in range(8):
|
||||
times, series = [], []
|
||||
for t,d in zip(df["time_s"], df["data"]):
|
||||
if len(d)>i:
|
||||
times.append(t); series.append(d[i])
|
||||
if not series: continue
|
||||
import matplotlib.pyplot as plt
|
||||
plt.figure(figsize=(10,4))
|
||||
plt.plot(times, series, marker=".", linestyle="-")
|
||||
plt.xlabel("Zeit (s)"); plt.ylabel(f"Byte[{i}] (8-bit)")
|
||||
plt.title(f"{prefix} – 8-bit Byte {i}")
|
||||
plt.grid(True); plt.tight_layout()
|
||||
plt.savefig(outdir / f"{prefix}_byte{i}.png", dpi=150); plt.close()
|
||||
# 16-bit plots (LE/BE)
|
||||
pairs = [(i,i+1) for i in range(7)]
|
||||
for i,j in pairs:
|
||||
times, series = [], []
|
||||
for t,d in zip(df["time_s"], df["data"]):
|
||||
if len(d)>j: times.append(t); series.append(le16(d[i],d[j]))
|
||||
if series:
|
||||
import matplotlib.pyplot as plt
|
||||
plt.figure(figsize=(10,4))
|
||||
plt.plot(times, series, marker=".", linestyle="-")
|
||||
plt.xlabel("Zeit (s)"); plt.ylabel(f"LE16 @{i}-{j}")
|
||||
plt.title(f"{prefix} – LE16 {i}-{j}")
|
||||
plt.grid(True); plt.tight_layout()
|
||||
plt.savefig(outdir / f"{prefix}_le16_{i}-{j}.png", dpi=150); plt.close()
|
||||
times, series = [], []
|
||||
for t,d in zip(df["time_s"], df["data"]):
|
||||
if len(d)>j: times.append(t); series.append(be16(d[i],d[j]))
|
||||
if series:
|
||||
import matplotlib.pyplot as plt
|
||||
plt.figure(figsize=(10,4))
|
||||
plt.plot(times, series, marker=".", linestyle="-")
|
||||
plt.xlabel("Zeit (s)"); plt.ylabel(f"BE16 @{i}-{j}")
|
||||
plt.title(f"{prefix} – BE16 {i}-{j}")
|
||||
plt.grid(True); plt.tight_layout()
|
||||
plt.savefig(outdir / f"{prefix}_be16_{i}-{j}.png", dpi=150); plt.close()
|
||||
|
||||
def main():
|
||||
ap = argparse.ArgumentParser(description="Batch analyze per-ID traces and rank 8/16-bit combinations")
|
||||
ap.add_argument("--traces-dir", required=True, help="Directory containing *.trace files")
|
||||
ap.add_argument("--outdir", required=True, help="Output directory for analysis results")
|
||||
ap.add_argument("--rx-only", action="store_true", help="Use RX frames only")
|
||||
ap.add_argument("--plots", action="store_true", help="Also generate plots for each trace")
|
||||
ap.add_argument("--scale", type=float, default=1.0, help="phys = raw*scale + offset")
|
||||
ap.add_argument("--offset", type=float, default=0.0, help="phys = raw*scale + offset")
|
||||
ap.add_argument("--range-min", type=float, default=None, help="physical min (after scale/offset)")
|
||||
ap.add_argument("--range-max", type=float, default=None, help="physical max (after scale/offset)")
|
||||
ap.add_argument("--top", type=int, default=8, help="Export top combos per trace to summary")
|
||||
args = ap.parse_args()
|
||||
|
||||
tdir = Path(args.traces_dir)
|
||||
outdir = Path(args.outdir); outdir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
traces = sorted([p for p in tdir.glob("*.trace")])
|
||||
if not traces:
|
||||
print("No .trace files found.", file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
global_rows = []
|
||||
for tr in traces:
|
||||
df = parse_trace(tr, rx_only=args.rx_only)
|
||||
if df.empty:
|
||||
continue
|
||||
stats = analyze_one_trace(df, args.scale, args.offset, args.range_min, args.range_max)
|
||||
# Ranking: primarily by hit_ratio (if range given), else by variance; break ties by var then n
|
||||
if args.range_min is not None or args.range_max is not None:
|
||||
stats = stats.sort_values(["hit_ratio","var","n"], ascending=[False, False, False])
|
||||
else:
|
||||
stats = stats.sort_values(["var","n"], ascending=[False, False])
|
||||
# write per-trace csv
|
||||
per_csv = outdir / f"{tr.stem}_combostats.csv"
|
||||
stats.to_csv(per_csv, index=False)
|
||||
|
||||
# append top rows with trace id hint
|
||||
stem = tr.stem # e.g., 0x208_log1
|
||||
for _, row in stats.head(args.top).iterrows():
|
||||
r = row.to_dict()
|
||||
r["trace"] = stem
|
||||
global_rows.append(r)
|
||||
|
||||
# plots (optional) into a subdir per trace
|
||||
if args.plots:
|
||||
plot_dir = outdir / f"{tr.stem}_plots"
|
||||
plot_one_trace(df, plot_dir, prefix=tr.stem)
|
||||
|
||||
# global summary
|
||||
if global_rows:
|
||||
gdf = pd.DataFrame(global_rows)
|
||||
gdf.to_csv(outdir / "summary_top_combinations.csv", index=False)
|
||||
print(f"Global summary written: {outdir/'summary_top_combinations.csv'}")
|
||||
|
||||
print(f"Processed {len(traces)} trace files. Results at: {outdir}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
660
Reverse-Engineering CAN-Bus/trace_signal_fitter.py
Normal file
660
Reverse-Engineering CAN-Bus/trace_signal_fitter.py
Normal file
@@ -0,0 +1,660 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
trace_signal_fitter.py – Advanced Range-/Unsupervised-Fit mit Physik-Constraints & Bericht
|
||||
|
||||
Modi:
|
||||
1) Range-Fit (supervised): --rmin/--rmax gesetzt → finde scale & offset, maximiere Hit-Ratio in [rmin, rmax].
|
||||
2) Unsupervised: ohne Range → plausible Rohsignale nach Smoothness/Var/Rate/Span.
|
||||
|
||||
Neu:
|
||||
- Periodizität: Rate (Hz), Jitter (std der Inter-Arrival-Times), CV.
|
||||
- Slew-Rate: p95/p99 von |Δ|/s (supervised in phys-Einheit, unsupervised normiert auf Roh-Span).
|
||||
- Grenzwerte als Argumente (--rate-min/max, --jitter-max-ms, --max-slope-abs, --max-slope-frac, ...).
|
||||
- Zusätzlich signed 16-bit Varianten (le16s/be16s).
|
||||
- JSON + Markdown-Bericht pro Trace mit PASS/FAIL und Begründungen.
|
||||
|
||||
Logformat (Kettenöler):
|
||||
<timestamp_ms> <TX|RX> 0x<ID_HEX> <DLC> <byte0> <byte1> ... <byte7>
|
||||
|
||||
Outputs:
|
||||
- supervised: <trace>_encoding_candidates.csv, Plots, <trace>_report.md, <trace>_report.json
|
||||
- unsupervised: <trace>_unsupervised_candidates.csv, Plots, <trace>_report.md, <trace>_report.json
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
from typing import List, Tuple, Dict, Iterable
|
||||
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
|
||||
# ---------- Parsing ----------
|
||||
|
||||
def parse_trace(path: Path, rx_only: bool = False) -> pd.DataFrame:
|
||||
"""
|
||||
Robustes Parsen des Kettenöler-Formats:
|
||||
<ts_ms> <TX|RX> 0x<ID> <DLC> <b0> <b1> ... (hex)
|
||||
"""
|
||||
rows = []
|
||||
with open(path, "r", errors="ignore") as f:
|
||||
for line in f:
|
||||
parts = line.strip().split()
|
||||
if len(parts) < 4:
|
||||
continue
|
||||
try:
|
||||
ts = int(parts[0])
|
||||
dr = parts[1]
|
||||
if rx_only and dr != "RX":
|
||||
continue
|
||||
cid = int(parts[2], 16) if parts[2].lower().startswith("0x") else int(parts[2], 16)
|
||||
dlc = int(parts[3])
|
||||
bytes_hex = parts[4:4+dlc] if dlc > 0 else []
|
||||
data = []
|
||||
for b in bytes_hex:
|
||||
try:
|
||||
data.append(int(b, 16))
|
||||
except Exception:
|
||||
data.append(0)
|
||||
rows.append((ts, dr, cid, data))
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
df = pd.DataFrame(rows, columns=["ts", "dir", "id", "data"])
|
||||
if df.empty:
|
||||
return df
|
||||
df["time_s"] = (df["ts"] - df["ts"].min()) / 1000.0
|
||||
return df
|
||||
|
||||
|
||||
# ---------- Helpers ----------
|
||||
|
||||
def be16(a: int, b: int) -> int: return (a << 8) | b
|
||||
def le16(a: int, b: int) -> int: return a | (b << 8)
|
||||
def s16(u: int) -> int: return u if u < 0x8000 else u - 0x10000
|
||||
|
||||
def p_quant_abs_diff(arr: np.ndarray, q: float) -> float:
|
||||
if arr.size < 2:
|
||||
return 0.0
|
||||
d = np.abs(np.diff(arr))
|
||||
return float(np.percentile(d, q * 100))
|
||||
|
||||
def p_quant(arr: np.ndarray, q: float) -> float:
|
||||
if arr.size == 0:
|
||||
return 0.0
|
||||
return float(np.percentile(arr, q * 100))
|
||||
|
||||
def interarrival_metrics(times: np.ndarray) -> Dict[str, float]:
|
||||
if times.size < 2:
|
||||
return {"rate_hz": 0.0, "period_mean": 0.0, "period_std": 0.0, "jitter_cv": 0.0, "n": int(times.size)}
|
||||
dt = np.diff(times)
|
||||
period_mean = float(np.mean(dt))
|
||||
period_std = float(np.std(dt))
|
||||
rate_hz = 1.0 / period_mean if period_mean > 0 else 0.0
|
||||
jitter_cv = (period_std / period_mean) if period_mean > 0 else 0.0
|
||||
return {"rate_hz": rate_hz, "period_mean": period_mean, "period_std": period_std, "jitter_cv": jitter_cv, "n": int(times.size)}
|
||||
|
||||
def slope_metrics(values: np.ndarray, times: np.ndarray) -> Dict[str, float]:
|
||||
if values.size < 2:
|
||||
return {"slope_p95": 0.0, "slope_p99": 0.0, "jerk_p95": 0.0}
|
||||
dv = np.abs(np.diff(values))
|
||||
dt = np.diff(times)
|
||||
# vermeide Division durch 0
|
||||
dt = np.where(dt <= 0, np.nan, dt)
|
||||
slope = dv / dt
|
||||
slope = slope[~np.isnan(slope)]
|
||||
if slope.size == 0:
|
||||
return {"slope_p95": 0.0, "slope_p99": 0.0, "jerk_p95": 0.0}
|
||||
jerk = np.abs(np.diff(slope))
|
||||
return {
|
||||
"slope_p95": float(np.percentile(slope, 95)),
|
||||
"slope_p99": float(np.percentile(slope, 99)),
|
||||
"jerk_p95": float(np.percentile(jerk, 95)) if jerk.size > 0 else 0.0,
|
||||
}
|
||||
|
||||
def prefilter(vals: np.ndarray) -> Tuple[bool, Dict[str, float]]:
|
||||
if vals.size < 12:
|
||||
return False, {"reason": "too_few_samples"}
|
||||
uniq = np.unique(vals)
|
||||
if uniq.size <= 2:
|
||||
return False, {"reason": "too_constant"}
|
||||
p95 = p_quant_abs_diff(vals, 0.95)
|
||||
if p95 == 0:
|
||||
return False, {"reason": "no_changes"}
|
||||
r = float(np.percentile(vals, 97) - np.percentile(vals, 3) + 1e-9)
|
||||
if p95 > 0.5 * r:
|
||||
return False, {"reason": "too_jumpi"}
|
||||
return True, {"p95_abs_diff": p95, "span_est": r}
|
||||
|
||||
def try_scaleset() -> List[float]:
|
||||
base = [
|
||||
1e-3, 2e-3, 5e-3,
|
||||
1e-2, 2e-2, 5e-2,
|
||||
0.05, 0.0625, 0.1, 0.125, 0.2, 0.25, 0.5,
|
||||
0.75, 0.8, 1.0, 1.25, 2.0, 5.0, 10.0
|
||||
]
|
||||
return sorted(set(base))
|
||||
|
||||
def interval_best_offset(raw: np.ndarray, scale: float, rmin: float, rmax: float) -> Tuple[float, float]:
|
||||
"""
|
||||
Finde das Offset, das die meisten Werte (scale*raw + offset) in [rmin, rmax] bringt.
|
||||
Sweep über Intervallgrenzen (klassische "interval stabbing" Lösung).
|
||||
"""
|
||||
a = rmin - scale * raw
|
||||
b = rmax - scale * raw
|
||||
lo = np.minimum(a, b)
|
||||
hi = np.maximum(a, b)
|
||||
events = []
|
||||
for L, H in zip(lo, hi):
|
||||
events.append((L, +1))
|
||||
events.append((H, -1))
|
||||
events.sort(key=lambda t: (t[0], -t[1]))
|
||||
best = -1
|
||||
cur = 0
|
||||
best_x = None
|
||||
for x, v in events:
|
||||
cur += v
|
||||
if cur > best:
|
||||
best = cur
|
||||
best_x = x
|
||||
hit_ratio = float(best) / float(len(raw)) if len(raw) else 0.0
|
||||
return float(best_x if best_x is not None else 0.0), hit_ratio
|
||||
|
||||
|
||||
# ---------- Candidate Generation ----------
|
||||
|
||||
def gen_candidates(df: pd.DataFrame) -> Iterable[Tuple[str, np.ndarray, np.ndarray]]:
|
||||
"""
|
||||
Liefert (label, values, times) für:
|
||||
- 8-bit Bytes D0..D7
|
||||
- 16-bit adjazente Paare (LE/BE) + signed Varianten
|
||||
Times wird auf die gefilterten Indizes gemappt (DLC-abhängig).
|
||||
"""
|
||||
times_all = df["time_s"].to_numpy(dtype=float)
|
||||
data = df["data"].tolist()
|
||||
|
||||
# 8-bit
|
||||
for i in range(8):
|
||||
idx = [k for k, d in enumerate(data) if len(d) > i]
|
||||
if len(idx) < 3:
|
||||
continue
|
||||
vals = np.array([data[k][i] for k in idx], dtype=float)
|
||||
t = times_all[idx]
|
||||
yield f"byte[{i}]", vals, t
|
||||
|
||||
# 16-bit adjazent
|
||||
for i in range(7):
|
||||
j = i + 1
|
||||
idx = [k for k, d in enumerate(data) if len(d) > j]
|
||||
if len(idx) < 3:
|
||||
continue
|
||||
a = [data[k][i] for k in idx]
|
||||
b = [data[k][j] for k in idx]
|
||||
u_le = np.array([le16(x, y) for x, y in zip(a, b)], dtype=float)
|
||||
u_be = np.array([be16(x, y) for x, y in zip(a, b)], dtype=float)
|
||||
s_le = np.array([s16(le16(x, y)) for x, y in zip(a, b)], dtype=float)
|
||||
s_be = np.array([s16(be16(x, y)) for x, y in zip(a, b)], dtype=float)
|
||||
t = times_all[idx]
|
||||
yield f"le16[{i}-{j}]", u_le, t
|
||||
yield f"be16[{i}-{j}]", u_be, t
|
||||
yield f"le16s[{i}-{j}]", s_le, t
|
||||
yield f"be16s[{i}-{j}]", s_be, t
|
||||
|
||||
|
||||
# ---------- Evaluation ----------
|
||||
|
||||
def evaluate_supervised(label: str,
|
||||
vals: np.ndarray,
|
||||
times: np.ndarray,
|
||||
rmin: float,
|
||||
rmax: float,
|
||||
allow_neg_scale: bool,
|
||||
constraints: Dict[str, float]) -> Dict[str, float] | None:
|
||||
ok, meta = prefilter(vals)
|
||||
if not ok:
|
||||
return None
|
||||
|
||||
scales = try_scaleset()
|
||||
if allow_neg_scale:
|
||||
scales += [-s for s in scales if s > 0]
|
||||
|
||||
best = {"hit_ratio": -1.0, "scale": None, "offset": 0.0}
|
||||
for s in scales:
|
||||
o, hr = interval_best_offset(vals, s, rmin, rmax)
|
||||
if hr > best["hit_ratio"]:
|
||||
best = {"scale": s, "offset": float(o), "hit_ratio": hr}
|
||||
|
||||
phys = vals * best["scale"] + best["offset"]
|
||||
within = (phys >= rmin) & (phys <= rmax)
|
||||
in_count = int(np.count_nonzero(within))
|
||||
|
||||
p95_raw = p_quant_abs_diff(vals, 0.95)
|
||||
p95_phys = p_quant_abs_diff(phys, 0.95)
|
||||
|
||||
ia = interarrival_metrics(times[:len(vals)])
|
||||
sm = slope_metrics(phys, times[:len(phys)])
|
||||
|
||||
prange = (rmax - rmin) if (rmax > rmin) else 1.0
|
||||
slope_p95_frac = sm["slope_p95"] / prange
|
||||
slope_p99_frac = sm["slope_p99"] / prange
|
||||
|
||||
failures = []
|
||||
|
||||
if constraints.get("rate_min") is not None and ia["rate_hz"] < constraints["rate_min"] - 1e-9:
|
||||
failures.append(f"rate {ia['rate_hz']:.2f}Hz < min {constraints['rate_min']:.2f}Hz")
|
||||
if constraints.get("rate_max") is not None and ia["rate_hz"] > constraints["rate_max"] + 1e-9:
|
||||
failures.append(f"rate {ia['rate_hz']:.2f}Hz > max {constraints['rate_max']:.2f}Hz")
|
||||
|
||||
if constraints.get("jitter_max_ms") is not None:
|
||||
jitter_ms = ia["period_std"] * 1000.0
|
||||
if jitter_ms > constraints["jitter_max_ms"] + 1e-9:
|
||||
failures.append(f"jitter {jitter_ms:.1f}ms > max {constraints['jitter_max_ms']:.1f}ms")
|
||||
|
||||
def _resolve_abs_slope_limit():
|
||||
if constraints.get("max_slope_abs") is not None:
|
||||
return constraints["max_slope_abs"]
|
||||
if constraints.get("max_slope_frac") is not None:
|
||||
return constraints["max_slope_frac"] * prange
|
||||
return None
|
||||
|
||||
max_s_abs = _resolve_abs_slope_limit()
|
||||
if max_s_abs is not None:
|
||||
q = constraints.get("slope_quantile", 0.95)
|
||||
qv = sm["slope_p95"] if q <= 0.95 else sm["slope_p99"]
|
||||
if qv > max_s_abs + 1e-9:
|
||||
failures.append(f"slope(q={q:.2f}) {qv:.3g} > max {max_s_abs:.3g}")
|
||||
|
||||
uniq_ratio = len(np.unique(vals)) / float(len(vals))
|
||||
if constraints.get("min_uniq_ratio") is not None and uniq_ratio < constraints["min_uniq_ratio"] - 1e-9:
|
||||
failures.append(f"uniq_ratio {uniq_ratio:.3f} < min {constraints['min_uniq_ratio']:.3f}")
|
||||
|
||||
passed = (len(failures) == 0)
|
||||
|
||||
# Quality Score
|
||||
score = best["hit_ratio"]
|
||||
if max_s_abs is not None and max_s_abs > 0:
|
||||
slope_pen = min(sm["slope_p95"] / max_s_abs, 1.0)
|
||||
score *= (1.0 - 0.3 * slope_pen)
|
||||
if constraints.get("jitter_max_ms") is not None:
|
||||
jitter_ms = ia["period_std"] * 1000.0
|
||||
jitter_pen = min(jitter_ms / constraints["jitter_max_ms"], 1.0)
|
||||
score *= (1.0 - 0.2 * jitter_pen)
|
||||
|
||||
return {
|
||||
"label": label,
|
||||
"mode": "range_fit",
|
||||
"n": int(vals.size),
|
||||
"raw_min": float(np.min(vals)),
|
||||
"raw_max": float(np.max(vals)),
|
||||
"raw_var": float(np.var(vals)),
|
||||
"p95_absdiff_raw": float(p95_raw),
|
||||
"scale": float(best["scale"]),
|
||||
"offset": float(best["offset"]),
|
||||
"hit_ratio": float(best["hit_ratio"]),
|
||||
"in_count": in_count,
|
||||
"phys_min": float(np.min(phys)),
|
||||
"phys_max": float(np.max(phys)),
|
||||
"p95_absdiff_phys": float(p95_phys),
|
||||
"span_phys": float(np.percentile(phys, 97) - np.percentile(phys, 3)),
|
||||
"rate_hz_est": float(ia["rate_hz"]),
|
||||
"period_std_ms": float(ia["period_std"] * 1000.0),
|
||||
"jitter_cv": float(ia["jitter_cv"]),
|
||||
"slope_p95_per_s": float(sm["slope_p95"]),
|
||||
"slope_p99_per_s": float(sm["slope_p99"]),
|
||||
"slope_p95_frac": float(slope_p95_frac),
|
||||
"slope_p99_frac": float(slope_p99_frac),
|
||||
"uniq_ratio": float(uniq_ratio),
|
||||
"passed": bool(passed),
|
||||
"fail_reasons": "; ".join(failures),
|
||||
"quality_score": float(score),
|
||||
}
|
||||
|
||||
def evaluate_unsupervised(label: str,
|
||||
vals: np.ndarray,
|
||||
times: np.ndarray,
|
||||
min_smooth: float = 0.2,
|
||||
max_slope_frac_raw: float | None = None,
|
||||
slope_quantile: float = 0.95) -> Dict[str, float] | None:
|
||||
if vals.size < 12:
|
||||
return None
|
||||
p95 = p_quant_abs_diff(vals, 0.95)
|
||||
span = float(np.percentile(vals, 97) - np.percentile(vals, 3) + 1e-9)
|
||||
smooth = 1.0 - min(max(p95 / span, 0.0), 1.0)
|
||||
uniq_ratio = float(len(np.unique(vals))) / float(vals.size)
|
||||
var = float(np.var(vals))
|
||||
|
||||
ia = interarrival_metrics(times[:len(vals)])
|
||||
sm = slope_metrics(vals, times[:len(vals)])
|
||||
slope_q = sm["slope_p95"] if slope_quantile <= 0.95 else sm["slope_p99"]
|
||||
slope_frac_raw = (slope_q / span) if span > 0 else 0.0
|
||||
|
||||
if uniq_ratio <= 0.02:
|
||||
return None
|
||||
if smooth < min_smooth:
|
||||
return None
|
||||
if (max_slope_frac_raw is not None) and (slope_frac_raw > max_slope_frac_raw):
|
||||
return None
|
||||
|
||||
return {
|
||||
"label": label,
|
||||
"mode": "unsupervised",
|
||||
"n": int(vals.size),
|
||||
"raw_min": float(np.min(vals)),
|
||||
"raw_max": float(np.max(vals)),
|
||||
"raw_var": var,
|
||||
"span_raw": span,
|
||||
"p95_absdiff_raw": float(p95),
|
||||
"smoothness": float(smooth),
|
||||
"uniq_ratio": float(uniq_ratio),
|
||||
"rate_hz_est": float(ia["rate_hz"]),
|
||||
"period_std_ms": float(ia["period_std"] * 1000.0),
|
||||
"jitter_cv": float(ia["jitter_cv"]),
|
||||
"slope_q_raw": float(slope_q),
|
||||
"slope_frac_raw": float(slope_frac_raw),
|
||||
}
|
||||
|
||||
|
||||
# ---------- Plot & Report ----------
|
||||
|
||||
def plot_timeseries(times: np.ndarray, series: np.ndarray, out_png: Path, title: str, ylabel: str) -> None:
|
||||
plt.figure(figsize=(10, 4))
|
||||
plt.plot(times[:len(series)], series, marker=".", linestyle="-")
|
||||
plt.xlabel("Zeit (s)")
|
||||
plt.ylabel(ylabel)
|
||||
plt.title(title)
|
||||
plt.grid(True)
|
||||
plt.tight_layout()
|
||||
out_png.parent.mkdir(parents=True, exist_ok=True)
|
||||
plt.savefig(out_png, dpi=150)
|
||||
plt.close()
|
||||
|
||||
def df_to_md_table(df: pd.DataFrame) -> str:
|
||||
"""Robustes Markdown-Table: nutzt to_markdown falls vorhanden, sonst CSV in Codeblock."""
|
||||
try:
|
||||
return df.to_markdown(index=False) # benötigt evtl. 'tabulate'
|
||||
except Exception:
|
||||
return "```\n" + df.to_csv(index=False) + "```"
|
||||
|
||||
def write_report_md(path: Path, header: dict, top_rows: pd.DataFrame, failures: pd.DataFrame, mode: str, links: dict) -> None:
|
||||
md = []
|
||||
md.append(f"# Trace Report – {header.get('trace_name','')}")
|
||||
md.append("")
|
||||
md.append(f"- **Mode:** {mode}")
|
||||
for k, v in header.items():
|
||||
if k in ("trace_name",):
|
||||
continue
|
||||
md.append(f"- **{k}**: {v}")
|
||||
md.append("")
|
||||
|
||||
if mode == "range_fit":
|
||||
md.append("## Top-Kandidaten (Range-Fit)")
|
||||
md.append("Hit-Ratio, Slope/Jitter & Score – beste zuerst.\n")
|
||||
if top_rows is not None and not top_rows.empty:
|
||||
md.append(df_to_md_table(top_rows))
|
||||
else:
|
||||
md.append("_Keine Kandidaten über Schwelle._")
|
||||
md.append("")
|
||||
if failures is not None and not failures.empty:
|
||||
md.append("## Ausgeschlossene Kandidaten (Gründe)\n")
|
||||
md.append(df_to_md_table(failures[["label", "fail_reasons"]]))
|
||||
else:
|
||||
md.append("## Top-Kandidaten (Unsupervised)\n")
|
||||
if top_rows is not None and not top_rows.empty:
|
||||
md.append(df_to_md_table(top_rows))
|
||||
else:
|
||||
md.append("_Keine plausiblen Rohsignale._")
|
||||
|
||||
md.append("\n## Artefakte")
|
||||
for k, v in links.items():
|
||||
md.append(f"- **{k}**: `{v}`")
|
||||
path.write_text("\n".join(md), encoding="utf-8")
|
||||
|
||||
|
||||
# ---------- Main ----------
|
||||
|
||||
def main():
|
||||
ap = argparse.ArgumentParser(description="Range-/Unsupervised-Fit mit physikbasierten Constraints + Bericht")
|
||||
ap.add_argument("trace", help="Pfad zur .trace Datei")
|
||||
|
||||
# supervision
|
||||
ap.add_argument("--rmin", type=float, default=None)
|
||||
ap.add_argument("--rmax", type=float, default=None)
|
||||
ap.add_argument("--allow-neg-scale", action="store_true")
|
||||
|
||||
# shared
|
||||
ap.add_argument("--rx-only", action="store_true")
|
||||
ap.add_argument("--outdir", default=".")
|
||||
ap.add_argument("--plots-top", type=int, default=8)
|
||||
|
||||
# supervised thresholds
|
||||
ap.add_argument("--min-hit", type=float, default=0.5)
|
||||
ap.add_argument("--rate-min", type=float, default=None)
|
||||
ap.add_argument("--rate-max", type=float, default=None)
|
||||
ap.add_argument("--jitter-max-ms", type=float, default=None)
|
||||
ap.add_argument("--max-slope-abs", type=float, default=None, help="Max |Δphys|/s (z. B. °C/s, km/h/s)")
|
||||
ap.add_argument("--max-slope-frac", type=float, default=None, help="Max |Δphys|/s relativ zu (rmax-rmin)")
|
||||
ap.add_argument("--slope-quantile", type=float, default=0.95, help="0.95 oder 0.99")
|
||||
ap.add_argument("--min-uniq-ratio", type=float, default=None)
|
||||
|
||||
# unsupervised thresholds
|
||||
ap.add_argument("--min-smooth", type=float, default=0.2)
|
||||
ap.add_argument("--max-slope-frac-raw", type=float, default=None, help="roh: (|Δraw|/s)/Span")
|
||||
|
||||
args = ap.parse_args()
|
||||
|
||||
trace = Path(args.trace)
|
||||
df = parse_trace(trace, rx_only=args.rx_only)
|
||||
if df.empty:
|
||||
print("Keine Daten in Trace.", file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
supervised = (args.rmin is not None) and (args.rmax is not None)
|
||||
outdir = Path(args.outdir)
|
||||
outdir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if supervised:
|
||||
constraints = {
|
||||
"rate_min": args.rate_min,
|
||||
"rate_max": args.rate_max,
|
||||
"jitter_max_ms": args.jitter_max_ms,
|
||||
"max_slope_abs": args.max_slope_abs,
|
||||
"max_slope_frac": args.max_slope_frac,
|
||||
"slope_quantile": args.slope_quantile,
|
||||
"min_uniq_ratio": args.min_uniq_ratio,
|
||||
}
|
||||
results = []
|
||||
rejected = []
|
||||
for label, series, times in gen_candidates(df):
|
||||
r = evaluate_supervised(label, series, times, args.rmin, args.rmax, args.allow_neg_scale, constraints)
|
||||
if r is None:
|
||||
continue
|
||||
if r["hit_ratio"] >= args.min_hit:
|
||||
(results if r["passed"] else rejected).append({**r, "trace": trace.stem})
|
||||
|
||||
if not results and not rejected:
|
||||
print("Keine Kandidaten über Schwelle gefunden.", file=sys.stderr)
|
||||
sys.exit(3)
|
||||
|
||||
df_ok = pd.DataFrame(results).sort_values(
|
||||
["quality_score", "hit_ratio", "p95_absdiff_phys", "rate_hz_est", "n"],
|
||||
ascending=[False, False, True, False, False]
|
||||
)
|
||||
df_rej = pd.DataFrame(rejected)
|
||||
|
||||
csv_path = outdir / f"{trace.stem}_encoding_candidates.csv"
|
||||
if not df_ok.empty:
|
||||
df_ok.to_csv(csv_path, index=False)
|
||||
print(f"Kandidaten-CSV: {csv_path}")
|
||||
|
||||
# Plots für Top-Kandidaten (oder Rejected, falls keine OK)
|
||||
top_for_plots = df_ok if not df_ok.empty else df_rej
|
||||
data = df["data"].tolist()
|
||||
times_all = df["time_s"].to_numpy(dtype=float)
|
||||
|
||||
def reconstruct_vals(label: str) -> np.ndarray | None:
|
||||
if label.startswith("byte["):
|
||||
i = int(label.split("[")[1].split("]")[0])
|
||||
idx = [k for k, d in enumerate(data) if len(d) > i]
|
||||
if not idx: return None
|
||||
return np.array([data[k][i] for k in idx], dtype=float), times_all[idx]
|
||||
elif label.startswith(("le16", "be16", "le16s", "be16s")):
|
||||
signed = label.startswith(("le16s", "be16s"))
|
||||
i, j = map(int, label.split("[")[1].split("]")[0].split("-"))
|
||||
idx = [k for k, d in enumerate(data) if len(d) > j]
|
||||
if not idx: return None
|
||||
a = [data[k][i] for k in idx]
|
||||
b = [data[k][j] for k in idx]
|
||||
if label.startswith("le16"):
|
||||
v = [le16(x, y) for x, y in zip(a, b)]
|
||||
else:
|
||||
v = [be16(x, y) for x, y in zip(a, b)]
|
||||
if signed:
|
||||
v = [s16(int(x)) for x in v]
|
||||
return np.array(v, dtype=float), times_all[idx]
|
||||
return None
|
||||
|
||||
for _, row in top_for_plots.head(max(1, args.plots_top)).iterrows():
|
||||
rec = reconstruct_vals(row["label"])
|
||||
if rec is None:
|
||||
continue
|
||||
vals, tt = rec
|
||||
phys = vals * row["scale"] + row["offset"]
|
||||
out_png = outdir / f"{trace.stem}_{row['label'].replace('[','_').replace(']','')}.png"
|
||||
plot_timeseries(tt[:len(phys)], phys, out_png,
|
||||
f"{trace.name} – {row['label']} (scale={row['scale']:.6g}, offset={row['offset']:.6g})",
|
||||
"phys (geschätzt)")
|
||||
|
||||
# Bericht
|
||||
hdr = {
|
||||
"trace_name": trace.name,
|
||||
"mode": "range_fit",
|
||||
"rmin": args.rmin,
|
||||
"rmax": args.rmax,
|
||||
"min_hit": args.min_hit,
|
||||
"rate_min": args.rate_min,
|
||||
"rate_max": args.rate_max,
|
||||
"jitter_max_ms": args.jitter_max_ms,
|
||||
"max_slope_abs": args.max_slope_abs,
|
||||
"max_slope_frac": args.max_slope_frac,
|
||||
"slope_quantile": args.slope_quantile,
|
||||
}
|
||||
top_view = df_ok.head(12)[
|
||||
["label", "quality_score", "hit_ratio", "scale", "offset",
|
||||
"rate_hz_est", "period_std_ms", "slope_p95_per_s", "slope_p99_per_s",
|
||||
"p95_absdiff_phys", "uniq_ratio"]
|
||||
] if not df_ok.empty else pd.DataFrame()
|
||||
fail_view = df_rej[["label", "fail_reasons"]] if not df_rej.empty else pd.DataFrame()
|
||||
|
||||
md_path = outdir / f"{trace.stem}_report.md"
|
||||
json_path = outdir / f"{trace.stem}_report.json"
|
||||
write_report_md(md_path, hdr, top_view, fail_view, "range_fit",
|
||||
{"candidates_csv": str(csv_path) if not df_ok.empty else "(leer)"})
|
||||
with open(json_path, "w", encoding="utf-8") as f:
|
||||
json.dump({
|
||||
"header": hdr,
|
||||
"accepted": df_ok.to_dict(orient="records"),
|
||||
"rejected": df_rej.to_dict(orient="records"),
|
||||
}, f, ensure_ascii=False, indent=2)
|
||||
print(f"Report: {md_path}")
|
||||
print(f"Report JSON: {json_path}")
|
||||
|
||||
if not df_ok.empty:
|
||||
print("\nTop-Kandidaten:")
|
||||
cols = ["label", "quality_score", "hit_ratio", "scale", "offset",
|
||||
"rate_hz_est", "period_std_ms", "slope_p95_per_s", "slope_p99_per_s"]
|
||||
print(df_ok.head(10)[cols].to_string(index=False))
|
||||
else:
|
||||
print("\nKeine Kandidaten PASS; siehe Gründe in report.")
|
||||
|
||||
else:
|
||||
# Unsupervised
|
||||
results = []
|
||||
for label, series, times in gen_candidates(df):
|
||||
r = evaluate_unsupervised(label, series, times,
|
||||
min_smooth=args.min_smooth,
|
||||
max_slope_frac_raw=args.max_slope_frac_raw,
|
||||
slope_quantile=args.slope_quantile)
|
||||
if r is None:
|
||||
continue
|
||||
r["trace"] = trace.stem
|
||||
results.append(r)
|
||||
|
||||
if not results:
|
||||
print("Keine plausiblen Rohsignale gefunden. Tipp: --min-smooth senken.", file=sys.stderr)
|
||||
sys.exit(3)
|
||||
|
||||
df_res = pd.DataFrame(results).sort_values(
|
||||
["smoothness", "span_raw", "raw_var", "rate_hz_est", "n"],
|
||||
ascending=[False, False, False, False, False]
|
||||
)
|
||||
|
||||
csv_path = outdir / f"{trace.stem}_unsupervised_candidates.csv"
|
||||
df_res.to_csv(csv_path, index=False)
|
||||
print(f"Unsupervised-CSV: {csv_path}")
|
||||
|
||||
# Plots der Top-N (Rohwerte)
|
||||
data = df["data"].tolist()
|
||||
times_all = df["time_s"].to_numpy(dtype=float)
|
||||
|
||||
def reconstruct_raw(label: str) -> Tuple[np.ndarray, np.ndarray] | None:
|
||||
if label.startswith("byte["):
|
||||
i = int(label.split("[")[1].split("]")[0])
|
||||
idx = [k for k, d in enumerate(data) if len(d) > i]
|
||||
if not idx: return None
|
||||
return np.array([data[k][i] for k in idx], dtype=float), times_all[idx]
|
||||
elif label.startswith(("le16", "be16", "le16s", "be16s")):
|
||||
signed = label.startswith(("le16s", "be16s"))
|
||||
i, j = map(int, label.split("[")[1].split("]")[0].split("-"))
|
||||
idx = [k for k, d in enumerate(data) if len(d) > j]
|
||||
if not idx: return None
|
||||
a = [data[k][i] for k in idx]
|
||||
b = [data[k][j] for k in idx]
|
||||
if label.startswith("le16"):
|
||||
v = [le16(x, y) for x, y in zip(a, b)]
|
||||
else:
|
||||
v = [be16(x, y) for x, y in zip(a, b)]
|
||||
if signed:
|
||||
v = [s16(int(x)) for x in v]
|
||||
return np.array(v, dtype=float), times_all[idx]
|
||||
return None
|
||||
|
||||
for _, row in df_res.head(max(1, args.plots_top)).iterrows():
|
||||
rec = reconstruct_raw(row["label"])
|
||||
if rec is None:
|
||||
continue
|
||||
vals, tt = rec
|
||||
out_png = outdir / f"{trace.stem}_{row['label'].replace('[','_').replace(']','')}_raw.png"
|
||||
plot_timeseries(tt[:len(vals)], vals, out_png,
|
||||
f"{trace.name} – {row['label']} (raw)", "raw")
|
||||
|
||||
# Bericht
|
||||
hdr = {
|
||||
"trace_name": trace.name,
|
||||
"mode": "unsupervised",
|
||||
"min_smooth": args.min_smooth,
|
||||
"max_slope_frac_raw": args.max_slope_frac_raw,
|
||||
}
|
||||
top_view = df_res.head(12)[
|
||||
["label", "smoothness", "span_raw", "raw_var",
|
||||
"rate_hz_est", "period_std_ms", "slope_frac_raw", "uniq_ratio"]
|
||||
]
|
||||
md_path = outdir / f"{trace.stem}_report.md"
|
||||
json_path = outdir / f"{trace.stem}_report.json"
|
||||
write_report_md(md_path, hdr, top_view, pd.DataFrame(), "unsupervised",
|
||||
{"candidates_csv": str(csv_path)})
|
||||
with open(json_path, "w", encoding="utf-8") as f:
|
||||
json.dump({
|
||||
"header": hdr,
|
||||
"accepted": df_res.to_dict(orient="records"),
|
||||
}, f, ensure_ascii=False, indent=2)
|
||||
print(f"Report: {md_path}")
|
||||
print(f"Report JSON: {json_path}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@@ -191,6 +191,29 @@
|
||||
</div>
|
||||
</div>
|
||||
</p>
|
||||
<hr />
|
||||
<p>
|
||||
<h4>CAN / OBD2 Trace</h4>
|
||||
<div class="form-group row">
|
||||
<div class="col">
|
||||
<div class="text-center mb-2">
|
||||
<!-- Beide Start-Buttons senden btn-trace-start; Modus kommt als value -->
|
||||
<button id="trace-start" data-wsid="trace-start" value="raw" class="btn-wsevent btn btn-outline-primary">
|
||||
Start CAN-Trace
|
||||
</button>
|
||||
<button id="trace-start-obd" data-wsid="trace-start" value="obd" class="btn-wsevent btn btn-outline-primary ml-2">
|
||||
Start OBD-Trace
|
||||
</button>
|
||||
<button id="trace-stop" class="btn-wsevent btn btn-outline-danger ml-2">
|
||||
Stop
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<textarea id="trace-out" class="form-control" style="font-family:monospace" rows="8" readonly spellcheck="false"></textarea>
|
||||
<small id="trace-status" class="form-text text-muted">Trace inaktiv</small>
|
||||
</div>
|
||||
</div>
|
||||
</p>
|
||||
<!-- Div Group LiveDebug -->
|
||||
<!-- Div Group Device Reboot -->
|
||||
<hr />
|
||||
|
@@ -5,6 +5,106 @@ var statusMapping;
|
||||
var staticMapping;
|
||||
var overlay;
|
||||
|
||||
let traceActive = false;
|
||||
let traceMode = null;
|
||||
let traceFileName = "";
|
||||
let traceUseFsAccess = false;
|
||||
|
||||
// File-System-Access-Stream (Chromium)
|
||||
let traceWriter = null;
|
||||
let traceEncoder = null;
|
||||
let traceWriteQueue = Promise.resolve(); // für geordnete Writes
|
||||
|
||||
// Fallback: In-Memory-Sammeln (für Blob-Download bei STOP)
|
||||
let traceMemParts = [];
|
||||
|
||||
// Textarea & Status
|
||||
const TRACE_MAX_CHARS = 200000; // ~200 KB für die Anzeige
|
||||
|
||||
function $(id) {
|
||||
return document.getElementById(id);
|
||||
}
|
||||
|
||||
function $(id) {
|
||||
return document.getElementById(id);
|
||||
}
|
||||
function nowIsoCompact() {
|
||||
return new Date().toISOString().replace(/[:.]/g, "-");
|
||||
}
|
||||
function genTraceFileName(mode) {
|
||||
return `cantrace-${mode}-${nowIsoCompact()}.log`;
|
||||
}
|
||||
|
||||
function setTraceUI(active, mode, infoText) {
|
||||
traceActive = !!active;
|
||||
traceMode = active ? mode : null;
|
||||
|
||||
const btnRaw = $("trace-start");
|
||||
const btnObd = $("trace-start-obd");
|
||||
const btnStop = $("trace-stop");
|
||||
const status = $("trace-status");
|
||||
|
||||
if (btnRaw) btnRaw.disabled = active;
|
||||
if (btnObd) btnObd.disabled = active;
|
||||
if (btnStop) btnStop.disabled = !active;
|
||||
|
||||
if (status)
|
||||
status.textContent =
|
||||
infoText || (active ? `Trace aktiv (${mode})` : "Trace inaktiv");
|
||||
}
|
||||
|
||||
function traceClear() {
|
||||
const out = $("trace-out");
|
||||
if (out) out.value = "";
|
||||
}
|
||||
|
||||
function traceAppend(text) {
|
||||
const out = $("trace-out");
|
||||
if (!out || !text) return;
|
||||
out.value += text;
|
||||
if (out.value.length > TRACE_MAX_CHARS) {
|
||||
out.value = out.value.slice(-TRACE_MAX_CHARS);
|
||||
}
|
||||
out.scrollTop = out.scrollHeight;
|
||||
}
|
||||
|
||||
function triggerBlobDownload(filename, blob) {
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement("a");
|
||||
a.href = url;
|
||||
a.download = filename;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
setTimeout(() => {
|
||||
document.body.removeChild(a);
|
||||
URL.revokeObjectURL(url);
|
||||
}, 0);
|
||||
}
|
||||
|
||||
function parseKv(s) {
|
||||
const out = Object.create(null);
|
||||
s.split(";").forEach((part) => {
|
||||
const eq = part.indexOf("=");
|
||||
if (eq > 0) {
|
||||
const k = part.slice(0, eq).trim();
|
||||
const v = part.slice(eq + 1).trim();
|
||||
if (k) out[k] = v;
|
||||
}
|
||||
});
|
||||
return out;
|
||||
}
|
||||
|
||||
// geordnete Writes auf File System Access Writer
|
||||
function writeToFs(chunk) {
|
||||
if (!traceUseFsAccess || !traceWriter) return;
|
||||
const data = traceEncoder
|
||||
? traceEncoder.encode(chunk)
|
||||
: new TextEncoder().encode(chunk);
|
||||
traceWriteQueue = traceWriteQueue
|
||||
.then(() => traceWriter.write(data))
|
||||
.catch(console.error);
|
||||
}
|
||||
|
||||
document.addEventListener("DOMContentLoaded", function () {
|
||||
// Ihr JavaScript-Code hier, einschließlich der onLoad-Funktion
|
||||
overlay = document.getElementById("overlay");
|
||||
@@ -45,16 +145,32 @@ function initSettingInputs() {
|
||||
|
||||
function onOpen(event) {
|
||||
console.log("Connection opened");
|
||||
setTraceUI(false, null, "Verbunden – Trace inaktiv");
|
||||
}
|
||||
|
||||
function onClose(event) {
|
||||
console.log("Connection closed");
|
||||
setTimeout(initWebSocket, 1000);
|
||||
overlay.style.display = "flex";
|
||||
|
||||
// Falls Trace noch aktiv war: lokal finalisieren
|
||||
if (traceActive) {
|
||||
const note = "Trace beendet (Verbindung getrennt)";
|
||||
if (traceUseFsAccess && traceWriter) {
|
||||
traceWriteQueue.then(() => traceWriter.close()).catch(console.error);
|
||||
traceWriter = null;
|
||||
} else if (traceMemParts.length) {
|
||||
const blob = new Blob(traceMemParts, { type: "text/plain" });
|
||||
triggerBlobDownload(traceFileName || "cantrace.log", blob);
|
||||
traceMemParts = [];
|
||||
}
|
||||
setTraceUI(false, null, note);
|
||||
showNotification(note, "warning");
|
||||
}
|
||||
}
|
||||
|
||||
function sendButton(event) {
|
||||
var targetElement = event.target;
|
||||
async function sendButton(event) {
|
||||
const targetElement = event.target;
|
||||
|
||||
if (
|
||||
targetElement.classList.contains("confirm") &&
|
||||
@@ -62,7 +178,46 @@ function sendButton(event) {
|
||||
)
|
||||
return;
|
||||
|
||||
websocket_sendevent("btn-" + targetElement.id, targetElement.value);
|
||||
const wsid = targetElement.dataset.wsid || targetElement.id; // z.B. "trace-start"
|
||||
const val = targetElement.value || "";
|
||||
|
||||
// File-Ziel *vor* dem WS-Start öffnen (nur bei trace-start; wegen User-Gesture!)
|
||||
if (wsid === "trace-start") {
|
||||
const mode = val || "raw";
|
||||
traceFileName = genTraceFileName(mode);
|
||||
|
||||
// Anzeige schon mal leeren
|
||||
traceClear();
|
||||
setTraceUI(false, null, "Trace wird gestartet…");
|
||||
|
||||
traceUseFsAccess = false;
|
||||
traceWriter = null;
|
||||
traceEncoder = null;
|
||||
traceWriteQueue = Promise.resolve();
|
||||
traceMemParts = []; // Fallback-Puffer leeren
|
||||
|
||||
if (window.showSaveFilePicker) {
|
||||
try {
|
||||
const fh = await showSaveFilePicker({
|
||||
suggestedName: traceFileName,
|
||||
types: [
|
||||
{
|
||||
description: "Text Log",
|
||||
accept: { "text/plain": [".log", ".txt"] },
|
||||
},
|
||||
],
|
||||
});
|
||||
traceWriter = await fh.createWritable();
|
||||
traceEncoder = new TextEncoder();
|
||||
traceUseFsAccess = true;
|
||||
} catch (e) {
|
||||
// Nutzer hat evtl. abgebrochen → Fallback in RAM
|
||||
traceUseFsAccess = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
websocket_sendevent("btn-" + wsid, val);
|
||||
}
|
||||
|
||||
function onMessage(event) {
|
||||
@@ -101,6 +256,58 @@ function onMessage(event) {
|
||||
fillValuesToHTML(result);
|
||||
overlay.style.display = "none";
|
||||
}
|
||||
// --- Trace: Start ---
|
||||
else if (data.startsWith("STARTTRACE;")) {
|
||||
const kv = parseKv(data.slice(11)); // mode=..., ts=...
|
||||
const mode = kv.mode || "?";
|
||||
setTraceUI(true, mode, `Trace gestartet (${mode})`);
|
||||
|
||||
// Fallback: wenn kein FS-Access → in RAM sammeln
|
||||
// (sonst haben wir traceWriter bereits im Klick vorbereitet)
|
||||
}
|
||||
// --- Trace: Lines (ggf. mehrere in einer WS-Nachricht) ---
|
||||
else if (data.startsWith("TRACELINE;")) {
|
||||
const payload = data.replace(/TRACELINE;/g, ""); // reiner Text inkl. '\n'
|
||||
traceAppend(payload);
|
||||
if (traceUseFsAccess && traceWriter) {
|
||||
writeToFs(payload);
|
||||
} else {
|
||||
traceMemParts.push(payload);
|
||||
}
|
||||
}
|
||||
// --- Trace: Stop/Summary ---
|
||||
else if (data.startsWith("STOPTRACE;")) {
|
||||
const kv = parseKv(data.slice(10));
|
||||
const msg = `Trace beendet (${kv.mode || "?"}), Zeilen=${
|
||||
kv.lines || "0"
|
||||
}, Drops=${kv.drops || "0"}${kv.reason ? ", Grund=" + kv.reason : ""}`;
|
||||
|
||||
// Datei finalisieren
|
||||
if (traceUseFsAccess && traceWriter) {
|
||||
traceWriteQueue.then(() => traceWriter.close()).catch(console.error);
|
||||
traceWriter = null;
|
||||
} else if (traceMemParts.length) {
|
||||
const blob = new Blob(traceMemParts, { type: "text/plain" });
|
||||
triggerBlobDownload(traceFileName || "cantrace.log", blob);
|
||||
traceMemParts = [];
|
||||
}
|
||||
|
||||
setTraceUI(false, null, msg);
|
||||
showNotification(msg, "info");
|
||||
}
|
||||
// --- Busy/Fehler/Ack ---
|
||||
else if (data.startsWith("TRACEBUSY;")) {
|
||||
const kv = parseKv(data.slice(10));
|
||||
const owner = kv.owner ? " (Owner #" + kv.owner + ")" : "";
|
||||
showNotification("Trace bereits aktiv" + owner, "warning");
|
||||
} else if (data.startsWith("TRACEERROR;")) {
|
||||
const kv = parseKv(data.slice(11));
|
||||
showNotification("Trace-Fehler: " + (kv.msg || "unbekannt"), "danger");
|
||||
} else if (data.startsWith("TRACEACK;")) {
|
||||
// optional
|
||||
const kv = parseKv(data.slice(9));
|
||||
console.log("TRACEACK", kv);
|
||||
}
|
||||
}
|
||||
|
||||
function createMapping(mappingString) {
|
||||
|
@@ -1 +1 @@
|
||||
1.04
|
||||
1.05
|
@@ -26,6 +26,20 @@ struct CanFilter {
|
||||
bool ext; // false = STD(11-bit), true = EXT(29-bit)
|
||||
};
|
||||
|
||||
// =====================
|
||||
// Trace / Logging Types
|
||||
// =====================
|
||||
struct CanLogFrame {
|
||||
uint32_t ts_ms;
|
||||
uint32_t id;
|
||||
bool ext;
|
||||
bool rx; // true = RX, false = TX
|
||||
uint8_t dlc;
|
||||
uint8_t data[8];
|
||||
};
|
||||
|
||||
using CanTraceSink = void (*)(const CanLogFrame& f);
|
||||
|
||||
// ==== API ====
|
||||
|
||||
// 1) Einmalige Hardware-Initialisierung + integrierter Loopback-Selftest.
|
||||
@@ -54,6 +68,11 @@ bool CAN_HAL_SetFilters(const CanFilter* list, size_t count);
|
||||
bool CAN_HAL_Read(unsigned long& id, uint8_t& len, uint8_t data[8]); // true = Frame gelesen
|
||||
uint8_t CAN_HAL_Send(unsigned long id, bool ext, uint8_t len, const uint8_t* data); // CAN_OK bei Erfolg
|
||||
|
||||
// Diagnose/Utilities (nutzen MCP-APIs)
|
||||
// Diagnose/Utilities
|
||||
uint8_t CAN_HAL_GetErrorFlags(); // Intern: getError()
|
||||
void CAN_HAL_GetErrorCounters(uint8_t& tec, uint8_t& rec); // TX/RX Error Counter
|
||||
|
||||
// Trace / Sniffer
|
||||
void CAN_HAL_SetTraceSink(CanTraceSink sink);
|
||||
void CAN_HAL_EnableRawSniffer(bool enable);
|
||||
bool CAN_HAL_IsRawSnifferEnabled();
|
||||
|
@@ -1,6 +1,5 @@
|
||||
#pragma once
|
||||
#include <Arduino.h>
|
||||
#include "can_hal.h"
|
||||
|
||||
// Initialisiert das OBD2-CAN-Profil:
|
||||
// - setzt Masken/Filter für 0x7E8..0x7EF (ECU-Antworten)
|
||||
|
@@ -112,6 +112,7 @@ typedef enum CANSource_e
|
||||
{
|
||||
KTM_890_ADV_R_2021,
|
||||
KTM_1290_SD_R_2023,
|
||||
TRIUMPH_SPEED_TWIN_1200_RS_2025,
|
||||
CANSOURCE_COUNT // <- sentinel (must be last)
|
||||
} CANSource_t;
|
||||
|
||||
|
@@ -46,4 +46,6 @@ void Webserver_Shutdown();
|
||||
void Websocket_PushLiveDebug(String Message);
|
||||
void Websocket_PushNotification(String Message, NotificationType_t type);
|
||||
|
||||
void TRACE_OnObdFrame(uint32_t id, bool rx, const uint8_t *d, uint8_t dlc, const char *note);
|
||||
|
||||
#endif // _WEBUI_H_
|
||||
|
@@ -19,9 +19,16 @@ board = d1_mini
|
||||
framework = arduino
|
||||
upload_speed = 921600
|
||||
|
||||
custom_firmware_version = 1.04
|
||||
custom_firmware_version = 1.07
|
||||
|
||||
; --- C++17 erzwingen (für if constexpr etc.) ---
|
||||
; Entferne evtl. voreingestelltes -std=gnu++11/14 aus dem Core:
|
||||
build_unflags =
|
||||
-std=gnu++11
|
||||
-std=gnu++14
|
||||
; Setze C++17 für alle Envs:
|
||||
build_flags =
|
||||
-std=gnu++17
|
||||
-DWIFI_SSID_CLIENT=${wifi_cred.wifi_ssid_client}
|
||||
-DWIFI_PASSWORD_CLIENT=${wifi_cred.wifi_password_client}
|
||||
-DADMIN_PASSWORD=${wifi_cred.admin_password}
|
||||
@@ -96,7 +103,6 @@ build_flags =
|
||||
-DPCB_REV=${this.custom_pcb_revision}
|
||||
board_build.ldscript = eagle.flash.4m1m.ld
|
||||
|
||||
|
||||
[env:pcb_rev_1-2_serial]
|
||||
extends = env
|
||||
custom_pcb_revision = 2
|
||||
|
@@ -1,24 +1,42 @@
|
||||
#include "can_hal.h"
|
||||
#include "dtc.h"
|
||||
// #include "debugger.h" // optional für Logs
|
||||
#include <string.h> // memcpy, memcmp
|
||||
|
||||
// ==== Interner Zustand/Helper ====
|
||||
// =====================
|
||||
// Interner Zustand/Helper
|
||||
// =====================
|
||||
MCP_CAN CAN0(GPIO_CS_CAN);
|
||||
|
||||
static bool s_ready = false;
|
||||
static uint8_t s_nextFiltSlot = 0; // 0..5 (MCP2515 hat 6 Filter-Slots)
|
||||
static uint16_t s_modeSettleMs = 10; // Default aus Config
|
||||
|
||||
// Trace-Hook
|
||||
static CanTraceSink s_traceSink = nullptr;
|
||||
|
||||
// RAW-Sniffer-Steuerung (Filter offen + Restore der vorherigen Konfig)
|
||||
static bool s_rawSnifferEnabled = false;
|
||||
|
||||
// Spiegel der "Normal"-Konfiguration (damit wir nach RAW wiederherstellen können)
|
||||
static uint16_t s_savedStdMask[2] = {0x000, 0x000};
|
||||
static struct
|
||||
{
|
||||
uint32_t id;
|
||||
bool ext;
|
||||
} s_savedFilt[6];
|
||||
static uint8_t s_savedFiltCount = 0;
|
||||
|
||||
// 11-bit: Lib erwartet (value << 16)
|
||||
static inline uint32_t _std_to_hw(uint16_t v11) { return ((uint32_t)v11) << 16; }
|
||||
|
||||
// „Bestätigter“ Mode-Wechsel mithilfe der Lib-Funktion setMode(newMode)
|
||||
// Viele Forks nutzen intern mcp2515_requestNewMode(); wir retryen kurz.
|
||||
static bool _trySetMode(uint8_t mode, uint16_t settleMs)
|
||||
{
|
||||
const uint32_t t0 = millis();
|
||||
do {
|
||||
if (CAN0.setMode(mode) == CAN_OK) return true;
|
||||
do
|
||||
{
|
||||
if (CAN0.setMode(mode) == CAN_OK)
|
||||
return true;
|
||||
delay(1);
|
||||
} while ((millis() - t0) < settleMs);
|
||||
return false;
|
||||
@@ -27,22 +45,32 @@ static bool _trySetMode(uint8_t mode, uint16_t settleMs)
|
||||
// LOOPBACK-Selftest (ohne Bus)
|
||||
static bool _selftest_loopback(uint16_t windowMs)
|
||||
{
|
||||
if (!_trySetMode(MCP_LOOPBACK, s_modeSettleMs)) return false;
|
||||
if (!_trySetMode(MCP_LOOPBACK, s_modeSettleMs))
|
||||
return false;
|
||||
|
||||
const unsigned long tid = 0x123;
|
||||
uint8_t tx[8] = {0xA5, 0x5A, 0x11, 0x22, 0x33, 0x44, 0x77, 0x88};
|
||||
if (CAN0.sendMsgBuf(tid, 0, 8, tx) != CAN_OK) {
|
||||
if (CAN0.sendMsgBuf(tid, 0, 8, tx) != CAN_OK)
|
||||
{
|
||||
(void)_trySetMode(MCP_NORMAL, s_modeSettleMs);
|
||||
return false;
|
||||
}
|
||||
|
||||
bool got = false;
|
||||
const uint32_t t0 = millis();
|
||||
while ((millis() - t0) < windowMs) {
|
||||
if (CAN0.checkReceive() == CAN_MSGAVAIL) {
|
||||
unsigned long rid; uint8_t len, rx[8];
|
||||
if (CAN0.readMsgBuf(&rid, &len, rx) == CAN_OK) {
|
||||
if (rid == tid && len == 8 && memcmp(tx, rx, 8) == 0) { got = true; break; }
|
||||
while ((millis() - t0) < windowMs)
|
||||
{
|
||||
if (CAN0.checkReceive() == CAN_MSGAVAIL)
|
||||
{
|
||||
unsigned long rid;
|
||||
uint8_t len, rx[8];
|
||||
if (CAN0.readMsgBuf(&rid, &len, rx) == CAN_OK)
|
||||
{
|
||||
if (rid == tid && len == 8 && memcmp(tx, rx, 8) == 0)
|
||||
{
|
||||
got = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
delay(1);
|
||||
@@ -55,60 +83,153 @@ static bool _selftest_loopback(uint16_t windowMs)
|
||||
// Optional: kurzer ListenOnly-Hörtest (nur Heuristik, keine DTC-Änderung)
|
||||
static void _probe_listen_only(uint16_t ms)
|
||||
{
|
||||
if (ms == 0) return;
|
||||
if (!_trySetMode(MCP_LISTENONLY, s_modeSettleMs)) return;
|
||||
if (ms == 0)
|
||||
return;
|
||||
if (!_trySetMode(MCP_LISTENONLY, s_modeSettleMs))
|
||||
return;
|
||||
const uint32_t t0 = millis();
|
||||
while ((millis() - t0) < ms) {
|
||||
if (CAN0.checkReceive() == CAN_MSGAVAIL) break;
|
||||
while ((millis() - t0) < ms)
|
||||
{
|
||||
if (CAN0.checkReceive() == CAN_MSGAVAIL)
|
||||
break;
|
||||
delay(1);
|
||||
}
|
||||
(void)_trySetMode(MCP_NORMAL, s_modeSettleMs);
|
||||
}
|
||||
|
||||
// ==== Öffentliche API ====
|
||||
// Offen konfigurieren (RAW-Sniffer)
|
||||
static bool _apply_open_filters()
|
||||
{
|
||||
if (!_trySetMode(MODE_CONFIG, s_modeSettleMs))
|
||||
return false;
|
||||
|
||||
// Masken 0 -> alles durchlassen
|
||||
CAN0.init_Mask(0, 0, _std_to_hw(0x000));
|
||||
CAN0.init_Mask(1, 0, _std_to_hw(0x000));
|
||||
// Filter egal
|
||||
for (uint8_t i = 0; i < 6; ++i)
|
||||
{
|
||||
CAN0.init_Filt(i, 0, _std_to_hw(0x000));
|
||||
}
|
||||
s_nextFiltSlot = 0;
|
||||
|
||||
return _trySetMode(MCP_NORMAL, s_modeSettleMs);
|
||||
}
|
||||
|
||||
// Gespeicherte Normal-Konfiguration anwenden
|
||||
static bool _apply_saved_filters()
|
||||
{
|
||||
if (!_trySetMode(MODE_CONFIG, s_modeSettleMs))
|
||||
return false;
|
||||
|
||||
CAN0.init_Mask(0, 0, _std_to_hw(s_savedStdMask[0]));
|
||||
CAN0.init_Mask(1, 0, _std_to_hw(s_savedStdMask[1]));
|
||||
|
||||
// Erst alle Filter neutralisieren
|
||||
for (uint8_t i = 0; i < 6; ++i)
|
||||
{
|
||||
CAN0.init_Filt(i, 0, _std_to_hw(0x000));
|
||||
}
|
||||
|
||||
// Dann gespeicherte Filter wieder setzen
|
||||
s_nextFiltSlot = 0;
|
||||
for (uint8_t i = 0; i < s_savedFiltCount && s_nextFiltSlot < 6; ++i)
|
||||
{
|
||||
const auto &F = s_savedFilt[i];
|
||||
const uint32_t hwId = F.ext ? F.id : _std_to_hw((uint16_t)F.id);
|
||||
CAN0.init_Filt(s_nextFiltSlot++, F.ext ? 1 : 0, hwId);
|
||||
}
|
||||
|
||||
return _trySetMode(MCP_NORMAL, s_modeSettleMs);
|
||||
}
|
||||
|
||||
// =====================
|
||||
// Öffentliche API
|
||||
// =====================
|
||||
|
||||
void CAN_HAL_SetTraceSink(CanTraceSink sink)
|
||||
{
|
||||
s_traceSink = sink;
|
||||
}
|
||||
|
||||
void CAN_HAL_EnableRawSniffer(bool enable)
|
||||
{
|
||||
if (enable == s_rawSnifferEnabled)
|
||||
return;
|
||||
|
||||
if (enable)
|
||||
{
|
||||
// Auf RAW öffnen
|
||||
if (_apply_open_filters())
|
||||
{
|
||||
s_rawSnifferEnabled = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Falls es nicht klappt, lieber Defekt melden als im Zwischending zu bleiben
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Gespeicherte "Normal"-Konfiguration wieder aktivieren
|
||||
if (_apply_saved_filters())
|
||||
{
|
||||
s_rawSnifferEnabled = false;
|
||||
}
|
||||
else
|
||||
{
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool CAN_HAL_IsRawSnifferEnabled()
|
||||
{
|
||||
return s_rawSnifferEnabled;
|
||||
}
|
||||
|
||||
bool CAN_HAL_Init(const CanHalConfig &cfg)
|
||||
{
|
||||
s_ready = false;
|
||||
s_modeSettleMs = cfg.modeSettleMs ? cfg.modeSettleMs : 10;
|
||||
s_traceSink = nullptr;
|
||||
s_rawSnifferEnabled = false;
|
||||
|
||||
// 1) SPI/MCP starten
|
||||
// HIER: MCP_STDEXT statt MCP_STD, damit die Lib nicht ins default/Failure läuft
|
||||
if (CAN0.begin(MCP_STDEXT, cfg.baud, cfg.clock) != CAN_OK) {
|
||||
// 1) SPI/MCP starten (STDEXT ist robust gegen Fehlpfade in Lib-Forks)
|
||||
if (CAN0.begin(MCP_STDEXT, cfg.baud, cfg.clock) != CAN_OK)
|
||||
{
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
return false;
|
||||
}
|
||||
|
||||
// 2) Loopback‑Selftest (ohne Bus)
|
||||
if (!_selftest_loopback(20)) {
|
||||
// 2) Loopback-Selftest (ohne Bus)
|
||||
if (!_selftest_loopback(20))
|
||||
{
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
return false;
|
||||
}
|
||||
|
||||
// 3) Optional Listen‑Only‑Probe (nur Info)
|
||||
// 3) Optional Listen-Only-Probe (nur Info)
|
||||
_probe_listen_only(cfg.listenOnlyProbeMs);
|
||||
|
||||
// 4) Default: Filter/Masks neutral, Mode NORMAL
|
||||
// -> Für Masken/Filter müssen wir in CONFIG sein (hier: MODE_CONFIG laut deiner Lib)
|
||||
if (!_trySetMode(MODE_CONFIG, s_modeSettleMs)) {
|
||||
// 4) Default: Filter/Masks offen, Mode NORMAL
|
||||
if (!_apply_open_filters())
|
||||
{
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
return false;
|
||||
}
|
||||
|
||||
// weit offen (STD)
|
||||
CAN0.init_Mask(0, 0, _std_to_hw(0x000));
|
||||
CAN0.init_Mask(1, 0, _std_to_hw(0x000));
|
||||
for (uint8_t i = 0; i < 6; ++i) {
|
||||
CAN0.init_Filt(i, 0, _std_to_hw(0x000));
|
||||
}
|
||||
s_nextFiltSlot = 0;
|
||||
|
||||
if (!_trySetMode(MCP_NORMAL, s_modeSettleMs)) {
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
return false;
|
||||
// Initiale "Normal"-Spiegelung: alles offen (bis die App später echte Filter setzt)
|
||||
s_savedStdMask[0] = 0x000;
|
||||
s_savedStdMask[1] = 0x000;
|
||||
s_savedFiltCount = 0;
|
||||
for (uint8_t i = 0; i < 6; ++i)
|
||||
{
|
||||
s_savedFilt[i].id = 0;
|
||||
s_savedFilt[i].ext = false;
|
||||
}
|
||||
|
||||
// Erfolgreich
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, false);
|
||||
s_ready = true;
|
||||
return true;
|
||||
@@ -119,18 +240,30 @@ bool CAN_HAL_IsReady() { return s_ready; }
|
||||
bool CAN_HAL_SetMode(uint8_t mode)
|
||||
{
|
||||
const bool ok = _trySetMode(mode, s_modeSettleMs);
|
||||
if (!ok) MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
if (!ok)
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
return ok;
|
||||
}
|
||||
|
||||
bool CAN_HAL_SetMask(uint8_t bank, bool ext, uint32_t rawMask)
|
||||
{
|
||||
if (bank > 1) return false;
|
||||
if (!CAN_HAL_SetMode(MODE_CONFIG)) return false;
|
||||
if (bank > 1)
|
||||
return false;
|
||||
|
||||
if (!CAN_HAL_SetMode(MODE_CONFIG))
|
||||
return false;
|
||||
|
||||
const bool ok = (CAN0.init_Mask(bank, ext ? 1 : 0, rawMask) == CAN_OK);
|
||||
|
||||
if (!CAN_HAL_SetMode(MCP_NORMAL)) {
|
||||
// Spiegeln (nur STD-11 Spiegel führen wir – ext-Masken selten; bei ext ignorieren)
|
||||
if (!ext)
|
||||
{
|
||||
const uint16_t m11 = (uint16_t)(rawMask >> 16);
|
||||
s_savedStdMask[bank] = m11;
|
||||
}
|
||||
|
||||
if (!CAN_HAL_SetMode(MCP_NORMAL))
|
||||
{
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
return false;
|
||||
}
|
||||
@@ -144,27 +277,42 @@ bool CAN_HAL_SetStdMask11(uint8_t bank, uint16_t mask11)
|
||||
|
||||
void CAN_HAL_ClearFilters()
|
||||
{
|
||||
if (!CAN_HAL_SetMode(MODE_CONFIG)) {
|
||||
if (!CAN_HAL_SetMode(MODE_CONFIG))
|
||||
{
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
return;
|
||||
}
|
||||
|
||||
CAN0.init_Mask(0, 0, _std_to_hw(0x000));
|
||||
CAN0.init_Mask(1, 0, _std_to_hw(0x000));
|
||||
for (uint8_t i = 0; i < 6; ++i) {
|
||||
for (uint8_t i = 0; i < 6; ++i)
|
||||
{
|
||||
CAN0.init_Filt(i, 0, _std_to_hw(0x000));
|
||||
}
|
||||
s_nextFiltSlot = 0;
|
||||
|
||||
if (!CAN_HAL_SetMode(MCP_NORMAL)) {
|
||||
// Spiegel auch zurücksetzen
|
||||
s_savedStdMask[0] = 0x000;
|
||||
s_savedStdMask[1] = 0x000;
|
||||
s_savedFiltCount = 0;
|
||||
for (uint8_t i = 0; i < 6; ++i)
|
||||
{
|
||||
s_savedFilt[i].id = 0;
|
||||
s_savedFilt[i].ext = false;
|
||||
}
|
||||
|
||||
if (!CAN_HAL_SetMode(MCP_NORMAL))
|
||||
{
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
}
|
||||
}
|
||||
|
||||
bool CAN_HAL_AddFilter(const CanFilter &f)
|
||||
{
|
||||
if (s_nextFiltSlot >= 6) return false;
|
||||
if (!CAN_HAL_SetMode(MODE_CONFIG)) {
|
||||
if (s_nextFiltSlot >= 6)
|
||||
return false;
|
||||
if (!CAN_HAL_SetMode(MODE_CONFIG))
|
||||
{
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
return false;
|
||||
}
|
||||
@@ -173,7 +321,19 @@ bool CAN_HAL_AddFilter(const CanFilter& f)
|
||||
const uint8_t slot = s_nextFiltSlot++;
|
||||
const bool ok = (CAN0.init_Filt(slot, f.ext ? 1 : 0, hwId) == CAN_OK);
|
||||
|
||||
if (!CAN_HAL_SetMode(MCP_NORMAL)) {
|
||||
// Spiegeln
|
||||
if (ok)
|
||||
{
|
||||
if (s_savedFiltCount < 6)
|
||||
{
|
||||
s_savedFilt[s_savedFiltCount].id = f.ext ? f.id : (uint32_t)((uint16_t)f.id);
|
||||
s_savedFilt[s_savedFiltCount].ext = f.ext;
|
||||
++s_savedFiltCount;
|
||||
}
|
||||
}
|
||||
|
||||
if (!CAN_HAL_SetMode(MCP_NORMAL))
|
||||
{
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
return false;
|
||||
}
|
||||
@@ -182,25 +342,38 @@ bool CAN_HAL_AddFilter(const CanFilter& f)
|
||||
|
||||
bool CAN_HAL_SetFilters(const CanFilter *list, size_t count)
|
||||
{
|
||||
if (!CAN_HAL_SetMode(MODE_CONFIG)) {
|
||||
if (!CAN_HAL_SetMode(MODE_CONFIG))
|
||||
{
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Slots zurücksetzen
|
||||
s_nextFiltSlot = 0;
|
||||
for (uint8_t i = 0; i < 6; ++i) {
|
||||
for (uint8_t i = 0; i < 6; ++i)
|
||||
{
|
||||
CAN0.init_Filt(i, 0, _std_to_hw(0x000));
|
||||
}
|
||||
|
||||
// Setzen
|
||||
for (size_t i = 0; i < count && s_nextFiltSlot < 6; ++i) {
|
||||
for (size_t i = 0; i < count && s_nextFiltSlot < 6; ++i)
|
||||
{
|
||||
const auto &f = list[i];
|
||||
const uint32_t hwId = f.ext ? f.id : _std_to_hw((uint16_t)f.id);
|
||||
CAN0.init_Filt(s_nextFiltSlot++, f.ext ? 1 : 0, hwId);
|
||||
}
|
||||
|
||||
if (!CAN_HAL_SetMode(MCP_NORMAL)) {
|
||||
// Spiegel aktualisieren
|
||||
s_savedFiltCount = 0;
|
||||
for (size_t i = 0; i < count && i < 6; ++i)
|
||||
{
|
||||
s_savedFilt[i].id = list[i].ext ? list[i].id : (uint32_t)((uint16_t)list[i].id);
|
||||
s_savedFilt[i].ext = list[i].ext;
|
||||
++s_savedFiltCount;
|
||||
}
|
||||
|
||||
if (!CAN_HAL_SetMode(MCP_NORMAL))
|
||||
{
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
return false;
|
||||
}
|
||||
@@ -209,20 +382,65 @@ bool CAN_HAL_SetFilters(const CanFilter* list, size_t count)
|
||||
|
||||
bool CAN_HAL_Read(unsigned long &id, uint8_t &len, uint8_t data[8])
|
||||
{
|
||||
if (CAN0.checkReceive() != CAN_MSGAVAIL) return false;
|
||||
if (CAN0.readMsgBuf(&id, &len, data) != CAN_OK) {
|
||||
if (CAN0.checkReceive() != CAN_MSGAVAIL)
|
||||
return false;
|
||||
|
||||
if (CAN0.readMsgBuf(&id, &len, data) != CAN_OK)
|
||||
{
|
||||
// Echte Lese-Fehler -> vermutlich SPI/Controller-Problem
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
return false;
|
||||
}
|
||||
|
||||
// MCP_CAN schreibt Flags in das ID-Wort:
|
||||
// bit31 = EXT, bit30 = RTR, Rest = rohe ID (11 oder 29 Bit)
|
||||
const bool ext = (id & 0x80000000UL) != 0;
|
||||
const bool rtr = (id & 0x40000000UL) != 0; // aktuell nur informativ
|
||||
|
||||
// "Saubere" ID für Aufrufer herstellen
|
||||
const uint32_t clean_id = ext ? (id & 0x1FFFFFFFUL) : (id & 0x7FFUL);
|
||||
id = clean_id;
|
||||
|
||||
// Trace-Hook (RX)
|
||||
if (s_traceSink)
|
||||
{
|
||||
CanLogFrame f{};
|
||||
f.ts_ms = millis();
|
||||
f.id = clean_id;
|
||||
f.ext = ext;
|
||||
f.rx = true;
|
||||
f.dlc = len;
|
||||
if (len)
|
||||
memcpy(f.data, data, len);
|
||||
s_traceSink(f);
|
||||
}
|
||||
|
||||
// Optional: Wenn du RTR-Frames speziell behandeln willst, könntest du hier
|
||||
// (rtr==true) markieren/loggen oder len=0 erzwingen. Für jetzt: einfach durchreichen.
|
||||
(void)rtr;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
uint8_t CAN_HAL_Send(unsigned long id, bool ext, uint8_t len, const uint8_t *data)
|
||||
{
|
||||
// Sende-Fehler (CAN_FAILTX) müssen nicht zwingend Transceiver-Defekte sein (z. B. Bus-Off).
|
||||
// Höhere Ebene kann bei Bedarf DTCs setzen. Hier nur durchreichen.
|
||||
return CAN0.sendMsgBuf(id, ext ? 1 : 0, len, const_cast<uint8_t*>(data));
|
||||
// Senden
|
||||
uint8_t st = CAN0.sendMsgBuf(id, ext ? 1 : 0, len, const_cast<uint8_t *>(data));
|
||||
|
||||
// Trace-Hook (TX) nur bei Erfolg loggen – optional: immer loggen
|
||||
if (st == CAN_OK && s_traceSink)
|
||||
{
|
||||
CanLogFrame f{};
|
||||
f.ts_ms = millis();
|
||||
f.id = id;
|
||||
f.ext = ext;
|
||||
f.rx = false;
|
||||
f.dlc = len;
|
||||
if (len)
|
||||
memcpy(f.data, data, len);
|
||||
s_traceSink(f);
|
||||
}
|
||||
return st;
|
||||
}
|
||||
|
||||
// ==== Diagnose/Utilities ====
|
||||
|
@@ -1,115 +1,209 @@
|
||||
// can_native.cpp – Mehrmodell-Setup (Integer-only), Triumph nutzt NUR Kanal B (W23)
|
||||
|
||||
#include "can_native.h"
|
||||
#include "globals.h" // für LubeConfig, etc.
|
||||
#include "globals.h" // enthält LubeConfig.CANSource
|
||||
#include "dtc.h"
|
||||
#include "debugger.h"
|
||||
|
||||
// ===== Bike-spezifische Konstanten =====
|
||||
// Faktor zur Umrechnung der Rohdaten -> km/h (aus deinem bisherigen Code)
|
||||
// ====================== Gemeinsame Konstanten / Helpers ======================
|
||||
|
||||
// KTM-Faktoren: raw/FACTOR -> km/h
|
||||
static constexpr uint16_t FACTOR_RWP_KMH_890ADV = 18;
|
||||
static constexpr uint16_t FACTOR_RWP_KMH_1290SD = 18;
|
||||
|
||||
// Erwartete CAN-ID(s) für die genutzten Bikes (11-bit)
|
||||
static constexpr uint16_t ID_KTM_REAR_WHEEL = 0x12D; // aus deinem Filter-Setup
|
||||
// Triumph 0x208: Fit ≈ 0.0073 km/h/LSB -> exakt 73/10000 km/h/LSB
|
||||
// mm/s = km/h * 1_000_000 / 3600 -> 73/36 mm/s pro LSB (bei EINEM 16-Bit-Wert)
|
||||
static constexpr uint16_t TRI_MMPS_NUM = 73;
|
||||
static constexpr uint16_t TRI_MMPS_DEN_SINGLE = 36; // EIN Kanal (W23)
|
||||
|
||||
// ===== Interner Status =====
|
||||
// Gemeinsamer Integrations-/Alive-Status
|
||||
static uint32_t s_lastIntegrateMs = 0;
|
||||
static uint32_t s_lastRxMs = 0; // für DTC_NO_CAN_SIGNAL
|
||||
static uint32_t s_lastSpeed_mmps = 0; // mm pro Sekunde (Rear Wheel)
|
||||
static uint32_t s_lastSpeed_mmps = 0; // aktuelle Geschwindigkeit [mm/s]
|
||||
|
||||
// Hilfsfunktion: aus km/h -> mm/s
|
||||
static inline uint32_t kmh_to_mmps(uint16_t kmh)
|
||||
// mm = (mm/s * ms) / 1000
|
||||
static inline uint32_t integrate_mm(uint32_t v_mmps, uint32_t dt_ms)
|
||||
{
|
||||
// 1 km/h = 1'000'000 mm / 3600 s
|
||||
return (uint32_t)kmh * 1000000UL / 3600UL;
|
||||
return (uint64_t)v_mmps * dt_ms / 1000ULL;
|
||||
}
|
||||
|
||||
// Hilfsfunktion: aus Rohdaten -> mm/s je nach Bike-Konfiguration
|
||||
static uint32_t parse_speed_mmps_from_frame(uint8_t dlc, const uint8_t data[8])
|
||||
{
|
||||
if (dlc < 7)
|
||||
return 0; // wir brauchen data[5] & data[6]
|
||||
uint16_t raw = (uint16_t)data[5] << 8 | data[6];
|
||||
// ========================== Modell-Decoder (Integer) =========================
|
||||
|
||||
// --- KTM: 11-bit ID 0x12D, Speed in data[5..6] (BE), raw/FACTOR -> km/h -> mm/s
|
||||
static uint32_t dec_ktm_rearwheel_mmps(uint8_t dlc, const uint8_t data[8], uint8_t bikeVariant /*0=890,1=1290*/)
|
||||
{
|
||||
if (dlc < 7) return 0; // benötigt data[5], data[6]
|
||||
const uint16_t raw = (uint16_t(data[5]) << 8) | data[6];
|
||||
|
||||
uint16_t factor = FACTOR_RWP_KMH_890ADV;
|
||||
if (bikeVariant == 1) factor = FACTOR_RWP_KMH_1290SD;
|
||||
|
||||
// mm/s = (raw/factor) * 1_000_000 / 3600 -> reine Integer-Mathe:
|
||||
const uint32_t num = (uint32_t)raw * 1000000UL;
|
||||
const uint32_t kmh_times1e6 = num / factor;
|
||||
return kmh_times1e6 / 3600UL;
|
||||
}
|
||||
|
||||
// --- Triumph: 11-bit ID 0x208, NUR Kanal B = W23 (B2..B3, Little-Endian)
|
||||
static uint32_t dec_triumph_0x208_w23_mmps(uint8_t dlc, const uint8_t data[8], uint8_t /*unused*/)
|
||||
{
|
||||
if (dlc < 4) return 0;
|
||||
|
||||
// W23 = (B2) + 256*(B3), LE
|
||||
const uint16_t W23 = (uint16_t)data[2] | ((uint16_t)data[3] << 8);
|
||||
|
||||
if (W23 == 0) return 0;
|
||||
|
||||
// mm/s = (W23 * 73) / 36 — rundendes Integer-Divide
|
||||
return ( (uint32_t)W23 * TRI_MMPS_NUM + (TRI_MMPS_DEN_SINGLE/2) ) / TRI_MMPS_DEN_SINGLE;
|
||||
}
|
||||
|
||||
// ============================ Modell-Registry ================================
|
||||
struct ModelSpec
|
||||
{
|
||||
// Erwartete 11-bit CAN-ID, min DLC, ob Extended (false=Standard)
|
||||
uint16_t can_id;
|
||||
uint8_t min_dlc;
|
||||
bool ext;
|
||||
|
||||
// Decoder-Funktion → mm/s (Integer). bikeVariant: optionale Untervariante.
|
||||
uint32_t (*decode_mmps)(uint8_t dlc, const uint8_t data[8], uint8_t bikeVariant);
|
||||
|
||||
// Optionaler Untervarianten-Index (z.B. 0=890ADV, 1=1290SD)
|
||||
uint8_t bikeVariant;
|
||||
};
|
||||
|
||||
// Konkrete Modelle (einfach erweiterbar)
|
||||
static constexpr uint16_t ID_KTM_REAR_WHEEL = 0x12D;
|
||||
static constexpr uint16_t ID_TRIUMPH_SPEED = 0x208;
|
||||
|
||||
static uint32_t trampoline_ktm_890(uint8_t dlc, const uint8_t data[8], uint8_t) {
|
||||
return dec_ktm_rearwheel_mmps(dlc, data, 0);
|
||||
}
|
||||
static uint32_t trampoline_ktm_1290(uint8_t dlc, const uint8_t data[8], uint8_t) {
|
||||
return dec_ktm_rearwheel_mmps(dlc, data, 1);
|
||||
}
|
||||
static uint32_t trampoline_triumph_w23(uint8_t dlc, const uint8_t data[8], uint8_t) {
|
||||
return dec_triumph_0x208_w23_mmps(dlc, data, 0);
|
||||
}
|
||||
|
||||
// getSpec(): mappt LubeConfig.CANSource → ModelSpec
|
||||
static bool getSpec(ModelSpec &out)
|
||||
{
|
||||
switch (LubeConfig.CANSource)
|
||||
{
|
||||
case KTM_890_ADV_R_2021:
|
||||
// (raw / FACTOR) km/h -> mm/s
|
||||
// Deine Kommentare: raw * 500 -> cm/s — hier sauber über kmh_to_mmps
|
||||
return (((uint32_t)raw * 1000000UL) / FACTOR_RWP_KMH_890ADV) / 3600UL;
|
||||
out = { ID_KTM_REAR_WHEEL, 7, false, trampoline_ktm_890, 0 };
|
||||
return true;
|
||||
|
||||
case KTM_1290_SD_R_2023:
|
||||
return (((uint32_t)raw * 1000000UL) / FACTOR_RWP_KMH_1290SD) / 3600UL;
|
||||
out = { ID_KTM_REAR_WHEEL, 7, false, trampoline_ktm_1290, 1 };
|
||||
return true;
|
||||
|
||||
case TRIUMPH_SPEED_TWIN_1200_RS_2025:
|
||||
// Triumph nutzt NUR W23 (Hinterrad-Kanal B)
|
||||
out = { ID_TRIUMPH_SPEED, 4, false, trampoline_triumph_w23, 0 };
|
||||
return true;
|
||||
|
||||
default:
|
||||
return 0;
|
||||
return false; // unbekannt → optional generisch behandeln
|
||||
}
|
||||
}
|
||||
|
||||
// ============================== Initialisierung ==============================
|
||||
bool Init_CAN_Native()
|
||||
{
|
||||
// 1) HAL bereitstellen (Selftest inklusive). Nur initialisieren, wenn noch nicht ready.
|
||||
// HAL bereitstellen
|
||||
if (!CAN_HAL_IsReady())
|
||||
{
|
||||
CanHalConfig cfg;
|
||||
cfg.baud = CAN_500KBPS;
|
||||
cfg.clock = MCP_16MHZ;
|
||||
cfg.listenOnlyProbeMs = 50; // kurzer, unkritischer „Bus lebt?“-Blick
|
||||
cfg.listenOnlyProbeMs = 50;
|
||||
|
||||
if (!CAN_HAL_Init(cfg))
|
||||
{
|
||||
// Hardware/Selftest failed → native Pfad nicht nutzbar
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, true);
|
||||
Debug_pushMessage("CAN(Native): HAL init failed\n");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// 2) Masken/Filter setzen
|
||||
// Spec laden
|
||||
ModelSpec spec;
|
||||
const bool haveSpec = getSpec(spec);
|
||||
|
||||
// Masken/Filter
|
||||
CAN_HAL_SetStdMask11(0, 0x7FF);
|
||||
CAN_HAL_SetStdMask11(1, 0x7FF);
|
||||
|
||||
CanFilter flist[1] = {{ID_KTM_REAR_WHEEL, false}};
|
||||
if (haveSpec)
|
||||
{
|
||||
CanFilter flist[1] = { { spec.can_id, spec.ext } };
|
||||
CAN_HAL_SetFilters(flist, 1);
|
||||
Debug_pushMessage("CAN(Native): Filter set (ID=0x%03X, minDLC=%u)\n", spec.can_id, spec.min_dlc);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Fallback: beide IDs aktivieren (KTM+Triumph), falls Quelle unbekannt
|
||||
CanFilter flist[2] = { { ID_KTM_REAR_WHEEL, false }, { ID_TRIUMPH_SPEED, false } };
|
||||
CAN_HAL_SetFilters(flist, 2);
|
||||
Debug_pushMessage("CAN(Native): Fallback filters (KTM=0x%03X, TRI=0x%03X)\n", ID_KTM_REAR_WHEEL, ID_TRIUMPH_SPEED);
|
||||
}
|
||||
|
||||
CAN_HAL_SetMode(MCP_NORMAL);
|
||||
|
||||
// 3) Startzustand/Flags
|
||||
MaintainDTC(DTC_CAN_TRANSCEIVER_FAILED, false);
|
||||
// DTC_NO_CAN_SIGNAL wird in Process_* verwaltet
|
||||
|
||||
// 4) Status resetten
|
||||
s_lastIntegrateMs = millis();
|
||||
s_lastRxMs = 0;
|
||||
s_lastSpeed_mmps = 0;
|
||||
|
||||
Debug_pushMessage("CAN(Native): Filters set (ID=0x%03X), NORMAL mode\n", ID_KTM_REAR_WHEEL);
|
||||
return true;
|
||||
}
|
||||
|
||||
// ============================== Verarbeitung ================================
|
||||
uint32_t Process_CAN_Native_WheelSpeed()
|
||||
{
|
||||
const uint32_t now = millis();
|
||||
uint32_t add_mm = 0;
|
||||
ModelSpec spec;
|
||||
const bool haveSpec = getSpec(spec);
|
||||
|
||||
// 1) Frames non-blocking ziehen und relevante verarbeiten
|
||||
for (uint8_t i = 0; i < 6; ++i)
|
||||
{ // kleine Obergrenze gegen Busy-Loops
|
||||
// Frames non-blocking verarbeiten
|
||||
for (uint8_t i = 0; i < 6; ++i) // kleine Obergrenze gegen Busy-Loops
|
||||
{
|
||||
unsigned long id;
|
||||
uint8_t dlc;
|
||||
uint8_t buf[8];
|
||||
if (!CAN_HAL_Read(id, dlc, buf))
|
||||
break;
|
||||
|
||||
// Wir erwarten 11-bit 0x12D (Filter sind gesetzt, aber doppelter Boden schadet nicht)
|
||||
if (id == ID_KTM_REAR_WHEEL)
|
||||
if (haveSpec)
|
||||
{
|
||||
s_lastSpeed_mmps = parse_speed_mmps_from_frame(dlc, buf);
|
||||
if (id == spec.can_id && dlc >= spec.min_dlc)
|
||||
{
|
||||
s_lastSpeed_mmps = spec.decode_mmps(dlc, buf, spec.bikeVariant);
|
||||
s_lastRxMs = now;
|
||||
// Kein "break": falls mehrere Frames in der Queue sind, nehmen wir das letzte als aktuellsten
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Fallback: KTM prüfen
|
||||
if (id == ID_KTM_REAR_WHEEL && dlc >= 7)
|
||||
{
|
||||
s_lastSpeed_mmps = dec_ktm_rearwheel_mmps(dlc, buf, 0);
|
||||
s_lastRxMs = now;
|
||||
}
|
||||
// Fallback: Triumph prüfen (nur W23)
|
||||
else if (id == ID_TRIUMPH_SPEED && dlc >= 4)
|
||||
{
|
||||
s_lastSpeed_mmps = dec_triumph_0x208_w23_mmps(dlc, buf, 0);
|
||||
s_lastRxMs = now;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 2) CAN-Heartbeat -> DTC_NO_CAN_SIGNAL (Warnung, wenn >10s nix mehr kam)
|
||||
// CAN-Heartbeat / DTC
|
||||
if (s_lastRxMs != 0)
|
||||
{
|
||||
const bool stale = (now - s_lastRxMs) > 10000UL;
|
||||
@@ -117,27 +211,18 @@ uint32_t Process_CAN_Native_WheelSpeed()
|
||||
}
|
||||
else
|
||||
{
|
||||
// Seit Start noch kein Frame gesehen -> noch nicht entscheiden, DTC-Logik darf warten
|
||||
// Optional: nach 1s ohne Frames Warnung setzen
|
||||
static uint32_t t0 = now;
|
||||
if (now - t0 > 1000UL)
|
||||
{
|
||||
MaintainDTC(DTC_NO_CAN_SIGNAL, true);
|
||||
}
|
||||
}
|
||||
|
||||
// 3) Integration der Distanz (mm) über dt
|
||||
if (s_lastIntegrateMs == 0)
|
||||
s_lastIntegrateMs = now;
|
||||
// Integration Strecke (mm)
|
||||
if (s_lastIntegrateMs == 0) s_lastIntegrateMs = now;
|
||||
const uint32_t dt_ms = now - s_lastIntegrateMs;
|
||||
s_lastIntegrateMs = now;
|
||||
|
||||
// Wenn seit 600 ms keine neue Geschwindigkeit kam, setze v -> 0 (Stale-Schutz)
|
||||
const bool speedStale = (s_lastRxMs == 0) || ((now - s_lastRxMs) > 600UL);
|
||||
const uint32_t v_mmps = speedStale ? 0 : s_lastSpeed_mmps;
|
||||
const uint32_t v_mmps = speedStale ? 0u : s_lastSpeed_mmps;
|
||||
|
||||
// mm = (mm/s * ms) / 1000
|
||||
add_mm = (uint64_t)v_mmps * dt_ms / 1000ULL;
|
||||
|
||||
return add_mm;
|
||||
return integrate_mm(v_mmps, dt_ms);
|
||||
}
|
||||
|
@@ -1,7 +1,12 @@
|
||||
#include "can_obd2.h"
|
||||
#include "can_hal.h"
|
||||
#include "dtc.h"
|
||||
#include "debugger.h"
|
||||
#include "globals.h" // falls du später Einstellungen brauchst
|
||||
#include "globals.h"
|
||||
#include <stdarg.h>
|
||||
|
||||
// Trace-Sink aus webui.cpp (o.ä.)
|
||||
extern void TRACE_OnObdFrame(uint32_t id, bool rx, const uint8_t *d, uint8_t dlc, const char *note);
|
||||
|
||||
// =======================
|
||||
// Konfiguration (anpassbar)
|
||||
@@ -14,7 +19,7 @@
|
||||
|
||||
// Antwort-Timeout auf eine einzelne Anfrage
|
||||
#ifndef OBD2_RESP_TIMEOUT_MS
|
||||
#define OBD2_RESP_TIMEOUT_MS 60 // ~60 ms
|
||||
#define OBD2_RESP_TIMEOUT_MS 120 // etwas großzügiger für reale ECUs
|
||||
#endif
|
||||
|
||||
// Wenn so lange keine valide Antwort kam, gilt die Geschwindigkeit als stale -> v=0
|
||||
@@ -32,10 +37,21 @@
|
||||
#define OBD2_DEBUG_INTERVAL_MS 1000
|
||||
#endif
|
||||
|
||||
// Max. Delta-Zeit fürs Weg-Integrationsglied (Ausreißer-Klemme)
|
||||
#ifndef OBD2_MAX_DT_MS
|
||||
#define OBD2_MAX_DT_MS 200
|
||||
#endif
|
||||
|
||||
// Erlaube einmaligen Fallback von funktionaler (0x7DF) auf physische Adresse (0x7E0)
|
||||
#ifndef OBD2_ALLOW_PHYSICAL_FALLBACK
|
||||
#define OBD2_ALLOW_PHYSICAL_FALLBACK 1
|
||||
#endif
|
||||
|
||||
// =======================
|
||||
// OBD-II IDs (11-bit)
|
||||
// =======================
|
||||
static constexpr uint16_t OBD_REQ_ID = 0x7DF; // Broadcast-Request
|
||||
static constexpr uint16_t OBD_REQ_ID_FUNCTIONAL = 0x7DF; // Broadcast-Request
|
||||
static constexpr uint16_t OBD_REQ_ID_PHYSICAL = 0x7E0; // Engine ECU (Antwort 0x7E8)
|
||||
static constexpr uint16_t OBD_RESP_MIN = 0x7E8; // ECUs antworten 0x7E8..0x7EF
|
||||
static constexpr uint16_t OBD_RESP_MAX = 0x7EF;
|
||||
|
||||
@@ -79,7 +95,7 @@ static inline void maybeDebug(uint32_t now, const char *fmt, ...)
|
||||
s_lastDbgMs = now;
|
||||
va_list ap;
|
||||
va_start(ap, fmt);
|
||||
Debug_pushMessage(fmt, ap);
|
||||
Debug_pushMessage(fmt, ap); // nimmt va_list
|
||||
va_end(ap);
|
||||
#else
|
||||
(void)now;
|
||||
@@ -113,15 +129,17 @@ bool Init_CAN_OBD2()
|
||||
CAN_HAL_SetStdMask11(0, 0x7F0);
|
||||
CAN_HAL_SetStdMask11(1, 0x7F0);
|
||||
|
||||
CanFilter flist[6] = {
|
||||
CanFilter flist[8] = {
|
||||
{0x7E8, false},
|
||||
{0x7E9, false},
|
||||
{0x7EA, false},
|
||||
{0x7EB, false},
|
||||
{0x7EC, false},
|
||||
{0x7ED, false},
|
||||
{0x7EE, false},
|
||||
{0x7EF, false},
|
||||
};
|
||||
CAN_HAL_SetFilters(flist, 6);
|
||||
CAN_HAL_SetFilters(flist, 8);
|
||||
|
||||
CAN_HAL_SetMode(MCP_NORMAL);
|
||||
|
||||
@@ -151,7 +169,11 @@ uint32_t Process_CAN_OBD2_Speed()
|
||||
if (s_state == ObdState::Idle && (now - s_lastQueryTime) >= OBD2_QUERY_INTERVAL_MS)
|
||||
{
|
||||
uint8_t req[8] = {0x02, 0x01, 0x0D, 0x00, 0x00, 0x00, 0x00, 0x00}; // Mode 01, PID 0x0D (Speed)
|
||||
const uint8_t st = CAN_HAL_Send(OBD_REQ_ID, /*ext=*/false, 8, req);
|
||||
|
||||
// Trace: geplanter Request (functional)
|
||||
TRACE_OnObdFrame(OBD_REQ_ID_FUNCTIONAL, /*rx=*/false, req, 8, "req 01 0D (functional)");
|
||||
|
||||
uint8_t st = CAN_HAL_Send(OBD_REQ_ID_FUNCTIONAL, /*ext=*/false, 8, req);
|
||||
s_lastQueryTime = now;
|
||||
|
||||
if (st == CAN_OK)
|
||||
@@ -161,11 +183,25 @@ uint32_t Process_CAN_OBD2_Speed()
|
||||
}
|
||||
else
|
||||
{
|
||||
#if OBD2_ALLOW_PHYSICAL_FALLBACK
|
||||
// einmalig physisch versuchen (0x7E0 → Antwort 0x7E8)
|
||||
TRACE_OnObdFrame(OBD_REQ_ID_PHYSICAL, /*rx=*/false, req, 8, "req 01 0D (physical)");
|
||||
st = CAN_HAL_Send(OBD_REQ_ID_PHYSICAL, /*ext=*/false, 8, req);
|
||||
s_lastQueryTime = now;
|
||||
if (st == CAN_OK)
|
||||
{
|
||||
s_state = ObdState::Waiting;
|
||||
s_requestDeadline = now + OBD2_RESP_TIMEOUT_MS;
|
||||
}
|
||||
else
|
||||
#endif
|
||||
{
|
||||
// Senden fehlgeschlagen -> harter Timeout-DTC
|
||||
MaintainDTC(DTC_OBD2_CAN_TIMEOUT, true);
|
||||
maybeDebug(now, "OBD2-CAN send failed (%u)\n", st);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 2) Non-blocking Receive: wenige Frames pro Tick ziehen
|
||||
for (uint8_t i = 0; i < OBD2_MAX_READS_PER_CALL; ++i)
|
||||
@@ -182,9 +218,9 @@ uint32_t Process_CAN_OBD2_Speed()
|
||||
|
||||
// Erwartete Formate:
|
||||
// - Einfache Antwort: 0x41 0x0D <A> ...
|
||||
// - Mit Längen-Byte: 0x03 0x41 0x0D <A> ...
|
||||
// - Mit Längen-Byte: 0x03/0x04 0x41 0x0D <A> ...
|
||||
uint8_t modeResp = 0, pid = 0, speedKmh = 0;
|
||||
if (rx[0] == 0x03 && len >= 4 && rx[1] == 0x41 && rx[2] == 0x0D)
|
||||
if ((rx[0] == 0x03 || rx[0] == 0x04) && len >= 4 && rx[1] == 0x41 && rx[2] == 0x0D)
|
||||
{
|
||||
modeResp = rx[1];
|
||||
pid = rx[2];
|
||||
@@ -198,7 +234,9 @@ uint32_t Process_CAN_OBD2_Speed()
|
||||
}
|
||||
else
|
||||
{
|
||||
continue; // anderes PID/Format ignorieren
|
||||
// Nicht das gesuchte PID → optional trotzdem loggen
|
||||
TRACE_OnObdFrame(rxId, /*rx=*/true, rx, len, "other OBD resp");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (modeResp == 0x41 && pid == 0x0D)
|
||||
@@ -211,10 +249,19 @@ uint32_t Process_CAN_OBD2_Speed()
|
||||
MaintainDTC(DTC_OBD2_CAN_TIMEOUT, false);
|
||||
MaintainDTC(DTC_OBD2_CAN_NO_RESPONSE, false);
|
||||
|
||||
char note[40];
|
||||
snprintf(note, sizeof(note), "speed=%ukmh", (unsigned)speedKmh);
|
||||
TRACE_OnObdFrame(rxId, /*rx=*/true, rx, len, note);
|
||||
|
||||
maybeDebug(now, "OBD2 speed: %u km/h (%lu mm/s)\n",
|
||||
(unsigned)speedKmh, (unsigned long)s_lastSpeedMMps);
|
||||
break; // eine valide Antwort pro Zyklus reicht
|
||||
}
|
||||
else
|
||||
{
|
||||
// ist zwar OBD-II Antwort, aber nicht unser PID – optional loggen
|
||||
TRACE_OnObdFrame(rxId, /*rx=*/true, rx, len, "other OBD resp");
|
||||
}
|
||||
}
|
||||
|
||||
// 3) Offene Anfrage: Timeout prüfen
|
||||
@@ -223,13 +270,17 @@ uint32_t Process_CAN_OBD2_Speed()
|
||||
// Keine passende Antwort erhalten
|
||||
MaintainDTC(DTC_OBD2_CAN_NO_RESPONSE, true);
|
||||
s_state = ObdState::Idle;
|
||||
TRACE_OnObdFrame(0x000, /*rx=*/true, nullptr, 0, "timeout 01 0D");
|
||||
}
|
||||
|
||||
// 4) Integration (mm) über dt
|
||||
uint32_t add_mm = 0;
|
||||
if (s_lastIntegrateMs == 0)
|
||||
s_lastIntegrateMs = now;
|
||||
const uint32_t dt_ms = now - s_lastIntegrateMs;
|
||||
|
||||
uint32_t raw_dt = now - s_lastIntegrateMs;
|
||||
if (raw_dt > OBD2_MAX_DT_MS) raw_dt = OBD2_MAX_DT_MS; // Ausreißer klemmen
|
||||
const uint32_t dt_ms = raw_dt;
|
||||
s_lastIntegrateMs = now;
|
||||
|
||||
// Stale-Schutz: wenn lange keine Antwort -> v=0
|
||||
|
@@ -46,6 +46,7 @@ const char *const GPSBaudRateString[GPSBAUDRATE_COUNT] = {
|
||||
const char *const CANSourceString[CANSOURCE_COUNT] = {
|
||||
"KTM 890 Adventure R (2021)",
|
||||
"KTM 1290 Superduke R (2023)",
|
||||
"Triumph Speed Twin 1200 RS (2025)",
|
||||
};
|
||||
|
||||
// ---- Centralized, safe getters ----
|
||||
|
@@ -13,11 +13,13 @@
|
||||
|
||||
#include "webui.h"
|
||||
#include "common.h"
|
||||
#include "can_hal.h" // <-- für CanLogFrame, Trace-Sink
|
||||
#include <memory> // std::unique_ptr
|
||||
#include <cstring> // strlen, strncpy, memcpy
|
||||
#include <algorithm> // std::clamp
|
||||
|
||||
AsyncWebServer webServer(80);
|
||||
AsyncWebSocket webSocket("/ws");
|
||||
|
||||
const char *PARAM_MESSAGE = "message";
|
||||
|
||||
@@ -52,8 +54,6 @@ void WebserverEERestore_Callback(AsyncWebServerRequest *request, const String &f
|
||||
void WebServerEEJSON_Callback(AsyncWebServerRequest *request);
|
||||
void GetFlashVersion(char *buff, size_t buff_size);
|
||||
|
||||
AsyncWebSocket webSocket("/ws");
|
||||
|
||||
void WebsocketEvent_Callback(AsyncWebSocket *server, AsyncWebSocketClient *client, AwsEventType type, void *arg, uint8_t *data, size_t len);
|
||||
void Websocket_HandleMessage(void *arg, uint8_t *data, size_t len);
|
||||
void Websocket_RefreshClientData_DTCs(uint32_t client_id);
|
||||
@@ -65,7 +65,10 @@ void parseWebsocketString(char *data, char *identifierBuffer, size_t identifierB
|
||||
int findIndexByString(const char *searchString, const char *const *array, int arraySize);
|
||||
|
||||
// ---------- small helpers (safety) ----------
|
||||
static inline const char *nz(const char *p) { return p ? p : ""; }
|
||||
static inline const char *nz(const char *p)
|
||||
{
|
||||
return p ? p : "";
|
||||
}
|
||||
|
||||
static inline String tableStr(const char *const *tbl, int idx, int size)
|
||||
{
|
||||
@@ -92,6 +95,290 @@ static inline bool validIndex(int idx, int size)
|
||||
return idx >= 0 && idx < size;
|
||||
}
|
||||
|
||||
// =====================================================================
|
||||
// WebSocket-basierter Trace
|
||||
// =====================================================================
|
||||
|
||||
enum class TraceMode
|
||||
{
|
||||
None,
|
||||
Raw,
|
||||
Obd
|
||||
};
|
||||
|
||||
static TraceMode g_traceMode = TraceMode::None;
|
||||
static uint32_t g_traceOwnerId = 0; // WS-Client-ID des Starters
|
||||
static uint32_t g_traceStartMs = 0;
|
||||
static uint32_t g_traceLines = 0;
|
||||
static uint32_t g_traceDrops = 0;
|
||||
|
||||
// Aktueller WS-Client während WS_EVT_DATA (für HandleMessage)
|
||||
static AsyncWebSocketClient *g_wsCurrentClient = nullptr;
|
||||
|
||||
// Ringpuffer (verlusttolerant)
|
||||
// ---- Dynamischer Ringpuffer, um BSS klein zu halten ----
|
||||
#ifndef TRACE_FMT_BUFSZ
|
||||
#define TRACE_FMT_BUFSZ 128 // Puffer für ASCII-Zeile (unabhängig vom Ring)
|
||||
#endif
|
||||
#ifndef TRACE_DEFAULT_LINES
|
||||
#define TRACE_DEFAULT_LINES 64 // 64 x 128 = 8KB
|
||||
#endif
|
||||
#ifndef TRACE_DEFAULT_LINE_MAX
|
||||
#define TRACE_DEFAULT_LINE_MAX 128
|
||||
#endif
|
||||
|
||||
static char *g_ring = nullptr; // contiguous: lines * lineSize
|
||||
static uint16_t g_ringLines = 0;
|
||||
static uint16_t g_lineSize = 0;
|
||||
static uint16_t g_head = 0, g_tail = 0;
|
||||
|
||||
static inline bool ring_alloc(uint16_t lines, uint16_t lineSize)
|
||||
{
|
||||
size_t bytes = (size_t)lines * (size_t)lineSize;
|
||||
g_ring = (char *)malloc(bytes);
|
||||
if (!g_ring)
|
||||
return false;
|
||||
g_ringLines = lines;
|
||||
g_lineSize = lineSize;
|
||||
g_head = g_tail = 0;
|
||||
return true;
|
||||
}
|
||||
static inline void ring_free()
|
||||
{
|
||||
if (g_ring)
|
||||
free(g_ring);
|
||||
g_ring = nullptr;
|
||||
g_ringLines = 0;
|
||||
g_lineSize = 0;
|
||||
g_head = g_tail = 0;
|
||||
}
|
||||
static inline bool ring_empty() { return g_head == g_tail; }
|
||||
static inline bool ring_full() { return (uint16_t)(g_head + 1) == g_tail; }
|
||||
|
||||
static inline char *ring_slot(uint16_t idx) { return g_ring + ((idx % g_ringLines) * g_lineSize); }
|
||||
|
||||
static inline void ring_push_line(const char *s)
|
||||
{
|
||||
if (!g_ring)
|
||||
return;
|
||||
if (ring_full())
|
||||
{
|
||||
g_tail++;
|
||||
g_traceDrops++;
|
||||
}
|
||||
char *dst = ring_slot(g_head);
|
||||
strncpy(dst, s, g_lineSize - 1);
|
||||
dst[g_lineSize - 1] = '\0';
|
||||
g_head++;
|
||||
}
|
||||
static inline const char *ring_front() { return ring_empty() ? "" : ring_slot(g_tail); }
|
||||
static inline void ring_pop()
|
||||
{
|
||||
if (!ring_empty())
|
||||
g_tail++;
|
||||
}
|
||||
|
||||
// Fallback: direkt senden, falls kein Ring (zu wenig RAM)
|
||||
static inline void trace_emit_line_direct_or_ring(const char *s)
|
||||
{
|
||||
if (g_ring)
|
||||
{ // wenn Ring existiert -> dort ablegen
|
||||
ring_push_line(s);
|
||||
return;
|
||||
}
|
||||
// sonst: direkt an den Owner senden (Best-Effort)
|
||||
if (g_traceOwnerId && webSocket.availableForWrite(g_traceOwnerId))
|
||||
{
|
||||
String payload;
|
||||
payload.reserve(strlen(s) + 16);
|
||||
payload += "TRACELINE;";
|
||||
payload += s;
|
||||
payload += "\n";
|
||||
webSocket.text(g_traceOwnerId, payload);
|
||||
}
|
||||
else
|
||||
{
|
||||
g_traceDrops++;
|
||||
}
|
||||
}
|
||||
|
||||
// ASCII-Formatter (ID 3-stellig 11-bit, 8-stellig 29-bit)
|
||||
static void TRACE_FormatLine(char *dst, size_t n, const CanLogFrame &f, const char *note)
|
||||
{
|
||||
int off = snprintf(dst, n, "%lu %s 0x%0*lX %u ",
|
||||
(unsigned long)f.ts_ms,
|
||||
f.rx ? "RX" : "TX",
|
||||
f.ext ? 8 : 3, (unsigned long)f.id,
|
||||
f.dlc);
|
||||
for (uint8_t i = 0; i < f.dlc && off < (int)n - 3; i++)
|
||||
off += snprintf(dst + off, n - off, "%02X ", f.data[i]);
|
||||
if (note && *note)
|
||||
snprintf(dst + off, n - off, "; %s", note);
|
||||
}
|
||||
|
||||
// Sinks ---------------------------------------------------------------
|
||||
|
||||
// RAW (wird vom HAL bei RX/TX gerufen)
|
||||
static void TRACE_SinkRaw(const CanLogFrame &f)
|
||||
{
|
||||
if (g_traceMode != TraceMode::Raw || g_traceOwnerId == 0)
|
||||
return;
|
||||
char buf[TRACE_FMT_BUFSZ];
|
||||
TRACE_FormatLine(buf, sizeof(buf), f, nullptr);
|
||||
trace_emit_line_direct_or_ring(buf);
|
||||
g_traceLines++;
|
||||
}
|
||||
/**
|
||||
* OBD-Trace-Hook:
|
||||
* Wird von can_obd2.cpp aufgerufen, um einzelne OBD-Frames (ASCII) an den
|
||||
* WebSocket-Trace zu übergeben. Implementierung liegt in webui.cpp.
|
||||
*
|
||||
* @param id CAN-ID (11-bit bei OBD)
|
||||
* @param rx true=Empfangen, false=Gesendet
|
||||
* @param d Datenpointer (kann nullptr sein, wenn dlc==0)
|
||||
* @param dlc Datenlänge (0..8)
|
||||
* @param note optionale Zusatznotiz (z.B. "Mode01 PID 0x0D")
|
||||
*/
|
||||
void TRACE_OnObdFrame(uint32_t id, bool rx, const uint8_t *d, uint8_t dlc, const char *note)
|
||||
{
|
||||
if (g_traceMode != TraceMode::Obd || g_traceOwnerId == 0)
|
||||
return;
|
||||
CanLogFrame f{};
|
||||
f.ts_ms = millis();
|
||||
f.id = id;
|
||||
f.ext = false;
|
||||
f.rx = rx;
|
||||
f.dlc = dlc;
|
||||
if (d && dlc)
|
||||
memcpy(f.data, d, dlc);
|
||||
char buf[TRACE_FMT_BUFSZ];
|
||||
TRACE_FormatLine(buf, sizeof(buf), f, note);
|
||||
trace_emit_line_direct_or_ring(buf);
|
||||
g_traceLines++;
|
||||
}
|
||||
|
||||
// Owner noch da?
|
||||
static inline bool trace_owner_online()
|
||||
{
|
||||
return (g_traceOwnerId != 0) && webSocket.hasClient(g_traceOwnerId);
|
||||
}
|
||||
|
||||
// Pump: gebündelt senden, Backpressure beachten
|
||||
static void TRACE_PumpWs()
|
||||
{
|
||||
if (!g_ring)
|
||||
return; // bei direktem Senden gibt's keinen Ring zu pumpen
|
||||
if (g_traceMode == TraceMode::None || g_traceOwnerId == 0)
|
||||
return;
|
||||
if (!trace_owner_online())
|
||||
return;
|
||||
if (!webSocket.availableForWrite(g_traceOwnerId))
|
||||
return;
|
||||
|
||||
String payload;
|
||||
payload.reserve(2048);
|
||||
int sent = 0;
|
||||
|
||||
// mehrere Zeilen in eine WS-Nachricht
|
||||
while (!ring_empty() && sent < 32)
|
||||
{
|
||||
payload += "TRACELINE;";
|
||||
payload += ring_front();
|
||||
payload += "\n";
|
||||
ring_pop();
|
||||
sent++;
|
||||
}
|
||||
|
||||
if (payload.length())
|
||||
webSocket.text(g_traceOwnerId, payload);
|
||||
}
|
||||
|
||||
static void TRACE_StopWs(const char *reason)
|
||||
{
|
||||
if (g_traceMode == TraceMode::None)
|
||||
return;
|
||||
|
||||
// Hooks lösen
|
||||
CAN_HAL_SetTraceSink(nullptr);
|
||||
CAN_HAL_EnableRawSniffer(false);
|
||||
|
||||
// Abschlussinfo an Owner (falls noch online)
|
||||
if (trace_owner_online())
|
||||
{
|
||||
String end = "STOPTRACE;mode=";
|
||||
end += (g_traceMode == TraceMode::Raw ? "raw" : "obd");
|
||||
end += ";lines=";
|
||||
end += String(g_traceLines);
|
||||
end += ";drops=";
|
||||
end += String(g_traceDrops);
|
||||
if (reason)
|
||||
{
|
||||
end += ";reason=";
|
||||
end += reason;
|
||||
}
|
||||
webSocket.text(g_traceOwnerId, end);
|
||||
}
|
||||
|
||||
// Ring freigeben
|
||||
ring_free();
|
||||
// Reset
|
||||
g_traceMode = TraceMode::None;
|
||||
g_traceOwnerId = 0;
|
||||
g_traceStartMs = 0;
|
||||
g_traceLines = 0;
|
||||
g_traceDrops = 0;
|
||||
g_head = g_tail = 0;
|
||||
}
|
||||
|
||||
static void TRACE_StartWs(TraceMode m, uint32_t ownerId)
|
||||
{
|
||||
// Falls schon aktiv → erst stoppen (sauber)
|
||||
if (g_traceMode != TraceMode::None)
|
||||
{
|
||||
TRACE_StopWs("restart");
|
||||
}
|
||||
|
||||
g_traceMode = m;
|
||||
g_traceOwnerId = ownerId;
|
||||
g_traceStartMs = millis();
|
||||
g_traceLines = 0;
|
||||
g_traceDrops = 0;
|
||||
|
||||
// vorhandenen Ring sicherheitshalber freigeben
|
||||
ring_free();
|
||||
|
||||
// versuchen: 64x128 (8KB)
|
||||
if (!ring_alloc(TRACE_DEFAULT_LINES, TRACE_DEFAULT_LINE_MAX))
|
||||
{
|
||||
// Fallback: 48x112 ~ 5.3KB
|
||||
if (!ring_alloc(48, 112))
|
||||
{
|
||||
// Minimal: 32x96 ~ 3KB
|
||||
(void)ring_alloc(32, 96); // wenn das auch scheitert -> g_ring bleibt nullptr
|
||||
}
|
||||
}
|
||||
if (m == TraceMode::Raw)
|
||||
{
|
||||
CAN_HAL_SetTraceSink(TRACE_SinkRaw);
|
||||
CAN_HAL_EnableRawSniffer(true);
|
||||
}
|
||||
else
|
||||
{ // Obd
|
||||
CAN_HAL_SetTraceSink(nullptr);
|
||||
CAN_HAL_EnableRawSniffer(false);
|
||||
}
|
||||
|
||||
String hdr = "STARTTRACE;mode=";
|
||||
hdr += (m == TraceMode::Raw ? "raw" : "obd");
|
||||
hdr += ";ts=";
|
||||
hdr += String(g_traceStartMs);
|
||||
webSocket.text(ownerId, hdr);
|
||||
}
|
||||
|
||||
// =====================================================================
|
||||
// WebUI
|
||||
// =====================================================================
|
||||
|
||||
/**
|
||||
* @brief Initializes the web-based user interface (WebUI) for the ChainLube application.
|
||||
*
|
||||
@@ -139,10 +426,8 @@ void initWebUI()
|
||||
{ request->redirect("/index.htm"); });
|
||||
webServer.onNotFound(WebserverNotFound_Callback);
|
||||
webServer.on("/eejson", HTTP_GET, WebServerEEJSON_Callback);
|
||||
webServer.on(
|
||||
"/doUpdate", HTTP_POST, [](AsyncWebServerRequest *request) {}, WebserverFirmwareUpdate_Callback);
|
||||
webServer.on(
|
||||
"/eeRestore", HTTP_POST, [](AsyncWebServerRequest *request) {}, WebserverEERestore_Callback);
|
||||
webServer.on("/doUpdate", HTTP_POST, [](AsyncWebServerRequest *request) {}, WebserverFirmwareUpdate_Callback);
|
||||
webServer.on("/eeRestore", HTTP_POST, [](AsyncWebServerRequest *request) {}, WebserverEERestore_Callback);
|
||||
|
||||
// Start the web server
|
||||
webServer.begin();
|
||||
@@ -218,6 +503,29 @@ void Webserver_Process()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Trace pumpen (sicher, backpressure-aware)
|
||||
TRACE_PumpWs();
|
||||
|
||||
// Watchdog: Owner weg → Stop nach 10s (zusätzlich zu Disconnect-Stop)
|
||||
static uint32_t ownerMissingSince = 0;
|
||||
if (g_traceMode != TraceMode::None)
|
||||
{
|
||||
if (!trace_owner_online())
|
||||
{
|
||||
if (ownerMissingSince == 0)
|
||||
ownerMissingSince = millis();
|
||||
if (millis() - ownerMissingSince > 10000)
|
||||
{
|
||||
TRACE_StopWs("owner-timeout");
|
||||
ownerMissingSince = 0;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
ownerMissingSince = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -235,6 +543,11 @@ void Webserver_Process()
|
||||
*/
|
||||
void Webserver_Shutdown()
|
||||
{
|
||||
// Bei Shutdown Trace beenden
|
||||
if (g_traceMode != TraceMode::None)
|
||||
{
|
||||
TRACE_StopWs("shutdown");
|
||||
}
|
||||
if (webSocket.count() > 0)
|
||||
webSocket.closeAll();
|
||||
webServer.end();
|
||||
@@ -298,7 +611,6 @@ void GetFlashVersion(char *buff, size_t buff_size)
|
||||
*/
|
||||
void WebserverFirmwareUpdate_Callback(AsyncWebServerRequest *request, const String &filename, size_t index, uint8_t *data, size_t len, bool final)
|
||||
{
|
||||
|
||||
if (!index)
|
||||
{
|
||||
Debug_pushMessage("Update\n");
|
||||
@@ -337,6 +649,7 @@ void WebserverFirmwareUpdate_Callback(AsyncWebServerRequest *request, const Stri
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void WebserverEERestore_Callback(AsyncWebServerRequest *request,
|
||||
const String &filename,
|
||||
size_t index,
|
||||
@@ -440,8 +753,8 @@ void WebserverEERestore_Callback(AsyncWebServerRequest *request,
|
||||
LubeConfig.RimDiameter_Inch = clamp_u32(json["config"]["RimDiameter_Inch"].as<uint32_t>(), 0, 30);
|
||||
LubeConfig.DistancePerRevolution_mm = clamp_u32(json["config"]["DistancePerRevolution_mm"].as<uint32_t>(), 0, 10000);
|
||||
LubeConfig.BleedingPulses = clamp_u16(json["config"]["BleedingPulses"].as<uint16_t>(), 0, 1000);
|
||||
LubeConfig.WashMode_Distance = json["config"]["WashMode_Distance"].as<uint16_t>(); // ggf. Grenzen anpassen
|
||||
LubeConfig.WashMode_Interval = json["config"]["WashMode_Interval"].as<uint16_t>(); // ggf. Grenzen anpassen
|
||||
LubeConfig.WashMode_Distance = json["config"]["WashMode_Distance"].as<uint16_t>();
|
||||
LubeConfig.WashMode_Interval = json["config"]["WashMode_Interval"].as<uint16_t>();
|
||||
LubeConfig.LED_Mode_Flash = json["config"]["LED_Mode_Flash"].as<bool>();
|
||||
LubeConfig.LED_Max_Brightness = json["config"]["LED_Max_Brightness"].as<uint8_t>();
|
||||
LubeConfig.LED_Min_Brightness = json["config"]["LED_Min_Brightness"].as<uint8_t>();
|
||||
@@ -483,15 +796,12 @@ void WebserverEERestore_Callback(AsyncWebServerRequest *request,
|
||||
PersistenceData.odometer = json["persis"]["odometer"].as<uint32_t>();
|
||||
PersistenceData.checksum = json["persis"]["checksum"].as<uint32_t>();
|
||||
|
||||
// Optional: Sanity-Autokorrektur im RAM (keine EEPROM-Writes hier!)
|
||||
{
|
||||
uint32_t sanity = ConfigSanityCheck(true);
|
||||
if (sanity > 0)
|
||||
{
|
||||
MaintainDTC(DTC_EEPROM_CFG_SANITY, true, sanity);
|
||||
Debug_pushMessage("Restore: ConfigSanity corrected (mask=0x%08lX)\n", sanity);
|
||||
}
|
||||
}
|
||||
|
||||
ee_done = true;
|
||||
}
|
||||
@@ -554,6 +864,10 @@ void WebServerEEJSON_Callback(AsyncWebServerRequest *request)
|
||||
request->send(response);
|
||||
}
|
||||
|
||||
// =====================================================================
|
||||
// WebSocket Handling
|
||||
// =====================================================================
|
||||
|
||||
/**
|
||||
* @brief Callback function for handling WebSocket events.
|
||||
*
|
||||
@@ -584,10 +898,17 @@ void WebsocketEvent_Callback(AsyncWebSocket *server, AsyncWebSocketClient *clien
|
||||
}
|
||||
case WS_EVT_DISCONNECT:
|
||||
Debug_pushMessage("WebSocket client #%u disconnected\n", client->id());
|
||||
// Falls Owner: Trace sofort stoppen
|
||||
if (g_traceOwnerId == client->id())
|
||||
TRACE_StopWs("owner-disconnect");
|
||||
break;
|
||||
|
||||
case WS_EVT_DATA:
|
||||
g_wsCurrentClient = client; // für HandleMessage → Owner-ID
|
||||
Websocket_HandleMessage(arg, data, len);
|
||||
g_wsCurrentClient = nullptr;
|
||||
break;
|
||||
|
||||
case WS_EVT_PONG:
|
||||
case WS_EVT_ERROR:
|
||||
break;
|
||||
@@ -614,19 +935,69 @@ void Websocket_HandleMessage(void *arg, uint8_t *data, size_t len)
|
||||
memcpy(buf.get(), data, len);
|
||||
buf[len] = '\0';
|
||||
|
||||
Debug_pushMessage("Websocket-Message (len: %d): %s\n", (int)len, buf.get());
|
||||
const uint32_t senderId = g_wsCurrentClient ? g_wsCurrentClient->id() : 0;
|
||||
Debug_pushMessage("Websocket-Message from #%u (len: %d): %s\n", (unsigned)senderId, (int)len, buf.get());
|
||||
|
||||
// Steuerkommandos für Trace hier direkt behandeln (brauchen senderId)
|
||||
if (strncmp(buf.get(), "btn-", 4) == 0)
|
||||
{
|
||||
// Format: "btn-<identifier>[:<value>]"
|
||||
char identifier[32];
|
||||
char value[64];
|
||||
parseWebsocketString((char *)buf.get() + 4, identifier, sizeof(identifier), value, sizeof(value));
|
||||
|
||||
if (strcmp(identifier, "trace-start") == 0)
|
||||
{
|
||||
// Lock: Nur starten, wenn nicht aktiv
|
||||
if (g_traceMode != TraceMode::None)
|
||||
{
|
||||
String busy = "TRACEBUSY;owner=";
|
||||
busy += String(g_traceOwnerId);
|
||||
if (senderId)
|
||||
webSocket.text(senderId, busy);
|
||||
}
|
||||
else
|
||||
{
|
||||
TraceMode m = TraceMode::None;
|
||||
if (!strcmp(value, "raw"))
|
||||
m = TraceMode::Raw;
|
||||
else if (!strcmp(value, "obd"))
|
||||
m = TraceMode::Obd;
|
||||
|
||||
if (m == TraceMode::None)
|
||||
{
|
||||
if (senderId)
|
||||
webSocket.text(senderId, "TRACEERROR;msg=mode-missing");
|
||||
}
|
||||
else
|
||||
{
|
||||
TRACE_StartWs(m, senderId);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
else if (strcmp(identifier, "trace-stop") == 0)
|
||||
{
|
||||
// Stop darf jeder
|
||||
TRACE_StopWs("user-stop");
|
||||
// optional: ACK an Sender (Owner bekommt STOPTRACE ohnehin)
|
||||
if (senderId)
|
||||
webSocket.text(senderId, "TRACEACK;cmd=stop");
|
||||
return;
|
||||
}
|
||||
|
||||
// sonst: in "normalen" Button-Handler
|
||||
Websocket_HandleButtons((uint8_t *)buf.get() + 4);
|
||||
return;
|
||||
}
|
||||
else if (strncmp(buf.get(), "set-", 4) == 0)
|
||||
{
|
||||
Websocket_HandleSettings((uint8_t *)buf.get() + 4);
|
||||
return;
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug_pushMessage("Got unknown Websocket-Message '%s' from client\n", buf.get());
|
||||
Debug_pushMessage("Got unknown Websocket-Message '%s'\n", buf.get());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -855,7 +1226,6 @@ void Websocket_RefreshClientData_DTCs(uint32_t client_id)
|
||||
*/
|
||||
void Websocket_RefreshClientData_Status(uint32_t client_id, bool send_mapping)
|
||||
{
|
||||
|
||||
if (send_mapping)
|
||||
{
|
||||
if (client_id > 0)
|
||||
@@ -865,12 +1235,11 @@ void Websocket_RefreshClientData_Status(uint32_t client_id, bool send_mapping)
|
||||
}
|
||||
|
||||
String temp = "STATUS:";
|
||||
|
||||
temp.concat(String(ToString(globals.systemStatus)) + ";");
|
||||
|
||||
// Guard against division by zero (capacity==0)
|
||||
uint32_t cap = LubeConfig.tankCapacity_ml;
|
||||
uint32_t remain10 = (PersistenceData.tankRemain_microL / 10); // keep your original math
|
||||
uint32_t remain10 = (PersistenceData.tankRemain_microL / 10);
|
||||
uint32_t ratio = (cap > 0) ? (remain10 / cap) : 0;
|
||||
temp.concat(String(ratio) + ";");
|
||||
|
||||
|
Reference in New Issue
Block a user