current files from jetson board

This commit is contained in:
Michael Weig 2026-03-05 12:13:27 +01:00
parent 537b452449
commit de12c1407c
7 changed files with 234 additions and 41 deletions

View File

@ -895,18 +895,6 @@
"display_name": "Python 3 (ipykernel)", "display_name": "Python 3 (ipykernel)",
"language": "python", "language": "python",
"name": "python3" "name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.10"
} }
}, },
"nbformat": 4, "nbformat": 4,

View File

@ -705,18 +705,6 @@
"display_name": "Python 3 (ipykernel)", "display_name": "Python 3 (ipykernel)",
"language": "python", "language": "python",
"name": "python3" "name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.10"
} }
}, },
"nbformat": 4, "nbformat": 4,

View File

@ -4,15 +4,15 @@ database:
key: _Id key: _Id
model: model:
path: "/home/edgekit/MSY_FS/fahrsimulator_msy2526_ai/predict_pipeline/xgb_model_3_groupK.joblib" path: "/home/edgekit/MSY_FS/fahrsimulator_msy2526_ai/predict_pipeline/cnn_crossVal_EarlyFusion_V2_0103.keras"
scaler: scaler:
use_scaling: true use_scaling: true
path: "/home/edgekit/MSY_FS/fahrsimulator_msy2526_ai/predict_pipeline/normalizer_min_max_global.pkl" path: "/home/edgekit/MSY_FS/fahrsimulator_msy2526_ai/predict_pipeline/scaler_crossVal_EarlyFusion_V2_0103.joblib"
mqtt: mqtt:
enabled: true enabled: true
host: "141.75.215.233" host: "141.75.223.13"
port: 1883 port: 1883
topic: "PREDICTION" topic: "PREDICTION"
client_id: "jetson-board" client_id: "jetson-board"
@ -107,4 +107,4 @@ fallback:
Blink_mean_dur: 0.38857142857142857 Blink_mean_dur: 0.38857142857142857
Blink_median_dur: 0.2 Blink_median_dur: 0.2
Pupil_mean: 3.2823675201416016 Pupil_mean: 3.2823675201416016
Pupil_IPA: 0.0036347377340156025 Pupil_IPA: 0.0036347377340156025

View File

@ -5,7 +5,7 @@ StartLimitIntervalSec=0
[Service] [Service]
Type=oneshot Type=oneshot
User=edgekit User=edgekit
ExecStart=~/anaconda3/envs/p310_FS/bin/python /home/edgekit/MSY_FS/fahrsimulator_msy2526_ai/predict_pipeline/predict_sample.py ExecStart=/home/edgekit/anaconda3/envs/p310_FS_TF/bin/python /home/edgekit/MSY_FS/fahrsimulator_msy2526_ai/predict_pipeline/predict_sample.py
[Install] [Install]
WantedBy=multi-user.target WantedBy=multi-user.target

View File

@ -2,10 +2,11 @@
Description=Run predict sample every 5 seconds Description=Run predict sample every 5 seconds
[Timer] [Timer]
OnBootSec=5 OnActiveSec=60
OnUnitActiveSec=5 OnUnitActiveSec=5
AccuracySec=1s AccuracySec=1s
Unit=predict.service Unit=predict.service
[Install] [Install]
WantedBy=timers.target WantedBy=timers.target

View File

@ -7,7 +7,6 @@ import sys
import yaml import yaml
import pickle import pickle
sys.path.append('/home/edgekit/MSY_FS/fahrsimulator_msy2526_ai/tools') sys.path.append('/home/edgekit/MSY_FS/fahrsimulator_msy2526_ai/tools')
import db_helpers import db_helpers
import joblib import joblib
import paho.mqtt.client as mqtt import paho.mqtt.client as mqtt
@ -54,8 +53,9 @@ def callModel(sample, model_path):
if suffix in {".pkl", ".joblib"}: if suffix in {".pkl", ".joblib"}:
model = _load_serialized(model_path) model = _load_serialized(model_path)
elif suffix == ".keras": elif suffix == ".keras":
import tensorflow as tf import tensorflow
model = tf.keras.models.load_model(model_path) tensorflow.get_logger().setLevel("ERROR")
model = tensorflow.keras.models.load_model(model_path)
else: else:
raise ValueError(f"Unsupported model format: {suffix}. Use .pkl, .joblib, or .keras.") raise ValueError(f"Unsupported model format: {suffix}. Use .pkl, .joblib, or .keras.")
@ -65,9 +65,7 @@ def callModel(sample, model_path):
if suffix == ".keras": if suffix == ".keras":
x_full = x x_full = x
# Future model (35 features): keep this call when your new model is active. prediction = (model.predict(x_full[:, :35], verbose=0) > 0.5).astype(int)
# prediction = model.predict(x_full[:, :35], verbose=0)
prediction = model.predict(x_full[:, :20], verbose=0)
else: else:
if hasattr(model, "predict"): if hasattr(model, "predict"):
@ -126,10 +124,12 @@ def sendMessage(config_file_path, message):
# Serialize the message to JSON # Serialize the message to JSON
payload = json.dumps(message, ensure_ascii=False) payload = json.dumps(message, ensure_ascii=False)
print(payload) # for debugging purposes print(payload)
# publish via MQTT using config parameters above.
client = mqtt.Client(client_id=mqtt_cfg.get("client_id", "predictor-01")) client = mqtt.Client(client_id=mqtt_cfg.get("client_id", "predictor-01"),
callback_api_version=mqtt.CallbackAPIVersion.VERSION2)
callback_api_version=mqtt.CallbackAPIVersion.VERSION2
if "username" in mqtt_cfg and mqtt_cfg.get("username"): if "username" in mqtt_cfg and mqtt_cfg.get("username"):
client.username_pw_set(mqtt_cfg["username"], mqtt_cfg.get("password")) client.username_pw_set(mqtt_cfg["username"], mqtt_cfg.get("password"))
client.connect(mqtt_cfg.get("host", "localhost"), int(mqtt_cfg.get("port", 1883)), 60) client.connect(mqtt_cfg.get("host", "localhost"), int(mqtt_cfg.get("port", 1883)), 60)
@ -207,7 +207,7 @@ def scale_sample(sample, use_scaling=False, scaler_path=None):
return df.iloc[0] if isinstance(sample, pd.Series) else df return df.iloc[0] if isinstance(sample, pd.Series) else df
def main(): def main():
pd.set_option('future.no_silent_downcasting', True) pd.set_option('future.no_silent_downcasting', True) # kann ggf raus
config_file_path = Path("/home/edgekit/MSY_FS/fahrsimulator_msy2526_ai/predict_pipeline/config.yaml") config_file_path = Path("/home/edgekit/MSY_FS/fahrsimulator_msy2526_ai/predict_pipeline/config.yaml")
with config_file_path.open("r", encoding="utf-8") as f: with config_file_path.open("r", encoding="utf-8") as f:

View File

@ -0,0 +1,216 @@
# Predict Service and Timer Documentation
## Overview
This setup uses **systemd services and timers** to repeatedly execute a
Python script that performs prediction on the latest sample and sends a
message.
The systemd unit files are typically stored in:
/etc/systemd/system/
For this setup, the relevant files are:
/etc/systemd/system/predict.service
/etc/systemd/system/predict.timer
These files define the service execution and the timer scheduling.
- `predict.service` -- defines how the Python script is executed
- `predict.timer` -- schedules the repeated execution of the service
The timer triggers the service **every 5 seconds** after the first
activation.
------------------------------------------------------------------------
# Systemd Timer
File: `predict.timer`
``` ini
[Unit]
Description=Run predict sample every 5 seconds
[Timer]
OnActiveSec=60
OnUnitActiveSec=5
AccuracySec=1s
Unit=predict.service
[Install]
WantedBy=timers.target
```
## Behavior
- **OnActiveSec=60**\
The timer starts **60 seconds after it is activated**.
- **OnUnitActiveSec=5**\
After the service has run once, it will be triggered again **every 5
seconds**.
- **AccuracySec=1s**\
Allows systemd to schedule the timer with **1 second precision**.
- **Unit=predict.service**\
Defines which service should be triggered by the timer.
------------------------------------------------------------------------
# Systemd Service
File: `predict.service`
``` ini
[Unit]
Description=Predict latest sample and send message
After=network.target
StartLimitIntervalSec=0
[Service]
Type=oneshot
User=edgekit
ExecStart=/home/edgekit/anaconda3/envs/p310_FS_TF/bin/python /home/edgekit/MSY_FS/fahrsimulator_msy2526_ai/predict_pipeline/predict_sample.py
[Install]
WantedBy=multi-user.target
```
## Behavior
- **Type=oneshot**\
The service runs the script once and then exits.
- **User=edgekit**\
The script is executed under the `edgekit` user.
- **ExecStart**\
Executes the Python script using the specified conda environment.
- **After=network.target**\
Ensures the service only runs after the network is available.
------------------------------------------------------------------------
# Execution Flow
1. The **timer starts** after it is enabled.
2. After **60 seconds**, the first execution happens (this results from the duration of the camera processing initialization)
3. The timer triggers `predict.service`.
4. The service runs `predict_sample.py`.
5. Once the script finishes, the service exits.
6. The timer triggers the service again **every 5 seconds**.
------------------------------------------------------------------------
# Debugging and Monitoring
## View Live Output
All `print()` output from the Python script is written to the **systemd
journal**.
Follow the output live with:
``` bash
journalctl -u predict.service -f
```
This command is typically the most useful for debugging.
------------------------------------------------------------------------
# Common Systemd Commands
## Check Service Status
``` bash
systemctl status predict.service
```
Shows the last execution result and recent log lines.
------------------------------------------------------------------------
## Check Timer Status
``` bash
systemctl status predict.timer
```
Shows when the timer last ran and when it will run next.
------------------------------------------------------------------------
## List All Timers
``` bash
systemctl list-timers
```
Displays all active timers and their next scheduled execution.
------------------------------------------------------------------------
# Manual Execution
To run the service manually once:
``` bash
systemctl start predict.service
```
------------------------------------------------------------------------
# Restarting the Systemd Units
## Restart the Service
``` bash
systemctl restart predict.service
```
## Restart the Timer
``` bash
systemctl restart predict.timer
```
------------------------------------------------------------------------
## Reload Systemd After Changes
If `.service` or `.timer` files were modified:
``` bash
systemctl daemon-reload
systemctl restart predict.timer
systemctl restart predict.service
```
------------------------------------------------------------------------
## Enabling the Timer
To ensure the timer starts automatically on system boot:
``` bash
systemctl enable predict.timer
systemctl start predict.timer
```
------------------------------------------------------------------------
# Summary
- `predict.timer` schedules periodic execution.
- `predict.service` runs the Python prediction script.
- The script runs **every 5 seconds** after the initial delay.
- Logs and script output are available through:
``` bash
journalctl -u predict.service -f
```