c8e3016fd6
- Implement `sigen_daemon.py` to poll Sigenergy plant metrics and store snapshots. - Create `web_daemon.py` for serving a web interface with various endpoints. - Add debug scripts: - `debug_duplicates.py` to find duplicate target times in forecast data. - `debug_energy_forecast.py` to print baseline energy forecast curves. - `debug_oracle_evaluations.py` to run the oracle evaluator. - `debug_sigen.py` to inspect stored Sigenergy plant snapshots. - `debug_weather.py` to trace resolved truth data. - `modbus_test.py` for exploring Sigenergy plants or inverters over Modbus TCP. - Introduce `oracle_evaluator.py` for evaluating stored oracle predictions against actuals. - Add TCN training scripts in `tcn` directory for training usage sequence models.
199 lines
5.7 KiB
Python
199 lines
5.7 KiB
Python
#!/usr/bin/env python3
|
|
"""Debug script to inspect stored Sigenergy plant snapshots."""
|
|
|
|
from __future__ import annotations
|
|
|
|
import argparse
|
|
from datetime import timezone
|
|
import json
|
|
|
|
from gibil.classes.env_loader import EnvLoader
|
|
from gibil.classes.sigen.store import SigenStore
|
|
|
|
|
|
def main() -> None:
|
|
EnvLoader().load()
|
|
args = parse_args()
|
|
store = SigenStore.from_env()
|
|
|
|
if args.view == "raw":
|
|
rows = load_raw_snapshots(store, args.limit)
|
|
print_raw_snapshots(rows)
|
|
return
|
|
|
|
rows = load_rollup(store, args.view, args.limit)
|
|
print_rollup(rows, args.view)
|
|
|
|
|
|
def parse_args() -> argparse.Namespace:
|
|
parser = argparse.ArgumentParser(
|
|
description="Inspect stored Sigenergy plant snapshots."
|
|
)
|
|
parser.add_argument(
|
|
"--view",
|
|
choices=("raw", "1m", "15m", "1h"),
|
|
default="raw",
|
|
help="View to inspect. Defaults to raw.",
|
|
)
|
|
parser.add_argument(
|
|
"--limit",
|
|
type=int,
|
|
default=10,
|
|
help="Number of most recent rows/buckets to show. Defaults to 10.",
|
|
)
|
|
return parser.parse_args()
|
|
|
|
|
|
def load_raw_snapshots(store: SigenStore, limit: int) -> list[tuple]:
|
|
with store._connection() as connection:
|
|
with connection.cursor() as cursor:
|
|
cursor.execute(
|
|
"""
|
|
SELECT
|
|
observed_at,
|
|
received_at,
|
|
solar_power_w,
|
|
load_power_w,
|
|
battery_soc_pct,
|
|
battery_power_w,
|
|
grid_import_w,
|
|
grid_export_w,
|
|
plant_active_power_w,
|
|
accumulated_pv_energy_kwh,
|
|
daily_consumed_energy_kwh,
|
|
raw_values
|
|
FROM sigen_plant_snapshots
|
|
ORDER BY observed_at DESC
|
|
LIMIT %s
|
|
""",
|
|
(limit,),
|
|
)
|
|
return cursor.fetchall()
|
|
|
|
|
|
def load_rollup(store: SigenStore, view: str, limit: int) -> list[tuple]:
|
|
view_name = {
|
|
"1m": "sigen_plant_snapshots_1m",
|
|
"15m": "sigen_plant_snapshots_15m",
|
|
"1h": "sigen_plant_snapshots_1h",
|
|
}[view]
|
|
|
|
with store._connection() as connection:
|
|
with connection.cursor() as cursor:
|
|
cursor.execute(
|
|
f"""
|
|
SELECT
|
|
bucket,
|
|
sample_count,
|
|
avg_solar_power_w,
|
|
max_solar_power_w,
|
|
avg_load_power_w,
|
|
max_load_power_w,
|
|
avg_grid_import_w,
|
|
max_grid_import_w,
|
|
avg_grid_export_w,
|
|
max_grid_export_w,
|
|
avg_battery_soc_pct
|
|
FROM {view_name}
|
|
ORDER BY bucket DESC
|
|
LIMIT %s
|
|
""",
|
|
(limit,),
|
|
)
|
|
return cursor.fetchall()
|
|
|
|
|
|
def print_raw_snapshots(rows: list[tuple]) -> None:
|
|
print(f"raw_snapshots={len(rows)}")
|
|
for row in rows:
|
|
(
|
|
observed_at,
|
|
received_at,
|
|
solar_power_w,
|
|
load_power_w,
|
|
battery_soc_pct,
|
|
battery_power_w,
|
|
grid_import_w,
|
|
grid_export_w,
|
|
plant_active_power_w,
|
|
accumulated_pv_energy_kwh,
|
|
daily_consumed_energy_kwh,
|
|
raw_values,
|
|
) = row
|
|
print(
|
|
f"{_fmt_time(observed_at)} "
|
|
f"solar={_fmt_w(solar_power_w)} "
|
|
f"load={_fmt_w(load_power_w)} "
|
|
f"soc={_fmt_pct(battery_soc_pct)} "
|
|
f"battery={_fmt_w(battery_power_w)} "
|
|
f"import={_fmt_w(grid_import_w)} "
|
|
f"export={_fmt_w(grid_export_w)} "
|
|
f"plant={_fmt_w(plant_active_power_w)} "
|
|
f"pv_total={_fmt_kwh(accumulated_pv_energy_kwh)} "
|
|
f"load_today={_fmt_kwh(daily_consumed_energy_kwh)} "
|
|
f"lag_s={(received_at - observed_at).total_seconds():.1f}"
|
|
)
|
|
if raw_values and any(key.endswith("_error") for key in raw_values):
|
|
errors = {
|
|
key: value
|
|
for key, value in raw_values.items()
|
|
if key.endswith("_error")
|
|
}
|
|
print(f" errors={json.dumps(errors, default=str)}")
|
|
|
|
|
|
def print_rollup(rows: list[tuple], view: str) -> None:
|
|
print(f"{view}_buckets={len(rows)}")
|
|
for row in rows:
|
|
(
|
|
bucket,
|
|
sample_count,
|
|
avg_solar_power_w,
|
|
max_solar_power_w,
|
|
avg_load_power_w,
|
|
max_load_power_w,
|
|
avg_grid_import_w,
|
|
max_grid_import_w,
|
|
avg_grid_export_w,
|
|
max_grid_export_w,
|
|
avg_battery_soc_pct,
|
|
) = row
|
|
print(
|
|
f"{_fmt_time(bucket)} samples={sample_count:4} "
|
|
f"solar_avg={_fmt_w(avg_solar_power_w)} "
|
|
f"solar_max={_fmt_w(max_solar_power_w)} "
|
|
f"load_avg={_fmt_w(avg_load_power_w)} "
|
|
f"load_max={_fmt_w(max_load_power_w)} "
|
|
f"import_avg={_fmt_w(avg_grid_import_w)} "
|
|
f"import_max={_fmt_w(max_grid_import_w)} "
|
|
f"export_avg={_fmt_w(avg_grid_export_w)} "
|
|
f"export_max={_fmt_w(max_grid_export_w)} "
|
|
f"soc_avg={_fmt_pct(avg_battery_soc_pct)}"
|
|
)
|
|
|
|
|
|
def _fmt_time(value) -> str:
|
|
return value.astimezone(timezone.utc).isoformat(timespec="seconds")
|
|
|
|
|
|
def _fmt_w(value) -> str:
|
|
if value is None:
|
|
return "None"
|
|
return f"{value:.0f}W"
|
|
|
|
|
|
def _fmt_pct(value) -> str:
|
|
if value is None:
|
|
return "None"
|
|
return f"{value:.1f}%"
|
|
|
|
|
|
def _fmt_kwh(value) -> str:
|
|
if value is None:
|
|
return "None"
|
|
return f"{value:.2f}kWh"
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|