Add new daemons and debug scripts for Sigenergy and Oracle functionalities
- Implement `sigen_daemon.py` to poll Sigenergy plant metrics and store snapshots. - Create `web_daemon.py` for serving a web interface with various endpoints. - Add debug scripts: - `debug_duplicates.py` to find duplicate target times in forecast data. - `debug_energy_forecast.py` to print baseline energy forecast curves. - `debug_oracle_evaluations.py` to run the oracle evaluator. - `debug_sigen.py` to inspect stored Sigenergy plant snapshots. - `debug_weather.py` to trace resolved truth data. - `modbus_test.py` for exploring Sigenergy plants or inverters over Modbus TCP. - Introduce `oracle_evaluator.py` for evaluating stored oracle predictions against actuals. - Add TCN training scripts in `tcn` directory for training usage sequence models.
This commit is contained in:
@@ -0,0 +1,15 @@
|
||||
from gibil.classes.oracle.builder import EnergyForecastBuilder, EnergyOracleBuilder
|
||||
from gibil.classes.oracle.config import EnergyForecastConfig
|
||||
from gibil.classes.oracle.display import OracleDisplay
|
||||
from gibil.classes.oracle.quality_display import OracleQualityDisplay
|
||||
from gibil.classes.oracle.store import OracleStore, OracleStoreConfig
|
||||
|
||||
__all__ = [
|
||||
"EnergyForecastBuilder",
|
||||
"EnergyForecastConfig",
|
||||
"EnergyOracleBuilder",
|
||||
"OracleDisplay",
|
||||
"OracleQualityDisplay",
|
||||
"OracleStore",
|
||||
"OracleStoreConfig",
|
||||
]
|
||||
@@ -0,0 +1,191 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from gibil.classes.models import NetPowerForecastRun, PowerForecastPoint, PowerForecastRun
|
||||
from gibil.classes.oracle.config import EnergyForecastConfig
|
||||
from gibil.classes.predictors.net_forecaster import NetPowerForecaster
|
||||
from gibil.classes.predictors.solar_rolling_regression import RollingSolarRegressionOracle
|
||||
from gibil.classes.predictors.usage_daily import DailyUsageOracle
|
||||
from gibil.classes.sigen.store import SigenStore
|
||||
from gibil.classes.weather.store import WeatherStore
|
||||
|
||||
|
||||
class EnergyOracleBuilder:
|
||||
"""Builds production, load, and net oracle curves."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
weather_store: WeatherStore,
|
||||
sigen_store: SigenStore,
|
||||
config: EnergyForecastConfig,
|
||||
) -> None:
|
||||
self.weather_store = weather_store
|
||||
self.sigen_store = sigen_store
|
||||
self.config = config
|
||||
|
||||
@classmethod
|
||||
def from_env(cls) -> "EnergyOracleBuilder":
|
||||
return cls(
|
||||
weather_store=WeatherStore.from_env(),
|
||||
sigen_store=SigenStore.from_env(),
|
||||
config=EnergyForecastConfig.from_env(),
|
||||
)
|
||||
|
||||
def build(self) -> tuple[PowerForecastRun, PowerForecastRun, NetPowerForecastRun]:
|
||||
issued_at = datetime.now(timezone.utc)
|
||||
hourly_solar_run = RollingSolarRegressionOracle(
|
||||
weather_store=self.weather_store,
|
||||
sigen_store=self.sigen_store,
|
||||
config=self.config,
|
||||
).forecast(issued_at=issued_at)
|
||||
solar_run = self._resample_power_run(
|
||||
hourly_solar_run,
|
||||
issued_at=issued_at,
|
||||
step_minutes=self.config.oracle_step_minutes,
|
||||
)
|
||||
load_run = DailyUsageOracle(
|
||||
sigen_store=self.sigen_store,
|
||||
config=self.config,
|
||||
).forecast(
|
||||
target_times=[point.target_at for point in solar_run.points],
|
||||
issued_at=issued_at,
|
||||
)
|
||||
net_run = NetPowerForecaster().combine(solar_run, load_run)
|
||||
return solar_run, load_run, net_run
|
||||
|
||||
def _resample_power_run(
|
||||
self,
|
||||
run: PowerForecastRun,
|
||||
issued_at: datetime,
|
||||
step_minutes: int,
|
||||
) -> PowerForecastRun:
|
||||
if step_minutes <= 0 or len(run.points) < 2:
|
||||
return run
|
||||
|
||||
points = sorted(run.points, key=lambda point: point.target_at)
|
||||
end_at = min(
|
||||
points[-1].target_at,
|
||||
issued_at + self._timedelta_hours(self.config.horizon_hours),
|
||||
)
|
||||
target_at = self._ceil_time(issued_at, step_minutes)
|
||||
sampled_points: list[PowerForecastPoint] = []
|
||||
|
||||
while target_at <= end_at:
|
||||
point = self._interpolate_power_point(points, target_at, issued_at)
|
||||
if point is not None:
|
||||
sampled_points.append(point)
|
||||
target_at += self._timedelta_minutes(step_minutes)
|
||||
|
||||
current_point = self._current_power_point(points, issued_at)
|
||||
if current_point is not None:
|
||||
sampled_points.insert(0, current_point)
|
||||
|
||||
if not sampled_points:
|
||||
return run
|
||||
|
||||
return PowerForecastRun(
|
||||
issued_at=run.issued_at,
|
||||
kind=run.kind,
|
||||
source=run.source,
|
||||
model_version=f"{run.model_version}_sampled_{step_minutes}m",
|
||||
points=sampled_points,
|
||||
)
|
||||
|
||||
def _interpolate_power_point(
|
||||
self,
|
||||
points: list[PowerForecastPoint],
|
||||
target_at: datetime,
|
||||
issued_at: datetime,
|
||||
) -> PowerForecastPoint | None:
|
||||
if target_at < points[0].target_at or target_at > points[-1].target_at:
|
||||
return None
|
||||
|
||||
for index in range(len(points) - 1):
|
||||
left = points[index]
|
||||
right = points[index + 1]
|
||||
if left.target_at <= target_at <= right.target_at:
|
||||
ratio = self._time_ratio(left.target_at, right.target_at, target_at)
|
||||
p10 = self._lerp(left.p10_power_w, right.p10_power_w, ratio)
|
||||
p50 = self._lerp(left.p50_power_w, right.p50_power_w, ratio)
|
||||
p90 = self._lerp(left.p90_power_w, right.p90_power_w, ratio)
|
||||
return PowerForecastPoint(
|
||||
target_at=target_at,
|
||||
horizon_minutes=max(
|
||||
0, round((target_at - issued_at).total_seconds() / 60)
|
||||
),
|
||||
expected_power_w=p50,
|
||||
p10_power_w=p10,
|
||||
p50_power_w=p50,
|
||||
p90_power_w=p90,
|
||||
confidence=self._lerp(left.confidence, right.confidence, ratio),
|
||||
source=left.source,
|
||||
model_version=left.model_version,
|
||||
metadata={
|
||||
"interpolated": True,
|
||||
"left_target_at": left.target_at.isoformat(),
|
||||
"right_target_at": right.target_at.isoformat(),
|
||||
},
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
def _current_power_point(
|
||||
self,
|
||||
points: list[PowerForecastPoint],
|
||||
issued_at: datetime,
|
||||
) -> PowerForecastPoint | None:
|
||||
if not points:
|
||||
return None
|
||||
|
||||
first = points[0]
|
||||
return PowerForecastPoint(
|
||||
target_at=issued_at,
|
||||
horizon_minutes=0,
|
||||
expected_power_w=first.p50_power_w,
|
||||
p10_power_w=first.p10_power_w,
|
||||
p50_power_w=first.p50_power_w,
|
||||
p90_power_w=first.p90_power_w,
|
||||
confidence=first.confidence,
|
||||
source=first.source,
|
||||
model_version=first.model_version,
|
||||
metadata={
|
||||
"interpolated": True,
|
||||
"anchored_to": first.target_at.isoformat(),
|
||||
},
|
||||
)
|
||||
|
||||
def _ceil_time(self, value: datetime, step_minutes: int) -> datetime:
|
||||
step_seconds = step_minutes * 60
|
||||
timestamp = value.timestamp()
|
||||
remainder = timestamp % step_seconds
|
||||
if remainder:
|
||||
timestamp += step_seconds - remainder
|
||||
return datetime.fromtimestamp(timestamp, timezone.utc)
|
||||
|
||||
def _time_ratio(
|
||||
self,
|
||||
left: datetime,
|
||||
right: datetime,
|
||||
value: datetime,
|
||||
) -> float:
|
||||
span = (right - left).total_seconds()
|
||||
if span <= 0:
|
||||
return 0.0
|
||||
return (value - left).total_seconds() / span
|
||||
|
||||
def _lerp(self, left: float, right: float, ratio: float) -> float:
|
||||
return left + (right - left) * ratio
|
||||
|
||||
def _timedelta_hours(self, hours: int):
|
||||
from datetime import timedelta
|
||||
|
||||
return timedelta(hours=hours)
|
||||
|
||||
def _timedelta_minutes(self, minutes: int):
|
||||
from datetime import timedelta
|
||||
|
||||
return timedelta(minutes=minutes)
|
||||
|
||||
|
||||
EnergyForecastBuilder = EnergyOracleBuilder
|
||||
@@ -0,0 +1,60 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from os import environ
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class EnergyForecastConfig:
|
||||
horizon_hours: int = 24
|
||||
oracle_step_minutes: int = 15
|
||||
fallback_solar_peak_w: float = 10000
|
||||
solar_peak_headroom: float = 1.05
|
||||
solar_scale: float = 1.0
|
||||
solar_training_days: int = 30
|
||||
solar_min_training_samples: int = 24
|
||||
solar_ridge_lambda: float = 0.1
|
||||
load_lookback_minutes: int = 30
|
||||
load_profile_days: int = 30
|
||||
load_profile_bucket_minutes: int = 15
|
||||
load_profile_min_samples: int = 5
|
||||
load_recent_blend: float = 0.35
|
||||
local_timezone: str = "Europe/Stockholm"
|
||||
|
||||
@classmethod
|
||||
def from_env(cls) -> "EnergyForecastConfig":
|
||||
return cls(
|
||||
horizon_hours=int(environ.get("ASTRAPE_ENERGY_FORECAST_HOURS", "24")),
|
||||
oracle_step_minutes=int(environ.get("ASTRAPE_ORACLE_STEP_MINUTES", "15")),
|
||||
fallback_solar_peak_w=float(
|
||||
environ.get("ASTRAPE_SOLAR_PEAK_W", "10000")
|
||||
),
|
||||
solar_peak_headroom=float(
|
||||
environ.get("ASTRAPE_SOLAR_PEAK_HEADROOM", "1.05")
|
||||
),
|
||||
solar_scale=float(environ.get("ASTRAPE_SOLAR_FORECAST_SCALE", "1.0")),
|
||||
solar_training_days=int(
|
||||
environ.get("ASTRAPE_SOLAR_TRAINING_DAYS", "30")
|
||||
),
|
||||
solar_min_training_samples=int(
|
||||
environ.get("ASTRAPE_SOLAR_MIN_TRAINING_SAMPLES", "24")
|
||||
),
|
||||
solar_ridge_lambda=float(
|
||||
environ.get("ASTRAPE_SOLAR_RIDGE_LAMBDA", "0.1")
|
||||
),
|
||||
load_lookback_minutes=int(
|
||||
environ.get("ASTRAPE_LOAD_LOOKBACK_MINUTES", "30")
|
||||
),
|
||||
load_profile_days=int(environ.get("ASTRAPE_LOAD_PROFILE_DAYS", "30")),
|
||||
load_profile_bucket_minutes=int(
|
||||
environ.get("ASTRAPE_LOAD_PROFILE_BUCKET_MINUTES", "15")
|
||||
),
|
||||
load_profile_min_samples=int(
|
||||
environ.get("ASTRAPE_LOAD_PROFILE_MIN_SAMPLES", "5")
|
||||
),
|
||||
load_recent_blend=float(environ.get("ASTRAPE_LOAD_RECENT_BLEND", "0.35")),
|
||||
local_timezone=environ.get(
|
||||
"ASTRAPE_LOCAL_TIMEZONE",
|
||||
environ.get("TZ", "Europe/Stockholm"),
|
||||
),
|
||||
)
|
||||
@@ -0,0 +1,434 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from dataclasses import asdict
|
||||
from datetime import datetime
|
||||
|
||||
from gibil.classes.oracle.builder import EnergyOracleBuilder
|
||||
from gibil.classes.models import (
|
||||
NetPowerForecastPoint,
|
||||
PowerForecastPoint,
|
||||
PowerForecastRun,
|
||||
)
|
||||
from gibil.classes.oracle.store import OracleStore
|
||||
|
||||
|
||||
class OracleDisplay:
|
||||
"""Renders energy oracle curves for the Astrape web UI."""
|
||||
|
||||
def render(self) -> str:
|
||||
return """
|
||||
<section class="panel oracle-panel" data-module="oracle-display">
|
||||
<div class="panel-heading">
|
||||
<div>
|
||||
<h2>Energy Oracle</h2>
|
||||
<p>Solar, usage, and net power projection curves</p>
|
||||
</div>
|
||||
<div class="control-row">
|
||||
<div id="oracle-legend" class="legend-control"></div>
|
||||
<label>
|
||||
Curve
|
||||
<select id="oracle-variable">
|
||||
<option value="net">Net power</option>
|
||||
<option value="history">Past net predictions</option>
|
||||
<option value="solar">Solar production</option>
|
||||
<option value="load">Consumption</option>
|
||||
</select>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="chart-shell">
|
||||
<canvas id="oracle-chart" width="1100" height="420"></canvas>
|
||||
</div>
|
||||
</section>
|
||||
<script>
|
||||
window.astrapeModules = window.astrapeModules || {};
|
||||
window.astrapeModules.oracleDisplay = (() => {
|
||||
const colors = {
|
||||
actual: "#34d399",
|
||||
historical: "#a78bfa",
|
||||
p10: "#60a5fa",
|
||||
p50: "#f8fafc",
|
||||
p90: "#fbbf24",
|
||||
safe: "#fb7185"
|
||||
};
|
||||
|
||||
function init() {
|
||||
document.getElementById("oracle-variable").addEventListener("change", render);
|
||||
refresh();
|
||||
setInterval(refresh, 5000);
|
||||
}
|
||||
|
||||
async function refresh() {
|
||||
const response = await fetch("/api/oracle", { cache: "no-store" });
|
||||
window.astrapeOracleData = await response.json();
|
||||
render();
|
||||
}
|
||||
|
||||
function render() {
|
||||
const payload = window.astrapeOracleData || {};
|
||||
const variable = document.getElementById("oracle-variable").value;
|
||||
const series = buildSeries(payload, variable);
|
||||
renderLegend(series);
|
||||
drawChart(series, payload);
|
||||
}
|
||||
|
||||
function renderLegend(series) {
|
||||
const legend = document.getElementById("oracle-legend");
|
||||
legend.innerHTML = "";
|
||||
series.forEach((item) => {
|
||||
const entry = document.createElement("div");
|
||||
entry.className = "horizon-option";
|
||||
entry.innerHTML = `
|
||||
<span class="legend-swatch" style="${legendSwatchStyle(item)}"></span>
|
||||
<span>${item.label}</span>
|
||||
`;
|
||||
legend.appendChild(entry);
|
||||
});
|
||||
}
|
||||
|
||||
function legendSwatchStyle(item) {
|
||||
if (item.dash) {
|
||||
return `background: repeating-linear-gradient(90deg, ${item.color} 0 8px, transparent 8px 13px); border: 1px solid ${item.color};`;
|
||||
}
|
||||
return `background: ${item.color}`;
|
||||
}
|
||||
|
||||
function buildSeries(payload, variable) {
|
||||
if (variable === "solar") {
|
||||
return [
|
||||
{ label: "Observed solar", color: colors.actual, width: 3, markers: true, points: actualPoints(payload.actual_points, "solar_power_w", payload.now) },
|
||||
{ label: "Current solar low", color: colors.p10, width: 2, dash: [6, 5], points: powerPoints(payload.solar_points, "p10_power_w") },
|
||||
{ label: "Current solar expected", color: colors.p50, width: 3, points: powerPoints(payload.solar_points, "p50_power_w") },
|
||||
{ label: "Current solar high", color: colors.p90, width: 2, dash: [6, 5], points: powerPoints(payload.solar_points, "p90_power_w") },
|
||||
...historicalPowerSeries(payload.historical_solar_runs || [], "Solar forecast"),
|
||||
];
|
||||
}
|
||||
if (variable === "load") {
|
||||
return [
|
||||
{ label: "Observed load", color: colors.actual, width: 3, markers: true, points: actualPoints(payload.actual_points, "load_power_w", payload.now) },
|
||||
{ label: "Current load low", color: colors.p10, width: 2, dash: [6, 5], points: powerPoints(payload.load_points, "p10_power_w") },
|
||||
{ label: "Current load expected", color: colors.p50, width: 3, points: powerPoints(payload.load_points, "p50_power_w") },
|
||||
{ label: "Current load high", color: colors.p90, width: 2, dash: [6, 5], points: powerPoints(payload.load_points, "p90_power_w") },
|
||||
...historicalPowerSeries(payload.historical_load_runs || [], "Load forecast"),
|
||||
];
|
||||
}
|
||||
if (variable === "history") {
|
||||
return [
|
||||
{ label: "Observed net", color: colors.actual, width: 3, markers: true, points: actualPoints(payload.actual_points, "net_power_w", payload.now) },
|
||||
...historicalNetSeries(payload.historical_net_runs || []),
|
||||
];
|
||||
}
|
||||
return [
|
||||
{ label: "Observed net", color: colors.actual, width: 3, markers: true, points: actualPoints(payload.actual_points, "net_power_w", payload.now) },
|
||||
{ label: "Current net low", color: colors.p10, width: 2, dash: [6, 5], points: netPoints(payload.net_points, "p10_net_power_w") },
|
||||
{ label: "Current net expected", color: colors.p50, width: 3, points: netPoints(payload.net_points, "p50_net_power_w") },
|
||||
{ label: "Current net high", color: colors.p90, width: 2, dash: [6, 5], points: netPoints(payload.net_points, "p90_net_power_w") },
|
||||
...historicalNetSeries(payload.historical_net_runs || []),
|
||||
];
|
||||
}
|
||||
|
||||
function historicalNetSeries(runs) {
|
||||
const palette = ["#a78bfa", "#c084fc", "#818cf8", "#38bdf8", "#f472b6", "#f59e0b"];
|
||||
return runs.map((run, index) => ({
|
||||
label: `Net forecast ${formatLag(run)}`,
|
||||
color: palette[index % palette.length],
|
||||
width: 2,
|
||||
dash: [3, 5],
|
||||
points: (run.points || []).map((point) => ({
|
||||
target_at: point.target_at,
|
||||
value: point.p50_net_power_w ?? point.expected_net_power_w
|
||||
})).filter((point) => new Date(point.target_at).getTime() >= new Date(run.issued_at).getTime())
|
||||
}));
|
||||
}
|
||||
|
||||
function historicalPowerSeries(runs, labelPrefix) {
|
||||
const palette = ["#a78bfa", "#c084fc", "#818cf8", "#38bdf8", "#f472b6", "#f59e0b"];
|
||||
return runs.map((run, index) => ({
|
||||
label: `${labelPrefix} ${formatLag(run)}`,
|
||||
color: palette[index % palette.length],
|
||||
width: 2,
|
||||
dash: [3, 5],
|
||||
points: (run.points || []).map((point) => ({
|
||||
target_at: point.target_at,
|
||||
value: point.p50_power_w ?? point.expected_power_w
|
||||
})).filter((point) => new Date(point.target_at).getTime() >= new Date(run.issued_at).getTime())
|
||||
}));
|
||||
}
|
||||
|
||||
function formatLag(run) {
|
||||
if (run.lag_hours) return `${run.lag_hours}h ago`;
|
||||
return `issued ${formatIssuedAge(run.issued_at)}`;
|
||||
}
|
||||
|
||||
function formatIssuedAge(issuedAt) {
|
||||
const ageMs = Math.max(0, new Date(window.astrapeOracleData.now).getTime() - new Date(issuedAt).getTime());
|
||||
const minutes = Math.round(ageMs / 60000);
|
||||
if (minutes < 60) return `${minutes}m ago`;
|
||||
return `${Math.round(minutes / 60)}h ago`;
|
||||
}
|
||||
|
||||
function actualPoints(points, key, nowIso) {
|
||||
const parsedNow = new Date(nowIso).getTime();
|
||||
const now = Number.isFinite(parsedNow) ? parsedNow : Date.now();
|
||||
return (points || [])
|
||||
.filter((point) => new Date(point.target_at).getTime() <= now)
|
||||
.map((point) => ({ target_at: point.target_at, value: point[key] }));
|
||||
}
|
||||
|
||||
function powerPoints(points, key) {
|
||||
return (points || []).map((point) => ({ target_at: point.target_at, value: point[key] }));
|
||||
}
|
||||
|
||||
function netPoints(points, key) {
|
||||
return (points || []).map((point) => ({ target_at: point.target_at, value: point[key] }));
|
||||
}
|
||||
|
||||
function drawChart(series, payload) {
|
||||
const canvas = document.getElementById("oracle-chart");
|
||||
const ctx = canvas.getContext("2d");
|
||||
ctx.clearRect(0, 0, canvas.width, canvas.height);
|
||||
|
||||
const allPoints = series.flatMap((item) => item.points).filter((point) => point.value !== null);
|
||||
if (!allPoints.length) return;
|
||||
|
||||
const ys = allPoints.map((point) => point.value);
|
||||
ys.push(0);
|
||||
const windowBounds = oracleAlignedBounds(payload.now);
|
||||
const bounds = {
|
||||
minX: windowBounds.minX,
|
||||
maxX: windowBounds.maxX,
|
||||
minY: Math.min(...ys),
|
||||
maxY: Math.max(...ys),
|
||||
};
|
||||
if (bounds.minY === bounds.maxY) {
|
||||
bounds.minY -= 1;
|
||||
bounds.maxY += 1;
|
||||
}
|
||||
|
||||
drawAxes(ctx, canvas, bounds);
|
||||
drawZeroLine(ctx, canvas, bounds);
|
||||
drawNowMarker(ctx, canvas, bounds, windowBounds.nowX);
|
||||
series.forEach((item) => drawSeries(ctx, canvas, bounds, item));
|
||||
}
|
||||
|
||||
function drawAxes(ctx, canvas, bounds) {
|
||||
const margin = chartMargin();
|
||||
ctx.strokeStyle = "#94a3b8";
|
||||
ctx.lineWidth = 1;
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(margin.left, margin.top);
|
||||
ctx.lineTo(margin.left, canvas.height - margin.bottom);
|
||||
ctx.lineTo(canvas.width - margin.right, canvas.height - margin.bottom);
|
||||
ctx.stroke();
|
||||
ctx.fillStyle = "#94a3b8";
|
||||
ctx.font = "12px system-ui";
|
||||
ctx.fillText(`${Math.round(bounds.maxY)} W`, 10, margin.top + 4);
|
||||
ctx.fillText(`${Math.round(bounds.minY)} W`, 10, canvas.height - margin.bottom);
|
||||
}
|
||||
|
||||
function drawZeroLine(ctx, canvas, bounds) {
|
||||
if (bounds.minY > 0 || bounds.maxY < 0) return;
|
||||
const margin = chartMargin();
|
||||
const y = scale(0, bounds.minY, bounds.maxY, canvas.height - margin.bottom, margin.top);
|
||||
ctx.save();
|
||||
ctx.strokeStyle = "#475569";
|
||||
ctx.lineWidth = 1;
|
||||
ctx.setLineDash([4, 4]);
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(margin.left, y);
|
||||
ctx.lineTo(canvas.width - margin.right, y);
|
||||
ctx.stroke();
|
||||
ctx.restore();
|
||||
}
|
||||
|
||||
function drawNowMarker(ctx, canvas, bounds, now) {
|
||||
if (now < bounds.minX || now > bounds.maxX) return;
|
||||
const margin = chartMargin();
|
||||
const x = scale(now, bounds.minX, bounds.maxX, margin.left, canvas.width - margin.right);
|
||||
ctx.save();
|
||||
ctx.strokeStyle = "#f8fafc";
|
||||
ctx.lineWidth = 1;
|
||||
ctx.setLineDash([5, 5]);
|
||||
ctx.beginPath();
|
||||
ctx.moveTo(x, margin.top);
|
||||
ctx.lineTo(x, canvas.height - margin.bottom);
|
||||
ctx.stroke();
|
||||
ctx.setLineDash([]);
|
||||
ctx.fillStyle = "#f8fafc";
|
||||
ctx.font = "12px system-ui";
|
||||
ctx.fillText("now", Math.min(x + 8, canvas.width - margin.right - 28), margin.top + 14);
|
||||
ctx.restore();
|
||||
}
|
||||
|
||||
function drawSeries(ctx, canvas, bounds, series) {
|
||||
const points = series.points.filter((point) => point.value !== null);
|
||||
if (!points.length) return;
|
||||
const margin = chartMargin();
|
||||
ctx.strokeStyle = series.color;
|
||||
ctx.lineWidth = series.width;
|
||||
ctx.setLineDash(series.dash || []);
|
||||
ctx.beginPath();
|
||||
points.forEach((point, index) => {
|
||||
const x = scale(new Date(point.target_at).getTime(), bounds.minX, bounds.maxX, margin.left, canvas.width - margin.right);
|
||||
const y = scale(point.value, bounds.minY, bounds.maxY, canvas.height - margin.bottom, margin.top);
|
||||
if (index === 0) ctx.moveTo(x, y);
|
||||
else ctx.lineTo(x, y);
|
||||
});
|
||||
ctx.stroke();
|
||||
ctx.setLineDash([]);
|
||||
|
||||
if (series.markers || points.length < 12) {
|
||||
ctx.fillStyle = series.color;
|
||||
points.forEach((point) => {
|
||||
const x = scale(new Date(point.target_at).getTime(), bounds.minX, bounds.maxX, margin.left, canvas.width - margin.right);
|
||||
const y = scale(point.value, bounds.minY, bounds.maxY, canvas.height - margin.bottom, margin.top);
|
||||
if (x < margin.left || x > canvas.width - margin.right) return;
|
||||
ctx.beginPath();
|
||||
ctx.arc(x, y, 3.5, 0, Math.PI * 2);
|
||||
ctx.fill();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function scale(value, inMin, inMax, outMin, outMax) {
|
||||
if (inMin === inMax) return (outMin + outMax) / 2;
|
||||
return outMin + ((value - inMin) / (inMax - inMin)) * (outMax - outMin);
|
||||
}
|
||||
|
||||
function chartMargin() {
|
||||
return { top: 24, right: 28, bottom: 34, left: 64 };
|
||||
}
|
||||
|
||||
function oracleAlignedBounds(nowIso) {
|
||||
const parsedNow = new Date(nowIso).getTime();
|
||||
const now = Number.isFinite(parsedNow) ? parsedNow : Date.now();
|
||||
return {
|
||||
minX: now - 24 * 60 * 60 * 1000,
|
||||
maxX: now + 48 * 60 * 60 * 1000,
|
||||
nowX: now
|
||||
};
|
||||
}
|
||||
|
||||
return { init };
|
||||
})();
|
||||
window.astrapeModules.oracleDisplay.init();
|
||||
</script>
|
||||
"""
|
||||
|
||||
def data_payload(self) -> str:
|
||||
builder = EnergyOracleBuilder.from_env()
|
||||
solar_run, load_run, net_run = builder.build()
|
||||
actual_points = builder.sigen_store.load_recent_actual_points()
|
||||
try:
|
||||
oracle_store = OracleStore.from_env()
|
||||
historical_net_runs = oracle_store.load_lagged_net_runs()
|
||||
historical_solar_runs = oracle_store.load_lagged_power_runs("solar")
|
||||
historical_load_runs = oracle_store.load_lagged_power_runs("load")
|
||||
except Exception:
|
||||
historical_net_runs = []
|
||||
historical_solar_runs = []
|
||||
historical_load_runs = []
|
||||
return json.dumps(
|
||||
{
|
||||
"issued_at": self._iso(net_run.issued_at),
|
||||
"now": self._iso(net_run.issued_at),
|
||||
"solar_model": solar_run.model_version,
|
||||
"load_model": load_run.model_version,
|
||||
"solar_points": [
|
||||
self._power_point(point) for point in solar_run.points
|
||||
],
|
||||
"load_points": [
|
||||
self._power_point(point) for point in load_run.points
|
||||
],
|
||||
"net_points": [self._net_point(point) for point in net_run.points],
|
||||
"actual_points": [
|
||||
self._actual_point(point) for point in actual_points
|
||||
],
|
||||
"historical_net_runs": [
|
||||
self._historical_net_run(run) for run in historical_net_runs
|
||||
],
|
||||
"historical_solar_runs": [
|
||||
self._historical_power_run(run) for run in historical_solar_runs
|
||||
],
|
||||
"historical_load_runs": [
|
||||
self._historical_power_run(run) for run in historical_load_runs
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
def _power_point(self, point: PowerForecastPoint) -> dict[str, object]:
|
||||
return {
|
||||
"target_at": self._iso(point.target_at),
|
||||
"horizon_minutes": point.horizon_minutes,
|
||||
"expected_power_w": point.expected_power_w,
|
||||
"p10_power_w": point.p10_power_w,
|
||||
"p50_power_w": point.p50_power_w,
|
||||
"p90_power_w": point.p90_power_w,
|
||||
"confidence": point.confidence,
|
||||
"source": point.source,
|
||||
"model_version": point.model_version,
|
||||
"metadata": point.metadata,
|
||||
}
|
||||
|
||||
def _net_point(self, point: NetPowerForecastPoint) -> dict[str, object]:
|
||||
return asdict(point) | {"target_at": self._iso(point.target_at)}
|
||||
|
||||
def _actual_point(self, point: dict[str, object]) -> dict[str, object]:
|
||||
return {
|
||||
"target_at": self._iso(point["target_at"]),
|
||||
"solar_power_w": point["solar_power_w"],
|
||||
"load_power_w": point["load_power_w"],
|
||||
"net_power_w": point["net_power_w"],
|
||||
"grid_import_w": point["grid_import_w"],
|
||||
"grid_export_w": point["grid_export_w"],
|
||||
"sample_count": point["sample_count"],
|
||||
}
|
||||
|
||||
def _historical_net_run(self, run: dict[str, object]) -> dict[str, object]:
|
||||
return {
|
||||
"lag_hours": run.get("lag_hours"),
|
||||
"issued_at": self._iso(run["issued_at"]),
|
||||
"points": [
|
||||
{
|
||||
"target_at": self._iso(point["target_at"]),
|
||||
"horizon_minutes": point["horizon_minutes"],
|
||||
"expected_net_power_w": point["expected_net_power_w"],
|
||||
"safe_net_power_w": point["safe_net_power_w"],
|
||||
"p10_net_power_w": point.get("p10_net_power_w"),
|
||||
"p50_net_power_w": point.get("p50_net_power_w"),
|
||||
"p90_net_power_w": point.get("p90_net_power_w"),
|
||||
"solar_p50_power_w": point["solar_p50_power_w"],
|
||||
"load_p50_power_w": point["load_p50_power_w"],
|
||||
"solar_p10_power_w": point["solar_p10_power_w"],
|
||||
"solar_p90_power_w": point.get("solar_p90_power_w"),
|
||||
"load_p10_power_w": point.get("load_p10_power_w"),
|
||||
"load_p90_power_w": point["load_p90_power_w"],
|
||||
}
|
||||
for point in run["points"]
|
||||
],
|
||||
}
|
||||
|
||||
def _historical_power_run(self, run: dict[str, object]) -> dict[str, object]:
|
||||
return {
|
||||
"lag_hours": run.get("lag_hours"),
|
||||
"issued_at": self._iso(run["issued_at"]),
|
||||
"kind": run["kind"],
|
||||
"source": run["source"],
|
||||
"model_version": run["model_version"],
|
||||
"points": [
|
||||
{
|
||||
"target_at": self._iso(point["target_at"]),
|
||||
"horizon_minutes": point["horizon_minutes"],
|
||||
"expected_power_w": point["expected_power_w"],
|
||||
"p10_power_w": point["p10_power_w"],
|
||||
"p50_power_w": point["p50_power_w"],
|
||||
"p90_power_w": point["p90_power_w"],
|
||||
"confidence": point["confidence"],
|
||||
}
|
||||
for point in run["points"]
|
||||
],
|
||||
}
|
||||
|
||||
def _iso(self, value: datetime) -> str:
|
||||
return value.isoformat()
|
||||
@@ -0,0 +1,152 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from datetime import timedelta
|
||||
|
||||
from gibil.classes.oracle.store import OracleStore
|
||||
|
||||
|
||||
class OracleQualityDisplay:
|
||||
"""Renders oracle prediction quality tables."""
|
||||
|
||||
def render(self) -> str:
|
||||
return """
|
||||
<section class="panel oracle-quality-panel" data-module="oracle-quality-display">
|
||||
<div class="panel-heading">
|
||||
<div>
|
||||
<h2>Oracle Quality</h2>
|
||||
<p>Prediction error by model and horizon</p>
|
||||
</div>
|
||||
<div class="control-row">
|
||||
<label>
|
||||
Window
|
||||
<select id="quality-lookback">
|
||||
<option value="24">24 hours</option>
|
||||
<option value="168" selected>7 days</option>
|
||||
<option value="720">30 days</option>
|
||||
</select>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="table-shell">
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Kind</th>
|
||||
<th>Model</th>
|
||||
<th>Horizon</th>
|
||||
<th>Samples</th>
|
||||
<th>Bias</th>
|
||||
<th>MAE</th>
|
||||
<th>Median AE</th>
|
||||
<th>MAPE</th>
|
||||
<th>Coverage</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="quality-rows"></tbody>
|
||||
</table>
|
||||
</div>
|
||||
</section>
|
||||
<script>
|
||||
window.astrapeModules = window.astrapeModules || {};
|
||||
window.astrapeModules.oracleQualityDisplay = (() => {
|
||||
function init() {
|
||||
document.getElementById("quality-lookback").addEventListener("change", refresh);
|
||||
refresh();
|
||||
setInterval(refresh, 10000);
|
||||
}
|
||||
|
||||
async function refresh() {
|
||||
const lookback = document.getElementById("quality-lookback").value;
|
||||
const response = await fetch(`/api/oracle-quality?lookback_hours=${lookback}`, { cache: "no-store" });
|
||||
const payload = await response.json();
|
||||
render(payload.rows || []);
|
||||
}
|
||||
|
||||
function render(rows) {
|
||||
const tbody = document.getElementById("quality-rows");
|
||||
if (!rows.length) {
|
||||
tbody.innerHTML = `<tr><td colspan="9">No evaluated oracle predictions yet.</td></tr>`;
|
||||
return;
|
||||
}
|
||||
tbody.innerHTML = rows.map((row) => `
|
||||
<tr>
|
||||
<td>${escapeHtml(row.kind)}</td>
|
||||
<td>${escapeHtml(row.model_version)}</td>
|
||||
<td>${formatHorizon(row)}</td>
|
||||
<td>${row.evaluated_count}</td>
|
||||
<td class="${biasClass(row.mean_error_w)}">${formatW(row.mean_error_w)}</td>
|
||||
<td>${formatW(row.mean_absolute_error_w)}</td>
|
||||
<td>${formatW(row.median_absolute_error_w)}</td>
|
||||
<td>${formatPct(row.mean_absolute_pct_error)}</td>
|
||||
<td>${formatPct(row.interval_coverage)}</td>
|
||||
</tr>
|
||||
`).join("");
|
||||
}
|
||||
|
||||
function formatHorizon(row) {
|
||||
if (row.horizon_label) return row.horizon_label;
|
||||
return `${row.min_horizon_minutes}-${row.max_horizon_minutes}m`;
|
||||
}
|
||||
|
||||
function formatW(value) {
|
||||
if (value === null || value === undefined) return "n/a";
|
||||
return `${Math.round(Number(value))} W`;
|
||||
}
|
||||
|
||||
function formatPct(value) {
|
||||
if (value === null || value === undefined) return "n/a";
|
||||
return `${(Number(value) * 100).toFixed(1)}%`;
|
||||
}
|
||||
|
||||
function biasClass(value) {
|
||||
if (value === null || value === undefined) return "";
|
||||
const absolute = Math.abs(Number(value));
|
||||
if (absolute < 250) return "metric-good";
|
||||
if (absolute < 1000) return "metric-warn";
|
||||
return "metric-bad";
|
||||
}
|
||||
|
||||
function escapeHtml(value) {
|
||||
return String(value ?? "")
|
||||
.replace(/&/g, "&")
|
||||
.replace(/</g, "<")
|
||||
.replace(/>/g, ">")
|
||||
.replace(/"/g, """)
|
||||
.replace(/'/g, "'");
|
||||
}
|
||||
|
||||
return { init };
|
||||
})();
|
||||
window.astrapeModules.oracleQualityDisplay.init();
|
||||
</script>
|
||||
"""
|
||||
|
||||
def data_payload(self, lookback_hours: float = 168) -> str:
|
||||
try:
|
||||
rows = OracleStore.from_env().load_evaluation_summary(
|
||||
lookback=timedelta(hours=lookback_hours)
|
||||
)
|
||||
except Exception:
|
||||
rows = []
|
||||
|
||||
return json.dumps(
|
||||
{
|
||||
"lookback_hours": lookback_hours,
|
||||
"rows": [self._row(row) for row in rows],
|
||||
}
|
||||
)
|
||||
|
||||
def _row(self, row: dict[str, object]) -> dict[str, object]:
|
||||
return {
|
||||
key: self._json_value(value)
|
||||
for key, value in row.items()
|
||||
}
|
||||
|
||||
def _json_value(self, value: object) -> object:
|
||||
if value is None or isinstance(value, (str, int, float, bool)):
|
||||
return value
|
||||
try:
|
||||
return float(value)
|
||||
except (TypeError, ValueError):
|
||||
return str(value)
|
||||
@@ -0,0 +1,888 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import contextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from os import environ
|
||||
from typing import Iterator
|
||||
|
||||
from gibil.classes.models import NetPowerForecastRun, PowerForecastRun
|
||||
|
||||
|
||||
class OracleStoreConfigurationError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class OracleStoreConfig:
|
||||
database_url: str
|
||||
|
||||
@classmethod
|
||||
def from_env(cls) -> "OracleStoreConfig":
|
||||
database_url = environ.get("ASTRAPE_DATABASE_URL")
|
||||
if not database_url:
|
||||
raise OracleStoreConfigurationError(
|
||||
"ASTRAPE_DATABASE_URL is required for oracle storage"
|
||||
)
|
||||
return cls(database_url=database_url)
|
||||
|
||||
|
||||
class OracleStore:
|
||||
"""Persists generated oracle projection curves for later evaluation."""
|
||||
|
||||
def __init__(self, config: OracleStoreConfig) -> None:
|
||||
self.config = config
|
||||
|
||||
@classmethod
|
||||
def from_env(cls) -> "OracleStore":
|
||||
return cls(OracleStoreConfig.from_env())
|
||||
|
||||
def initialize(self) -> None:
|
||||
with self._connection() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute("CREATE EXTENSION IF NOT EXISTS timescaledb")
|
||||
cursor.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS oracle_power_forecast_points (
|
||||
issued_at TIMESTAMPTZ NOT NULL,
|
||||
target_at TIMESTAMPTZ NOT NULL,
|
||||
kind TEXT NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
model_version TEXT NOT NULL,
|
||||
horizon_minutes INTEGER NOT NULL,
|
||||
expected_power_w DOUBLE PRECISION NOT NULL,
|
||||
p10_power_w DOUBLE PRECISION NOT NULL,
|
||||
p50_power_w DOUBLE PRECISION NOT NULL,
|
||||
p90_power_w DOUBLE PRECISION NOT NULL,
|
||||
confidence DOUBLE PRECISION NOT NULL,
|
||||
inserted_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
PRIMARY KEY (issued_at, target_at, kind, source, model_version)
|
||||
)
|
||||
"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT create_hypertable(
|
||||
'oracle_power_forecast_points',
|
||||
'target_at',
|
||||
if_not_exists => TRUE
|
||||
)
|
||||
"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS oracle_net_forecast_points (
|
||||
issued_at TIMESTAMPTZ NOT NULL,
|
||||
target_at TIMESTAMPTZ NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
horizon_minutes INTEGER NOT NULL,
|
||||
expected_net_power_w DOUBLE PRECISION NOT NULL,
|
||||
safe_net_power_w DOUBLE PRECISION NOT NULL,
|
||||
p10_net_power_w DOUBLE PRECISION,
|
||||
p50_net_power_w DOUBLE PRECISION,
|
||||
p90_net_power_w DOUBLE PRECISION,
|
||||
solar_p50_power_w DOUBLE PRECISION NOT NULL,
|
||||
load_p50_power_w DOUBLE PRECISION NOT NULL,
|
||||
solar_p10_power_w DOUBLE PRECISION NOT NULL,
|
||||
solar_p90_power_w DOUBLE PRECISION,
|
||||
load_p10_power_w DOUBLE PRECISION,
|
||||
load_p90_power_w DOUBLE PRECISION NOT NULL,
|
||||
inserted_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
PRIMARY KEY (issued_at, target_at, source)
|
||||
)
|
||||
"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""
|
||||
ALTER TABLE oracle_net_forecast_points
|
||||
ADD COLUMN IF NOT EXISTS p10_net_power_w DOUBLE PRECISION
|
||||
"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""
|
||||
ALTER TABLE oracle_net_forecast_points
|
||||
ADD COLUMN IF NOT EXISTS p50_net_power_w DOUBLE PRECISION
|
||||
"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""
|
||||
ALTER TABLE oracle_net_forecast_points
|
||||
ADD COLUMN IF NOT EXISTS p90_net_power_w DOUBLE PRECISION
|
||||
"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""
|
||||
ALTER TABLE oracle_net_forecast_points
|
||||
ADD COLUMN IF NOT EXISTS solar_p90_power_w DOUBLE PRECISION
|
||||
"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""
|
||||
ALTER TABLE oracle_net_forecast_points
|
||||
ADD COLUMN IF NOT EXISTS load_p10_power_w DOUBLE PRECISION
|
||||
"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT create_hypertable(
|
||||
'oracle_net_forecast_points',
|
||||
'target_at',
|
||||
if_not_exists => TRUE
|
||||
)
|
||||
"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS oracle_forecast_evaluations (
|
||||
issued_at TIMESTAMPTZ NOT NULL,
|
||||
target_at TIMESTAMPTZ NOT NULL,
|
||||
kind TEXT NOT NULL,
|
||||
source TEXT NOT NULL,
|
||||
model_version TEXT NOT NULL,
|
||||
horizon_minutes INTEGER NOT NULL,
|
||||
expected_power_w DOUBLE PRECISION NOT NULL,
|
||||
p10_power_w DOUBLE PRECISION,
|
||||
p50_power_w DOUBLE PRECISION,
|
||||
p90_power_w DOUBLE PRECISION,
|
||||
realized_power_w DOUBLE PRECISION,
|
||||
error_w DOUBLE PRECISION,
|
||||
absolute_error_w DOUBLE PRECISION,
|
||||
absolute_pct_error DOUBLE PRECISION,
|
||||
covered_by_p10_p90 BOOLEAN,
|
||||
sample_count INTEGER NOT NULL DEFAULT 0,
|
||||
evaluated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
inserted_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
|
||||
PRIMARY KEY (
|
||||
issued_at,
|
||||
target_at,
|
||||
kind,
|
||||
source,
|
||||
model_version
|
||||
)
|
||||
)
|
||||
"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT create_hypertable(
|
||||
'oracle_forecast_evaluations',
|
||||
'target_at',
|
||||
if_not_exists => TRUE
|
||||
)
|
||||
"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""
|
||||
CREATE INDEX IF NOT EXISTS oracle_forecast_evaluations_kind_horizon_idx
|
||||
ON oracle_forecast_evaluations (
|
||||
kind,
|
||||
horizon_minutes,
|
||||
target_at DESC
|
||||
)
|
||||
"""
|
||||
)
|
||||
connection.commit()
|
||||
|
||||
def save_runs(
|
||||
self,
|
||||
solar_run: PowerForecastRun,
|
||||
load_run: PowerForecastRun,
|
||||
net_run: NetPowerForecastRun,
|
||||
) -> int:
|
||||
self.initialize()
|
||||
power_rows = [
|
||||
(
|
||||
run.issued_at,
|
||||
point.target_at,
|
||||
run.kind.value,
|
||||
run.source,
|
||||
run.model_version,
|
||||
point.horizon_minutes,
|
||||
point.expected_power_w,
|
||||
point.p10_power_w,
|
||||
point.p50_power_w,
|
||||
point.p90_power_w,
|
||||
point.confidence,
|
||||
)
|
||||
for run in (solar_run, load_run)
|
||||
for point in run.points
|
||||
]
|
||||
net_rows = [
|
||||
(
|
||||
net_run.issued_at,
|
||||
point.target_at,
|
||||
net_run.source,
|
||||
point.horizon_minutes,
|
||||
point.expected_net_power_w,
|
||||
point.safe_net_power_w,
|
||||
point.p10_net_power_w,
|
||||
point.p50_net_power_w,
|
||||
point.p90_net_power_w,
|
||||
point.solar_p50_power_w,
|
||||
point.load_p50_power_w,
|
||||
point.solar_p10_power_w,
|
||||
point.solar_p90_power_w,
|
||||
point.load_p10_power_w,
|
||||
point.load_p90_power_w,
|
||||
)
|
||||
for point in net_run.points
|
||||
]
|
||||
|
||||
with self._connection() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.executemany(
|
||||
"""
|
||||
INSERT INTO oracle_power_forecast_points (
|
||||
issued_at,
|
||||
target_at,
|
||||
kind,
|
||||
source,
|
||||
model_version,
|
||||
horizon_minutes,
|
||||
expected_power_w,
|
||||
p10_power_w,
|
||||
p50_power_w,
|
||||
p90_power_w,
|
||||
confidence
|
||||
)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
||||
ON CONFLICT (issued_at, target_at, kind, source, model_version)
|
||||
DO UPDATE SET
|
||||
horizon_minutes = EXCLUDED.horizon_minutes,
|
||||
expected_power_w = EXCLUDED.expected_power_w,
|
||||
p10_power_w = EXCLUDED.p10_power_w,
|
||||
p50_power_w = EXCLUDED.p50_power_w,
|
||||
p90_power_w = EXCLUDED.p90_power_w,
|
||||
confidence = EXCLUDED.confidence,
|
||||
inserted_at = now()
|
||||
""",
|
||||
power_rows,
|
||||
)
|
||||
cursor.executemany(
|
||||
"""
|
||||
INSERT INTO oracle_net_forecast_points (
|
||||
issued_at,
|
||||
target_at,
|
||||
source,
|
||||
horizon_minutes,
|
||||
expected_net_power_w,
|
||||
safe_net_power_w,
|
||||
p10_net_power_w,
|
||||
p50_net_power_w,
|
||||
p90_net_power_w,
|
||||
solar_p50_power_w,
|
||||
load_p50_power_w,
|
||||
solar_p10_power_w,
|
||||
solar_p90_power_w,
|
||||
load_p10_power_w,
|
||||
load_p90_power_w
|
||||
)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
||||
ON CONFLICT (issued_at, target_at, source)
|
||||
DO UPDATE SET
|
||||
horizon_minutes = EXCLUDED.horizon_minutes,
|
||||
expected_net_power_w = EXCLUDED.expected_net_power_w,
|
||||
safe_net_power_w = EXCLUDED.safe_net_power_w,
|
||||
p10_net_power_w = EXCLUDED.p10_net_power_w,
|
||||
p50_net_power_w = EXCLUDED.p50_net_power_w,
|
||||
p90_net_power_w = EXCLUDED.p90_net_power_w,
|
||||
solar_p50_power_w = EXCLUDED.solar_p50_power_w,
|
||||
load_p50_power_w = EXCLUDED.load_p50_power_w,
|
||||
solar_p10_power_w = EXCLUDED.solar_p10_power_w,
|
||||
solar_p90_power_w = EXCLUDED.solar_p90_power_w,
|
||||
load_p10_power_w = EXCLUDED.load_p10_power_w,
|
||||
load_p90_power_w = EXCLUDED.load_p90_power_w,
|
||||
inserted_at = now()
|
||||
""",
|
||||
net_rows,
|
||||
)
|
||||
connection.commit()
|
||||
|
||||
return len(power_rows) + len(net_rows)
|
||||
|
||||
def load_recent_net_runs(
|
||||
self,
|
||||
lookback: timedelta = timedelta(hours=6),
|
||||
limit: int = 6,
|
||||
) -> list[dict[str, object]]:
|
||||
return self.load_lagged_net_runs(
|
||||
lag_hours=[hour for hour in (1, 2, 6, 24, 48) if hour <= lookback.total_seconds() / 3600],
|
||||
tolerance=timedelta(minutes=45),
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
def load_lagged_net_runs(
|
||||
self,
|
||||
lag_hours: list[int] | None = None,
|
||||
tolerance: timedelta = timedelta(minutes=45),
|
||||
limit: int = 5,
|
||||
) -> list[dict[str, object]]:
|
||||
if lag_hours is None:
|
||||
lag_hours = [1, 2, 6, 24, 48]
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
selected: list[tuple[int, datetime]] = []
|
||||
used_issued_at: set[datetime] = set()
|
||||
|
||||
with self._connection() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
for lag_hour in lag_hours:
|
||||
target_issued_at = now - timedelta(hours=lag_hour)
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT issued_at
|
||||
FROM oracle_net_forecast_points
|
||||
WHERE issued_at BETWEEN %s AND %s
|
||||
GROUP BY issued_at
|
||||
ORDER BY abs(extract(epoch FROM (issued_at - %s)))
|
||||
LIMIT 1
|
||||
""",
|
||||
(
|
||||
target_issued_at - tolerance,
|
||||
target_issued_at + tolerance,
|
||||
target_issued_at,
|
||||
),
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
if row is None or row[0] in used_issued_at:
|
||||
continue
|
||||
selected.append((lag_hour, row[0]))
|
||||
used_issued_at.add(row[0])
|
||||
if len(selected) >= limit:
|
||||
break
|
||||
|
||||
runs: list[dict[str, object]] = []
|
||||
for lag_hour, issued_at in selected:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT
|
||||
target_at,
|
||||
horizon_minutes,
|
||||
expected_net_power_w,
|
||||
safe_net_power_w,
|
||||
COALESCE(p10_net_power_w, safe_net_power_w),
|
||||
COALESCE(p50_net_power_w, expected_net_power_w),
|
||||
p90_net_power_w,
|
||||
solar_p50_power_w,
|
||||
load_p50_power_w,
|
||||
solar_p10_power_w,
|
||||
solar_p90_power_w,
|
||||
load_p10_power_w,
|
||||
load_p90_power_w
|
||||
FROM oracle_net_forecast_points
|
||||
WHERE issued_at = %s
|
||||
AND target_at >= %s
|
||||
ORDER BY target_at
|
||||
""",
|
||||
(issued_at, issued_at),
|
||||
)
|
||||
points = cursor.fetchall()
|
||||
if not points:
|
||||
continue
|
||||
runs.append(
|
||||
{
|
||||
"lag_hours": lag_hour,
|
||||
"issued_at": issued_at,
|
||||
"points": [
|
||||
{
|
||||
"target_at": row[0],
|
||||
"horizon_minutes": row[1],
|
||||
"expected_net_power_w": row[2],
|
||||
"safe_net_power_w": row[3],
|
||||
"p10_net_power_w": row[4],
|
||||
"p50_net_power_w": row[5],
|
||||
"p90_net_power_w": row[6],
|
||||
"solar_p50_power_w": row[7],
|
||||
"load_p50_power_w": row[8],
|
||||
"solar_p10_power_w": row[9],
|
||||
"solar_p90_power_w": row[10],
|
||||
"load_p10_power_w": row[11],
|
||||
"load_p90_power_w": row[12],
|
||||
}
|
||||
for row in points
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
return runs
|
||||
|
||||
def load_lagged_power_runs(
|
||||
self,
|
||||
kind: str,
|
||||
lag_hours: list[int] | None = None,
|
||||
tolerance: timedelta = timedelta(minutes=45),
|
||||
limit: int = 5,
|
||||
) -> list[dict[str, object]]:
|
||||
if kind not in {"solar", "load"}:
|
||||
raise ValueError("kind must be 'solar' or 'load'")
|
||||
if lag_hours is None:
|
||||
lag_hours = [1, 2, 6, 24, 48]
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
selected: list[tuple[int, datetime, str, str, str]] = []
|
||||
used_keys: set[tuple[datetime, str, str, str]] = set()
|
||||
|
||||
with self._connection() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
for lag_hour in lag_hours:
|
||||
target_issued_at = now - timedelta(hours=lag_hour)
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT issued_at, kind, source, model_version
|
||||
FROM oracle_power_forecast_points
|
||||
WHERE kind = %s
|
||||
AND issued_at BETWEEN %s AND %s
|
||||
GROUP BY issued_at, kind, source, model_version
|
||||
ORDER BY abs(extract(epoch FROM (issued_at - %s)))
|
||||
LIMIT 1
|
||||
""",
|
||||
(
|
||||
kind,
|
||||
target_issued_at - tolerance,
|
||||
target_issued_at + tolerance,
|
||||
target_issued_at,
|
||||
),
|
||||
)
|
||||
row = cursor.fetchone()
|
||||
if row is None:
|
||||
continue
|
||||
key = (row[0], row[1], row[2], row[3])
|
||||
if key in used_keys:
|
||||
continue
|
||||
selected.append((lag_hour, row[0], row[1], row[2], row[3]))
|
||||
used_keys.add(key)
|
||||
if len(selected) >= limit:
|
||||
break
|
||||
|
||||
runs: list[dict[str, object]] = []
|
||||
for lag_hour, issued_at, run_kind, source, model_version in selected:
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT
|
||||
target_at,
|
||||
horizon_minutes,
|
||||
expected_power_w,
|
||||
p10_power_w,
|
||||
p50_power_w,
|
||||
p90_power_w,
|
||||
confidence
|
||||
FROM oracle_power_forecast_points
|
||||
WHERE issued_at = %s
|
||||
AND kind = %s
|
||||
AND source = %s
|
||||
AND model_version = %s
|
||||
AND target_at >= %s
|
||||
ORDER BY target_at
|
||||
""",
|
||||
(issued_at, run_kind, source, model_version, issued_at),
|
||||
)
|
||||
points = cursor.fetchall()
|
||||
if not points:
|
||||
continue
|
||||
runs.append(
|
||||
{
|
||||
"lag_hours": lag_hour,
|
||||
"issued_at": issued_at,
|
||||
"kind": run_kind,
|
||||
"source": source,
|
||||
"model_version": model_version,
|
||||
"points": [
|
||||
{
|
||||
"target_at": row[0],
|
||||
"horizon_minutes": row[1],
|
||||
"expected_power_w": row[2],
|
||||
"p10_power_w": row[3],
|
||||
"p50_power_w": row[4],
|
||||
"p90_power_w": row[5],
|
||||
"confidence": row[6],
|
||||
}
|
||||
for row in points
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
return runs
|
||||
|
||||
def evaluate_due_forecasts(
|
||||
self,
|
||||
actual_window: timedelta = timedelta(minutes=5),
|
||||
lookback: timedelta = timedelta(days=7),
|
||||
limit: int = 1000,
|
||||
) -> int:
|
||||
self.initialize()
|
||||
start_at = datetime.now(timezone.utc) - lookback
|
||||
|
||||
with self._connection() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
power_count = self._evaluate_due_power_forecasts(
|
||||
cursor=cursor,
|
||||
actual_window=actual_window,
|
||||
start_at=start_at,
|
||||
limit=limit,
|
||||
)
|
||||
remaining_limit = max(limit - power_count, 0)
|
||||
net_count = 0
|
||||
if remaining_limit > 0:
|
||||
net_count = self._evaluate_due_net_forecasts(
|
||||
cursor=cursor,
|
||||
actual_window=actual_window,
|
||||
start_at=start_at,
|
||||
limit=remaining_limit,
|
||||
)
|
||||
connection.commit()
|
||||
|
||||
return power_count + net_count
|
||||
|
||||
def load_evaluation_summary(
|
||||
self,
|
||||
lookback: timedelta = timedelta(days=7),
|
||||
) -> list[dict[str, object]]:
|
||||
start_at = datetime.now(timezone.utc) - lookback
|
||||
with self._connection() as connection:
|
||||
with connection.cursor() as cursor:
|
||||
cursor.execute(
|
||||
"""
|
||||
WITH bucketed AS (
|
||||
SELECT
|
||||
*,
|
||||
CASE
|
||||
WHEN horizon_minutes < 120 THEN 1
|
||||
WHEN horizon_minutes < 240 THEN 2
|
||||
WHEN horizon_minutes < 480 THEN 3
|
||||
WHEN horizon_minutes < 960 THEN 4
|
||||
ELSE 5
|
||||
END AS horizon_bucket,
|
||||
CASE
|
||||
WHEN horizon_minutes < 120 THEN '0-2h'
|
||||
WHEN horizon_minutes < 240 THEN '2-4h'
|
||||
WHEN horizon_minutes < 480 THEN '4-8h'
|
||||
WHEN horizon_minutes < 960 THEN '8-16h'
|
||||
ELSE '16-24h'
|
||||
END AS horizon_label
|
||||
FROM oracle_forecast_evaluations
|
||||
WHERE target_at >= %s
|
||||
AND realized_power_w IS NOT NULL
|
||||
)
|
||||
SELECT
|
||||
kind,
|
||||
source,
|
||||
model_version,
|
||||
horizon_bucket,
|
||||
horizon_label,
|
||||
min(horizon_minutes) AS min_horizon_minutes,
|
||||
max(horizon_minutes) AS max_horizon_minutes,
|
||||
count(*) AS evaluated_count,
|
||||
avg(error_w) AS mean_error_w,
|
||||
avg(absolute_error_w) AS mean_absolute_error_w,
|
||||
percentile_cont(0.50) WITHIN GROUP (
|
||||
ORDER BY absolute_error_w
|
||||
) AS median_absolute_error_w,
|
||||
avg(absolute_pct_error) AS mean_absolute_pct_error,
|
||||
avg(
|
||||
CASE
|
||||
WHEN covered_by_p10_p90 IS NULL THEN NULL
|
||||
WHEN covered_by_p10_p90 THEN 1.0
|
||||
ELSE 0.0
|
||||
END
|
||||
) AS interval_coverage
|
||||
FROM bucketed
|
||||
GROUP BY kind, source, model_version, horizon_bucket, horizon_label
|
||||
ORDER BY kind, source, model_version, horizon_bucket
|
||||
""",
|
||||
(start_at,),
|
||||
)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
return [
|
||||
{
|
||||
"kind": row[0],
|
||||
"source": row[1],
|
||||
"model_version": row[2],
|
||||
"horizon_bucket": row[3],
|
||||
"horizon_label": row[4],
|
||||
"min_horizon_minutes": row[5],
|
||||
"max_horizon_minutes": row[6],
|
||||
"evaluated_count": row[7],
|
||||
"mean_error_w": row[8],
|
||||
"mean_absolute_error_w": row[9],
|
||||
"median_absolute_error_w": row[10],
|
||||
"mean_absolute_pct_error": row[11],
|
||||
"interval_coverage": row[12],
|
||||
}
|
||||
for row in rows
|
||||
]
|
||||
|
||||
def _evaluate_due_power_forecasts(
|
||||
self,
|
||||
cursor: object,
|
||||
actual_window: timedelta,
|
||||
start_at: datetime,
|
||||
limit: int,
|
||||
) -> int:
|
||||
cursor.execute(
|
||||
"""
|
||||
WITH candidates AS (
|
||||
SELECT
|
||||
forecast.issued_at,
|
||||
forecast.target_at,
|
||||
forecast.kind,
|
||||
forecast.source,
|
||||
forecast.model_version,
|
||||
forecast.horizon_minutes,
|
||||
forecast.expected_power_w,
|
||||
forecast.p10_power_w,
|
||||
forecast.p50_power_w,
|
||||
forecast.p90_power_w
|
||||
FROM oracle_power_forecast_points AS forecast
|
||||
LEFT JOIN oracle_forecast_evaluations AS evaluation
|
||||
ON evaluation.issued_at = forecast.issued_at
|
||||
AND evaluation.target_at = forecast.target_at
|
||||
AND evaluation.kind = forecast.kind
|
||||
AND evaluation.source = forecast.source
|
||||
AND evaluation.model_version = forecast.model_version
|
||||
WHERE forecast.target_at >= %s
|
||||
AND forecast.target_at <= now() - %s
|
||||
AND (
|
||||
evaluation.issued_at IS NULL
|
||||
OR evaluation.sample_count = 0
|
||||
)
|
||||
ORDER BY forecast.target_at, forecast.issued_at
|
||||
LIMIT %s
|
||||
),
|
||||
realized AS (
|
||||
SELECT
|
||||
candidates.*,
|
||||
actual.realized_power_w,
|
||||
actual.sample_count
|
||||
FROM candidates
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT
|
||||
avg(
|
||||
CASE candidates.kind
|
||||
WHEN 'solar' THEN snapshot.solar_power_w
|
||||
WHEN 'load' THEN snapshot.load_power_w
|
||||
ELSE NULL
|
||||
END
|
||||
) AS realized_power_w,
|
||||
count(*) FILTER (
|
||||
WHERE CASE candidates.kind
|
||||
WHEN 'solar' THEN snapshot.solar_power_w
|
||||
WHEN 'load' THEN snapshot.load_power_w
|
||||
ELSE NULL
|
||||
END IS NOT NULL
|
||||
) AS sample_count
|
||||
FROM sigen_plant_snapshots AS snapshot
|
||||
WHERE snapshot.observed_at >= candidates.target_at
|
||||
AND snapshot.observed_at < candidates.target_at + %s
|
||||
) AS actual ON TRUE
|
||||
)
|
||||
INSERT INTO oracle_forecast_evaluations (
|
||||
issued_at,
|
||||
target_at,
|
||||
kind,
|
||||
source,
|
||||
model_version,
|
||||
horizon_minutes,
|
||||
expected_power_w,
|
||||
p10_power_w,
|
||||
p50_power_w,
|
||||
p90_power_w,
|
||||
realized_power_w,
|
||||
error_w,
|
||||
absolute_error_w,
|
||||
absolute_pct_error,
|
||||
covered_by_p10_p90,
|
||||
sample_count,
|
||||
evaluated_at
|
||||
)
|
||||
SELECT
|
||||
issued_at,
|
||||
target_at,
|
||||
kind,
|
||||
source,
|
||||
model_version,
|
||||
horizon_minutes,
|
||||
expected_power_w,
|
||||
p10_power_w,
|
||||
p50_power_w,
|
||||
p90_power_w,
|
||||
realized_power_w,
|
||||
realized_power_w - p50_power_w,
|
||||
abs(realized_power_w - p50_power_w),
|
||||
CASE
|
||||
WHEN abs(realized_power_w) < 1 THEN NULL
|
||||
ELSE abs(realized_power_w - p50_power_w) / abs(realized_power_w)
|
||||
END,
|
||||
CASE
|
||||
WHEN realized_power_w IS NULL THEN NULL
|
||||
ELSE realized_power_w BETWEEN p10_power_w AND p90_power_w
|
||||
END,
|
||||
COALESCE(sample_count, 0),
|
||||
now()
|
||||
FROM realized
|
||||
ON CONFLICT (
|
||||
issued_at,
|
||||
target_at,
|
||||
kind,
|
||||
source,
|
||||
model_version
|
||||
)
|
||||
DO UPDATE SET
|
||||
horizon_minutes = EXCLUDED.horizon_minutes,
|
||||
expected_power_w = EXCLUDED.expected_power_w,
|
||||
p10_power_w = EXCLUDED.p10_power_w,
|
||||
p50_power_w = EXCLUDED.p50_power_w,
|
||||
p90_power_w = EXCLUDED.p90_power_w,
|
||||
realized_power_w = EXCLUDED.realized_power_w,
|
||||
error_w = EXCLUDED.error_w,
|
||||
absolute_error_w = EXCLUDED.absolute_error_w,
|
||||
absolute_pct_error = EXCLUDED.absolute_pct_error,
|
||||
covered_by_p10_p90 = EXCLUDED.covered_by_p10_p90,
|
||||
sample_count = EXCLUDED.sample_count,
|
||||
evaluated_at = EXCLUDED.evaluated_at,
|
||||
updated_at = now()
|
||||
""",
|
||||
(start_at, actual_window, limit, actual_window),
|
||||
)
|
||||
return cursor.rowcount
|
||||
|
||||
def _evaluate_due_net_forecasts(
|
||||
self,
|
||||
cursor: object,
|
||||
actual_window: timedelta,
|
||||
start_at: datetime,
|
||||
limit: int,
|
||||
) -> int:
|
||||
cursor.execute(
|
||||
"""
|
||||
WITH candidates AS (
|
||||
SELECT
|
||||
forecast.issued_at,
|
||||
forecast.target_at,
|
||||
'net'::text AS kind,
|
||||
forecast.source,
|
||||
'net_forecaster_v1'::text AS model_version,
|
||||
forecast.horizon_minutes,
|
||||
forecast.expected_net_power_w AS expected_power_w,
|
||||
COALESCE(forecast.p10_net_power_w, forecast.safe_net_power_w)
|
||||
AS p10_power_w,
|
||||
COALESCE(forecast.p50_net_power_w, forecast.expected_net_power_w)
|
||||
AS p50_power_w,
|
||||
forecast.p90_net_power_w AS p90_power_w
|
||||
FROM oracle_net_forecast_points AS forecast
|
||||
LEFT JOIN oracle_forecast_evaluations AS evaluation
|
||||
ON evaluation.issued_at = forecast.issued_at
|
||||
AND evaluation.target_at = forecast.target_at
|
||||
AND evaluation.kind = 'net'
|
||||
AND evaluation.source = forecast.source
|
||||
AND evaluation.model_version = 'net_forecaster_v1'
|
||||
WHERE forecast.target_at >= %s
|
||||
AND forecast.target_at <= now() - %s
|
||||
AND (
|
||||
evaluation.issued_at IS NULL
|
||||
OR evaluation.sample_count = 0
|
||||
)
|
||||
ORDER BY forecast.target_at, forecast.issued_at
|
||||
LIMIT %s
|
||||
),
|
||||
realized AS (
|
||||
SELECT
|
||||
candidates.*,
|
||||
actual.realized_power_w,
|
||||
actual.sample_count
|
||||
FROM candidates
|
||||
LEFT JOIN LATERAL (
|
||||
SELECT
|
||||
avg(snapshot.solar_power_w - snapshot.load_power_w)
|
||||
AS realized_power_w,
|
||||
count(*) FILTER (
|
||||
WHERE snapshot.solar_power_w IS NOT NULL
|
||||
AND snapshot.load_power_w IS NOT NULL
|
||||
) AS sample_count
|
||||
FROM sigen_plant_snapshots AS snapshot
|
||||
WHERE snapshot.observed_at >= candidates.target_at
|
||||
AND snapshot.observed_at < candidates.target_at + %s
|
||||
) AS actual ON TRUE
|
||||
)
|
||||
INSERT INTO oracle_forecast_evaluations (
|
||||
issued_at,
|
||||
target_at,
|
||||
kind,
|
||||
source,
|
||||
model_version,
|
||||
horizon_minutes,
|
||||
expected_power_w,
|
||||
p10_power_w,
|
||||
p50_power_w,
|
||||
p90_power_w,
|
||||
realized_power_w,
|
||||
error_w,
|
||||
absolute_error_w,
|
||||
absolute_pct_error,
|
||||
covered_by_p10_p90,
|
||||
sample_count,
|
||||
evaluated_at
|
||||
)
|
||||
SELECT
|
||||
issued_at,
|
||||
target_at,
|
||||
kind,
|
||||
source,
|
||||
model_version,
|
||||
horizon_minutes,
|
||||
expected_power_w,
|
||||
p10_power_w,
|
||||
p50_power_w,
|
||||
p90_power_w,
|
||||
realized_power_w,
|
||||
realized_power_w - p50_power_w,
|
||||
abs(realized_power_w - p50_power_w),
|
||||
CASE
|
||||
WHEN abs(realized_power_w) < 1 THEN NULL
|
||||
ELSE abs(realized_power_w - p50_power_w) / abs(realized_power_w)
|
||||
END,
|
||||
CASE
|
||||
WHEN realized_power_w IS NULL OR p90_power_w IS NULL THEN NULL
|
||||
ELSE realized_power_w BETWEEN p10_power_w AND p90_power_w
|
||||
END,
|
||||
COALESCE(sample_count, 0),
|
||||
now()
|
||||
FROM realized
|
||||
ON CONFLICT (
|
||||
issued_at,
|
||||
target_at,
|
||||
kind,
|
||||
source,
|
||||
model_version
|
||||
)
|
||||
DO UPDATE SET
|
||||
horizon_minutes = EXCLUDED.horizon_minutes,
|
||||
expected_power_w = EXCLUDED.expected_power_w,
|
||||
p10_power_w = EXCLUDED.p10_power_w,
|
||||
p50_power_w = EXCLUDED.p50_power_w,
|
||||
p90_power_w = EXCLUDED.p90_power_w,
|
||||
realized_power_w = EXCLUDED.realized_power_w,
|
||||
error_w = EXCLUDED.error_w,
|
||||
absolute_error_w = EXCLUDED.absolute_error_w,
|
||||
absolute_pct_error = EXCLUDED.absolute_pct_error,
|
||||
covered_by_p10_p90 = EXCLUDED.covered_by_p10_p90,
|
||||
sample_count = EXCLUDED.sample_count,
|
||||
evaluated_at = EXCLUDED.evaluated_at,
|
||||
updated_at = now()
|
||||
""",
|
||||
(start_at, actual_window, limit, actual_window),
|
||||
)
|
||||
return cursor.rowcount
|
||||
|
||||
@contextmanager
|
||||
def _connection(self) -> Iterator[object]:
|
||||
try:
|
||||
import psycopg
|
||||
except ImportError as error:
|
||||
raise OracleStoreConfigurationError(
|
||||
"Install dependencies with `python3 -m pip install -r requirements.txt`"
|
||||
) from error
|
||||
|
||||
with psycopg.connect(self.config.database_url) as connection:
|
||||
yield connection
|
||||
Reference in New Issue
Block a user