mirror of
https://github.com/fhem/fhem-mirror.git
synced 2025-03-10 09:16:53 +00:00
ch.eick: Ergänzungen zur KI_Prognose
git-svn-id: https://svn.fhem.de/fhem/trunk@28420 2b470e98-0d58-463d-a4d8-8e2adae1ed80
This commit is contained in:
parent
494e536b78
commit
444664e709
268
fhem/contrib/ch.eick/Photovoltaik/99_myUtils.pm_Ergänzungen.txt
Normal file
268
fhem/contrib/ch.eick/Photovoltaik/99_myUtils.pm_Ergänzungen.txt
Normal file
@ -0,0 +1,268 @@
|
||||
use strict;
|
||||
use warnings;
|
||||
|
||||
use UConv; # used for wunderground conversion
|
||||
|
||||
use Blocking; # for sendmail
|
||||
|
||||
sub
|
||||
MyUtils_Initialize($$)
|
||||
{
|
||||
my ($hash) = @_;
|
||||
}
|
||||
|
||||
# Enter your functions below _this_ line.
|
||||
###################################################
|
||||
|
||||
|
||||
############################################################################################################
|
||||
######## DbRep readings separieren und erstellen
|
||||
############################################################################################################
|
||||
sub splitReading {
|
||||
my ($name,$reading,$value) = @_;
|
||||
my $hash = $defs{$name};
|
||||
|
||||
if($reading =~ /^.*SqlResultRow_.*$/ and
|
||||
$value =~ /^(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)\|(.*)\|(.*)/ ) {
|
||||
|
||||
my $TIMESTAMP = "$1-$2-$3 $4:$5:$6";
|
||||
my $READING = "$7";
|
||||
my $VALUE = "$8";
|
||||
|
||||
setReadingsVal($hash,$READING,$VALUE,$TIMESTAMP);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
###########################################################
|
||||
## Kostal Plenticore Autentifizierung PV_Anlage_1
|
||||
###########################################################
|
||||
|
||||
use Encode qw(decode encode);
|
||||
use PBKDF2::Tiny qw/derive verify/;
|
||||
use Digest::SHA qw(sha256 hmac_sha256);
|
||||
use Crypt::URandom qw( urandom );
|
||||
use Crypt::AuthEnc::GCM;
|
||||
|
||||
######################################################
|
||||
# {KeyValue("read|store","PW_PV_Anlage_1_API_user","<passwort>")}
|
||||
######################################################
|
||||
sub KeyValue {
|
||||
my ($step, $index, $value) = @_;
|
||||
my $key = getUniqueId().$index;
|
||||
my $e_value = "";
|
||||
my $error;
|
||||
|
||||
if (eval "use Digest::MD5;1") {
|
||||
$key = Digest::MD5::md5_hex(unpack "H*", $key);
|
||||
$key .= Digest::MD5::md5_hex($key);
|
||||
}
|
||||
|
||||
if ($step eq "read") {
|
||||
($error, $value) = getKeyValue($index);
|
||||
|
||||
if ( defined($error) ) {
|
||||
Log3 $index,3, "$index, can't read key from FhemUtils/uniqueID: $error";
|
||||
return undef;
|
||||
}
|
||||
|
||||
if ( defined($value) ) {
|
||||
my $dec_value = '';
|
||||
|
||||
for my $char (map { pack('C', hex($_)) } ($value =~ /(..)/g)) {
|
||||
my $decode = chop($key);
|
||||
$dec_value .= chr(ord($char)^ord($decode));
|
||||
$key = $decode.$key;
|
||||
}
|
||||
return $dec_value;
|
||||
}
|
||||
else {
|
||||
Log3 $index,3,"$index, no key found in FhemUtils/uniqueID";
|
||||
return undef;
|
||||
}
|
||||
}
|
||||
|
||||
if ($step eq "store") {
|
||||
for my $char (split //, $value) {
|
||||
my $encode = chop($key);
|
||||
$e_value .= sprintf("%.2x",ord($char)^ord($encode));
|
||||
$key = $encode.$key;
|
||||
}
|
||||
$error = setKeyValue($index, $e_value);
|
||||
return "error while saving key : $error" if(defined($error));
|
||||
return "Key successfully saved in FhemUtils/uniqueID Key $index";
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
###########################################################
|
||||
# {plenticore_auth("WR_1_API","[user|master]","auth_randomString64","auth_nonce","auth_salt","auth_rounds","auth_token","auth_transactionId")}
|
||||
# {plenticore_auth("start|finish|session","user","WR_1_API","TESMUWZnwkJZbnpF","TE2MUWZnwkJZbnpFQ5ulCfolNNdAD0vT","DbAC0R85jwF0rh+r","29000","1376720346bea40cdf770a8f84b5975cfeb20c5e6ac6d89b7862df3ca9695e43","acafc66c0e1975293d35512a1e4bcceea55840b3109a703514e75b5ebce9b7c5")}
|
||||
###########################################################
|
||||
sub plenticore_auth {
|
||||
my ($step, $user, $logdevice, $randomString, $nonce, $salt, $rounds, $transactionId, $token) = @_;
|
||||
|
||||
my $verbose = AttrVal($logdevice,"verbose",0) ;
|
||||
|
||||
my $PASSWD = KeyValue("read","PW_".$logdevice."_".$user);
|
||||
|
||||
if ($verbose >= 3) {
|
||||
Log3 $logdevice,3,"====Start plenticore_auth==============================";
|
||||
Log3 $logdevice,3,"auth_step : ".$step;
|
||||
Log3 $logdevice,3,"auth_user : ".$user;
|
||||
Log3 $logdevice,3,"auth_device : ".$logdevice;
|
||||
Log3 $logdevice,3,"auth_KeyValue read: PW_".$logdevice."_".$user;
|
||||
};
|
||||
|
||||
if($step eq "start")
|
||||
{
|
||||
my @chars = ('0'..'9', 'A'..'Z', 'a'..'z');
|
||||
my $len = 12;
|
||||
my $string;
|
||||
|
||||
if ($verbose >= 3) {
|
||||
Log3 $logdevice,3,"====End arguments======================================";
|
||||
};
|
||||
|
||||
while($len--){ $string .= $chars[rand @chars] };
|
||||
$string = encode("UTF-8", $string);
|
||||
$string = decode("UTF-8", $string);
|
||||
my $u = encode_base64($string);
|
||||
$u =~ s/\n$//g;
|
||||
|
||||
my $message = '{"nonce": "'.$u.'","username": "'.$user.'"}';
|
||||
|
||||
if ($verbose >= 3) {
|
||||
Log3 $logdevice,3,"auth_nonce : ".$u;
|
||||
Log3 $logdevice,3,"auth_return : ".$message;
|
||||
Log3 $logdevice,3,"====End output=========================================";
|
||||
};
|
||||
|
||||
CommandSetReading(undef, $logdevice." auth_randomString64 ".$u) ;
|
||||
|
||||
return $message;
|
||||
}
|
||||
|
||||
######### This code is identical for finish and session #################
|
||||
my $bitSalt = decode_base64($salt);
|
||||
my $r = derive( 'SHA-256', $PASSWD, $bitSalt, $rounds );
|
||||
my $ck = encode('UTF-8', "Client Key");
|
||||
my $s = hmac_sha256($ck, $r);
|
||||
my $underscore = sha256($s);
|
||||
my $d = "n=".$user.",r=".$randomString.",r=".$nonce.",s=".$salt.",i=".$rounds.",c=biws,r=".$nonce;
|
||||
|
||||
if ($verbose >= 3) {
|
||||
Log3 $logdevice,3,"auth_randomString : ".$randomString;
|
||||
Log3 $logdevice,3,"auth_nonce : ".$nonce;
|
||||
Log3 $logdevice,3,"auth_salt : ".$salt;
|
||||
Log3 $logdevice,3,"auth_rounds : ".$rounds;
|
||||
Log3 $logdevice,3,"auth_transactionId: ".$transactionId;
|
||||
};
|
||||
|
||||
if($step eq "finish")
|
||||
{
|
||||
Log3 $logdevice,3,"====End arguments======================================";
|
||||
|
||||
my $sk = encode('UTF-8', "Server Key");
|
||||
my $c = hmac_sha256($sk, $r);
|
||||
my $pd = encode('UTF-8', $d);
|
||||
my $p = hmac_sha256($pd, $c);
|
||||
my $gd = encode('UTF-8', $d);
|
||||
my $g = hmac_sha256($gd, $underscore);
|
||||
my $f = "";
|
||||
my $g1 = "";
|
||||
my $s1 = "";
|
||||
my $f1 = "";
|
||||
my $j = 0;
|
||||
for($j=0; $j<length($g); $j++) {
|
||||
$g1 = substr($g,$j,1);
|
||||
$s1 = substr($s,$j,1);
|
||||
$f1 = $s1 ^ $g1 ;
|
||||
$f = $f.$f1;
|
||||
}
|
||||
my $pe = encode_base64($f);
|
||||
$pe =~ s/\n$//g; # Korrektur: \n am Ende des Strings entfernen, Ursache unbekannt
|
||||
my $proof = decode('UTF-8', $pe);
|
||||
|
||||
my $message = '{"transactionId": "'.$transactionId.'", "proof": "'.$proof.'"}';
|
||||
|
||||
if ($verbose >= 3) {
|
||||
Log3 $logdevice,3,"auth_proof : ".$proof;
|
||||
Log3 $logdevice,3,"auth_return : ".$message;
|
||||
Log3 $logdevice,3,"====End output=========================================";
|
||||
};
|
||||
|
||||
return $message;
|
||||
}
|
||||
|
||||
if($step eq "session")
|
||||
{
|
||||
Log3 $logdevice,3,"auth_token : ".$token;
|
||||
Log3 $logdevice,3,"====End arguments======================================";
|
||||
|
||||
my $sk = encode('UTF-8', "Session Key");
|
||||
my $dd = encode('UTF-8', $d);
|
||||
my $protocol_key = hmac_sha256($sk, $dd, $s, $underscore);
|
||||
|
||||
## Test only my $t = "7244ba6f73c8cdc47b232e1311451939";
|
||||
my $t = $token;
|
||||
$t =~ s/([a-fA-F0-9][a-fA-F0-9])/chr(hex($1))/eg;
|
||||
my $e2 = Crypt::AuthEnc::GCM->new("AES", $protocol_key, $t);
|
||||
|
||||
my $tt = encode('UTF-8', $token);
|
||||
if($user eq "master")
|
||||
{
|
||||
$PASSWD = KeyValue("read","PW_".$logdevice."_".$user."_Service"); ## Das ist der Service Key
|
||||
$tt = encode('UTF-8', $token.":".$PASSWD);
|
||||
if ($verbose >= 3) {
|
||||
Log3 $logdevice,3,"using master : PW_".$logdevice."_".$user."_Service";
|
||||
}
|
||||
}
|
||||
|
||||
my $e2ct = $e2->encrypt_add($tt);
|
||||
my $authtag = $e2->encrypt_done();
|
||||
|
||||
$tt = encode_base64($t);
|
||||
$tt =~ s/\n$//g; # Korrektur: \n am Ende des Strings entfernen, Ursache unbekannt
|
||||
my $iv = decode('UTF-8', $tt);
|
||||
|
||||
my $aa = encode_base64($authtag);
|
||||
$aa =~ s/\n$//g; # Korrektur: \n am Ende des Strings entfernen, Ursache unbekannt
|
||||
$authtag = decode('UTF-8', $aa);
|
||||
|
||||
my $pp = encode_base64($e2ct);
|
||||
$pp =~ s/\n//g; # Korrektur: \n am Ende des Strings entfernen, Ursache unbekannt
|
||||
my $payload = decode('UTF-8', $pp);
|
||||
|
||||
my $message = '{"transactionId": "'.$transactionId.'", "iv": "'.$iv.'", "tag": "'.$authtag.'", "payload": "'.$payload.'"}';
|
||||
|
||||
if ($verbose >= 3) {
|
||||
Log3 $logdevice,3,"auth_iv : ".$iv;
|
||||
Log3 $logdevice,3,"auth_authtag : ".$authtag;
|
||||
Log3 $logdevice,3,"auth_payload : ".$payload;
|
||||
Log3 $logdevice,3,"auth_return : ".$message;
|
||||
Log3 $logdevice,3,"====End output=========================================";
|
||||
};
|
||||
|
||||
return $message;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
#### Log-abriss vermeiden
|
||||
sub
|
||||
addLog($$$) {
|
||||
my ($logdb, $logdevice, $reading) = @_; # device and reading to be used
|
||||
my $logentry = ReadingsVal($logdevice,$reading,"invalid reading");
|
||||
my $timestamp = strftime "%Y-%m-%d %H:%M:%S", localtime;
|
||||
|
||||
# if ($reading =~ m,state,i) {
|
||||
if ($reading eq 'state') {
|
||||
fhem "set ".$logdb." addCacheLine ".$timestamp."|".$logdevice."|addlog|".$logentry."|".$reading."|".$logentry."|";
|
||||
} else {
|
||||
fhem "set ".$logdb." addCacheLine ".$timestamp."|".$logdevice."|addlog|".$reading.": ".$logentry."|".$reading."|".$logentry."|";
|
||||
}
|
||||
}
|
||||
|
||||
1;
|
File diff suppressed because it is too large
Load Diff
375
fhem/contrib/ch.eick/Photovoltaik/KI_Prognose/PV_KI_Prognose.py
Normal file
375
fhem/contrib/ch.eick/Photovoltaik/KI_Prognose/PV_KI_Prognose.py
Normal file
@ -0,0 +1,375 @@
|
||||
#!/usr/bin/python3
|
||||
# coding: utf-8
|
||||
|
||||
# Version die eine Vorhersage auf Basis der Messwerte - Analyseversion
|
||||
# Analyse via Random Forest Regressor
|
||||
|
||||
import fhem
|
||||
import json
|
||||
|
||||
# Einlesen der Übergabeparameter
|
||||
import sys
|
||||
DbLog = sys.argv[1]
|
||||
web = sys.argv[2]
|
||||
webport = 8083
|
||||
DbRep = sys.argv[3]
|
||||
WRname = sys.argv[4]
|
||||
WRread = sys.argv[5]
|
||||
|
||||
|
||||
try:
|
||||
with open('/opt/fhem/python/pwd_fhem.json', 'r') as f:
|
||||
credentials=json.load(f)
|
||||
fhem_user = credentials["username"]
|
||||
fhem_pass = credentials["password"]
|
||||
fh = fhem.Fhem(web, protocol="http", port=webport, username=fhem_user, password=fhem_pass)
|
||||
print("PV_KI_Prognose running - start")
|
||||
fh.send_cmd("setreading "+DbRep+" PV_KI_Prognose running start")
|
||||
except Exception as e:
|
||||
|
||||
print('Something went wrong: {}'.format(e))
|
||||
|
||||
|
||||
try:
|
||||
with open('/opt/fhem/python/pwd_sql.json', 'r') as f:
|
||||
credentials=json.load(f)
|
||||
except Exception as e:
|
||||
|
||||
print('Something went wrong: {}'.format(e))
|
||||
|
||||
|
||||
verbose = fh.get_device_attribute(DbRep, "verbose")
|
||||
|
||||
if (verbose >= 4):
|
||||
print("PV_KI_Prognose running - start")
|
||||
print("PV_KI_Prognose DbLog ",DbLog,"/fhem")
|
||||
print("PV_KI_Prognose Fhem ",web,":",webport)
|
||||
|
||||
|
||||
Inverter_Max_Power = fh.get_device_reading("WR_1_Speicher_1_ExternControl", "SpeicherMidday_Inverter_Max_Power")
|
||||
# Inverter_Max_Power = fh.get_device_reading(WRname, "SpeicherMidday_Inverter_Max_Power")
|
||||
|
||||
if (verbose >= 4):
|
||||
print("Inverter_Max_Power {}".format(Inverter_Max_Power["Value"]))
|
||||
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
from sqlalchemy import create_engine
|
||||
import pymysql
|
||||
|
||||
# betrifft beide relevanten Tabellen
|
||||
db_connection_str = 'mysql+pymysql://'+credentials["username"]+':'+credentials["password"]+'@'+DbLog+'/fhem'
|
||||
db_connection = create_engine(db_connection_str)
|
||||
|
||||
if (verbose >= 3):
|
||||
print("PV_KI_Prognose running - connected to "+DbLog)
|
||||
fh.send_cmd("setreading "+DbRep+" PV_KI_Prognose running connected to "+DbLog)
|
||||
|
||||
import datetime
|
||||
from datetime import date, timedelta
|
||||
|
||||
today = datetime.datetime.now()
|
||||
de = today.strftime("%Y-%m-%d 00:00:00")
|
||||
# print(de)
|
||||
|
||||
# alle Wetterdaten ohne den start Tag der Prognose
|
||||
dflern = pd.read_sql('SELECT * FROM dwdfull WHERE TIMESTAMP < '+"'"+de+"'", con=db_connection)
|
||||
dfask = pd.read_sql('SELECT * FROM dwdfull WHERE TIMESTAMP >= '+"'"+de+"'", con=db_connection)
|
||||
|
||||
dfhour_start = pd.read_sql('SELECT min(hour(TIMESTAMP)) AS VALUE FROM dwdfull WHERE date(TIMESTAMP) = '+"'"+today.strftime("%Y-%m-%d")+"'", con=db_connection)
|
||||
dfhour_stop = pd.read_sql('SELECT max(hour(TIMESTAMP)) AS VALUE FROM dwdfull WHERE date(TIMESTAMP) = '+"'"+today.strftime("%Y-%m-%d")+"'", con=db_connection)
|
||||
dfhours = dfhour_stop['VALUE'].values[0] - dfhour_start['VALUE'].values[0] +1
|
||||
|
||||
if (verbose >= 3):
|
||||
print("PV_KI_Prognose running - dwdfull read from DbLog "+DbLog)
|
||||
fh.send_cmd("setreading "+DbRep+" PV_KI_Prognose running dwdfull read from DbLog "+DbLog)
|
||||
|
||||
# Rad1h = Globale Einstrahlung
|
||||
# TTT = Temperature 2m above surface [°C]
|
||||
# Neff = Effektive Wolkendecke
|
||||
# R101 = Niederschlagswahrscheinlichkeit> 0,1 mm während der letzten Stunde
|
||||
# SunD1 = Sonnenscheindauer während der letzten Stunde
|
||||
# VV = Sichtweite
|
||||
# N = Gesamte Wolkendecke
|
||||
# DD = Windrichtung
|
||||
# RRS1c = Schneeregen-Äquivalent während der letzten Stunde
|
||||
|
||||
columns = ['Rad1h','Neff','R101','TTT','DD','SunAz','SunAlt','SunD1','VV','N','RRS1c']
|
||||
|
||||
# jetzt gehen wir die Analyse an
|
||||
from sklearn.ensemble import RandomForestRegressor
|
||||
|
||||
if (verbose >= 3):
|
||||
print("PV_KI_Prognose running - RandomForestRegressor loading")
|
||||
fh.send_cmd("setreading "+DbRep+" PV_KI_Prognose running RandomForestRegressor loading")
|
||||
|
||||
clf = RandomForestRegressor(n_estimators = 4000, bootstrap=True, random_state = 42)
|
||||
|
||||
if (verbose >= 3):
|
||||
print("PV_KI_Prognose running - RandomForestRegressor loaded")
|
||||
fh.send_cmd("setreading "+DbRep+" PV_KI_Prognose running RandomForestRegressor loaded")
|
||||
|
||||
# train the model
|
||||
df = dflern[:]
|
||||
|
||||
if (verbose >= 3):
|
||||
print("PV_KI_Prognose running - RandomForestRegressor trained")
|
||||
fh.send_cmd("setreading "+DbRep+" PV_KI_Prognose running RandomForestRegressor trained")
|
||||
|
||||
# bring das gelernte in Bezug zum yield
|
||||
clf.fit(df[columns], df['yield'])
|
||||
|
||||
if (verbose >= 3):
|
||||
print("PV_KI_Prognose running - RandomForestRegressor fitted with yield")
|
||||
fh.send_cmd("setreading "+DbRep+" PV_KI_Prognose running RandomForestRegressor fitted with yield")
|
||||
|
||||
if (verbose >= 4):
|
||||
print("PV_KI_Prognose running - RandomForestRegressor read statistics")
|
||||
# Auslesen und Anzeigen von Statistiken
|
||||
# Get numerical feature importances
|
||||
importances = list(clf.feature_importances_)
|
||||
# List of tuples with variable and importance
|
||||
feature_importances = [(feature, round(importance, 2)) for feature, importance in zip(columns, importances)]
|
||||
# Sort the feature importances by most important first
|
||||
feature_importances = sorted(feature_importances, key = lambda x: x[1], reverse = True)
|
||||
# Print out the feature and importances
|
||||
[print('Variable: {:20} Importance: {}'.format(*pair)) for pair in feature_importances]
|
||||
|
||||
# Immer einen Forecast für heute und morgen erstellen
|
||||
start_date = datetime.datetime.now()
|
||||
delta = timedelta(days=1)
|
||||
end_date = start_date + delta
|
||||
|
||||
Prognose_faktor = 1 # Falls die Prognose generell daneben liegt kann der Faktor verwendet werden
|
||||
|
||||
loop_hour = 0
|
||||
loop_date = start_date
|
||||
loop_count = 0
|
||||
|
||||
|
||||
while loop_date <= end_date:
|
||||
# Daten Tagesmaximum
|
||||
middayhigh = 0 # Ein Merker, ob das Tagesmaximum überschritten wird
|
||||
middayhigh_start = "00:00"
|
||||
middayhigh_stop = "00:00"
|
||||
middayhigh_tmp = 0
|
||||
middayhigh_start_tmp = 0
|
||||
middayhigh_stop_tmp = 0
|
||||
|
||||
# Pro Prognosetag die Tages Zähler zurück setzen
|
||||
Prognose_max = 0
|
||||
Prognose_pre = 0
|
||||
Prognose_4h = 0
|
||||
Prognose_rest = 0
|
||||
Prognose_morning = 0
|
||||
Prognose_afternoon = 0
|
||||
Prognose_day = 0
|
||||
|
||||
# Löschen der bisherigen Prognose von diesem
|
||||
sql = "DELETE FROM history WHERE DEVICE = '"+WRname+"' AND TIMESTAMP >= '"+str(loop_date.strftime("%Y-%m-%d"))+" 00:00:00' AND READING = '"+WRread+str(loop_count)+"' ;"
|
||||
db_connection.execute(str(sql))
|
||||
|
||||
if (verbose >= 3):
|
||||
print("PV_KI_Prognose running - old forecast deleted")
|
||||
fh.send_cmd("setreading "+DbRep+" PV_KI_Prognose running old forecast deleted")
|
||||
|
||||
New_year = str(loop_date.year)
|
||||
New_month = str(loop_date.month)
|
||||
New_day = str(loop_date.day)
|
||||
New_hour = loop_date.hour
|
||||
|
||||
if (verbose >= 4):
|
||||
print("--------------------------------------------")
|
||||
print("Forecast fc%d %s" % (loop_count,loop_date.strftime("%Y-%m-%d")))
|
||||
|
||||
fcolumns = columns[:]
|
||||
fcolumns.insert(0, 'TIMESTAMP')
|
||||
fcolumns.append('yield')
|
||||
|
||||
# hole die Werte für den Tag, der bearbeitet wird
|
||||
query = 'year == "'+New_year+'" and month == "'+New_month+'" and day == "'+New_day+'"'
|
||||
dfq = dfask.query(query)[fcolumns].reset_index()
|
||||
|
||||
# erstelle die Prognose für den Tag
|
||||
predict = clf.predict(dfq[columns])
|
||||
|
||||
# bearbeite jede einzelne Stunde der Prognose
|
||||
|
||||
Prognose_pre = 0
|
||||
|
||||
if (verbose >= 3):
|
||||
print("PV_KI_Prognose running - start forecast")
|
||||
fh.send_cmd("setreading "+DbRep+" PV_KI_Prognose running start forecast")
|
||||
|
||||
for loop_hour in range(dfhours):
|
||||
|
||||
parms = dfq.iloc[loop_hour].values
|
||||
list = parms.reshape(1, -1)
|
||||
date = loop_date.strftime("%Y-%m-%d")
|
||||
|
||||
# Hier wird die Prognose noch etwas angehoben, da bisher zu niedrige Werte prognostiziert werden.
|
||||
# Das kann sich mit mehr Vergleichsdaten noch ändern
|
||||
#
|
||||
# Zusätzlich wird noch interpoliert, wodurch die Summen korrekter erscheinen
|
||||
Prognose = int(round((Prognose_pre + predict[loop_hour]*Prognose_faktor)/2))
|
||||
Prognose_pre = int(round(predict[loop_hour]*Prognose_faktor))
|
||||
|
||||
# Zu kleine Werte werden verworfen
|
||||
if (Prognose < 20):
|
||||
if (verbose >= 4):
|
||||
print("Forecast value to smale")
|
||||
Prognose = 0
|
||||
|
||||
# Zu große Werte werden limitiert
|
||||
# Achtung, die yield Prognose Werte sind Angaben zum Ende der Stunde
|
||||
if (Prognose > 0):
|
||||
timestamp = date+" %02d:00:00" % (dfhour_start['VALUE'].values[0]+loop_hour)
|
||||
Limit = int(round(dfask.loc[dfask['TIMESTAMP'] == timestamp].yield_max.values[0],0))
|
||||
if (verbose >= 4):
|
||||
# Hier wird beim Anzeigen der Wert um eine Stunde vorher angezeigt
|
||||
print(dfhour_start['VALUE'].values[0]+loop_hour-1,Prognose,Limit)
|
||||
|
||||
if (Prognose > Limit):
|
||||
if (verbose >= 4):
|
||||
print("Forecast value to high : " + str(Prognose)+" > " + str(Limit))
|
||||
Prognose = Limit
|
||||
|
||||
## hier beginnt die Ermittung für das Mittagshoch
|
||||
if ( middayhigh == 0 and Prognose > Inverter_Max_Power["Value"] ):
|
||||
middayhigh = 1
|
||||
# der Start wird auf eine Stunde vorher vorverlegt
|
||||
middayhigh_start_tmp = loop_hour-1
|
||||
## einige Durchläufe später endet hier das Mittagshoch
|
||||
if ( middayhigh == 1 and Prognose < Inverter_Max_Power["Value"] and middayhigh_stop_tmp == 0 ):
|
||||
middayhigh_stop_tmp = loop_hour
|
||||
## prüfen, ob es einen kurzen Leistungseinbruch gegeben hat, der soll übersprungen werden
|
||||
if ( middayhigh == 1 and Prognose > Inverter_Max_Power["Value"] and middayhigh_stop != "00:00" ):
|
||||
# da war ein kurzer Einbruch, es sollte noch länger sein.
|
||||
middayhigh_stop_tmp = 0
|
||||
|
||||
## hier ist dann das richtige Ende vom Mittagshoch
|
||||
if ( middayhigh == 1
|
||||
and middayhigh_stop_tmp != 0
|
||||
and middayhigh_stop_tmp == loop_hour):
|
||||
|
||||
## Wie lang ist das gefundene Mittagshoch
|
||||
middayhigh_tmp = middayhigh_stop_tmp - middayhigh_start_tmp
|
||||
if ( middayhigh_tmp > 4 ): # das Middayhigh wird zu lang
|
||||
if (verbose >= 4): # die bisherigen Zeiten ausgeben
|
||||
print("Middayhigh to long-------------------")
|
||||
print("Middayhigh_start %02d:00" % (dfhour_start['VALUE'].values[0]+middayhigh_start_tmp))
|
||||
print("Middayhigh_stop %02d:00" % (dfhour_start['VALUE'].values[0]+middayhigh_stop_tmp))
|
||||
print("--------------------------------------------")
|
||||
## jetzt wird die Zeit vom Mittagshoch verkürzt
|
||||
## beim Start etwas mehr kürzen, als zum Ende hin
|
||||
middayhigh_start_tmp = middayhigh_start_tmp + round(middayhigh_tmp/3-0.2) # es wird um ganze Stunden verkürzt
|
||||
middayhigh_stop_tmp = middayhigh_stop_tmp - round(middayhigh_tmp/6-0.2)
|
||||
if (verbose >= 4): # melde die Verkürzung
|
||||
print("Middayhigh cut about %d h" % (round(middayhigh_tmp/3-0.2)+round(middayhigh_tmp/6-0.2)) )
|
||||
|
||||
## Die neuen Mittagshochzeiten formatieren
|
||||
middayhigh_start = "%02d:00" % (dfhour_start['VALUE'].values[0]+middayhigh_start_tmp)
|
||||
middayhigh_stop = "%02d:00" % (dfhour_start['VALUE'].values[0]+middayhigh_stop_tmp)
|
||||
|
||||
## End if (middayhigh == 1...
|
||||
|
||||
### Bildung der Prognose Summen ###
|
||||
|
||||
if (Prognose > Prognose_max):
|
||||
Prognose_max = Prognose
|
||||
Prognose_max_time = "%02d:00" % (dfhour_start['VALUE'].values[0]+loop_hour-1)
|
||||
|
||||
# Hier wird die Summe der nächsten 4 h gebildet
|
||||
if ( dfhour_start['VALUE'].values[0]+loop_hour > New_hour
|
||||
and dfhour_start['VALUE'].values[0]+loop_hour <= New_hour+3):
|
||||
Prognose_4h += Prognose
|
||||
|
||||
# Hier wird die Summe für den Resttag gebildet
|
||||
if (dfhour_start['VALUE'].values[0]+loop_hour > New_hour):
|
||||
Prognose_rest += Prognose
|
||||
|
||||
# Hier wird die Summe für den Vormittag gebildet
|
||||
if (dfhour_start['VALUE'].values[0]+loop_hour < 13):
|
||||
Prognose_morning += Prognose
|
||||
|
||||
# Hier wird die Summe für den Nachmittag gebildet
|
||||
if (dfhour_start['VALUE'].values[0]+loop_hour >= 13):
|
||||
Prognose_afternoon += Prognose
|
||||
|
||||
# Summe für den ganzen Tag
|
||||
Prognose_day += Prognose
|
||||
|
||||
######################################################################
|
||||
|
||||
# Die Prognose anzeigen und in die dwdfull Tabelle eintragen
|
||||
if (loop_hour-1 >= 0):
|
||||
|
||||
# Achtung, der Wert wird um eine Stunde früher in die Datenbank eingetragen
|
||||
timestamp = date+" "+"%02d:00:00" % (dfhour_start['VALUE'].values[0]+loop_hour-1)
|
||||
sql = "UPDATE dwdfull SET forecast ="+str(Prognose)+" WHERE TIMESTAMP = '"+timestamp+"' AND hour ="+str(dfhour_start['VALUE'].values[0]+loop_hour-1)+";"
|
||||
db_connection.execute(str(sql))
|
||||
|
||||
sql = "INSERT INTO history (TIMESTAMP, DEVICE, TYPE ,READING ,VALUE) VALUES('"+timestamp+"','"+WRname+"','addlog','"+WRread+str(loop_count)+"','"+str(Prognose)+"') ;"
|
||||
db_connection.execute(str(sql))
|
||||
|
||||
# Die Prognose Werte ins FHEM schreiben
|
||||
reading = WRread+str(loop_count)+"_%02d" % (dfhour_start['VALUE'].values[0]+loop_hour-1)
|
||||
fh.send_cmd("setreading "+WRname+" "+reading+" "+str(Prognose))
|
||||
|
||||
if (verbose >= 3):
|
||||
print("%s %02d %d" % (reading,dfhour_start['VALUE'].values[0]+loop_hour-1,Prognose))
|
||||
|
||||
# Zum Ende der Prognose alle Werte in die readings schreiben
|
||||
if (loop_hour == dfhours-1):
|
||||
if (loop_date.day == start_date.day):
|
||||
# Für den aktuellen Tag diese Werte schreiben
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_max "+str(Prognose_max))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_max_time "+str(Prognose_max_time))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_middayhigh "+str(middayhigh))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_middayhigh_start "+str(middayhigh_start))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_middayhigh_stop "+str(middayhigh_stop))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_4h "+str(Prognose_4h))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_rest "+str(Prognose_rest))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_morning "+str(Prognose_morning))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_afternoon "+str(Prognose_afternoon))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_day "+str(Prognose_day))
|
||||
|
||||
if (loop_date.day != start_date.day):
|
||||
# für weiter Prognosen sind nur diese Werte relevant
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_max "+str(Prognose_max))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_max_time "+str(Prognose_max_time))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_middayhigh "+str(middayhigh))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_middayhigh_start "+str(middayhigh_start))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_middayhigh_stop "+str(middayhigh_stop))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_morning "+str(Prognose_morning))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_afternoon "+str(Prognose_afternoon))
|
||||
fh.send_cmd("setreading "+WRname+" "+WRread+str(loop_count)+"_day "+str(Prognose_day))
|
||||
|
||||
if (verbose >= 3):
|
||||
# für das Logging noch etwas formatieren
|
||||
print("--------------------------------------------")
|
||||
print("max off/at",Prognose_max,Prognose_max_time)
|
||||
print("Middayhigh_start",middayhigh_start)
|
||||
print("Middayhigh_stop ",middayhigh_stop)
|
||||
print("4h ",Prognose_4h)
|
||||
print("rest ",Prognose_rest)
|
||||
print("morning ",Prognose_morning)
|
||||
print("afternoon ",Prognose_afternoon)
|
||||
print("day ",Prognose_day)
|
||||
print("--------------------------------------------")
|
||||
|
||||
if (verbose >= 3):
|
||||
print("PV_KI_Prognose running - forecast written to FHEM")
|
||||
fh.send_cmd("setreading "+DbRep+" PV_KI_Prognose running forecast written")
|
||||
|
||||
loop_date += delta
|
||||
loop_count += 1
|
||||
|
||||
|
||||
if (verbose >= 3):
|
||||
print("PV_KI_Prognose done")
|
||||
|
||||
# Zum Schluss noch einen Trigger ins FHEM schreiben
|
||||
fh.send_cmd("setreading "+DbRep+" PV_KI_Prognose done")
|
||||
|
@ -0,0 +1,16 @@
|
||||
defmod LogDBRep_PV_KI_Prognose DbRep LogDB
|
||||
attr LogDBRep_PV_KI_Prognose DbLogExclude .*
|
||||
attr LogDBRep_PV_KI_Prognose comment Version 2023.02.23 12:00\
|
||||
\
|
||||
Hier wird die Vorbereitung für die KI PV-Leistungsprognose durchgeführt\
|
||||
\
|
||||
sqlCmd call dwd_load(curdate(),'none');;\
|
||||
[none|show] zum Anzeigen des Ergebnisses\
|
||||
\
|
||||
executeAfterProc:\
|
||||
<absoluter Skript Name> <DbLog IP-Adresse> <FHEM IP-Adresse> <DbRep Name> <Wechselricher Name> <Prefix Reading Name>
|
||||
attr LogDBRep_PV_KI_Prognose executeAfterProc "/opt/fhem/python/bin/PV_KI_Prognose.py 192.168.178.40 192.168.178.40 LogDBRep_PV_KI_Prognose WR_ctl Yield_fc"
|
||||
attr LogDBRep_PV_KI_Prognose room System
|
||||
attr LogDBRep_PV_KI_Prognose verbose 3
|
||||
|
||||
setstate LogDBRep_PV_KI_Prognose 2024-01-25 14:07:24 sqlCmd call dwd_load(curdate(),'none');;
|
@ -0,0 +1,4 @@
|
||||
{
|
||||
"username": "< Benutzername zum FHEM >",
|
||||
"password": "< Passwort zum FHEM >"
|
||||
}
|
@ -0,0 +1,4 @@
|
||||
{
|
||||
"username": "< fhemuser zum MySQL >",
|
||||
"password": "< Passwort zum MySQL >"
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user