diff --git a/fhem/CHANGED b/fhem/CHANGED
index 2b90ccc08..b0d54634d 100644
--- a/fhem/CHANGED
+++ b/fhem/CHANGED
@@ -1,5 +1,7 @@
# Add changes at the top of the list. Keep it in ASCII, and 80-char wide.
# Do not insert empty lines here, update check depends on it.
+ - feature: 93_DbRep: command exportToFile or attribute "expimpfile" accept
+ option "MAXLINES="
- bugfix: 09_CUL_FHTTK: removed low batt information from state, moved to
reading batteryState
- feature: 59_Weather: add limit forecast, selection between daily and hourly
diff --git a/fhem/FHEM/93_DbRep.pm b/fhem/FHEM/93_DbRep.pm
index 0ec71d01a..62900e2ba 100644
--- a/fhem/FHEM/93_DbRep.pm
+++ b/fhem/FHEM/93_DbRep.pm
@@ -57,6 +57,7 @@ no if $] >= 5.017011, warnings => 'experimental::smartmatch';
# Versions History intern
our %DbRep_vNotesIntern = (
+ "8.11.0" => "24.01.2019 command exportToFile or attribute \"expimpfile\" accepts option \"MAXLINES=\" ",
"8.10.1" => "23.01.2019 change DbRep_charfilter to eliminate \xc2",
"8.10.0" => "19.01.2019 sqlCmd, dbValue may input SQL session variables, Forum:#96082 ",
"8.9.10" => "18.01.2019 fix warnings Malformed UTF-8 character during importFromFile, Forum:#96056 ",
@@ -132,6 +133,7 @@ our %DbRep_vNotesIntern = (
# Versions History extern:
our %DbRep_vNotesExtern = (
+ "8.11.0" => "24.01.2019 command exportToFile or attribute \"expimpfile\" accepts option \"MAXLINES=\" ",
"8.10.0" => "19.01.2019 In commands sqlCmd, dbValue you may now use SQL session variables like \"SET \@open:=NULL,\@closed:=NULL; SELECT ...\", Forum:#96082",
"8.9.0" => "07.11.2018 new command set delDoublets added. This command allows to delete multiple occuring identical records. ",
"8.8.0" => "06.11.2018 new attribute 'fastStart'. Usually every DbRep-device is making a short connect to its database when "
@@ -412,6 +414,7 @@ sub DbRep_Set($@) {
my $name = $a[0];
my $opt = $a[1];
my $prop = $a[2];
+ my $prop1 = $a[3];
my $dbh = $hash->{DBH};
my $dblogdevice = $hash->{HELPER}{DBLOGDEVICE};
$hash->{dbloghash} = $defs{$dblogdevice};
@@ -747,11 +750,12 @@ sub DbRep_Set($@) {
} elsif ($opt eq "exportToFile" && $hash->{ROLE} ne "Agent") {
$hash->{LASTCMD} = $prop?"$opt $prop":"$opt";
- my $f = $prop if($prop);
- if (!AttrVal($hash->{NAME}, "expimpfile", "") && !$f) {
- return "\"$opt\" needs a file as an argument or the attribute \"expimpfile\" (path and filename) to be set !";
+ my $f = ($prop && $prop !~ /MAXLINES=/)?$prop:AttrVal($name,"expimpfile","");
+ my $e = $prop1?" $prop1":"";
+ if (!$f) {
+ return "\"$opt\" needs a file as argument or the attribute \"expimpfile\" (path and filename) to be set !";
}
- DbRep_Main($hash,$opt,$f);
+ DbRep_Main($hash,$opt,$f.$e);
} elsif ($opt eq "importFromFile" && $hash->{ROLE} ne "Agent") {
$hash->{LASTCMD} = $prop?"$opt $prop":"$opt";
@@ -5204,12 +5208,37 @@ sub expfile_DoParse($) {
return "$name|''|''|$err|''|''|''";
}
- $rsf =~ s/[:\s]/_/g;
- my $outfile = $file?$file:AttrVal($name, "expimpfile", undef);
- $outfile =~ s/%TSB/$rsf/g;
+
+ my $ml;
+ my $part = ".";
+ if($file =~ /MAXLINES=/) {
+ my ($arrayref, $hashref) = parseParams($file);
+ my @a = @{$arrayref};
+ my %h = %{$hashref};
+ $file = $a[0];
+ if(!$file) {
+ ($arrayref, undef) = parseParams(AttrVal($name,"expimpfile",""));
+ @a = @{$arrayref};
+ $file = $a[0];
+ }
+ $ml = $h{MAXLINES};
+ if($ml !~ /^\d+$/) {
+ undef $ml;
+ } else {
+ $part = "_part1.";
+ }
+ }
+
+ Log3 ($name, 4, "DbRep $name - Export data to file: $file ".($ml?"splitted to parts of $ml lines":"") );
+
+ $rsf =~ s/[:\s]/_/g;
+ my ($f,$e) = split(/\./,$file);
+ $e = $e?$e:"";
+ $f =~ s/%TSB/$rsf/g;
my @t = localtime;
- $outfile = ResolveDateWildcards($outfile, @t);
- if (open(FH, ">", "$outfile")) {
+ $f = ResolveDateWildcards($f, @t);
+ my $outfile = $f.$part.$e;
+ if (open(FH, ">", $outfile)) {
binmode (FH);
} else {
$err = encode_base64("could not open ".$outfile.": ".$!,"");
@@ -5229,7 +5258,8 @@ sub expfile_DoParse($) {
# DB-Abfrage zeilenweise für jeden Array-Eintrag
my $arrstr;
- my $nrows = 0;
+ my ($nrows,$frows) = (0,0);
+ my $p = 2;
my $addon = "ORDER BY TIMESTAMP";
no warnings 'uninitialized';
foreach my $row (@ts) {
@@ -5260,10 +5290,24 @@ sub expfile_DoParse($) {
Log3 ($name, 5, "DbRep $name -> write row: @$row");
# Anzahl der Datensätze
$nrows++;
- }
-
+ $frows++;
+ if($ml && $frows >= $ml) {
+ Log3 ($name, 3, "DbRep $name - Number of exported datasets from $hash->{DATABASE} to file $outfile: ".$frows);
+ close(FH);
+ $outfile = $f."_part$p.".$e;
+ if (open(FH, ">", $outfile)) {
+ binmode (FH);
+ } else {
+ $err = encode_base64("could not open ".$outfile.": ".$!,"");
+ return "$name|''|''|$err|''|''|''";
+ }
+ $p++;
+ $frows = 0;
+ }
+ }
}
close(FH);
+ Log3 ($name, 3, "DbRep $name - Number of exported datasets from $hash->{DATABASE} to file $outfile: ".$frows);
# SQL-Laufzeit ermitteln
my $rt = tv_interval($st);
@@ -5313,16 +5357,13 @@ sub expfile_ParseDone($) {
my $ds = $device." -- " if ($device);
my $rds = $reading." -- " if ($reading);
- my $export_string = $ds.$rds." -- ROWS EXPORTED TO FILE -- ";
+ my $export_string = $ds.$rds." -- ROWS EXPORTED TO FILE(S) -- ";
my $state = $erread?$erread:"done";
readingsBeginUpdate($hash);
ReadingsBulkUpdateValue ($hash, $export_string, $nrows);
ReadingsBulkUpdateTimeState($hash,$brt,$rt,$state);
readingsEndUpdate($hash, 1);
-
- my $rows = $ds.$rds.$nrows;
- Log3 ($name, 3, "DbRep $name - Number of exported datasets from $hash->{DATABASE} to file $outfile: ".$rows);
delete($hash->{HELPER}{RUNNING_PID});
@@ -10936,13 +10977,18 @@ return;
contained in exception list defined by attribute "readingPreventFromDel".
-