mirror of
https://github.com/fhem/fhem-mirror.git
synced 2025-03-09 20:57:11 +00:00
60_allergy: alternative data source with Attribute alternative3Day
git-svn-id: https://svn.fhem.de/fhem/trunk@26280 2b470e98-0d58-463d-a4d8-8e2adae1ed80
This commit is contained in:
parent
625bb522e8
commit
91431ee665
@ -1,5 +1,6 @@
|
||||
# Add changes at the top of the list. Keep it in ASCII, and 80-char wide.
|
||||
# Do not insert empty lines here, update check depends on it.
|
||||
- feature: 60_allergy: alternative data source with Attribute alternative3Day
|
||||
- feature: 49_SSCAM: allow placeholders #CAM, #DATE, #TIME, #FILE, #CTIME
|
||||
- bugfix: 50_Signalbot: update contacts on startup, signal-cli 0.10.9
|
||||
- feature: 96_Snapcast: group volume now can be set
|
||||
|
@ -27,39 +27,32 @@ use utf8;
|
||||
|
||||
|
||||
my %pollen_types = ( 0 => "Unknown",
|
||||
1 => "Ahorn",
|
||||
2 => "Ambrosia",
|
||||
3 => "Beifuss",
|
||||
4 => "Birke",
|
||||
5 => "Brennnessel",
|
||||
6 => "Buche",
|
||||
7 => "Eiche",
|
||||
8 => "Erle",
|
||||
9 => "Esche",
|
||||
10 => "Fichte",
|
||||
11 => "Flieder",
|
||||
12 => "Gaensefuss",
|
||||
13 => "Gerste",
|
||||
14 => "Graeser",
|
||||
15 => "Hafer",
|
||||
16 => "Hasel",
|
||||
17 => "Holunder",
|
||||
18 => "Hopfen",
|
||||
19 => "Kiefer",
|
||||
20 => "Linde",
|
||||
21 => "Loewenzahn",
|
||||
22 => "Mais",
|
||||
23 => "Nessel",
|
||||
24 => "Pappel",
|
||||
25 => "Platane",
|
||||
26 => "Raps",
|
||||
27 => "Roggen",
|
||||
28 => "Rotbuche",
|
||||
29 => "Spitzwegerich",
|
||||
30 => "Tanne",
|
||||
31 => "Ulme",
|
||||
32 => "Weide",
|
||||
33 => "Weizen", );
|
||||
1 => "Erle",
|
||||
2 => "Birke",
|
||||
3 => "Hasel",
|
||||
4 => "Esche",
|
||||
5 => "Graeser",
|
||||
6 => "Ambrosia",
|
||||
7 => "Beifuss",
|
||||
15 => "Brennnessel",
|
||||
16 => "Plantane",
|
||||
17 => "Zypressen",
|
||||
18 => "Oelbaum",
|
||||
23 => "Pilzsporen",
|
||||
291 => "Roggen",
|
||||
294 => "Ulme",
|
||||
296 => "Weide",
|
||||
297 => "Eiche",
|
||||
298 => "Buche",
|
||||
313 => "Manna-Esche",
|
||||
318 => "Hopfenbuche",
|
||||
320 => "Wegerich",
|
||||
322 => "Knoeterich",
|
||||
324 => "Pappel",
|
||||
326 => "Edelkastanie",
|
||||
355 => "Linde",
|
||||
356 => "Ampfer",
|
||||
361 => "Hainbuche", );
|
||||
|
||||
##############################################################################
|
||||
|
||||
@ -78,7 +71,7 @@ sub allergy_Initialize($) {
|
||||
"updateEmpty:1,0 ".
|
||||
"levelsFormat ".
|
||||
"weekdaysFormat ".
|
||||
#"extended5Day:1,0 ".
|
||||
"alternative3Day:1,0 ".
|
||||
$readingFnAttributes;
|
||||
}
|
||||
|
||||
@ -170,8 +163,8 @@ sub allergy_GetUpdate($) {
|
||||
|
||||
my $url="http://www.allergie.hexal.de/pollenflug/xml-interface-neu/pollen_de_7tage.php?plz=".$hash->{helper}{ZIPCODE};
|
||||
|
||||
if(AttrVal($name, "extended5Day", "0") eq "DEPRECATED") {
|
||||
$url="https://pollenwarner-live.herokuapp.com/pollen/".$hash->{helper}{ZIPCODE};
|
||||
if(AttrVal($name, "alternative3Day", "0") eq "1") {
|
||||
$url="https://www.pollenwarndienst.at/index.php?eID=appinterface&action=getContamination&type=zip&value=".$hash->{helper}{ZIPCODE}."&show_polls=1%2C2%2C3%2C4%2C5%2C6%2C7%2C15%2C16%2C17%2C18%2C23%2C291%2C294%2C296%2C297%2C298%2C313%2C318%2C320%2C322%2C324%2C326%2C355%2C356%2C361&country_id=7&personal_contamination=false&lang_code=de&lang_id=0&pure_json=1&cordova=1&pasyfo=0";
|
||||
Log3 ($name, 4, "Getting URL $url");
|
||||
HttpUtils_NonblockingGet({
|
||||
url => $url,
|
||||
@ -328,47 +321,47 @@ sub allergy_ParseExtended($$$)
|
||||
|
||||
readingsBeginUpdate($hash); # Start update readings
|
||||
|
||||
my $city = $json->{region};
|
||||
readingsBulkUpdate($hash, "city", allergy_utf8clean($city)) if($json->{region});
|
||||
my $day = $json->{date};
|
||||
readingsBulkUpdate($hash, "date", $day) if($json->{date});
|
||||
Log3 $name, 4, "Received data for postcode ".$json->{region};
|
||||
my $city = $json->{result}->{locationtitle};
|
||||
readingsBulkUpdate($hash, "city", allergy_utf8clean($city)) if($json->{result}->{locationtitle});
|
||||
my $day = $json->{result}->{contamination_date_3};
|
||||
readingsBulkUpdate($hash, "date", $day) if($json->{result}->{contamination_date_3});
|
||||
Log3 $name, 4, "Received data for postcode ".$json->{result}->{locationtitle};
|
||||
|
||||
my @daymax;
|
||||
|
||||
return undef if(!defined($json->{polls}));
|
||||
return undef if(!defined($json->{result}->{contamination}));
|
||||
#Log3 $name, 1, "found polls ".ref($json->{polls});
|
||||
|
||||
foreach my $pollenid ( keys %{$json->{polls}}) {
|
||||
my $pollenid = $json->{polls}->{$pollenid}->{id};
|
||||
foreach my $pollenentry ( @{$json->{result}->{contamination}}) {
|
||||
my $pollenid = $pollenentry->{poll_id};
|
||||
#Log3 $name, 1, "polls step ".$pollenid;
|
||||
my $pollenkey = 'Unknown';
|
||||
$pollenkey = $pollen_types{$pollenid} if( defined($pollen_types{$pollenid}) );
|
||||
|
||||
return undef if(!defined($json->{polls}->{$pollenid}->{forecast}));
|
||||
return undef if(!defined($pollenentry->{contamination_1}));
|
||||
#Log3 $name, 1, "forecast ";
|
||||
return undef if(ref($json->{polls}->{$pollenid}->{forecast}) ne "ARRAY");
|
||||
#return undef if(ref($json->{polls}->{$pollenid}->{forecast}) ne "ARRAY");
|
||||
|
||||
#my @forecast = $json->{polls}->{$pollenid}->{forecast};
|
||||
|
||||
my $daycode = 0;
|
||||
while(defined($json->{polls}->{$pollenid}->{forecast}[$daycode])) {
|
||||
my $daycode = 1;
|
||||
while(defined($pollenentry->{'contamination_'.$daycode})) {
|
||||
|
||||
my $pollendata = int($json->{polls}->{$pollenid}->{forecast}[$daycode]);
|
||||
my $pollendata = int($pollenentry->{'contamination_'.$daycode});
|
||||
#Log3 $name, 1, "forecast array".ref($pollendata);
|
||||
|
||||
if (( AttrVal($hash->{NAME}, "updateEmpty", 0 ) gt 0 or $pollendata gt 0) and ( AttrVal($hash->{NAME}, "updateIgnored", 0 ) gt 0 or ( index(AttrVal($hash->{NAME}, "ignoreList", ""), $pollenkey ) == -1 )))
|
||||
{
|
||||
readingsBulkUpdate($hash, "fc".($daycode+1)."_".$pollenkey, $levels[$pollendata]);
|
||||
$daymax[$daycode] = $pollendata if(!defined($daymax[$daycode]) || $pollendata gt $daymax[$daycode]);
|
||||
Log3 $name, 4, "Received pollen level for ".$pollenkey.": day".($daycode+1)." level ".$pollendata;
|
||||
readingsBulkUpdate($hash, "fc".($daycode)."_".$pollenkey, $levels[$pollendata]);
|
||||
$daymax[$daycode-1] = $pollendata if(!defined($daymax[$daycode-1]) || $pollendata gt $daymax[$daycode-1]);
|
||||
Log3 $name, 4, "Received pollen level for ".$pollenkey.": day".($daycode)." level ".$pollendata;
|
||||
}
|
||||
else
|
||||
{
|
||||
fhem( "deletereading $name fc".($daycode+1)."_".$pollenkey, 1 );
|
||||
Log3 $name, 5, "Received pollen level for ".$pollenkey.": day".($daycode+1)." level ".$pollendata." (ignored)";
|
||||
fhem( "deletereading $name fc".($daycode)."_".$pollenkey, 1 );
|
||||
Log3 $name, 5, "Received pollen level for ".$pollenkey.": day".($daycode)." level ".$pollendata." (ignored)";
|
||||
}
|
||||
$daymax[$daycode] = 0 if(!defined($daymax[$daycode]));
|
||||
$daymax[$daycode-1] = 0 if(!defined($daymax[$daycode-1]));
|
||||
$daycode++;
|
||||
}
|
||||
}
|
||||
@ -408,14 +401,14 @@ sub allergy_Attr($$$)
|
||||
$attrVal = 1;
|
||||
} else {
|
||||
$attr{$name}{$attrName} = 0;
|
||||
allergy_GetUpdate($hash);
|
||||
InternalTimer( gettimeofday() + 2, "allergy_GetUpdate", $hash);
|
||||
}
|
||||
}
|
||||
elsif ($attrName eq "extended5Day") {
|
||||
elsif ($attrName eq "alternative3Day") {
|
||||
fhem("deletereading $name fc.*", 1);
|
||||
fhem("deletereading $name date", 1);
|
||||
my $hash = $defs{$name};
|
||||
allergy_GetUpdate($hash);
|
||||
InternalTimer( gettimeofday() + 2, "allergy_GetUpdate", $hash);
|
||||
}
|
||||
|
||||
if( $cmd eq "set" ) {
|
||||
@ -555,6 +548,10 @@ sub allergy_utf8clean($) {
|
||||
<br>
|
||||
Localize Weekdays by adding them comma separated (default: Sun,Mon,Tue,Wed,Thu,Fr,Sat)
|
||||
</li><br>
|
||||
<li><code>alternative3Day (Standard: 0|1)</code>
|
||||
<br>
|
||||
Alternative data source with 3 day forecast.
|
||||
</li><br>
|
||||
</ul>
|
||||
</ul>
|
||||
|
||||
@ -635,6 +632,10 @@ sub allergy_utf8clean($) {
|
||||
<br>
|
||||
Lokalisierte Wochentage, durch Kommas getrennt.
|
||||
</li><br>
|
||||
<li><code>alternative3Day (Standard: 0|1)</code>
|
||||
<br>
|
||||
Alternative Datenquelle mit 3-Tage-Vorhersage.
|
||||
</li><br>
|
||||
</ul>
|
||||
</ul>
|
||||
|
||||
|
@ -5,12 +5,12 @@
|
||||
#
|
||||
# 2019 Markus Moises < vorname at nachname . de >
|
||||
#
|
||||
# This module provides livetracking data from OwnTracks, OpenPaths, Life360 and Swarm (FourSquare)
|
||||
# This module provides livetracking data from OwnTracks, Life360 and Swarm (FourSquare)
|
||||
#
|
||||
#
|
||||
##############################################################################
|
||||
#
|
||||
# define <name> livetracking <life360_user> <life360_pass> <openpaths_key> <openpaths_secret> <swarm_token>
|
||||
# define <name> livetracking <life360_user> <life360_pass> <swarm_token>
|
||||
#
|
||||
##############################################################################
|
||||
|
||||
@ -58,6 +58,7 @@ sub livetracking_Initialize($) {
|
||||
"interval ".
|
||||
"home ".
|
||||
"swarmHome ".
|
||||
"swarmHomeName ".
|
||||
"owntracksDevice ".
|
||||
"beacon_0 ".
|
||||
"beacon_1 ".
|
||||
@ -79,6 +80,7 @@ sub livetracking_Initialize($) {
|
||||
"zonename_7 ".
|
||||
"zonename_8 ".
|
||||
"zonename_9 ".
|
||||
"clearPlace:0,1 ".
|
||||
"batteryWarning:5,10,15,20,25,30,35,40 ".
|
||||
"addressLanguage:de,en,fr,es,it,nl ".
|
||||
"addressReading:0,1 ".
|
||||
@ -95,59 +97,26 @@ sub livetracking_Define($$$) {
|
||||
my ($hash, $def) = @_;
|
||||
my @a = split("[ \t][ \t]*", $def);
|
||||
|
||||
return "syntax: define <name> livetracking <life360_user> <life360_pass> <openpaths_key> <openpaths_secret> <swarm_token>" if(int(@a) < 2 || int(@a) > 7 );
|
||||
return "syntax: define <name> livetracking <life360_user> <life360_pass> <swarm_token>" if(int(@a) < 2 || int(@a) > 7 );
|
||||
my $name = $hash->{NAME};
|
||||
|
||||
#$hash->{OAuth_exists} = $libcheck_hasOAuth if($libcheck_hasOAuth);
|
||||
|
||||
if(int(@a) == 4 ) {
|
||||
if ($a[2] =~ /@/) {
|
||||
$hash->{helper}{life360_user} = $a[2];
|
||||
$hash->{helper}{life360_pass} = $a[3];
|
||||
} else {
|
||||
$hash->{helper}{openpaths_key} = $a[2];# if($hash->{OAuth_exists});
|
||||
$hash->{helper}{openpaths_secret} = $a[3];# if($hash->{OAuth_exists});
|
||||
}
|
||||
}
|
||||
elsif(int(@a) == 3 ) {
|
||||
$hash->{helper}{swarm_token} = $a[2];
|
||||
}
|
||||
elsif(int(@a) == 5 ) {
|
||||
if ($a[2] =~ /@/) {
|
||||
$hash->{helper}{life360_user} = $a[2];
|
||||
$hash->{helper}{life360_pass} = $a[3];
|
||||
} else {
|
||||
$hash->{helper}{openpaths_key} = $a[2];# if($hash->{OAuth_exists});
|
||||
$hash->{helper}{openpaths_secret} = $a[3];# if($hash->{OAuth_exists});
|
||||
}
|
||||
$hash->{helper}{swarm_token} = $a[4];
|
||||
}
|
||||
elsif(int(@a) == 7 ) {
|
||||
$hash->{helper}{life360_user} = $a[2];
|
||||
$hash->{helper}{life360_pass} = $a[3];
|
||||
$hash->{helper}{openpaths_key} = $a[4];# if($hash->{OAuth_exists});
|
||||
$hash->{helper}{openpaths_secret} = $a[5];# if($hash->{OAuth_exists});
|
||||
$hash->{helper}{swarm_token} = $a[6];
|
||||
}
|
||||
|
||||
|
||||
my $req = eval
|
||||
{
|
||||
require XML::Simple;
|
||||
XML::Simple->import();
|
||||
1;
|
||||
};
|
||||
|
||||
if($req)
|
||||
{
|
||||
$hash->{NOTIFYDEV} = AttrVal($name, "owntracksDevice" , "owntracks");
|
||||
}
|
||||
else
|
||||
{
|
||||
$hash->{STATE} = "XML::Simple is required!";
|
||||
$attr{$name}{disable} = "1";
|
||||
return undef;
|
||||
}
|
||||
$hash->{NOTIFYDEV} = AttrVal($name, "owntracksDevice" , undef);
|
||||
|
||||
|
||||
# my $resolve = inet_aton("api.foursquare.com");
|
||||
@ -160,16 +129,6 @@ sub livetracking_Define($$$) {
|
||||
|
||||
InternalTimer( gettimeofday() + 60, "livetracking_GetSwarm", $hash, 0) if(defined($hash->{helper}{swarm_token}));
|
||||
|
||||
# $resolve = inet_aton("openpaths.cc");
|
||||
# if(!defined($resolve) && defined($hash->{helper}{openpaths_key}))
|
||||
# {
|
||||
# $hash->{STATE} = "DNS error";
|
||||
# InternalTimer( gettimeofday() + 1800, "livetracking_GetAll", $hash, 0);
|
||||
# return undef;
|
||||
# }
|
||||
|
||||
InternalTimer( gettimeofday() + 90, "livetracking_GetOpenPaths", $hash, 0) if(defined($hash->{helper}{openpaths_key}));
|
||||
|
||||
|
||||
if (!defined($attr{$name}{stateFormat}))
|
||||
{
|
||||
@ -252,7 +211,6 @@ sub livetracking_Get($@) {
|
||||
|
||||
|
||||
my $usage = "Unknown argument $command, choose one of All:noArg";
|
||||
$usage .= " OpenPaths:noArg" if(defined($hash->{helper}{openpaths_key}));
|
||||
$usage .= " Swarm:noArg" if(defined($hash->{helper}{swarm_token}));
|
||||
$usage .= " owntracksLocation:noArg owntracksSteps:noArg" if(defined($attr{$name}{owntracksDevice}));
|
||||
$usage .= " address";
|
||||
@ -271,10 +229,6 @@ sub livetracking_Get($@) {
|
||||
{
|
||||
livetracking_GetAll($hash);
|
||||
}
|
||||
elsif($command eq "OpenPaths")
|
||||
{
|
||||
livetracking_GetOpenPaths($hash);
|
||||
}
|
||||
elsif($command eq "Swarm")
|
||||
{
|
||||
livetracking_GetSwarm($hash);
|
||||
@ -423,16 +377,6 @@ sub livetracking_GetAll($) {
|
||||
|
||||
InternalTimer( gettimeofday() + 5, "livetracking_GetSwarm", $hash, 0) if(defined($hash->{helper}{swarm_token}));
|
||||
|
||||
# $resolve = inet_aton("openpaths.cc");
|
||||
# if(!defined($resolve) && defined($hash->{helper}{openpaths_key}))
|
||||
# {
|
||||
# $hash->{STATE} = "DNS error";
|
||||
# InternalTimer( gettimeofday() + 3600, "livetracking_GetAll", $hash, 0);
|
||||
# return undef;
|
||||
# }
|
||||
|
||||
InternalTimer( gettimeofday() + 10, "livetracking_GetOpenPaths", $hash, 0) if(defined($hash->{helper}{openpaths_key}));
|
||||
|
||||
|
||||
InternalTimer( gettimeofday() + 20, "livetracking_GetLife360", $hash, 0) if(defined($hash->{helper}{life360_user}));
|
||||
|
||||
@ -491,72 +435,6 @@ sub livetracking_GetLife360($) {
|
||||
}
|
||||
|
||||
|
||||
sub livetracking_GetOpenPaths($) {
|
||||
my ($hash) = @_;
|
||||
my $name = $hash->{NAME};
|
||||
|
||||
#RemoveInternalTimer($hash);
|
||||
RemoveInternalTimer($hash, "livetracking_GetOpenPaths");
|
||||
|
||||
if(AttrVal($name, "disable", 0) eq 1)
|
||||
{
|
||||
Log3 ($name, 4, "livetracking $name is disabled, data update cancelled.");
|
||||
return undef;
|
||||
}
|
||||
|
||||
if(!defined($hash->{helper}{openpaths_key}))
|
||||
{
|
||||
return undef;
|
||||
}
|
||||
|
||||
|
||||
my $nonce = "";
|
||||
for (my $i=0;$i<32;$i++) {
|
||||
my $r = int(rand(62));
|
||||
if ($r<10) { $r += 48; }
|
||||
elsif ($r<36) { $r += 55; }
|
||||
else { $r += 61; }
|
||||
$nonce .= chr($r);
|
||||
}
|
||||
|
||||
my $request = Net::OAuth->request("request token")->new(
|
||||
consumer_key => $hash->{helper}{openpaths_key},
|
||||
consumer_secret => $hash->{helper}{openpaths_secret},
|
||||
request_url => 'https://openpaths.cc/api/1',
|
||||
request_method => 'GET',
|
||||
signature_method => 'HMAC-SHA1',
|
||||
timestamp => livetracking_roundfunc(time()),
|
||||
nonce => $nonce,
|
||||
);
|
||||
$request->sign;
|
||||
|
||||
|
||||
my $lastupdate = livetracking_roundfunc(ReadingsVal($name,".lastOpenPaths",time()-3600));
|
||||
|
||||
my $url = $request->to_url."&start_time=".$lastupdate."&num_points=50"; # start_time/end_time/num_points
|
||||
Log3 ($name, 4, "livetracking OpenPaths URL: ".$url);
|
||||
|
||||
HttpUtils_NonblockingGet({
|
||||
url => $url,
|
||||
timeout => 10,
|
||||
noshutdown => 1,
|
||||
hash => $hash,
|
||||
type => 'openpathsdata',
|
||||
callback => \&livetracking_dispatch,
|
||||
});
|
||||
|
||||
|
||||
|
||||
my $interval = AttrVal($hash->{NAME}, "interval", 1800);
|
||||
#RemoveInternalTimer($hash);
|
||||
InternalTimer( gettimeofday() + $interval, "livetracking_GetOpenPaths", $hash, 0);
|
||||
$hash->{UPDATED} = FmtDateTime(time());
|
||||
|
||||
return undef;
|
||||
}
|
||||
|
||||
|
||||
|
||||
sub livetracking_GetSwarm($) {
|
||||
my ($hash) = @_;
|
||||
my $name = $hash->{NAME};
|
||||
@ -720,92 +598,6 @@ sub livetracking_ParseLife360($$) {
|
||||
}
|
||||
|
||||
|
||||
sub livetracking_ParseOpenPaths($$) {
|
||||
my ($hash,$json) = @_;
|
||||
my $name = $hash->{NAME};
|
||||
|
||||
my $updated = 0;
|
||||
|
||||
my $lastreading = ReadingsVal($name,".lastOpenPaths",time()-300);
|
||||
my $device = ReadingsVal($name,"deviceOpenPaths","");
|
||||
my $os = ReadingsVal($name,"osOpenPaths","");
|
||||
my $version = ReadingsVal($name,"versionOpenPaths","");
|
||||
my $altitude = ReadingsVal($name,"altitude","0");
|
||||
my $altitudeRound = AttrVal($hash->{NAME}, "roundAltitude", 1);
|
||||
|
||||
Log3 ($name, 6, "$name OpenPaths data: /n".Dumper($json));
|
||||
|
||||
|
||||
foreach my $dataset (@{$json})
|
||||
{
|
||||
Log3 ($name, 5, "$name OpenPaths: at ".FmtDateTime($dataset->{t})." / ".$dataset->{lat}.",".$dataset->{lon});
|
||||
|
||||
$lastreading = $dataset->{t}+1;
|
||||
|
||||
readingsBeginUpdate($hash); # Begin update readings
|
||||
$hash->{".updateTimestamp"} = FmtDateTime($dataset->{t});
|
||||
my $changeindex = 0;
|
||||
|
||||
|
||||
readingsBulkUpdate($hash, "latitude", sprintf("%.5f", $dataset->{lat}));
|
||||
$hash->{CHANGETIME}[$changeindex++] = FmtDateTime($dataset->{t});
|
||||
readingsBulkUpdate($hash, "longitude", sprintf("%.5f", $dataset->{lon}));
|
||||
$hash->{CHANGETIME}[$changeindex++] = FmtDateTime($dataset->{t});
|
||||
readingsBulkUpdate($hash, "location", sprintf("%.5f", $dataset->{lat}).",".sprintf("%.5f", $dataset->{lon}));
|
||||
$hash->{CHANGETIME}[$changeindex++] = FmtDateTime($dataset->{t});
|
||||
|
||||
|
||||
if(defined($dataset->{alt}) && $dataset->{alt} ne '')
|
||||
{
|
||||
my $newaltitude = livetracking_roundfunc($dataset->{alt}/$altitudeRound)*$altitudeRound;
|
||||
#Log3 ($name, 0, "$name SwarmRound: ".$dataset->{alt}."/".$altitudeRound." = ".livetracking_roundfunc($dataset->{alt}/$altitudeRound)." *".$altitudeRound);
|
||||
|
||||
if($altitude ne $newaltitude)
|
||||
{
|
||||
readingsBulkUpdate($hash, "altitude", int($newaltitude));
|
||||
$hash->{CHANGETIME}[$changeindex++] = FmtDateTime($dataset->{t});
|
||||
$altitude = $newaltitude;
|
||||
}
|
||||
}
|
||||
if(defined($dataset->{device}) && $dataset->{device} ne $device)
|
||||
{
|
||||
readingsBulkUpdate($hash, "deviceOpenPaths", $dataset->{device});
|
||||
$hash->{CHANGETIME}[$changeindex++] = FmtDateTime($dataset->{t});
|
||||
}
|
||||
if(defined($dataset->{os}) && $dataset->{os} ne $os)
|
||||
{
|
||||
readingsBulkUpdate($hash, "osOpenPaths", $dataset->{os});
|
||||
$hash->{CHANGETIME}[$changeindex++] = FmtDateTime($dataset->{t});
|
||||
}
|
||||
if(defined($dataset->{version}) && $dataset->{version} ne $version)
|
||||
{
|
||||
readingsBulkUpdate($hash, "versionOpenPaths", $dataset->{version});
|
||||
$hash->{CHANGETIME}[$changeindex++] = FmtDateTime($dataset->{t});
|
||||
}
|
||||
if(defined($attr{$name}{home}))
|
||||
{
|
||||
readingsBulkUpdate($hash, "distance", livetracking_distance($hash,$dataset->{lat}.",".$dataset->{lon},$attr{$name}{home}));
|
||||
$hash->{CHANGETIME}[$changeindex++] = FmtDateTime($dataset->{t});
|
||||
}
|
||||
$updated = 1;
|
||||
|
||||
readingsEndUpdate($hash, 1); # End update readings
|
||||
}
|
||||
|
||||
|
||||
|
||||
if($updated == 1)
|
||||
{
|
||||
readingsSingleUpdate($hash,".lastOpenPaths",$lastreading,1);
|
||||
$hash->{helper}{lastOpenPaths} = $lastreading;
|
||||
}
|
||||
|
||||
return undef;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
sub livetracking_ParseSwarm($$) {
|
||||
my ($hash,$json) = @_;
|
||||
my $name = $hash->{NAME};
|
||||
@ -835,11 +627,12 @@ sub livetracking_ParseSwarm($$) {
|
||||
|
||||
my $loc = sprintf("%.5f", $dataset->{venue}->{location}->{lat}).",".sprintf("%.5f", $dataset->{venue}->{location}->{lng});
|
||||
|
||||
if(defined($attr{$name}{swarmHome}) and defined($attr{$name}{home}))
|
||||
{
|
||||
my $shl = $attr{$name}{swarmHome};
|
||||
my $home = $attr{$name}{home};
|
||||
$loc =~ s/$shl/$home/g;
|
||||
if(defined(AttrVal($name, "home", undef))){
|
||||
if(defined(AttrVal($name, "swarmHomeName", undef)) or defined(AttrVal($name, "swarmHome", undef))){
|
||||
if($place eq AttrVal($name, "swarmHomeName", "undef") or $loc eq AttrVal($name, "swarmHome", "-")){
|
||||
$loc = $attr{$name}{home};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
readingsBulkUpdate($hash, "latitude", sprintf("%.5f", $dataset->{venue}->{location}->{lat}));
|
||||
@ -1050,6 +843,16 @@ sub livetracking_ParseOwnTracks
|
||||
readingsBulkUpdate($hash, "connection", (($dataset->{conn} eq "m")?"mobile":($dataset->{conn} eq "w")?"wifi":($dataset->{conn} eq "o")?"offline":"unknown"));
|
||||
$hash->{CHANGETIME}[$changeindex++] = FmtDateTime($dataset->{tst});
|
||||
}
|
||||
if(defined($dataset->{SSID}))
|
||||
{
|
||||
readingsBulkUpdate($hash, "connection_ssid", $dataset->{SSID});
|
||||
$hash->{CHANGETIME}[$changeindex++] = FmtDateTime($dataset->{tst});
|
||||
}
|
||||
if(defined($dataset->{BSSID}))
|
||||
{
|
||||
readingsBulkUpdate($hash, "connection_bssid", $dataset->{BSSID});
|
||||
$hash->{CHANGETIME}[$changeindex++] = FmtDateTime($dataset->{tst});
|
||||
}
|
||||
if(defined($dataset->{p}) and $dataset->{p} > 0)
|
||||
{
|
||||
readingsBulkUpdate($hash, "pressure", sprintf("%.2f", $dataset->{p}*10));
|
||||
@ -1080,7 +883,14 @@ sub livetracking_ParseOwnTracks
|
||||
}
|
||||
else
|
||||
{
|
||||
#fhem( "deletereading $name place" ) if(ReadingsVal($name,"place","undefined") eq $dataset->{desc});
|
||||
#fhem( "deletereading $name place" ) if(ReadingsVal($name,"place","") eq $dataset->{desc});
|
||||
if(defined($attr{$name}{clearPlace}) and $attr{$name}{clearPlace} == 1)
|
||||
{
|
||||
if(ReadingsVal( $name, "place", "" ) eq $place){ #exit place
|
||||
readingsBulkUpdate($hash, "place", "-");
|
||||
$hash->{CHANGETIME}[$changeindex++] = FmtDateTime($dataset->{tst});
|
||||
}
|
||||
}
|
||||
foreach my $placenumber (@placenumbers)
|
||||
{
|
||||
readingsBulkUpdate($hash, "zone_".$placenumber,"inactive");
|
||||
@ -1253,8 +1063,6 @@ sub livetracking_dispatch($$$)
|
||||
|
||||
if( $param->{type} eq 'life360data' ) {
|
||||
livetracking_ParseLife360($hash,$json);
|
||||
} elsif( $param->{type} eq 'openpathsdata' ) {
|
||||
livetracking_ParseOpenPaths($hash,$json);
|
||||
} elsif( $param->{type} eq 'swarmdata' ) {
|
||||
livetracking_ParseSwarm($hash,$json);
|
||||
}
|
||||
@ -1926,7 +1734,7 @@ sub livetracking_utf8clean($) {
|
||||
<ul>
|
||||
<code>define <name> livetracking <...></code>
|
||||
<br>
|
||||
Example: <code>define livetrackingdata livetracking [life360_email] [life360_pass] [openpaths_key] [openpaths_secret] [swarm_token]</code><br/>
|
||||
Example: <code>define livetrackingdata livetracking [life360_email] [life360_pass] [swarm_token]</code><br/>
|
||||
Any combination of these services can be defined as long as their order is correct.
|
||||
<br>
|
||||
<li><code>...</code>
|
||||
@ -1940,11 +1748,7 @@ sub livetracking_utf8clean($) {
|
||||
<ul>
|
||||
<li><a name="#All">All</a>
|
||||
<br/>
|
||||
Manually trigger a data update for all sources (OpenPaths/Swarm)
|
||||
</li><br>
|
||||
<li><a name="#OpenPaths">OpenPaths</a>
|
||||
<br/>
|
||||
Manually trigger a data update for OpenPaths
|
||||
Manually trigger a data update for all sources (Life360/Swarm)
|
||||
</li><br>
|
||||
<li><a name="#Life360">Life360</a>
|
||||
<br/>
|
||||
@ -2083,9 +1887,9 @@ sub livetracking_utf8clean($) {
|
||||
<br>
|
||||
Home location
|
||||
</li><br>
|
||||
<li><a name="swarmHome">swarmHome</a> (lat,lon)
|
||||
<li><a name="swarmHome">swarmHome/SwarmHomeName</a> (lat,lon)
|
||||
<br>
|
||||
Fake home location (that is assigned to private homes for security reasons) of your Swarm home (exact position)
|
||||
Fake home location or name (that is assigned to private homes for security reasons) of your Swarm home (exact position)
|
||||
</li><br>
|
||||
<li><a name="filterAccuracy">filterAccuracy</a> (m)
|
||||
<br>
|
||||
|
Loading…
x
Reference in New Issue
Block a user