HotNoob / PythonProtocolGateway

Python Protocol Gateway reads data via Modbus RTU or other protocols and translates the data for MQTT. In the long run, Python Protocol Gateway will become a general purpose protocol gateway to translate between more than just modbus and mqtt. Growatt, EG4, Sigineer, SOK, PACE
Apache License 2.0
4 stars 0 forks source link

custom date parsing #34

Closed utdrmac closed 2 months ago

utdrmac commented 2 months ago

Figured I'd share my code changes. I was frustrated for quite some time due to the SectionProxy config not allowing the mqtt class to read global settings.

root@eg4monitor:~/PythonProtocolGateway# diff -ur classes/transports/mqtt.py classes/transports/mqtt_custom.py
--- classes/transports/mqtt.py  2024-05-03 10:42:57.881082043 -0500
+++ classes/transports/mqtt_custom.py   2024-05-03 14:05:28.387262768 -0500
@@ -4,6 +4,7 @@
 import time
 import json
 import warnings
+import datetime

 import paho.mqtt.client
 import paho.mqtt.properties
@@ -17,7 +18,7 @@
 from ..protocol_settings import registry_map_entry, WriteMode, Registry_Type

-class mqtt(transport_base):
+class mqtt_custom(transport_base):
     ''' for future; this will hold mqtt transport'''
     host : str
     port : int = 1883
@@ -57,6 +58,12 @@
         self.json = strtobool(settings.get('json', self.json))
         self.reconnect_delay = settings.getint('reconnect_delay', fallback=7)
         self.max_precision = settings.getint('max_precision', fallback=self.max_precision)
+        self.combine_timestamp = settings.getboolean('combine_timestamp', fallback=False)
+
+        # Timestamp mapping config
+        self.timestamp_map = {}
+        for i in ['year', 'month', 'date', 'hour', 'minute', 'second']:
+            self.timestamp_map[i] = settings.get(i)

         if not isinstance( self.reconnect_delay , int) or self.reconnect_delay < 1: #minumum 1 second
             self.reconnect_delay = 1
@@ -162,11 +169,34 @@

         self._log.info("write data to mqtt transport")
         self._log.info(data)
+
         #have to send this every loop, because mqtt doesnt disconnect when HA restarts. HA bug.
         info = self.client.publish(self.base_topic + "/availability","online", qos=0,retain=True)
         if info.rc == MQTT_ERR_NO_CONN:
             self.connected = False

+        if self.combine_timestamp:
+            """ Some inverters provide a timestamp, but it is split into multiple data points.
+                This functionality allows for creating a new timestamp data point, using the
+                documented-name from the registry maps.
+            """
+            tmap = self.timestamp_map
+
+            # Construct timestamp string ISO-8601
+            year = data[tmap['year']]
+            if year < 50:
+                year += 2000
+
+            ts = datetime.datetime(year, data[tmap['month']], data[tmap['date']],
+                data[tmap['hour']], data[tmap['minute']], data[tmap['second']])
+
+            # Add new timesstamp to data
+            data["timestamp"] = ts.isoformat()
+
+            # Remove timestamp parts from original data dict
+            for k in tmap:
+                del(data[tmap[k]])
+
         if(self.json):
             # Serializing json
             json_object = json.dumps(data, indent=4)

And the added sections in config.cfg

[transport.1]
...
json = true
...
# Do you want a single timestamp created?
combine_timestamp = true

# Timestamp mapping for combine_timestamp
# Use the documented_name from the protocol
# registry map to create a mapping.
year = time_year
month = time_month
date = time_date
hour = time_hour
minute = time_minute
second = time_second

Which generates MQTT events:

/home/inverter {
    "pv1_voltage": 176.9,
    "pv2_voltage": 192.20000000000002,
    "pv3_voltage": 5.800000000000001,
    "soc": 48,
    "ppv1": 301.0,
    "ppv2": 83.0,
    "ppv3": 0.0,
    "pcharge": 269.0,
    "pdischarge": 0.0,
    "peps_l1n": 44.0,
    "peps_l2n": 64.0,
    "timestamp": "2024-05-03T14:47:00"
}

and now I can parse this in Telegraf using the json datatype to send to InfluxDB.