I have a weather station base on Raspberry Pi that sends the data to two different DBs on another Raspberry Pi (for now all are local). Let's say the weather Raspberry Pi as weather_client and the other one as db_server.
On the db_server I have one mysql, db and one influxDB:
mysql port: 3306
influxDB port: 8086
At some point the weather_client will be not on local network, so I started to check sshtunneling, and for now I have the below working solution for reading mysql queries:
import os
import time
#import datetime
import MySQLdb
from datetime import datetime
from sshtunnel import SSHTunnelForwarder
import influxdb_client
from influxdb_client import InfluxDBClient, Point, WritePrecision, WriteOptions
from influxdb_client.client.write_api import SYNCHRONOUS
query = 'from(bucket:bucket)\
|> range(start: -5m)\
|> filter(fn:(r) => r._measurement == "rpi-bme280")\
|> filter(fn:(r) => r.location == "xxxx")\
|> filter(fn:(r) => r._field == "temperature")\
|> aggregateWindow(every: v.windowPeriod, fn: last, createEmpty: false)\
|> yield(name: "last")'
# Configure mysql connection variable
global db_cursor
global db
def read_from_mysqldb():
try:
db_cursor.execute("SELECT * FROM wind_data ORDER BY wind_data.id DESC LIMIT 1")
result = db_cursor.fetchall()
if result is not None:
print ( '| time: ' , result[0][5], '| GrafanaId: ', result[0][1], '| location: ' , result[0][2], '| speed: ' , result[0][3], '| direction: ' , result[0][4], '| id: ' , result[0][0])
except:
print ("read error")
def read_from_influxdb():
try:
result = query_api.query(org=org, query=query)
results = []
for table in result:
for record in table.records:
results.append((record.get_field(), record.get_value()))
print(results)
except:
print ("read error")
def main():
while 1:
read_from_mysqldb()
#read_from_influxdb()
time.sleep(15)
server = SSHTunnelForwarder(
('192.168.0.100', 22),
ssh_username="pi",
ssh_password="xxxxxx",
remote_bind_address=("192.168.0.100", 3306)
)
# Configure InfluxDB connection variables
token = os.environ.get("INFLUXDB_TOKEN")
org = "mymeteo"
bucket = "test_enviro"
if __name__ == '__main__':
server.start()
print(server.local_bind_port) # show assigned local port
url = "http://192.168.0.100:" + str(server.local_bind_port)
try:
_client = InfluxDBClient(url=url, token=token)
_query_api = _client.query_api()
db = MySQLdb.connect(host="127.0.0.1", port=server.local_bind_port, user="root", password="xxxx", database="xxxxx")
db_cursor = db.cursor()
except:
print ("Keine Verbindung zum Server...")
try:
main()
except KeyboardInterrupt:
print ("bye bye...")
pass
db_cursor.close()
db.close()
server.stop()
Is it possible to use the same tunnel towards the two DBs with different ports?
I tried this but it gives me one assert error:
File "/usr/local/lib/python3.9/dist-packages/sshtunnel.py", line 149, in check_addresses
assert all(isinstance(x, (tuple, string_types)) for x in address_list)
AssertionError
Changes in the sshtunnel line:
tunnels = [("192.168.0.100", 3306),
("192.168.0.100", 8086)]
server = SSHTunnelForwarder(
('192.168.0.100', 22),
ssh_username="pi",
ssh_password="password",
remote_bind_address=tunnels
)
How can I do this, maybe my thinking is wrong and the better solution would be to send the data to the DBs via php or!?