I currently save data to a database with this script. I have to run dump1090 as a sub process. Basically it pulls data from the JSON aspect of the HTTP server. Is there a better way to do this? Why not add functionality to save data to CSV or Excel?
import subprocess
import time
from datetime import datetime
import sqlite3
import json
DATABASE_FILE = 'flight_data.db'
def drop_table():
conn = sqlite3.connect(DATABASE_FILE)
cursor = conn.cursor()
cursor.execute('DROP TABLE IF EXISTS flight_data')
conn.commit()
conn.close()
def create_table():
conn = sqlite3.connect(DATABASE_FILE)
cursor = conn.cursor()
cursor.execute('''
CREATE TABLE IF NOT EXISTS flight_data (
timestamp TEXT,
hex TEXT,
flight TEXT,
lat REAL,
lon REAL,
altitude INTEGER,
track INTEGER,
speed INTEGER
)
''')
conn.commit()
conn.close()
def insert_data(timestamp, data_element):
conn = sqlite3.connect(DATABASE_FILE)
cursor = conn.cursor()
data = json.loads(data_element)
cursor.execute('''
INSERT INTO flight_data (
timestamp, hex, flight, lat, lon, altitude, track, speed
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
''', (
timestamp,
data['hex'],
data['flight'],
data['lat'],
data['lon'],
data['altitude'],
data['track'],
data['speed']
))
conn.commit()
conn.close()
def run_curl_and_save():
try:
result = subprocess.run(['curl', 'http://localhost:8080/data.json'], capture_output=True, text=True, check=True)
data = json.loads(result.stdout)
timestamp = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S UTC')
for record in data:
insert_data(timestamp, json.dumps(record))
except subprocess.CalledProcessError as e:
print(f"Error: {e}")
if __name__ == "__main__":
dump1090_process = subprocess.Popen(['./dump1090', '--net', '--interactive'])
time.sleep(4)
try:
drop_table()
create_table()
while True:
run_curl_and_save()
time.sleep(10)
except KeyboardInterrupt:
print("Terminating the script.")
finally:
dump1090_process.terminate()
dump1090_process.wait()
The output format is just a CSV-like.You can read the complete stream from the remote port 30003, if u contact me via private message we can implement a real collector system: zio.rick@gmail.com.
Best fun.
I currently save data to a database with this script. I have to run dump1090 as a sub process. Basically it pulls data from the JSON aspect of the HTTP server. Is there a better way to do this? Why not add functionality to save data to CSV or Excel?