Skip to content

Commit

Permalink
Send live data to local influx and send to front end (#1299)
Browse files Browse the repository at this point in the history
### Changelist 
Sending live data to local influx
### Testing Done
it works
### Resolved Tickets
OH YEA YEA OH YEA OH YEA YEA
  • Loading branch information
larakawasme authored Jun 7, 2024
1 parent d5d250b commit cea553c
Show file tree
Hide file tree
Showing 3 changed files with 96 additions and 62 deletions.
17 changes: 14 additions & 3 deletions firmware/quadruna/VC/src/io/io_telemMessage.c
Original file line number Diff line number Diff line change
Expand Up @@ -47,15 +47,16 @@ bool io_telemMessage_pushMsgtoQueue(CanMsg *rx_msg)
{
uint8_t proto_buffer[QUEUE_SIZE] = { 0 };

// filter messages
if (rx_msg->std_id != 111)
// filter messages, rn for faults and warnings and bms (to verify working when running normally)
if (rx_msg->std_id != 111 || rx_msg->std_id != 205 || rx_msg->std_id != 206 || rx_msg->std_id != 207 ||
rx_msg->std_id != 208)
{
return false;
}
// send it over the correct UART functionality
pb_ostream_t stream = pb_ostream_from_buffer(proto_buffer, sizeof(proto_buffer));

// filling in fields
// // filling in fields
if (rx_msg->dlc > 8)
return false;
t_message.can_id = (int32_t)(rx_msg->std_id);
Expand All @@ -68,6 +69,16 @@ bool io_telemMessage_pushMsgtoQueue(CanMsg *rx_msg)
t_message.message_6 = rx_msg->data[6];
t_message.message_7 = rx_msg->data[7];

// t_message.can_id = 111;
// t_message.message_0 = 117;
// t_message.message_1 = 49;
// t_message.message_2 = 0;
// t_message.message_3 = 0;
// t_message.message_4 = 0;
// t_message.message_5 = 236;
// t_message.message_6 = 202;
// t_message.message_7 = 0;
t_message.time_stamp = (int32_t)io_time_getCurrentMs();
// encoding message

proto_status = pb_encode(&stream, TelemMessage_fields, &t_message);
Expand Down
2 changes: 1 addition & 1 deletion software/hardware_tests/data_rate_test_tx.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def generate_data_rate(target_data_rate_kbps):
try:
while True:
msg = str(int(tick_count)) + "\n"
ser.write(msg) # 10 characters at 8 bits each
ser.write(msg.encode('utf-8')) # Encode as UTF-8 bytes
# 80 bits

time.sleep(delay)
Expand Down
139 changes: 81 additions & 58 deletions software/tracksight/backend/app/signal_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import logging
import time
import pandas as pd
from queue import Queue
from tzlocal import get_localzone

from generated import telem_pb2
Expand All @@ -23,33 +24,39 @@
0,
os.path.abspath(
os.path.join(
os.path.dirname(__file__), "../../../../../scripts/code_generation/"
os.path.dirname(__file__), "../../../../scripts/code_generation/"
)
),
)
bus_path = os.path.abspath(
os.path.join(os.path.dirname(__file__), "../../../../../can_bus/quadruna")
os.path.join(os.path.dirname(__file__), "../../../../can_bus/quadruna")
)
# from jsoncan.src.json_parsing.json_can_parsing import JsonCanParser
# from jsoncan.src.can_database import CanDatabase
print("sys.path[0]:", sys.path[0])
print(bus_path)

VALID_PACKET_SIZES = {255, 165, 253}
from jsoncan.src.json_parsing.json_can_parsing import JsonCanParser
from jsoncan.src.can_database import CanDatabase

INVALID_PACKET_SIZES = {255, 165, 253}


class SignalUtil:
available_signals = {}
client_signals = {}
is_setup = False
signal_df = pd.DataFrame(columns=['time', 'signal', 'value', 'unit'])
max_df_size = 1 # Define the size threshold


@classmethod
def setup(cls, port: str, app):
# cls.ser = serial.Serial(port=port, baudrate=57600, timeout=1)
# cls.ser.reset_input_buffer()
# cls.ser.reset_output_buffer()
cls.ser = serial.Serial(port=port, baudrate=57600, timeout=1)
cls.ser.reset_input_buffer()
cls.ser.reset_output_buffer()
cls.is_setup = True
cls.app = app

# can_db = JsonCanParser(bus_path).make_database()
cls.can_db = JsonCanParser(bus_path).make_database()

@classmethod
def read_messages(cls):
Expand All @@ -64,71 +71,87 @@ def read_messages(cls):
while True:
# TODO: Lara: Upload actual signals instead!

# packet_size = int.from_bytes(cls.ser.read(1), byteorder="little")
# logger.info(f"Received data: {packet_size}")
# if packet_size in VALID_PACKET_SIZES:
# continue
packet_size = int.from_bytes(cls.ser.read(1), byteorder="little")
logger.info(f"Received data: {packet_size}")
if packet_size in INVALID_PACKET_SIZES:
continue

# if (
# last_bit == 0 and packet_size != 0
# ): # the size will be different due to 0 not often being include
if (
last_bit == 0 and packet_size != 0
): # the size will be different due to 0 not often being include

# # Read in UART message and parse the protobuf
# bytes_read = cls.ser.read(packet_size)
# message_received = telem_pb2.TelemMessage()
# message_received.ParseFromString(bytes_read)
# Read in UART message and parse the protobuf
bytes_read = cls.ser.read(packet_size)
message_received = telem_pb2.TelemMessage()
message_received.ParseFromString(bytes_read)

# # Make data array out of ints
# data_array = cls.make_bytes(message_received)
# Make data array out of ints
data_array = cls.make_bytes(message_received)

# # Unpack the data and add the id and meta data
# signal_list = cls.can_db.unpack(message_received.can_id, data_array)
# Unpack the data and add the id and meta data
signal_list = cls.can_db.unpack(message_received.can_id, data_array)

# for single_signal in signal_list:
for single_signal in signal_list:

# # Add the time stamp
# single_signal["timestamp"] = message_received.time_stamp
# signal_name = single_signal["name"]
# Add the time stamp and get name
single_signal["timestamp"] = message_received.time_stamp
signal_name = single_signal["name"]

# # Update the list of availble signals and add it to client signals
# if signal_name not in cls.available_signals:
# cls.available_signals[signal_name] = True
# cls.client_signals[signal_name] = []
# Update the list of availble signals and add it to client signals
if signal_name not in cls.available_signals:
cls.available_signals[signal_name] = True
cls.client_signals[signal_name] = []

# # Emit the message
# flask_socketio.emit("signal_response", single_signal)

# Ensure the value is the correct type (convert to float)
value = int(single_signal["value"])

# else:
# last_bit = packet_size

if "Signal" not in cls.available_signals:
cls.available_signals["Signal"] = True
cls.client_signals["Signal"] = []
#print(single_signal) #Un comment to verify getting signals to terminal

# signal = {
# # "timestamp": "2024",
# "name": "Signal",
# "value": 3,
# "unit": "W",
# }
# with cls.app.app_context():
# Create a DataFrame for the new signal
new_signal_df = pd.DataFrame([{
"time": pd.Timestamp.now(tz=get_localzone()),#TODO: Make time more accurate in mili since start
"value": value,
"unit": single_signal["unit"],
"signal": single_signal["name"]
}])

signals = {"time": [], "signal": [], "value": [], "unit": []}
for _ in range(10):
timestamp = pd.Timestamp.now(tz=get_localzone())
signals["time"].append(timestamp)
signals["signal"].append("Test")
signals["value"].append(2)
signals["unit"].append("kW")

InfluxHandler.write(
pd.DataFrame(data=signals),
measurement="live",
)
# Filter out empty or all-NA columns before concatenation
cls.signal_df = cls.signal_df.dropna(axis=1, how='all')
new_signal_df = new_signal_df.dropna(axis=1, how='all')
# Concatenate the new signal DataFrame with the existing one
cls.signal_df = pd.concat([cls.signal_df, new_signal_df], ignore_index=True)

time.sleep(1)

# Emit the message
if len(cls.signal_df) >= cls.max_df_size:
print(cls.signal_df)
InfluxHandler.write(
cls.signal_df, measurement='live'
)

cls.signal_df = pd.DataFrame(columns=['time', 'value', 'unit', 'signal'])
time.sleep(1)

else:
last_bit = packet_size

except Exception as e:
logger.error("Error receiving/sending proto msg:", e)
finally:
cls.ser.close()



@classmethod
def make_bytes(cls, message):
"""
Make the byte array out of the messages array.
"""
return bytearray([
message.message_0, message.message_1, message.message_2,
message.message_3, message.message_4, message.message_5,
message.message_6, message.message_7
])

0 comments on commit cea553c

Please sign in to comment.