emit v2 mlg (#4979)

* emit v2 mlg

* missed a spot

* s

* s

* binary log unit test

* sd generator test

* oop

* getcategory

* write zeroes instead of null pointer in case of no category
This commit is contained in:
Matthew Kennedy 2023-01-20 18:01:40 -08:00 committed by GitHub
parent 37612cfec8
commit d92b90c0a5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 76 additions and 44 deletions

View File

@ -54,9 +54,9 @@ void writeFileHeader(Writer& outBuffer) {
// File format: MLVLG\0 // File format: MLVLG\0
strncpy(buffer, "MLVLG", 6); strncpy(buffer, "MLVLG", 6);
// Format version = 01 // Format version = 02
buffer[6] = 0; buffer[6] = 0;
buffer[7] = 1; buffer[7] = 2;
// Timestamp // Timestamp
buffer[8] = 0; buffer[8] = 0;
@ -67,23 +67,25 @@ void writeFileHeader(Writer& outBuffer) {
// Info data start // Info data start
buffer[12] = 0; buffer[12] = 0;
buffer[13] = 0; buffer[13] = 0;
size_t headerSize = MLQ_HEADER_SIZE + efi::size(fields) * 55;
// Data begin index: begins immediately after the header
buffer[14] = 0; buffer[14] = 0;
buffer[15] = 0; buffer[15] = 0;
buffer[16] = (headerSize >> 8) & 0xFF;
buffer[17] = headerSize & 0xFF; size_t headerSize = MLQ_HEADER_SIZE + efi::size(fields) * MLQ_FIELD_HEADER_SIZE;
// Data begin index: begins immediately after the header
buffer[16] = 0;
buffer[17] = 0;
buffer[18] = (headerSize >> 8) & 0xFF;
buffer[19] = headerSize & 0xFF;
// Record length - length of a single data record: sum size of all fields // Record length - length of a single data record: sum size of all fields
buffer[18] = recordLength >> 8; buffer[20] = recordLength >> 8;
buffer[19] = recordLength & 0xFF; buffer[21] = recordLength & 0xFF;
// Number of logger fields // Number of logger fields
int fieldsCount = efi::size(fields); int fieldsCount = efi::size(fields);
buffer[20] = fieldsCount >> 8; buffer[22] = fieldsCount >> 8;
buffer[21] = fieldsCount; buffer[23] = fieldsCount;
outBuffer.write(buffer, MLQ_HEADER_SIZE); outBuffer.write(buffer, MLQ_HEADER_SIZE);

View File

@ -42,7 +42,14 @@ void LogField::writeHeader(Writer& outBuffer) const {
// Offset 54, size 1 = digits to display (signed int) // Offset 54, size 1 = digits to display (signed int)
buffer[54] = m_digits; buffer[54] = m_digits;
// Total size = 55 // Offset 55, (optional) category string
if (m_category) {
strncpy(&buffer[55], m_category, 34);
} else {
memset(&buffer[55], 0, 34);
}
// Total size = 89
outBuffer.write(buffer, MLQ_FIELD_HEADER_SIZE); outBuffer.write(buffer, MLQ_FIELD_HEADER_SIZE);
} }

View File

@ -10,7 +10,7 @@ public:
// Scaled channels, memcpys data directly and describes format in header // Scaled channels, memcpys data directly and describes format in header
template <typename TValue, int TMult, int TDiv> template <typename TValue, int TMult, int TDiv>
constexpr LogField(const scaled_channel<TValue, TMult, TDiv>& toRead, constexpr LogField(const scaled_channel<TValue, TMult, TDiv>& toRead,
const char* name, const char* units, int8_t digits) const char* name, const char* units, int8_t digits, const char* category = "none")
: m_multiplier(float(TDiv) / TMult) : m_multiplier(float(TDiv) / TMult)
, m_addr(toRead.getFirstByteAddr()) , m_addr(toRead.getFirstByteAddr())
, m_type(resolveType<TValue>()) , m_type(resolveType<TValue>())
@ -18,13 +18,14 @@ public:
, m_size(sizeForType(resolveType<TValue>())) , m_size(sizeForType(resolveType<TValue>()))
, m_name(name) , m_name(name)
, m_units(units) , m_units(units)
, m_category(category)
{ {
} }
// Non-scaled channel, works for plain arithmetic types (int, float, uint8_t, etc) // Non-scaled channel, works for plain arithmetic types (int, float, uint8_t, etc)
template <typename TValue, typename = typename std::enable_if<std::is_arithmetic_v<TValue>>::type> template <typename TValue, typename = typename std::enable_if<std::is_arithmetic_v<TValue>>::type>
constexpr LogField(TValue& toRead, constexpr LogField(TValue& toRead,
const char* name, const char* units, int8_t digits) const char* name, const char* units, int8_t digits, const char* category = "none")
: m_multiplier(1) : m_multiplier(1)
, m_addr(&toRead) , m_addr(&toRead)
, m_type(resolveType<TValue>()) , m_type(resolveType<TValue>())
@ -32,6 +33,7 @@ public:
, m_size(sizeForType(resolveType<TValue>())) , m_size(sizeForType(resolveType<TValue>()))
, m_name(name) , m_name(name)
, m_units(units) , m_units(units)
, m_category(category)
{ {
} }
@ -83,6 +85,7 @@ private:
const char* const m_name; const char* const m_name;
const char* const m_units; const char* const m_units;
const char* const m_category;
}; };
template<> template<>

View File

@ -2038,8 +2038,8 @@ end_struct
! dual-bank devices can go with 300 for better TS reconnect experience ! dual-bank devices can go with 300 for better TS reconnect experience
#define TS_BLOCK_READ_TIMEOUT 3000 #define TS_BLOCK_READ_TIMEOUT 3000
#define MLQ_HEADER_SIZE 22 #define MLQ_HEADER_SIZE 24
#define MLQ_FIELD_HEADER_SIZE 55 #define MLQ_FIELD_HEADER_SIZE 89
#define PROTOCOL_OUTPIN "outpin" #define PROTOCOL_OUTPIN "outpin"
#define PROTOCOL_ANALOG_CHART "analog_chart" #define PROTOCOL_ANALOG_CHART "analog_chart"

View File

@ -297,6 +297,7 @@ public enum Sensor implements BinaryLogEntry {
} }
} }
@Override
public String getName() { public String getName() {
return name; return name;
} }
@ -326,6 +327,8 @@ public enum Sensor implements BinaryLogEntry {
} }
} }
// TODO: this should be a string
@Override
public SensorCategory getCategory() { public SensorCategory getCategory() {
return category; return category;
} }

View File

@ -1,11 +1,16 @@
package com.rusefi.sensor_logs; package com.rusefi.sensor_logs;
import com.rusefi.core.SensorCategory;
import java.io.DataOutputStream; import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
public interface BinaryLogEntry { public interface BinaryLogEntry {
String getName(); String getName();
// TODO: getCategory() should return String
SensorCategory getCategory();
String getUnit(); String getUnit();
int getByteSize(); int getByteSize();

View File

@ -106,25 +106,31 @@ public class BinarySensorLog<T extends BinaryLogEntry> implements SensorLog {
fieldsDataSize += entry.getByteSize(); fieldsDataSize += entry.getByteSize();
} }
// 0006h Format version = 01 // 0006h Format version = 02
stream.write(0); stream.write(0);
stream.write(1); stream.write(2);
// 0008h Timestamp // 0008h Timestamp
stream.writeInt((int) (System.currentTimeMillis() / 1000)); stream.writeInt((int) (System.currentTimeMillis() / 1000));
// 000ch
int offsetToText = Fields.MLQ_HEADER_SIZE + Fields.MLQ_FIELD_HEADER_SIZE * entries.size(); int headerSize = Fields.MLQ_HEADER_SIZE + Fields.MLQ_FIELD_HEADER_SIZE * entries.size();
stream.writeShort(offsetToText); // 000ch Info data start - immediately after header
stream.writeShort(0); // reserved? stream.writeInt(headerSize);
// 0010h = offset_to_data
stream.writeShort(offsetToText + headerText.length()); // 0010h Data begin index - begins immediately after the header text
// 0012h int headerWithTextSize = headerSize + headerText.length();
stream.writeInt(headerSize);
// 0014h Record length
stream.writeShort(fieldsDataSize); stream.writeShort(fieldsDataSize);
// 0014h number of fields
// 0016h Number of fields
stream.writeShort(entries.size()); stream.writeShort(entries.size());
for (BinaryLogEntry sensor : entries) { for (BinaryLogEntry sensor : entries) {
String name = sensor.getName(); String name = sensor.getName();
String unit = sensor.getUnit(); String unit = sensor.getUnit();
String category = sensor.getCategory().getName();
// 0000h // 0000h
stream.write(sensor.getByteSize()); stream.write(sensor.getByteSize());
@ -138,8 +144,11 @@ public class BinarySensorLog<T extends BinaryLogEntry> implements SensorLog {
stream.writeInt(0); stream.writeInt(0);
// 0036h precision // 0036h precision
stream.write(2); stream.write(2);
// 0037h category string
writeLine(stream, category, 34);
} }
if (stream.size() != offsetToText)
if (stream.size() != headerWithTextSize)
throw new IllegalStateException("We are doing something wrong :( stream.size=" + stream.size()); throw new IllegalStateException("We are doing something wrong :( stream.size=" + stream.size());
writeLine(stream, headerText, headerText.length()); writeLine(stream, headerText, headerText.length());
} }

View File

@ -46,6 +46,8 @@ public class SdCardFieldsContent {
quote(configField.getUnits()) + quote(configField.getUnits()) +
", " + ", " +
configField.getDigits() + configField.getDigits() +
", " +
configField.getCategory() +
"},\n"; "},\n";
} }

View File

@ -31,10 +31,10 @@ public class SdCardFieldsGeneratorTest {
"\tuint16_t autoscale speedToRpmRatio;@@GAUGE_NAME_GEAR_RATIO@@;\"value\",{1/@@PACK_MULT_PERCENT@@}, 0, 0, 0, 0\n" + "\tuint16_t autoscale speedToRpmRatio;@@GAUGE_NAME_GEAR_RATIO@@;\"value\",{1/@@PACK_MULT_PERCENT@@}, 0, 0, 0, 0\n" +
"end_struct"; "end_struct";
processAndAssert(test, "\t{engine->outputChannels.internalMcuTemperature, \"internalMcuTemperature\", \"\", 0},\n" + processAndAssert(test, "\t{engine->outputChannels.internalMcuTemperature, \"internalMcuTemperature\", \"\", 0, null},\n" +
"\t{engine->outputChannels.RPMValue, \"hello\", \"RPM\", 2},\n" + "\t{engine->outputChannels.RPMValue, \"hello\", \"RPM\", 2, null},\n" +
"\t{engine->outputChannels.rpmAcceleration, \"dRPM\", \"RPM/s\", 2},\n" + "\t{engine->outputChannels.rpmAcceleration, \"dRPM\", \"RPM/s\", 2, null},\n" +
"\t{engine->outputChannels.speedToRpmRatio, \"ra\", \"value\", 0},\n" + "\t{engine->outputChannels.speedToRpmRatio, \"ra\", \"value\", 0, null},\n" +
"", actor); "", actor);
} }
@ -43,7 +43,7 @@ public class SdCardFieldsGeneratorTest {
processAndAssert("struct_no_prefix output_channels_s\n" + processAndAssert("struct_no_prefix output_channels_s\n" +
"uint16_t autoscale RPMValue;feee;\"RPM\",1, 0, 0, 8000, 2\n" + "uint16_t autoscale RPMValue;feee;\"RPM\",1, 0, 0, 8000, 2\n" +
"bit sd_logging_internal\n" + "bit sd_logging_internal\n" +
"end_struct", "\t{engine->outputChannels.RPMValue, \"feee\", \"RPM\", 2},\n", readerState -> { "end_struct", "\t{engine->outputChannels.RPMValue, \"feee\", \"RPM\", 2, null},\n", readerState -> {
}); });
} }
@ -52,10 +52,10 @@ public class SdCardFieldsGeneratorTest {
public void array() { public void array() {
processAndAssert("struct_no_prefix output_channels_s\n" + processAndAssert("struct_no_prefix output_channels_s\n" +
"uint16_t[4 iterate] recentErrorCode;;\"error\", 1, 0, 0, 0, 0\n" + "uint16_t[4 iterate] recentErrorCode;;\"error\", 1, 0, 0, 0, 0\n" +
"end_struct", "\t{engine->outputChannels.recentErrorCode[0], \"recentErrorCode 1\", \"error\", 0},\n" + "end_struct", "\t{engine->outputChannels.recentErrorCode[0], \"recentErrorCode 1\", \"error\", 0, null},\n" +
"\t{engine->outputChannels.recentErrorCode[1], \"recentErrorCode 2\", \"error\", 0},\n" + "\t{engine->outputChannels.recentErrorCode[1], \"recentErrorCode 2\", \"error\", 0, null},\n" +
"\t{engine->outputChannels.recentErrorCode[2], \"recentErrorCode 3\", \"error\", 0},\n" + "\t{engine->outputChannels.recentErrorCode[2], \"recentErrorCode 3\", \"error\", 0, null},\n" +
"\t{engine->outputChannels.recentErrorCode[3], \"recentErrorCode 4\", \"error\", 0},\n", readerState -> { "\t{engine->outputChannels.recentErrorCode[3], \"recentErrorCode 4\", \"error\", 0, null},\n", readerState -> {
}); });
} }
@ -68,7 +68,7 @@ public class SdCardFieldsGeneratorTest {
" end_struct\n" + " end_struct\n" +
"\tpid_status_s alternatorStatus\n" + "\tpid_status_s alternatorStatus\n" +
"end_struct", "end_struct",
"\t{engine->outputChannels.alternatorStatus.pTerm, \"alternatorStatus.pTerm\", \"\", 2},\n", "\t{engine->outputChannels.alternatorStatus.pTerm, \"alternatorStatus.pTerm\", \"\", 2, null},\n",
readerState -> { readerState -> {
}); });

View File

@ -127,7 +127,7 @@ public interface ConfigField {
@Override @Override
public String getCategory() { public String getCategory() {
return null; return "";
} }
@Override @Override

View File

@ -15,17 +15,17 @@ public:
TEST(BinaryLogField, FieldHeader) { TEST(BinaryLogField, FieldHeader) {
scaled_channel<int8_t, 10> channel; scaled_channel<int8_t, 10> channel;
LogField field(channel, "name", "units", 2); LogField field(channel, "name", "units", 2, "category");
char buffer[55]; char buffer[89];
StrictMock<MockWriter> bufWriter; StrictMock<MockWriter> bufWriter;
EXPECT_CALL(bufWriter, write(_, 55)) EXPECT_CALL(bufWriter, write(_, 89))
.WillOnce([&] (const char* buf, size_t count) { .WillOnce([&] (const char* buf, size_t count) {
memcpy(buffer, buf, count); memcpy(buffer, buf, count);
return 0; return 0;
}); });
// Should write 55 bytes // Should write 89 bytes
field.writeHeader(bufWriter); field.writeHeader(bufWriter);
// Expect correctly written header // Expect correctly written header
@ -42,7 +42,8 @@ TEST(BinaryLogField, FieldHeader) {
// Transform - we always use 0 // Transform - we always use 0
0, 0, 0, 0, 0, 0, 0, 0,
// Digits - 2, as configured // Digits - 2, as configured
2 2,
'c', 'a', 't', 'e', 'g', 'o', 'r', 'y', 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
)); ));
} }