rusefi/firmware/controllers/trigger/spark_logic.cpp

457 lines
17 KiB
C++
Raw Normal View History

2016-09-15 15:02:36 -07:00
/*
* @file spark_logic.cpp
*
* @date Sep 15, 2016
2019-07-06 17:15:49 -07:00
* @author Andrey Belomutskiy, (c) 2012-2019
2016-09-15 15:02:36 -07:00
*/
2019-07-05 17:03:32 -07:00
#include "global.h"
#include "os_access.h"
2019-07-05 17:03:32 -07:00
#include "engine_math.h"
2016-09-21 20:03:22 -07:00
#include "utlist.h"
#include "event_queue.h"
2016-09-15 15:02:36 -07:00
#if EFI_TUNER_STUDIO
#include "tunerstudio_configuration.h"
#endif /* EFI_TUNER_STUDIO */
2016-09-21 20:03:22 -07:00
EXTERN_ENGINE;
2019-08-24 23:01:09 -07:00
#if EFI_UNIT_TEST
extern bool verboseMode;
#endif /* EFI_UNIT_TEST */
2016-09-15 15:02:36 -07:00
2016-09-21 20:03:22 -07:00
static cyclic_buffer<int> ignitionErrorDetection;
static Logging *logger;
2016-09-15 15:02:36 -07:00
2019-09-22 05:22:35 -07:00
static const char *prevSparkName = nullptr;
2016-11-02 20:01:48 -07:00
2017-05-25 19:28:04 -07:00
int isInjectionEnabled(DECLARE_ENGINE_PARAMETER_SIGNATURE) {
2016-09-21 20:03:22 -07:00
// todo: is this worth a method? should this be inlined?
2017-05-25 19:28:04 -07:00
return CONFIG(isInjectionEnabled);
2016-09-21 20:03:22 -07:00
}
int isIgnitionTimingError(void) {
return ignitionErrorDetection.sum(6) > 4;
}
static void fireSparkBySettingPinLow(IgnitionEvent *event, IgnitionOutputPin *output) {
2019-04-12 19:07:03 -07:00
#if SPARK_EXTREME_LOGGING
2016-11-01 18:03:07 -07:00
scheduleMsg(logger, "spark goes low %d %s %d current=%d cnt=%d id=%d", getRevolutionCounter(), output->name, (int)getTimeNowUs(),
2016-11-01 20:01:54 -07:00
output->currentLogicValue, output->outOfOrder, event->sparkId);
2016-10-31 16:02:32 -07:00
#endif /* FUEL_MATH_EXTREME_LOGGING */
2016-10-31 19:02:12 -07:00
2016-11-01 18:03:07 -07:00
/**
* there are two kinds of 'out-of-order'
* 1) low goes before high, everything is fine after words
*
* 2) we have an un-matched low followed by legit pairs
*/
2016-11-01 20:01:54 -07:00
output->signalFallSparkId = event->sparkId;
2016-11-01 18:03:07 -07:00
2016-10-31 19:02:12 -07:00
if (!output->currentLogicValue) {
warning(CUSTOM_OUT_OF_ORDER_COIL, "out-of-order coil off %s", output->getName());
2016-11-01 20:01:54 -07:00
output->outOfOrder = true;
2016-10-31 19:02:12 -07:00
}
2017-04-21 16:23:20 -07:00
output->setLow();
2019-04-12 19:07:03 -07:00
#if EFI_PROD_CODE
2016-09-21 20:03:22 -07:00
if (CONFIG(dizzySparkOutputPin) != GPIO_UNASSIGNED) {
2017-04-21 16:23:20 -07:00
enginePins.dizzyOutput.setLow();
2016-09-21 20:03:22 -07:00
}
#endif /* EFI_PROD_CODE */
}
// todo: make this a class method?
#define assertPinAssigned(output) { \
if (!output->isInitialized()) { \
warning(CUSTOM_OBD_COIL_PIN_NOT_ASSIGNED, "no_pin_cl #%s", (output)->getName()); \
} \
}
static void prepareCylinderIgnitionSchedule(angle_t dwellAngle, floatms_t sparkDwell, IgnitionEvent *event DECLARE_ENGINE_PARAMETER_SUFFIX) {
// todo: clean up this implementation? does not look too nice as is.
// let's save planned duration so that we can later compare it with reality
event->sparkDwell = sparkDwell;
// change of sign here from 'before TDC' to 'after TDC'
2018-07-24 17:40:44 -07:00
angle_t ignitionPositionWithinEngineCycle = ENGINE(ignitionPositionWithinEngineCycle[event->cylinderIndex]);
assertAngleRange(ignitionPositionWithinEngineCycle, "aPWEC", CUSTOM_ERR_6566);
cfg_float_t_1f timing_offset_cylinder = CONFIG(timing_offset_cylinder[event->cylinderIndex]);
const angle_t localAdvance = -ENGINE(engineState.timingAdvance) + ignitionPositionWithinEngineCycle + timing_offset_cylinder;
2018-07-26 14:11:47 -07:00
efiAssertVoid(CUSTOM_ERR_6689, !cisnan(localAdvance), "findAngle#9");
2018-07-24 17:40:44 -07:00
2018-07-25 20:03:04 -07:00
efiAssertVoid(CUSTOM_ERR_6589, !cisnan(localAdvance), "localAdvance#1");
const int index = ENGINE(ignitionPin[event->cylinderIndex]);
const int coilIndex = ID2INDEX(getCylinderId(index PASS_ENGINE_PARAMETER_SUFFIX));
IgnitionOutputPin *output = &enginePins.coils[coilIndex];
IgnitionOutputPin *secondOutput;
if (getCurrentIgnitionMode(PASS_ENGINE_PARAMETER_SIGNATURE) == IM_WASTED_SPARK && CONFIG(twoWireBatchIgnition)) {
int secondIndex = index + CONFIG(specs.cylindersCount) / 2;
int secondCoilIndex = ID2INDEX(getCylinderId(secondIndex PASS_ENGINE_PARAMETER_SUFFIX));
secondOutput = &enginePins.coils[secondCoilIndex];
assertPinAssigned(secondOutput);
} else {
2019-09-22 05:22:35 -07:00
secondOutput = nullptr;
}
assertPinAssigned(output);
event->outputs[0] = output;
event->outputs[1] = secondOutput;
event->advance = localAdvance;
angle_t a = localAdvance - dwellAngle;
2018-07-25 20:03:04 -07:00
efiAssertVoid(CUSTOM_ERR_6590, !cisnan(a), "findAngle#5");
assertAngleRange(a, "findAngle#a6", CUSTOM_ERR_6550);
TRIGGER_SHAPE(findTriggerPosition(&event->dwellPosition, a PASS_CONFIG_PARAM(engineConfiguration->globalTriggerAngleOffset)));
2019-04-12 19:07:03 -07:00
#if FUEL_MATH_EXTREME_LOGGING
2019-10-07 21:54:19 -07:00
printf("addIgnitionEvent %s ind=%d\n", output->name, event->dwellPosition.triggerEventIndex);
// scheduleMsg(logger, "addIgnitionEvent %s ind=%d", output->name, event->dwellPosition->eventIndex);
#endif /* FUEL_MATH_EXTREME_LOGGING */
}
void fireSparkAndPrepareNextSchedule(IgnitionEvent *event) {
2016-11-27 18:04:45 -08:00
for (int i = 0; i< MAX_OUTPUTS_FOR_IGNITION;i++) {
IgnitionOutputPin *output = event->outputs[i];
if (output != NULL) {
fireSparkBySettingPinLow(event, output);
2016-11-27 18:04:45 -08:00
}
}
#if !EFI_UNIT_TEST
if (engineConfiguration->debugMode == DBG_DWELL_METRIC) {
2019-10-07 23:01:41 -07:00
uint32_t actualDwellDurationNt = getTimeNowLowerNt() - event->actualStartOfDwellNt;
/**
* ratio of desired dwell duration to actual dwell duration gives us some idea of how good is input trigger jitter
*/
float ratio = NT2US(actualDwellDurationNt) / 1000.0 / event->sparkDwell;
#if EFI_TUNER_STUDIO
// todo: smarted solution for index to field mapping
if (event->cylinderIndex == 0) {
tsOutputChannels.debugFloatField1 = ratio;
} else if (event->cylinderIndex == 1) {
tsOutputChannels.debugFloatField2 = ratio;
} else if (event->cylinderIndex == 2) {
tsOutputChannels.debugFloatField3 = ratio;
} else if (event->cylinderIndex == 3) {
tsOutputChannels.debugFloatField4 = ratio;
}
#endif
}
#endif /* EFI_UNIT_TEST */
2019-04-12 19:07:03 -07:00
#if EFI_UNIT_TEST
2016-11-29 17:02:41 -08:00
Engine *engine = event->engine;
EXPAND_Engine;
#endif /* EFI_UNIT_TEST */
2016-11-29 17:02:41 -08:00
// now that we've just fired a coil let's prepare the new schedule for the next engine revolution
2018-07-24 17:40:44 -07:00
angle_t dwellAngle = ENGINE(engineState.dwellAngle);
floatms_t sparkDwell = ENGINE(engineState.sparkDwell);
if (cisnan(dwellAngle) || cisnan(sparkDwell)) {
2018-07-24 17:40:44 -07:00
// we are here if engine has just stopped
return;
}
prepareCylinderIgnitionSchedule(dwellAngle, sparkDwell, event PASS_ENGINE_PARAMETER_SUFFIX);
2016-11-26 21:01:22 -08:00
}
static void startDwellByTurningSparkPinHigh(IgnitionEvent *event, IgnitionOutputPin *output) {
2016-11-07 19:02:21 -08:00
2019-04-12 19:07:03 -07:00
#if ! EFI_UNIT_TEST
2019-01-21 17:33:21 -08:00
if (GET_RPM_VALUE > 2 * engineConfiguration->cranking.rpm) {
const char *outputName = output->getName();
if (prevSparkName == outputName && getCurrentIgnitionMode(PASS_ENGINE_PARAMETER_SIGNATURE) != IM_ONE_COIL) {
2016-11-07 19:02:21 -08:00
warning(CUSTOM_OBD_SKIPPED_SPARK, "looks like skipped spark event %d %s", getRevolutionCounter(), outputName);
}
prevSparkName = outputName;
}
#endif /* EFI_UNIT_TEST */
2019-04-12 19:07:03 -07:00
#if SPARK_EXTREME_LOGGING
2016-11-01 18:03:07 -07:00
scheduleMsg(logger, "spark goes high %d %s %d current=%d cnt=%d id=%d", getRevolutionCounter(), output->name, (int)getTimeNowUs(),
2016-11-01 20:01:54 -07:00
output->currentLogicValue, output->outOfOrder, event->sparkId);
2016-10-31 16:02:32 -07:00
#endif /* FUEL_MATH_EXTREME_LOGGING */
2016-10-31 19:02:12 -07:00
2016-11-01 20:01:54 -07:00
if (output->outOfOrder) {
output->outOfOrder = false;
if (output->signalFallSparkId == event->sparkId) {
2016-11-01 18:03:07 -07:00
// let's save this coil if things do not look right
return;
}
2016-10-31 19:02:12 -07:00
}
2017-04-21 16:23:20 -07:00
output->setHigh();
2019-04-12 19:07:03 -07:00
#if EFI_PROD_CODE
2016-09-21 20:03:22 -07:00
if (CONFIG(dizzySparkOutputPin) != GPIO_UNASSIGNED) {
2017-04-21 16:23:20 -07:00
enginePins.dizzyOutput.setHigh();
2016-09-21 20:03:22 -07:00
}
#endif /* EFI_PROD_CODE */
}
2016-11-26 21:01:22 -08:00
void turnSparkPinHigh(IgnitionEvent *event) {
2019-10-07 23:01:41 -07:00
event->actualStartOfDwellNt = getTimeNowLowerNt();
2016-11-27 18:04:45 -08:00
for (int i = 0; i< MAX_OUTPUTS_FOR_IGNITION;i++) {
IgnitionOutputPin *output = event->outputs[i];
if (output != NULL) {
startDwellByTurningSparkPinHigh(event, output);
2016-11-27 18:04:45 -08:00
}
}
2016-11-26 21:01:22 -08:00
}
2016-09-21 20:03:22 -07:00
static ALWAYS_INLINE void handleSparkEvent(bool limitedSpark, uint32_t trgEventIndex, IgnitionEvent *iEvent,
2017-05-15 20:28:49 -07:00
int rpm DECLARE_ENGINE_PARAMETER_SUFFIX) {
2016-09-21 20:03:22 -07:00
2018-07-26 14:11:47 -07:00
angle_t advance = iEvent->advance;
2016-10-09 16:03:51 -07:00
const floatms_t dwellMs = ENGINE(engineState.sparkDwell);
if (cisnan(dwellMs) || dwellMs <= 0) {
2018-01-23 09:05:14 -08:00
warning(CUSTOM_DWELL, "invalid dwell to handle: %.2f at %d", dwellMs, rpm);
2016-09-21 20:03:22 -07:00
return;
}
2018-07-26 14:11:47 -07:00
if (cisnan(advance)) {
warning(CUSTOM_ERR_6688, "NaN advance");
return;
}
2016-09-21 20:03:22 -07:00
2019-10-07 22:03:57 -07:00
floatus_t chargeDelayUs = ENGINE(rpmCalculator.oneDegreeUs) * iEvent->dwellPosition.angleOffsetFromTriggerEvent;
2016-09-21 20:03:22 -07:00
int isIgnitionError = chargeDelayUs < 0;
ignitionErrorDetection.add(isIgnitionError);
if (isIgnitionError) {
2019-04-12 19:07:03 -07:00
#if EFI_PROD_CODE
2018-01-23 09:05:14 -08:00
scheduleMsg(logger, "Negative spark delay=%.2f", chargeDelayUs);
2017-06-08 21:05:41 -07:00
#endif /* EFI_PROD_CODE */
2016-09-21 20:03:22 -07:00
chargeDelayUs = 0;
return;
}
2019-10-07 22:36:03 -07:00
iEvent->sparkId = engine->globalSparkIdCounter++;
2016-11-01 18:03:07 -07:00
2016-09-21 20:03:22 -07:00
/**
* We are alternating two event lists in order to avoid a potential issue around revolution boundary
* when an event is scheduled within the next revolution.
*/
2019-10-07 23:01:41 -07:00
scheduling_s * sUp = &iEvent->dwellStartTimer;
2016-09-21 20:03:22 -07:00
scheduling_s * sDown = &iEvent->signalTimerDown;
2016-11-01 18:03:07 -07:00
2016-09-21 20:03:22 -07:00
/**
* The start of charge is always within the current trigger event range, so just plain time-based scheduling
*/
if (!limitedSpark) {
2019-04-12 19:07:03 -07:00
#if SPARK_EXTREME_LOGGING
2016-11-27 18:04:45 -08:00
scheduleMsg(logger, "scheduling sparkUp ind=%d %d %s now=%d %d later id=%d", trgEventIndex, getRevolutionCounter(), iEvent->getOutputForLoggins()->name, (int)getTimeNowUs(), (int)chargeDelayUs,
2016-11-08 15:02:01 -08:00
iEvent->sparkId);
2016-10-29 14:03:45 -07:00
#endif /* FUEL_MATH_EXTREME_LOGGING */
2016-11-02 20:01:48 -07:00
2016-10-29 14:03:45 -07:00
/**
2016-09-21 20:03:22 -07:00
* Note how we do not check if spark is limited or not while scheduling 'spark down'
* This way we make sure that coil dwell started while spark was enabled would fire and not burn
* the coil.
*/
engine->executor.scheduleForLater(sUp, chargeDelayUs, (schfunc_t) &turnSparkPinHigh, iEvent);
2016-09-21 20:03:22 -07:00
}
/**
* Spark event is often happening during a later trigger event timeframe
* TODO: improve precision
*/
2018-07-26 14:11:47 -07:00
2018-07-25 20:03:04 -07:00
efiAssertVoid(CUSTOM_ERR_6591, !cisnan(advance), "findAngle#4");
2018-07-23 18:03:20 -07:00
assertAngleRange(advance, "findAngle#a5", CUSTOM_ERR_6549);
TRIGGER_SHAPE(findTriggerPosition(&iEvent->sparkPosition, advance PASS_CONFIG_PARAM(engineConfiguration->globalTriggerAngleOffset)));
2016-09-21 20:03:22 -07:00
2019-04-12 19:07:03 -07:00
#if EFI_UNIT_TEST
2019-08-24 23:01:09 -07:00
if (verboseMode) {
2019-10-07 22:03:57 -07:00
printf("spark dwell@ %d/%d spark@ %d/%d id=%d\r\n", iEvent->dwellPosition.triggerEventIndex, (int)iEvent->dwellPosition.angleOffsetFromTriggerEvent,
iEvent->sparkPosition.triggerEventAngle, (int)iEvent->sparkPosition.angleOffsetFromTriggerEvent,
2016-11-08 15:02:01 -08:00
iEvent->sparkId);
2019-08-24 23:01:09 -07:00
}
2016-11-08 15:02:01 -08:00
#endif
2018-11-22 09:13:21 -08:00
/**
* todo: extract a "scheduleForAngle" method with best implementation into a separate utility method
*
* Here's the status as of Nov 2018:
* "scheduleForLater" uses time only and for best precision it's best to use "scheduleForLater" only
* once we hit the last trigger tooth prior to needed event. This case we use as much trigger position angle as possible
* and only use less precise RPM-based time calculation for the last portion of the angle, the one between two teeth closest to the
2018-11-22 09:18:00 -08:00
* desired angle moment.
*
* At the moment we only have time-based scheduler. I believe what needs to be added is a trigger-event based scheduler on top of the
* time-based schedule. This case we would be firing events with best possible angle precision.
2018-11-22 09:13:21 -08:00
*
*/
2019-10-07 21:54:19 -07:00
if (iEvent->sparkPosition.triggerEventIndex == trgEventIndex) {
2016-09-21 20:03:22 -07:00
/**
* Spark should be fired before the next trigger event - time-based delay is best precision possible
*/
2019-10-07 22:03:57 -07:00
float timeTillIgnitionUs = ENGINE(rpmCalculator.oneDegreeUs) * iEvent->sparkPosition.angleOffsetFromTriggerEvent;
2016-09-21 20:03:22 -07:00
2019-04-12 19:07:03 -07:00
#if SPARK_EXTREME_LOGGING
2016-11-27 18:04:45 -08:00
scheduleMsg(logger, "scheduling sparkDown ind=%d %d %s now=%d %d later id=%d", trgEventIndex, getRevolutionCounter(), iEvent->getOutputForLoggins()->name, (int)getTimeNowUs(), (int)timeTillIgnitionUs, iEvent->sparkId);
2016-10-29 14:03:45 -07:00
#endif /* FUEL_MATH_EXTREME_LOGGING */
engine->executor.scheduleForLater(sDown, (int) timeTillIgnitionUs, (schfunc_t) &fireSparkAndPrepareNextSchedule, iEvent);
2016-09-21 20:03:22 -07:00
} else {
2019-04-12 19:07:03 -07:00
#if SPARK_EXTREME_LOGGING
2019-10-07 21:54:19 -07:00
scheduleMsg(logger, "to queue sparkDown ind=%d %d %s %d for %d", trgEventIndex, getRevolutionCounter(), iEvent->getOutputForLoggins()->name, (int)getTimeNowUs(), iEvent->sparkPosition.triggerEventIndex);
2016-10-29 14:03:45 -07:00
#endif /* FUEL_MATH_EXTREME_LOGGING */
2016-09-21 20:03:22 -07:00
/**
* Spark should be scheduled in relation to some future trigger event, this way we get better firing precision
*/
2019-10-07 22:36:03 -07:00
bool isPending = assertNotInList<IgnitionEvent>(ENGINE(ignitionEventsHead), iEvent);
2016-10-31 16:02:32 -07:00
if (isPending) {
2019-04-12 19:07:03 -07:00
#if SPARK_EXTREME_LOGGING
2016-11-27 18:04:45 -08:00
scheduleMsg(logger, "not adding to queue sparkDown ind=%d %d %s %d", trgEventIndex, getRevolutionCounter(), iEvent->getOutputForLoggins()->name, (int)getTimeNowUs());
2016-10-31 16:02:32 -07:00
#endif /* FUEL_MATH_EXTREME_LOGGING */
2016-09-21 20:03:22 -07:00
return;
2016-10-31 16:02:32 -07:00
}
2016-09-21 20:03:22 -07:00
2019-10-07 22:36:03 -07:00
LL_APPEND(ENGINE(ignitionEventsHead), iEvent);
2016-09-21 20:03:22 -07:00
}
}
2017-05-15 20:28:49 -07:00
static void initializeIgnitionActions(IgnitionEventList *list DECLARE_ENGINE_PARAMETER_SUFFIX) {
2018-07-24 17:40:44 -07:00
angle_t dwellAngle = ENGINE(engineState.dwellAngle);
floatms_t sparkDwell = ENGINE(engineState.sparkDwell);
2018-07-24 17:40:44 -07:00
if (cisnan(ENGINE(engineState.timingAdvance)) || cisnan(dwellAngle)) {
2016-12-18 07:02:38 -08:00
// error should already be reported
// need to invalidate previous ignition schedule
list->isReady = false;
return;
}
2018-07-25 20:03:04 -07:00
efiAssertVoid(CUSTOM_ERR_6592, engineConfiguration->specs.cylindersCount > 0, "cylindersCount");
2016-11-27 19:01:36 -08:00
2016-11-28 11:01:52 -08:00
for (int cylinderIndex = 0; cylinderIndex < CONFIG(specs.cylindersCount); cylinderIndex++) {
list->elements[cylinderIndex].cylinderIndex = cylinderIndex;
2019-04-12 19:07:03 -07:00
#if EFI_UNIT_TEST
2016-11-28 20:02:02 -08:00
list->elements[cylinderIndex].engine = engine;
2017-11-26 19:30:37 -08:00
#endif /* EFI_UNIT_TEST */
prepareCylinderIgnitionSchedule(dwellAngle, sparkDwell, &list->elements[cylinderIndex] PASS_ENGINE_PARAMETER_SUFFIX);
2016-11-25 11:03:06 -08:00
}
2016-11-28 09:03:02 -08:00
list->isReady = true;
2016-11-25 11:03:06 -08:00
}
2017-05-15 20:28:49 -07:00
static ALWAYS_INLINE void prepareIgnitionSchedule(DECLARE_ENGINE_PARAMETER_SIGNATURE) {
2016-09-21 21:03:00 -07:00
2019-05-07 16:32:08 -07:00
engine->m.beforeIgnitionSch = getTimeNowLowerNt();
2016-09-21 21:03:00 -07:00
/**
* TODO: warning. there is a bit of a hack here, todo: improve.
2019-10-07 23:01:41 -07:00
* currently output signals/times dwellStartTimer from the previous revolutions could be
2016-09-21 21:03:00 -07:00
* still used because they have crossed the revolution boundary
* but we are already re-purposing the output signals, but everything works because we
* are not affecting that space in memory. todo: use two instances of 'ignitionSignals'
*/
operation_mode_e operationMode = engine->getOperationMode(PASS_ENGINE_PARAMETER_SIGNATURE);
float maxAllowedDwellAngle = (int) (getEngineCycle(operationMode) / 2); // the cast is about making Coverity happy
2016-09-21 21:03:00 -07:00
if (getCurrentIgnitionMode(PASS_ENGINE_PARAMETER_SIGNATURE) == IM_ONE_COIL) {
maxAllowedDwellAngle = getEngineCycle(operationMode) / engineConfiguration->specs.cylindersCount / 1.1;
2016-09-21 21:03:00 -07:00
}
if (engine->engineState.dwellAngle == 0) {
2017-01-22 06:03:08 -08:00
warning(CUSTOM_ZERO_DWELL, "dwell is zero?");
2016-09-21 21:03:00 -07:00
}
if (engine->engineState.dwellAngle > maxAllowedDwellAngle) {
2018-01-23 09:05:14 -08:00
warning(CUSTOM_DWELL_TOO_LONG, "dwell angle too long: %.2f", engine->engineState.dwellAngle);
2016-09-21 21:03:00 -07:00
}
// todo: add some check for dwell overflow? like 4 times 6 ms while engine cycle is less then that
2016-12-18 07:02:38 -08:00
IgnitionEventList *list = &engine->ignitionEvents;
2016-09-21 21:03:00 -07:00
2017-05-15 20:28:49 -07:00
initializeIgnitionActions(list PASS_ENGINE_PARAMETER_SUFFIX);
2019-05-07 16:32:08 -07:00
engine->m.ignitionSchTime = getTimeNowLowerNt() - engine->m.beforeIgnitionSch;
2016-09-21 21:03:00 -07:00
}
2016-09-21 20:03:22 -07:00
static void scheduleAllSparkEventsUntilNextTriggerTooth(uint32_t trgEventIndex DECLARE_ENGINE_PARAMETER_SUFFIX) {
2016-09-21 20:03:22 -07:00
IgnitionEvent *current, *tmp;
2019-10-07 22:36:03 -07:00
LL_FOREACH_SAFE(ENGINE(ignitionEventsHead), current, tmp)
2016-09-21 20:03:22 -07:00
{
2019-10-07 21:54:19 -07:00
if (current->sparkPosition.triggerEventIndex == trgEventIndex) {
2016-09-21 20:03:22 -07:00
// time to fire a spark which was scheduled previously
2019-10-07 22:36:03 -07:00
LL_DELETE(ENGINE(ignitionEventsHead), current);
2016-09-21 20:03:22 -07:00
scheduling_s * sDown = &current->signalTimerDown;
2019-04-12 19:07:03 -07:00
#if SPARK_EXTREME_LOGGING
2016-11-27 18:04:45 -08:00
scheduleMsg(logger, "time to sparkDown ind=%d %d %s %d", trgEventIndex, getRevolutionCounter(), current->getOutputForLoggins()->name, (int)getTimeNowUs());
2016-10-29 14:03:45 -07:00
#endif /* FUEL_MATH_EXTREME_LOGGING */
2019-10-07 22:03:57 -07:00
float timeTillIgnitionUs = ENGINE(rpmCalculator.oneDegreeUs) * current->sparkPosition.angleOffsetFromTriggerEvent;
engine->executor.scheduleForLater(sDown, (int) timeTillIgnitionUs, (schfunc_t) &fireSparkAndPrepareNextSchedule, current);
2016-09-21 20:03:22 -07:00
}
}
}
void onTriggerEventSparkLogic(bool limitedSpark, uint32_t trgEventIndex, int rpm
DECLARE_ENGINE_PARAMETER_SUFFIX) {
if (!isValidRpm(rpm) || !CONFIG(isIgnitionEnabled)) {
// this might happen for instance in case of a single trigger event after a pause
return;
}
if (!ENGINE(ignitionEvents.isReady)) {
prepareIgnitionSchedule(PASS_ENGINE_PARAMETER_SIGNATURE);
}
/**
* Ignition schedule is defined once per revolution
* See initializeIgnitionActions()
*/
scheduleAllSparkEventsUntilNextTriggerTooth(trgEventIndex PASS_ENGINE_PARAMETER_SUFFIX);
2016-09-21 20:03:22 -07:00
// scheduleSimpleMsg(&logger, "eventId spark ", eventIndex);
2017-06-08 21:05:41 -07:00
if (ENGINE(ignitionEvents.isReady)) {
2016-11-28 09:03:02 -08:00
for (int i = 0; i < CONFIG(specs.cylindersCount); i++) {
2017-06-08 21:05:41 -07:00
IgnitionEvent *event = &ENGINE(ignitionEvents.elements[i]);
2019-10-07 21:54:19 -07:00
if (event->dwellPosition.triggerEventIndex != trgEventIndex)
2016-11-28 09:03:02 -08:00
continue;
2017-05-15 20:28:49 -07:00
handleSparkEvent(limitedSpark, trgEventIndex, event, rpm PASS_ENGINE_PARAMETER_SUFFIX);
2016-11-28 09:03:02 -08:00
}
2016-09-21 20:03:22 -07:00
}
}
void initSparkLogic(Logging *sharedLogger) {
logger = sharedLogger;
}
2017-03-06 22:26:58 -08:00
/**
* Number of sparks per physical coil
* @see getNumberOfInjections
*/
2017-05-15 20:28:49 -07:00
int getNumberOfSparks(ignition_mode_e mode DECLARE_ENGINE_PARAMETER_SUFFIX) {
2017-03-06 22:26:58 -08:00
switch (mode) {
case IM_ONE_COIL:
return engineConfiguration->specs.cylindersCount;
case IM_INDIVIDUAL_COILS:
return 1;
case IM_WASTED_SPARK:
return 2;
default:
2017-04-12 06:26:22 -07:00
firmwareError(CUSTOM_ERR_IGNITION_MODE, "Unexpected ignition_mode_e %d", mode);
2017-03-06 22:26:58 -08:00
return 1;
}
}
/**
* @see getInjectorDutyCycle
*/
2017-05-15 20:28:49 -07:00
percent_t getCoilDutyCycle(int rpm DECLARE_ENGINE_PARAMETER_SUFFIX) {
floatms_t totalPerCycle = 1/**getInjectionDuration(rpm PASS_ENGINE_PARAMETER_SUFFIX)*/ * getNumberOfSparks(getCurrentIgnitionMode(PASS_ENGINE_PARAMETER_SIGNATURE) PASS_ENGINE_PARAMETER_SUFFIX);
floatms_t engineCycleDuration = getCrankshaftRevolutionTimeMs(rpm) * (engine->getOperationMode(PASS_ENGINE_PARAMETER_SIGNATURE) == TWO_STROKE ? 1 : 2);
2017-03-06 22:26:58 -08:00
return 100 * totalPerCycle / engineCycleDuration;
}