2016-09-15 15:02:36 -07:00
|
|
|
/*
|
|
|
|
* @file spark_logic.cpp
|
|
|
|
*
|
|
|
|
* @date Sep 15, 2016
|
|
|
|
* @author Andrey Belomutskiy, (c) 2012-2016
|
|
|
|
*/
|
|
|
|
|
2016-09-21 20:03:22 -07:00
|
|
|
#include "engine_math.h"
|
|
|
|
#include "utlist.h"
|
|
|
|
#include "event_queue.h"
|
2016-09-21 21:03:00 -07:00
|
|
|
#include "efilib2.h"
|
2016-09-15 15:02:36 -07:00
|
|
|
|
2016-09-21 20:03:22 -07:00
|
|
|
EXTERN_ENGINE;
|
2016-09-15 15:02:36 -07:00
|
|
|
|
2016-09-21 20:03:22 -07:00
|
|
|
static cyclic_buffer<int> ignitionErrorDetection;
|
|
|
|
static Logging *logger;
|
2016-09-15 15:02:36 -07:00
|
|
|
|
2016-11-02 20:01:48 -07:00
|
|
|
static const char *prevSparkName = NULL;
|
|
|
|
|
2016-09-21 20:03:22 -07:00
|
|
|
int isInjectionEnabled(engine_configuration_s *engineConfiguration) {
|
|
|
|
// todo: is this worth a method? should this be inlined?
|
|
|
|
return engineConfiguration->isInjectionEnabled;
|
|
|
|
}
|
|
|
|
|
|
|
|
int isIgnitionTimingError(void) {
|
|
|
|
return ignitionErrorDetection.sum(6) > 4;
|
|
|
|
}
|
|
|
|
|
2016-11-01 18:03:07 -07:00
|
|
|
void turnSparkPinLow(IgnitionEvent *event) {
|
|
|
|
IgnitionOutputPin *output = event->output;
|
2016-10-31 16:02:32 -07:00
|
|
|
#if SPARK_EXTREME_LOGGING || defined(__DOXYGEN__)
|
2016-11-01 18:03:07 -07:00
|
|
|
scheduleMsg(logger, "spark goes low %d %s %d current=%d cnt=%d id=%d", getRevolutionCounter(), output->name, (int)getTimeNowUs(),
|
2016-11-01 20:01:54 -07:00
|
|
|
output->currentLogicValue, output->outOfOrder, event->sparkId);
|
2016-10-31 16:02:32 -07:00
|
|
|
#endif /* FUEL_MATH_EXTREME_LOGGING */
|
2016-10-31 19:02:12 -07:00
|
|
|
|
2016-11-01 18:03:07 -07:00
|
|
|
/**
|
|
|
|
* there are two kinds of 'out-of-order'
|
|
|
|
* 1) low goes before high, everything is fine after words
|
|
|
|
*
|
|
|
|
* 2) we have an un-matched low followed by legit pairs
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2016-11-01 20:01:54 -07:00
|
|
|
output->signalFallSparkId = event->sparkId;
|
2016-11-01 18:03:07 -07:00
|
|
|
|
2016-10-31 19:02:12 -07:00
|
|
|
if (!output->currentLogicValue) {
|
2016-11-02 20:01:48 -07:00
|
|
|
warning(CUSTOM_OUT_OF_ORDER_COIL, "out-of-order coil off %s", output->name);
|
2016-11-01 20:01:54 -07:00
|
|
|
output->outOfOrder = true;
|
2016-10-31 19:02:12 -07:00
|
|
|
}
|
|
|
|
|
2016-09-21 20:03:22 -07:00
|
|
|
turnPinLow(output);
|
|
|
|
#if EFI_PROD_CODE || defined(__DOXYGEN__)
|
|
|
|
if (CONFIG(dizzySparkOutputPin) != GPIO_UNASSIGNED) {
|
|
|
|
turnPinLow(&enginePins.dizzyOutput);
|
|
|
|
}
|
|
|
|
#endif /* EFI_PROD_CODE */
|
|
|
|
}
|
|
|
|
|
2016-11-01 18:03:07 -07:00
|
|
|
void turnSparkPinHigh(IgnitionEvent *event) {
|
|
|
|
IgnitionOutputPin *output = event->output;
|
2016-10-31 16:02:32 -07:00
|
|
|
#if SPARK_EXTREME_LOGGING || defined(__DOXYGEN__)
|
2016-11-01 18:03:07 -07:00
|
|
|
scheduleMsg(logger, "spark goes high %d %s %d current=%d cnt=%d id=%d", getRevolutionCounter(), output->name, (int)getTimeNowUs(),
|
2016-11-01 20:01:54 -07:00
|
|
|
output->currentLogicValue, output->outOfOrder, event->sparkId);
|
2016-10-31 16:02:32 -07:00
|
|
|
#endif /* FUEL_MATH_EXTREME_LOGGING */
|
2016-10-31 19:02:12 -07:00
|
|
|
|
2016-11-01 20:01:54 -07:00
|
|
|
if (output->outOfOrder) {
|
|
|
|
output->outOfOrder = false;
|
|
|
|
if (output->signalFallSparkId == event->sparkId) {
|
2016-11-01 18:03:07 -07:00
|
|
|
// let's save this coil if things do not look right
|
|
|
|
return;
|
|
|
|
}
|
2016-10-31 19:02:12 -07:00
|
|
|
}
|
|
|
|
|
2016-09-21 20:03:22 -07:00
|
|
|
turnPinHigh(output);
|
|
|
|
#if EFI_PROD_CODE || defined(__DOXYGEN__)
|
|
|
|
if (CONFIG(dizzySparkOutputPin) != GPIO_UNASSIGNED) {
|
|
|
|
turnPinHigh(&enginePins.dizzyOutput);
|
|
|
|
}
|
|
|
|
#endif /* EFI_PROD_CODE */
|
|
|
|
}
|
|
|
|
|
2016-11-01 18:03:07 -07:00
|
|
|
static int globalSparkIdCoutner = 0;
|
|
|
|
|
2016-09-21 20:03:22 -07:00
|
|
|
static ALWAYS_INLINE void handleSparkEvent(bool limitedSpark, uint32_t trgEventIndex, IgnitionEvent *iEvent,
|
|
|
|
int rpm DECLARE_ENGINE_PARAMETER_S) {
|
|
|
|
|
2016-10-09 16:03:51 -07:00
|
|
|
const floatms_t dwellMs = ENGINE(engineState.sparkDwell);
|
|
|
|
if (cisnan(dwellMs) || dwellMs <= 0) {
|
|
|
|
warning(CUSTOM_DWELL, "invalid dwell: %f at %d", dwellMs, rpm);
|
2016-09-21 20:03:22 -07:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
floatus_t chargeDelayUs = ENGINE(rpmCalculator.oneDegreeUs) * iEvent->dwellPosition.angleOffset;
|
|
|
|
int isIgnitionError = chargeDelayUs < 0;
|
|
|
|
ignitionErrorDetection.add(isIgnitionError);
|
|
|
|
if (isIgnitionError) {
|
|
|
|
#if EFI_PROD_CODE || defined(__DOXYGEN__)
|
|
|
|
scheduleMsg(logger, "Negative spark delay=%f", chargeDelayUs);
|
|
|
|
#endif
|
|
|
|
chargeDelayUs = 0;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-11-01 18:03:07 -07:00
|
|
|
iEvent->sparkId = globalSparkIdCoutner++;
|
|
|
|
|
2016-09-21 20:03:22 -07:00
|
|
|
/**
|
|
|
|
* We are alternating two event lists in order to avoid a potential issue around revolution boundary
|
|
|
|
* when an event is scheduled within the next revolution.
|
|
|
|
*/
|
|
|
|
scheduling_s * sUp = &iEvent->signalTimerUp;
|
|
|
|
scheduling_s * sDown = &iEvent->signalTimerDown;
|
|
|
|
|
2016-11-01 18:03:07 -07:00
|
|
|
|
2016-09-21 20:03:22 -07:00
|
|
|
/**
|
|
|
|
* The start of charge is always within the current trigger event range, so just plain time-based scheduling
|
|
|
|
*/
|
|
|
|
if (!limitedSpark) {
|
2016-10-29 14:03:45 -07:00
|
|
|
#if SPARK_EXTREME_LOGGING || defined(__DOXYGEN__)
|
2016-11-01 18:03:07 -07:00
|
|
|
scheduleMsg(logger, "scheduling sparkUp ind=%d %d %s now=%d %d later", trgEventIndex, getRevolutionCounter(), iEvent->output->name, (int)getTimeNowUs(), (int)chargeDelayUs);
|
2016-10-29 14:03:45 -07:00
|
|
|
#endif /* FUEL_MATH_EXTREME_LOGGING */
|
|
|
|
|
2016-11-02 20:01:48 -07:00
|
|
|
|
|
|
|
if (rpm > 2 * engineConfiguration->cranking.rpm) {
|
|
|
|
const char *outputName = iEvent->output->name;
|
|
|
|
if (prevSparkName == outputName) {
|
|
|
|
warning(CUSTOM_OBD_SKIPPED_SPARK, "looks like skipped spark event %d %s", getRevolutionCounter(), outputName);
|
|
|
|
}
|
|
|
|
prevSparkName = outputName;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2016-10-29 14:03:45 -07:00
|
|
|
/**
|
2016-09-21 20:03:22 -07:00
|
|
|
* Note how we do not check if spark is limited or not while scheduling 'spark down'
|
|
|
|
* This way we make sure that coil dwell started while spark was enabled would fire and not burn
|
|
|
|
* the coil.
|
|
|
|
*/
|
2016-11-01 18:03:07 -07:00
|
|
|
scheduleTask(true, "spark up", sUp, chargeDelayUs, (schfunc_t) &turnSparkPinHigh, iEvent);
|
2016-09-21 20:03:22 -07:00
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Spark event is often happening during a later trigger event timeframe
|
|
|
|
* TODO: improve precision
|
|
|
|
*/
|
|
|
|
findTriggerPosition(&iEvent->sparkPosition, iEvent->advance PASS_ENGINE_PARAMETER);
|
|
|
|
|
|
|
|
if (iEvent->sparkPosition.eventIndex == trgEventIndex) {
|
|
|
|
/**
|
|
|
|
* Spark should be fired before the next trigger event - time-based delay is best precision possible
|
|
|
|
*/
|
|
|
|
float timeTillIgnitionUs = ENGINE(rpmCalculator.oneDegreeUs) * iEvent->sparkPosition.angleOffset;
|
|
|
|
|
|
|
|
#if EFI_UNIT_TEST || defined(__DOXYGEN__)
|
2016-10-31 17:02:09 -07:00
|
|
|
printf("spark delay=%f angle=%f\r\n", timeTillIgnitionUs, iEvent->sparkPosition.angleOffset);
|
2016-09-21 20:03:22 -07:00
|
|
|
#endif
|
|
|
|
|
2016-10-29 14:03:45 -07:00
|
|
|
#if SPARK_EXTREME_LOGGING || defined(__DOXYGEN__)
|
2016-10-31 18:02:36 -07:00
|
|
|
scheduleMsg(logger, "scheduling sparkDown ind=%d %d %s now=%d %d later", trgEventIndex, getRevolutionCounter(), iEvent->output->name, (int)getTimeNowUs(), (int)timeTillIgnitionUs);
|
2016-10-29 14:03:45 -07:00
|
|
|
#endif /* FUEL_MATH_EXTREME_LOGGING */
|
|
|
|
|
2016-11-01 18:03:07 -07:00
|
|
|
scheduleTask(true, "spark1 down", sDown, (int) timeTillIgnitionUs, (schfunc_t) &turnSparkPinLow, iEvent);
|
2016-09-21 20:03:22 -07:00
|
|
|
} else {
|
2016-10-29 14:03:45 -07:00
|
|
|
#if SPARK_EXTREME_LOGGING || defined(__DOXYGEN__)
|
2016-10-31 17:02:09 -07:00
|
|
|
scheduleMsg(logger, "to queue sparkDown ind=%d %d %s %d for %d", trgEventIndex, getRevolutionCounter(), iEvent->output->name, (int)getTimeNowUs(), iEvent->sparkPosition.eventIndex);
|
2016-10-29 14:03:45 -07:00
|
|
|
#endif /* FUEL_MATH_EXTREME_LOGGING */
|
2016-09-21 20:03:22 -07:00
|
|
|
/**
|
|
|
|
* Spark should be scheduled in relation to some future trigger event, this way we get better firing precision
|
|
|
|
*/
|
|
|
|
bool isPending = assertNotInList<IgnitionEvent>(ENGINE(iHead), iEvent);
|
2016-10-31 16:02:32 -07:00
|
|
|
if (isPending) {
|
|
|
|
#if SPARK_EXTREME_LOGGING || defined(__DOXYGEN__)
|
2016-10-31 17:02:09 -07:00
|
|
|
scheduleMsg(logger, "not adding to queue sparkDown ind=%d %d %s %d", trgEventIndex, getRevolutionCounter(), iEvent->output->name, (int)getTimeNowUs());
|
2016-10-31 16:02:32 -07:00
|
|
|
#endif /* FUEL_MATH_EXTREME_LOGGING */
|
2016-09-21 20:03:22 -07:00
|
|
|
return;
|
2016-10-31 16:02:32 -07:00
|
|
|
}
|
2016-09-21 20:03:22 -07:00
|
|
|
|
|
|
|
LL_APPEND(ENGINE(iHead), iEvent);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-09-21 21:03:00 -07:00
|
|
|
static ALWAYS_INLINE void prepareIgnitionSchedule(int rpm, int revolutionIndex DECLARE_ENGINE_PARAMETER_S) {
|
|
|
|
|
|
|
|
engine->m.beforeIgnitionSch = GET_TIMESTAMP();
|
|
|
|
/**
|
|
|
|
* TODO: warning. there is a bit of a hack here, todo: improve.
|
|
|
|
* currently output signals/times signalTimerUp from the previous revolutions could be
|
|
|
|
* still used because they have crossed the revolution boundary
|
|
|
|
* but we are already re-purposing the output signals, but everything works because we
|
|
|
|
* are not affecting that space in memory. todo: use two instances of 'ignitionSignals'
|
|
|
|
*/
|
|
|
|
float maxAllowedDwellAngle = (int) (getEngineCycle(engineConfiguration->operationMode) / 2); // the cast is about making Coverity happy
|
|
|
|
|
|
|
|
if (engineConfiguration->ignitionMode == IM_ONE_COIL) {
|
|
|
|
maxAllowedDwellAngle = getEngineCycle(engineConfiguration->operationMode) / engineConfiguration->specs.cylindersCount / 1.1;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (engine->engineState.dwellAngle == 0) {
|
|
|
|
warning(CUSTOM_OBD_32, "dwell is zero?");
|
|
|
|
}
|
|
|
|
if (engine->engineState.dwellAngle > maxAllowedDwellAngle) {
|
|
|
|
warning(CUSTOM_OBD_33, "dwell angle too long: %f", engine->engineState.dwellAngle);
|
|
|
|
}
|
|
|
|
|
|
|
|
// todo: add some check for dwell overflow? like 4 times 6 ms while engine cycle is less then that
|
|
|
|
|
|
|
|
IgnitionEventList *list = &engine->engineConfiguration2->ignitionEvents[revolutionIndex];
|
|
|
|
|
|
|
|
if (cisnan(ENGINE(engineState.timingAdvance))) {
|
|
|
|
// error should already be reported
|
|
|
|
list->reset(); // reset is needed to clear previous ignition schedule
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
initializeIgnitionActions(ENGINE(engineState.timingAdvance), ENGINE(engineState.dwellAngle), list PASS_ENGINE_PARAMETER);
|
|
|
|
engine->m.ignitionSchTime = GET_TIMESTAMP() - engine->m.beforeIgnitionSch;
|
|
|
|
}
|
|
|
|
|
|
|
|
void handleSpark(int revolutionIndex, bool limitedSpark, uint32_t trgEventIndex, int rpm,
|
2016-09-21 20:03:22 -07:00
|
|
|
IgnitionEventList *list DECLARE_ENGINE_PARAMETER_S) {
|
2016-09-21 21:03:00 -07:00
|
|
|
if (trgEventIndex == 0) {
|
|
|
|
prepareIgnitionSchedule(rpm, revolutionIndex PASS_ENGINE_PARAMETER);
|
|
|
|
}
|
|
|
|
|
2016-09-21 20:03:22 -07:00
|
|
|
if (!isValidRpm(rpm) || !CONFIG(isIgnitionEnabled)) {
|
|
|
|
// this might happen for instance in case of a single trigger event after a pause
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Ignition schedule is defined once per revolution
|
|
|
|
* See initializeIgnitionActions()
|
|
|
|
*/
|
|
|
|
|
|
|
|
IgnitionEvent *current, *tmp;
|
|
|
|
|
|
|
|
LL_FOREACH_SAFE(ENGINE(iHead), current, tmp)
|
|
|
|
{
|
|
|
|
if (current->sparkPosition.eventIndex == trgEventIndex) {
|
|
|
|
// time to fire a spark which was scheduled previously
|
|
|
|
LL_DELETE(ENGINE(iHead), current);
|
|
|
|
|
|
|
|
scheduling_s * sDown = ¤t->signalTimerDown;
|
|
|
|
|
2016-10-29 14:03:45 -07:00
|
|
|
#if SPARK_EXTREME_LOGGING || defined(__DOXYGEN__)
|
2016-10-31 16:02:32 -07:00
|
|
|
scheduleMsg(logger, "time to sparkDown ind=%d %d %s %d", trgEventIndex, getRevolutionCounter(), current->output->name, (int)getTimeNowUs());
|
2016-10-29 14:03:45 -07:00
|
|
|
#endif /* FUEL_MATH_EXTREME_LOGGING */
|
|
|
|
|
|
|
|
|
2016-09-21 20:03:22 -07:00
|
|
|
float timeTillIgnitionUs = ENGINE(rpmCalculator.oneDegreeUs) * current->sparkPosition.angleOffset;
|
2016-11-01 18:03:07 -07:00
|
|
|
scheduleTask(true, "spark 2down", sDown, (int) timeTillIgnitionUs, (schfunc_t) &turnSparkPinLow, current);
|
2016-09-21 20:03:22 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// scheduleSimpleMsg(&logger, "eventId spark ", eventIndex);
|
|
|
|
for (int i = 0; i < list->size; i++) {
|
|
|
|
IgnitionEvent *event = &list->elements[i];
|
|
|
|
if (event->dwellPosition.eventIndex != trgEventIndex)
|
|
|
|
continue;
|
|
|
|
handleSparkEvent(limitedSpark, trgEventIndex, event, rpm PASS_ENGINE_PARAMETER);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void initSparkLogic(Logging *sharedLogger) {
|
|
|
|
logger = sharedLogger;
|
|
|
|
}
|