ADC/DMA Fails at sampling rates above 40kHz

cancel
Showing results for 
Search instead for 
Did you mean: 

ADC/DMA Fails at sampling rates above 40kHz

Jump to solution
516 Views
whata
Contributor III

Hi, 

I'm attempting to sample signals at a rate of 80kHz, but for some reason I'm able to achieve only 40kHz. My set-up is a follows:

- MKL17Z128VFM4

- Running on LIRC

- Core Clock : 8MHz

- Bus Clock : 4 MHz

- ADC Dma code is taken from \demo_apps\adc16_low_power_async_dma and adapted to use PIT instead of LPTMR

I've calculated that with my current ADC Setup conversion time is abou 9.25us, while sampling time is between 1.5-2.5us which gives maximum rate of around 88kHz. Setting the pit interval to 100 ticks (4M/100 = 40kHz) works like a charm every time, while setting the tick count to 50 (4M/50=80kHz) yields interesting results:

1. noise_dma_read is executed

2. DMA0_IRQHandler is hit where the contents of DMA_DSR_BCRn are equal to  (1<<24) which means that transfer is complete and no errors are detected.

3. Immediately after DMA_ClearChannelStatusFlags(DMA0, 0, kDMA_TransactionsDoneFlag); is executed  the contents of register DMA_DSR_BCRn are equal to  (1<<24) | (1<<30) 

4. The code then gets stuck in DMA0_IRQHandler 

I'm not quite sure what is causing this behavior and how to handle it. Prior to clearing channel status flag DMA_DSR_BCRn clearly states that transfer is complete and there are no errors and no bytes left to transmitt.

static uint32_t                 noiseDmaBuf[1024];
static adc16_config_t           nDmaCfg;
static adc16_channel_config_t   nDmaChCfg;
static dma_handle_t             nDmaHandle;
static dma_transfer_config_t    nDmaXferCfg;
static volatile bool            noiseDmaDone;

static void noise_dma_adc_cfg       (void);
static void noise_dma_cfg           (void);
static void noise_dma_pit_cfg       (void);
static void noise_dma_sim_cfg       (void);
static void noise_dma_get_freq      (void);

void noise_dma_init (void)
{
    /* Configure ADC */
    noise_dma_adc_cfg ();

    /* Configure DMA */
    noise_dma_cfg ();

    /* Configure Timer Source */
    noise_dma_pit_cfg ();

    /* Link Timer Source to ADC Hw Trigger */
    noise_dma_sim_cfg ();

    /* Initially transfer is complete */
    noiseDmaDone = true;
}

uint32_t *noise_dma_read (void)
{

    noiseDmaDone = false;
    PIT_StartTimer(PIT, kPIT_Chnl_0);
    while (!noiseDmaDone);
    return (uint32_t*)&noiseDmaBuf[0];
}

static void noise_dma_adc_cfg   (void)
{
    nDmaChCfg.channelNumber = 1;
    nDmaChCfg.enableDifferentialConversion = false;
    nDmaChCfg.enableInterruptOnConversionCompleted = false;

    ADC16_GetDefaultConfig(&nDmaCfg);

    nDmaCfg.resolution = kADC16_Resolution16Bit;
    nDmaCfg.enableContinuousConversion = false;
    nDmaCfg.clockSource = kADC16_ClockSourceAlt0;           /** Run directly on bus-clock */
    nDmaCfg.clockDivider = kADC16_ClockDivider1;            /** No divider, raw bus-clock in */
    nDmaCfg.enableHighSpeed = true;
    nDmaCfg.longSampleMode = kADC16_LongSampleDisabled;
    nDmaCfg.enableLowPower = false;
    nDmaCfg.referenceVoltageSource = kADC16_ReferenceVoltageSourceValt;

    ADC16_Init(ADC0, &nDmaCfg);
    noise_dma_get_freq ();
    ADC16_DoAutoCalibration (ADC0);
    ADC16_SetChannelConfig(ADC0, 0, &nDmaChCfg);
    ADC16_SetHardwareAverage(ADC0, kADC16_HardwareAverageDisabled);
    ADC16_EnableHardwareTrigger(ADC0, true);
    ADC16_EnableDMA(ADC0, true);
}

static void noise_dma_cfg       (void)
{
    /* Configure DMAMUX */
    DMAMUX_Init(DMAMUX0);
    DMAMUX_SetSource(DMAMUX0, 0U, kDmaRequestMux0ADC0); /* Map ADC source to channel 0 */
    DMAMUX_EnableChannel(DMAMUX0, 0U);

    DMA_Init(DMA0);
    DMA_CreateHandle(&nDmaHandle, DMA0, 0U);
    DMA_PrepareTransfer(&nDmaXferCfg, (void *)(uint32_t)(&ADC0->R[0]), sizeof(uint32_t),
                        (void *)noiseDmaBuf, sizeof(uint32_t), sizeof(noiseDmaBuf),
                        kDMA_PeripheralToMemory);
    /* Setup transfer */
    DMA_SetTransferConfig(DMA0, 0U, &nDmaXferCfg);
    /* Enable interrupt when transfer is done. */
    DMA_EnableInterrupts(DMA0, 0U);
    /* Enable async DMA request. */
    DMA_EnableAsyncRequest(DMA0, 0U, true);
    /* Forces a single read/write transfer per request. */
    DMA_EnableCycleSteal(DMA0, 0U, true);
    /* Enable transfer. */
    DMA_StartTransfer(&nDmaHandle);
    /* Enable IRQ. */
    NVIC_EnableIRQ(DMA0_IRQn);
}

/*!
 * @brief           Configures ADC Sampling frequency.
 * @note            Sampling frequency is set to 40kHz
 *                  while bus clock is at 4Mhz, yielding
 *                  tick time of 4MHz/40kHz = 100 ticks
 * @param   none    None
 * @retval  none    None
 */
static void noise_dma_pit_cfg   (void)
{
    pit_config_t pitCfg;

    PIT_GetDefaultConfig(&pitCfg);
    PIT_Init(PIT, &pitCfg);
    PIT_SetTimerPeriod(PIT, kPIT_Chnl_0, 50);
}

static void noise_dma_sim_cfg   (void)
{
    SIM->SOPT7 |= SIM_SOPT7_ADC0TRGSEL(4) | SIM_SOPT7_ADC0ALTTRGEN(1);
}

void DMA0_IRQHandler (void)
{
    /* Stop trigger */
    PIT_StopTimer(PIT, kPIT_Chnl_0);
    /* Inidicate that transfer is complete */
    noiseDmaDone = true;
    /* Clear transaction done interrupt flag */
    DMA_ClearChannelStatusFlags(DMA0, 0, kDMA_TransactionsDoneFlag);
    /* Setup transfer */
    DMA_PrepareTransfer(&nDmaXferCfg, (void *)(uint32_t)(&ADC0->R[0]), sizeof(uint32_t),
                        (void *)noiseDmaBuf, sizeof(uint32_t), sizeof(noiseDmaBuf),
                        kDMA_PeripheralToMemory);
    DMA_SetTransferConfig(DMA0, 0U, &nDmaXferCfg);
}‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍‍
Tags (2)
1 Solution
235 Views
Hui_Ma
NXP TechSupport
NXP TechSupport

Hi,

Thank you for the test.

Could you double check if the DMA Control Register (DMA_DCRn) [D_REQ] bit was set before enable DMA transfer?

The hardware can automatically clear DCRn[ERQ], disabling the peripheral request, when BCRn reaches zero by setting DCRn[D_REQ].

If DCRn[D_REQ] didn't set, please set this bit and check if there with any change. Thanks.

best regards,

Mike

View solution in original post

4 Replies
235 Views
Hui_Ma
NXP TechSupport
NXP TechSupport

Hi,

First of all, sorry for the later reply.

From step 3 DMA_DSR_BCR register value, bit 30 [CE] bit was set means BCR equals 0 when the DMA receives a start condition. I am quite suspect the PIT has issue as hardware trigger source.

There with an errata about PIT trigger DMA issue, please check here for detailed info (e5746).

Could you try use LPTMR instead of PIT to trigger ADC?

Please let us know the result. Thanks.


Have a great day,
Mike

-----------------------------------------------------------------------------------------------------------------------
Note: If this post answers your question, please click the Correct Answer button. Thank you!
-----------------------------------------------------------------------------------------------------------------------

0 Kudos
235 Views
whata
Contributor III

Mike, 

I'm running in a RTOS-environment and LPTMR is used as a clock source, so it was easier for me to try and use TPM0 as as trigger source instead, the behavior and result is similar to PIT, i.e 100 ticks at 4MHz works, but if I decrease it same issue surfaces (DMA_DSR_BCRn are equal to  (1<<24) | (1<<30)).  The code above was modified as follows:

static void noise_dma_tpm_cfg (void)
{
    tpm_config_t tpm;

    TPM_GetDefaultConfig(&tpm);
    tpm.prescale = kTPM_Prescale_Divide_2;
    TPM_Init(TPM0, &tpm);
    TPM_SetTimerPeriod(TPM0, NOISE_SAMPLING_PERIOD);

}

static void noise_dma_sim_cfg   (void)
{

    /* Allow TPM Channel 0 to trigger ADC Conversion */
    SIM->SOPT7 |= SIM_SOPT7_ADC0TRGSEL(8) | SIM_SOPT7_ADC0ALTTRGEN(1);
}

void DMA0_IRQHandler (void)
{
    /* Stop trigger */
    TPM_StopTimer(TPM0);
    /* Inidicate that transfer is complete */
    noiseDmaDone = true;
    /* Clear transaction done interrupt flag */
    DMA_ClearChannelStatusFlags(DMA0, 0, kDMA_TransactionsDoneFlag);
    /* Setup transfer */
    DMA_PrepareTransfer(&nDmaXferCfg, (void *)(uint32_t)(&ADC0->R[0]), sizeof(uint32_t),
                        (void *)noiseDmaBuf, sizeof(uint32_t), sizeof(noiseDmaBuf),
                        kDMA_PeripheralToMemory);
    DMA_SetTransferConfig(DMA0, 0U, &nDmaXferCfg);
}

I'll try now to use LPTMR instead

0 Kudos
236 Views
Hui_Ma
NXP TechSupport
NXP TechSupport

Hi,

Thank you for the test.

Could you double check if the DMA Control Register (DMA_DCRn) [D_REQ] bit was set before enable DMA transfer?

The hardware can automatically clear DCRn[ERQ], disabling the peripheral request, when BCRn reaches zero by setting DCRn[D_REQ].

If DCRn[D_REQ] didn't set, please set this bit and check if there with any change. Thanks.

best regards,

Mike

View solution in original post

235 Views
whata
Contributor III

Mike, 

Very nice! Thank you! This solved the issue. 

I've extended my noise_dma_cfg () function per your suggestion:

DMA0->DMA[0].DCR |= DMA_DCR_D_REQ_MASK; 

And noise_dma_read () with:

    /*
     * Re-Enable DCR[ERQ] because it will 
     * automatically clear after completion of previous transfer
     */
    DMA_EnableChannelRequest (DMA0, 0);

And everything seems to work as expected with 80kHz sampling rate. I think I'll stick with using TPM for now instead of PIT due to possible errata that you've pointed out.

Thanks again!

0 Kudos