1/*
2 * Copyright (c) 2015, Freescale Semiconductor, Inc.
3 * Copyright 2016-2021 NXP
4 * All rights reserved.
5 *
6 * SPDX-License-Identifier: BSD-3-Clause
7 */
8
9#include "fsl_edma.h"
10#if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
11#include "fsl_memory.h"
12#endif
13/*******************************************************************************
14 * Definitions
15 ******************************************************************************/
16
17/* Component ID definition, used by tools. */
18#ifndef FSL_COMPONENT_ID
19#define FSL_COMPONENT_ID "platform.drivers.edma"
20#endif
21
22#define EDMA_TRANSFER_ENABLED_MASK 0x80U
23
24/*******************************************************************************
25 * Prototypes
26 ******************************************************************************/
27
28/*!
29 * @brief Get instance offset.
30 *
31 * @param instance EDMA peripheral instance number.
32 */
33static uint32_t EDMA_GetInstanceOffset(uint32_t instance);
34
35/*!
36 * @brief Map transfer width.
37 *
38 * @param width transfer width.
39 */
40static edma_transfer_size_t EDMA_TransferWidthMapping(uint32_t width);
41/*******************************************************************************
42 * Variables
43 ******************************************************************************/
44
45/*! @brief Array to map EDMA instance number to base pointer. */
46static DMA_Type *const s_edmaBases[] = DMA_BASE_PTRS;
47
48#if !(defined(FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL) && FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL)
49/*! @brief Array to map EDMA instance number to clock name. */
50static const clock_ip_name_t s_edmaClockName[] = EDMA_CLOCKS;
51#endif /* FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL */
52
53/*! @brief Array to map EDMA instance number to IRQ number. */
54static const IRQn_Type s_edmaIRQNumber[][FSL_FEATURE_EDMA_MODULE_CHANNEL] = DMA_CHN_IRQS;
55
56/*! @brief Pointers to transfer handle for each EDMA channel. */
57static edma_handle_t *s_EDMAHandle[FSL_FEATURE_EDMA_MODULE_CHANNEL * FSL_FEATURE_SOC_EDMA_COUNT];
58
59/*******************************************************************************
60 * Code
61 ******************************************************************************/
62
63static uint32_t EDMA_GetInstance(DMA_Type *base)
64{
65 uint32_t instance;
66
67 /* Find the instance index from base address mappings. */
68 for (instance = 0; instance < ARRAY_SIZE(s_edmaBases); instance++)
69 {
70 if (s_edmaBases[instance] == base)
71 {
72 break;
73 }
74 }
75
76 assert(instance < ARRAY_SIZE(s_edmaBases));
77
78 return instance;
79}
80
81/*!
82 * brief Push content of TCD structure into hardware TCD register.
83 *
84 * param base EDMA peripheral base address.
85 * param channel EDMA channel number.
86 * param tcd Point to TCD structure.
87 */
88void EDMA_InstallTCD(DMA_Type *base, uint32_t channel, edma_tcd_t *tcd)
89{
90 assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
91 assert(tcd != NULL);
92 assert(((uint32_t)tcd & 0x1FU) == 0U);
93
94 /* Push tcd into hardware TCD register */
95 base->TCD[channel].SADDR = tcd->SADDR;
96 base->TCD[channel].SOFF = tcd->SOFF;
97 base->TCD[channel].ATTR = tcd->ATTR;
98 base->TCD[channel].NBYTES_MLNO = tcd->NBYTES;
99 base->TCD[channel].SLAST = (int32_t)tcd->SLAST;
100 base->TCD[channel].DADDR = tcd->DADDR;
101 base->TCD[channel].DOFF = tcd->DOFF;
102 base->TCD[channel].CITER_ELINKNO = tcd->CITER;
103 base->TCD[channel].DLAST_SGA = (int32_t)tcd->DLAST_SGA;
104 /* Clear DONE bit first, otherwise ESG cannot be set */
105 base->TCD[channel].CSR = 0;
106 base->TCD[channel].CSR = tcd->CSR;
107 base->TCD[channel].BITER_ELINKNO = tcd->BITER;
108}
109
110/*!
111 * brief Initializes the eDMA peripheral.
112 *
113 * This function ungates the eDMA clock and configures the eDMA peripheral according
114 * to the configuration structure.
115 *
116 * param base eDMA peripheral base address.
117 * param config A pointer to the configuration structure, see "edma_config_t".
118 * note This function enables the minor loop map feature.
119 */
120void EDMA_Init(DMA_Type *base, const edma_config_t *config)
121{
122 assert(config != NULL);
123
124 uint32_t tmpreg;
125
126#if !(defined(FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL) && FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL)
127 /* Ungate EDMA peripheral clock */
128 CLOCK_EnableClock(s_edmaClockName[EDMA_GetInstance(base)]);
129#endif /* FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL */
130
131 /* clear all the enabled request, status to make sure EDMA status is in normal condition */
132 base->ERQ = 0U;
133 base->INT = 0xFFFFFFFFU;
134 base->ERR = 0xFFFFFFFFU;
135 /* Configure EDMA peripheral according to the configuration structure. */
136 tmpreg = base->CR;
137 tmpreg &= ~(DMA_CR_ERCA_MASK | DMA_CR_HOE_MASK | DMA_CR_CLM_MASK | DMA_CR_EDBG_MASK);
138 tmpreg |= (DMA_CR_ERCA(config->enableRoundRobinArbitration) | DMA_CR_HOE(config->enableHaltOnError) |
139 DMA_CR_CLM(config->enableContinuousLinkMode) | DMA_CR_EDBG(config->enableDebugMode) | DMA_CR_EMLM(1U));
140 base->CR = tmpreg;
141}
142
143/*!
144 * brief Deinitializes the eDMA peripheral.
145 *
146 * This function gates the eDMA clock.
147 *
148 * param base eDMA peripheral base address.
149 */
150void EDMA_Deinit(DMA_Type *base)
151{
152#if !(defined(FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL) && FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL)
153 /* Gate EDMA peripheral clock */
154 CLOCK_DisableClock(s_edmaClockName[EDMA_GetInstance(base)]);
155#endif /* FSL_SDK_DISABLE_DRIVER_CLOCK_CONTROL */
156}
157
158/*!
159 * brief Gets the eDMA default configuration structure.
160 *
161 * This function sets the configuration structure to default values.
162 * The default configuration is set to the following values.
163 * code
164 * config.enableContinuousLinkMode = false;
165 * config.enableHaltOnError = true;
166 * config.enableRoundRobinArbitration = false;
167 * config.enableDebugMode = false;
168 * endcode
169 *
170 * param config A pointer to the eDMA configuration structure.
171 */
172void EDMA_GetDefaultConfig(edma_config_t *config)
173{
174 assert(config != NULL);
175
176 /* Initializes the configure structure to zero. */
177 (void)memset(config, 0, sizeof(*config));
178
179 config->enableRoundRobinArbitration = false;
180 config->enableHaltOnError = true;
181 config->enableContinuousLinkMode = false;
182 config->enableDebugMode = false;
183}
184
185/*!
186 * brief Sets all TCD registers to default values.
187 *
188 * This function sets TCD registers for this channel to default values.
189 *
190 * param base eDMA peripheral base address.
191 * param channel eDMA channel number.
192 * note This function must not be called while the channel transfer is ongoing
193 * or it causes unpredictable results.
194 * note This function enables the auto stop request feature.
195 */
196void EDMA_ResetChannel(DMA_Type *base, uint32_t channel)
197{
198 assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
199
200 EDMA_TcdReset((edma_tcd_t *)(uint32_t)&base->TCD[channel]);
201}
202
203/*!
204 * brief Configures the eDMA transfer attribute.
205 *
206 * This function configures the transfer attribute, including source address, destination address,
207 * transfer size, address offset, and so on. It also configures the scatter gather feature if the
208 * user supplies the TCD address.
209 * Example:
210 * code
211 * edma_transfer_t config;
212 * edma_tcd_t tcd;
213 * config.srcAddr = ..;
214 * config.destAddr = ..;
215 * ...
216 * EDMA_SetTransferConfig(DMA0, channel, &config, &stcd);
217 * endcode
218 *
219 * param base eDMA peripheral base address.
220 * param channel eDMA channel number.
221 * param config Pointer to eDMA transfer configuration structure.
222 * param nextTcd Point to TCD structure. It can be NULL if users
223 * do not want to enable scatter/gather feature.
224 * note If nextTcd is not NULL, it means scatter gather feature is enabled
225 * and DREQ bit is cleared in the previous transfer configuration, which
226 * is set in the eDMA_ResetChannel.
227 */
228void EDMA_SetTransferConfig(DMA_Type *base, uint32_t channel, const edma_transfer_config_t *config, edma_tcd_t *nextTcd)
229{
230 assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
231 assert(config != NULL);
232 assert(((uint32_t)nextTcd & 0x1FU) == 0U);
233
234/* If there is address offset, convert the address */
235#if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
236 nextTcd = (edma_tcd_t *)(MEMORY_ConvertMemoryMapAddress((uint32_t)nextTcd, kMEMORY_Local2DMA));
237#endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
238 EDMA_TcdSetTransferConfig((edma_tcd_t *)(uint32_t)&base->TCD[channel], config, nextTcd);
239}
240
241/*!
242 * brief Configures the eDMA minor offset feature.
243 *
244 * The minor offset means that the signed-extended value is added to the source address or destination
245 * address after each minor loop.
246 *
247 * param base eDMA peripheral base address.
248 * param channel eDMA channel number.
249 * param config A pointer to the minor offset configuration structure.
250 */
251void EDMA_SetMinorOffsetConfig(DMA_Type *base, uint32_t channel, const edma_minor_offset_config_t *config)
252{
253 assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
254 assert(config != NULL);
255
256 uint32_t tmpreg;
257
258 tmpreg = base->TCD[channel].NBYTES_MLOFFYES;
259 tmpreg &= ~(DMA_NBYTES_MLOFFYES_SMLOE_MASK | DMA_NBYTES_MLOFFYES_DMLOE_MASK | DMA_NBYTES_MLOFFYES_MLOFF_MASK);
260 tmpreg |=
261 (DMA_NBYTES_MLOFFYES_SMLOE(config->enableSrcMinorOffset) |
262 DMA_NBYTES_MLOFFYES_DMLOE(config->enableDestMinorOffset) | DMA_NBYTES_MLOFFYES_MLOFF(config->minorOffset));
263 base->TCD[channel].NBYTES_MLOFFYES = tmpreg;
264}
265
266/*!
267 * brief Configures the eDMA channel TCD major offset feature.
268 *
269 * Adjustment value added to the source address at the completion of the major iteration count
270 *
271 * param base eDMA peripheral base address.
272 * param channel edma channel number.
273 * param sourceOffset source address offset.
274 * param destOffset destination address offset.
275 */
276void EDMA_SetMajorOffsetConfig(DMA_Type *base, uint32_t channel, int32_t sourceOffset, int32_t destOffset)
277{
278 assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
279
280 base->TCD[channel].SLAST = sourceOffset;
281 base->TCD[channel].DLAST_SGA = destOffset;
282}
283
284/*!
285 * brief Configures the eDMA channel preemption feature.
286 *
287 * This function configures the channel preemption attribute and the priority of the channel.
288 *
289 * param base eDMA peripheral base address.
290 * param channel eDMA channel number
291 * param config A pointer to the channel preemption configuration structure.
292 */
293void EDMA_SetChannelPreemptionConfig(DMA_Type *base, uint32_t channel, const edma_channel_Preemption_config_t *config)
294{
295 assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
296 assert(config != NULL);
297
298 bool tmpEnablePreemptAbility = config->enablePreemptAbility;
299 bool tmpEnableChannelPreemption = config->enableChannelPreemption;
300 uint8_t tmpChannelPriority = config->channelPriority;
301 volatile uint8_t *tmpReg = &base->DCHPRI3;
302
303 ((volatile uint8_t *)tmpReg)[DMA_DCHPRI_INDEX(channel)] =
304 (DMA_DCHPRI0_DPA((true == tmpEnablePreemptAbility ? 0U : 1U)) |
305 DMA_DCHPRI0_ECP((true == tmpEnableChannelPreemption ? 1U : 0U)) | DMA_DCHPRI0_CHPRI(tmpChannelPriority));
306}
307
308/*!
309 * brief Sets the channel link for the eDMA transfer.
310 *
311 * This function configures either the minor link or the major link mode. The minor link means that the channel link is
312 * triggered every time CITER decreases by 1. The major link means that the channel link is triggered when the CITER is
313 * exhausted.
314 *
315 * param base eDMA peripheral base address.
316 * param channel eDMA channel number.
317 * param type A channel link type, which can be one of the following:
318 * arg kEDMA_LinkNone
319 * arg kEDMA_MinorLink
320 * arg kEDMA_MajorLink
321 * param linkedChannel The linked channel number.
322 * note Users should ensure that DONE flag is cleared before calling this interface, or the configuration is invalid.
323 */
324void EDMA_SetChannelLink(DMA_Type *base, uint32_t channel, edma_channel_link_type_t type, uint32_t linkedChannel)
325{
326 assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
327 assert(linkedChannel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
328
329 EDMA_TcdSetChannelLink((edma_tcd_t *)(uint32_t)&base->TCD[channel], type, linkedChannel);
330}
331
332/*!
333 * brief Sets the bandwidth for the eDMA transfer.
334 *
335 * Because the eDMA processes the minor loop, it continuously generates read/write sequences
336 * until the minor count is exhausted. The bandwidth forces the eDMA to stall after the completion of
337 * each read/write access to control the bus request bandwidth seen by the crossbar switch.
338 *
339 * param base eDMA peripheral base address.
340 * param channel eDMA channel number.
341 * param bandWidth A bandwidth setting, which can be one of the following:
342 * arg kEDMABandwidthStallNone
343 * arg kEDMABandwidthStall4Cycle
344 * arg kEDMABandwidthStall8Cycle
345 */
346void EDMA_SetBandWidth(DMA_Type *base, uint32_t channel, edma_bandwidth_t bandWidth)
347{
348 assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
349
350 base->TCD[channel].CSR = (uint16_t)((base->TCD[channel].CSR & (~DMA_CSR_BWC_MASK)) | DMA_CSR_BWC(bandWidth));
351}
352
353/*!
354 * brief Sets the source modulo and the destination modulo for the eDMA transfer.
355 *
356 * This function defines a specific address range specified to be the value after (SADDR + SOFF)/(DADDR + DOFF)
357 * calculation is performed or the original register value. It provides the ability to implement a circular data
358 * queue easily.
359 *
360 * param base eDMA peripheral base address.
361 * param channel eDMA channel number.
362 * param srcModulo A source modulo value.
363 * param destModulo A destination modulo value.
364 */
365void EDMA_SetModulo(DMA_Type *base, uint32_t channel, edma_modulo_t srcModulo, edma_modulo_t destModulo)
366{
367 assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
368
369 uint16_t tmpreg;
370
371 tmpreg = base->TCD[channel].ATTR & (~(uint16_t)(DMA_ATTR_SMOD_MASK | DMA_ATTR_DMOD_MASK));
372 base->TCD[channel].ATTR = tmpreg | DMA_ATTR_DMOD(destModulo) | DMA_ATTR_SMOD(srcModulo);
373}
374
375/*!
376 * brief Enables the interrupt source for the eDMA transfer.
377 *
378 * param base eDMA peripheral base address.
379 * param channel eDMA channel number.
380 * param mask The mask of interrupt source to be set. Users need to use
381 * the defined edma_interrupt_enable_t type.
382 */
383void EDMA_EnableChannelInterrupts(DMA_Type *base, uint32_t channel, uint32_t mask)
384{
385 assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
386
387 /* Enable error interrupt */
388 if (0U != (mask & (uint32_t)kEDMA_ErrorInterruptEnable))
389 {
390 base->EEI |= ((uint32_t)0x1U << channel);
391 }
392
393 /* Enable Major interrupt */
394 if (0U != (mask & (uint32_t)kEDMA_MajorInterruptEnable))
395 {
396 base->TCD[channel].CSR |= DMA_CSR_INTMAJOR_MASK;
397 }
398
399 /* Enable Half major interrupt */
400 if (0U != (mask & (uint32_t)kEDMA_HalfInterruptEnable))
401 {
402 base->TCD[channel].CSR |= DMA_CSR_INTHALF_MASK;
403 }
404}
405
406/*!
407 * brief Disables the interrupt source for the eDMA transfer.
408 *
409 * param base eDMA peripheral base address.
410 * param channel eDMA channel number.
411 * param mask The mask of the interrupt source to be set. Use
412 * the defined edma_interrupt_enable_t type.
413 */
414void EDMA_DisableChannelInterrupts(DMA_Type *base, uint32_t channel, uint32_t mask)
415{
416 assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
417
418 /* Disable error interrupt */
419 if (0U != (mask & (uint32_t)kEDMA_ErrorInterruptEnable))
420 {
421 base->EEI &= (~((uint32_t)0x1U << channel));
422 }
423
424 /* Disable Major interrupt */
425 if (0U != (mask & (uint32_t)kEDMA_MajorInterruptEnable))
426 {
427 base->TCD[channel].CSR &= ~(uint16_t)DMA_CSR_INTMAJOR_MASK;
428 }
429
430 /* Disable Half major interrupt */
431 if (0U != (mask & (uint32_t)kEDMA_HalfInterruptEnable))
432 {
433 base->TCD[channel].CSR &= ~(uint16_t)DMA_CSR_INTHALF_MASK;
434 }
435}
436
437/*!
438 * brief Sets all fields to default values for the TCD structure.
439 *
440 * This function sets all fields for this TCD structure to default value.
441 *
442 * param tcd Pointer to the TCD structure.
443 * note This function enables the auto stop request feature.
444 */
445void EDMA_TcdReset(edma_tcd_t *tcd)
446{
447 assert(tcd != NULL);
448 assert(((uint32_t)tcd & 0x1FU) == 0U);
449
450 /* Reset channel TCD */
451 tcd->SADDR = 0U;
452 tcd->SOFF = 0U;
453 tcd->ATTR = 0U;
454 tcd->NBYTES = 0U;
455 tcd->SLAST = 0U;
456 tcd->DADDR = 0U;
457 tcd->DOFF = 0U;
458 tcd->CITER = 0U;
459 tcd->DLAST_SGA = 0U;
460 /* Enable auto disable request feature */
461 tcd->CSR = DMA_CSR_DREQ(true);
462 tcd->BITER = 0U;
463}
464
465/*!
466 * brief Configures the eDMA TCD transfer attribute.
467 *
468 * The TCD is a transfer control descriptor. The content of the TCD is the same as the hardware TCD registers.
469 * The STCD is used in the scatter-gather mode.
470 * This function configures the TCD transfer attribute, including source address, destination address,
471 * transfer size, address offset, and so on. It also configures the scatter gather feature if the
472 * user supplies the next TCD address.
473 * Example:
474 * code
475 * edma_transfer_t config = {
476 * ...
477 * }
478 * edma_tcd_t tcd __aligned(32);
479 * edma_tcd_t nextTcd __aligned(32);
480 * EDMA_TcdSetTransferConfig(&tcd, &config, &nextTcd);
481 * endcode
482 *
483 * param tcd Pointer to the TCD structure.
484 * param config Pointer to eDMA transfer configuration structure.
485 * param nextTcd Pointer to the next TCD structure. It can be NULL if users
486 * do not want to enable scatter/gather feature.
487 * note TCD address should be 32 bytes aligned or it causes an eDMA error.
488 * note If the nextTcd is not NULL, the scatter gather feature is enabled
489 * and DREQ bit is cleared in the previous transfer configuration, which
490 * is set in the EDMA_TcdReset.
491 */
492void EDMA_TcdSetTransferConfig(edma_tcd_t *tcd, const edma_transfer_config_t *config, edma_tcd_t *nextTcd)
493{
494 assert(tcd != NULL);
495 assert(((uint32_t)tcd & 0x1FU) == 0U);
496 assert(config != NULL);
497 assert(((uint32_t)nextTcd & 0x1FU) == 0U);
498 assert((config->srcAddr % (1UL << (uint32_t)config->srcTransferSize)) == 0U);
499 assert((config->destAddr % (1UL << (uint32_t)config->destTransferSize)) == 0U);
500
501 /* source address */
502#if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
503 tcd->SADDR = MEMORY_ConvertMemoryMapAddress(config->srcAddr, kMEMORY_Local2DMA);
504 /* destination address */
505 tcd->DADDR = MEMORY_ConvertMemoryMapAddress(config->destAddr, kMEMORY_Local2DMA);
506#else
507 tcd->SADDR = config->srcAddr;
508 /* destination address */
509 tcd->DADDR = config->destAddr;
510#endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
511 /* Source data and destination data transfer size */
512 tcd->ATTR = DMA_ATTR_SSIZE(config->srcTransferSize) | DMA_ATTR_DSIZE(config->destTransferSize);
513 /* Source address signed offset */
514 tcd->SOFF = (uint16_t)config->srcOffset;
515 /* Destination address signed offset */
516 tcd->DOFF = (uint16_t)config->destOffset;
517 /* Minor byte transfer count */
518 tcd->NBYTES = config->minorLoopBytes;
519 /* Current major iteration count */
520 tcd->CITER = (uint16_t)config->majorLoopCounts;
521 /* Starting major iteration count */
522 tcd->BITER = (uint16_t)config->majorLoopCounts;
523 /* Enable scatter/gather processing */
524 if (nextTcd != NULL)
525 {
526#if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
527 tcd->DLAST_SGA = MEMORY_ConvertMemoryMapAddress((uint32_t)nextTcd, kMEMORY_Local2DMA);
528#else
529 tcd->DLAST_SGA = (uint32_t)nextTcd;
530#endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
531 /*
532 Before call EDMA_TcdSetTransferConfig or EDMA_SetTransferConfig,
533 user must call EDMA_TcdReset or EDMA_ResetChannel which will set
534 DREQ, so must use "|" or "&" rather than "=".
535
536 Clear the DREQ bit because scatter gather has been enabled, so the
537 previous transfer is not the last transfer, and channel request should
538 be enabled at the next transfer(the next TCD).
539 */
540 tcd->CSR = (tcd->CSR | (uint16_t)DMA_CSR_ESG_MASK) & ~(uint16_t)DMA_CSR_DREQ_MASK;
541 }
542}
543
544/*!
545 * brief Configures the eDMA TCD minor offset feature.
546 *
547 * A minor offset is a signed-extended value added to the source address or a destination
548 * address after each minor loop.
549 *
550 * param tcd A point to the TCD structure.
551 * param config A pointer to the minor offset configuration structure.
552 */
553void EDMA_TcdSetMinorOffsetConfig(edma_tcd_t *tcd, const edma_minor_offset_config_t *config)
554{
555 assert(tcd != NULL);
556 assert(((uint32_t)tcd & 0x1FU) == 0U);
557
558 uint32_t tmpreg;
559
560 tmpreg = tcd->NBYTES &
561 ~(DMA_NBYTES_MLOFFYES_SMLOE_MASK | DMA_NBYTES_MLOFFYES_DMLOE_MASK | DMA_NBYTES_MLOFFYES_MLOFF_MASK);
562 tmpreg |=
563 (DMA_NBYTES_MLOFFYES_SMLOE(config->enableSrcMinorOffset) |
564 DMA_NBYTES_MLOFFYES_DMLOE(config->enableDestMinorOffset) | DMA_NBYTES_MLOFFYES_MLOFF(config->minorOffset));
565 tcd->NBYTES = tmpreg;
566}
567
568/*!
569 * brief Configures the eDMA TCD major offset feature.
570 *
571 * Adjustment value added to the source address at the completion of the major iteration count
572 *
573 * param tcd A point to the TCD structure.
574 * param sourceOffset source address offset.
575 * param destOffset destination address offset.
576 */
577void EDMA_TcdSetMajorOffsetConfig(edma_tcd_t *tcd, int32_t sourceOffset, int32_t destOffset)
578{
579 assert(tcd != NULL);
580 assert(((uint32_t)tcd & 0x1FU) == 0U);
581
582 tcd->SLAST = (uint32_t)sourceOffset;
583 tcd->DLAST_SGA = (uint32_t)destOffset;
584}
585
586/*!
587 * brief Sets the channel link for the eDMA TCD.
588 *
589 * This function configures either a minor link or a major link. The minor link means the channel link is
590 * triggered every time CITER decreases by 1. The major link means that the channel link is triggered when the CITER is
591 * exhausted.
592 *
593 * note Users should ensure that DONE flag is cleared before calling this interface, or the configuration is invalid.
594 * param tcd Point to the TCD structure.
595 * param type Channel link type, it can be one of:
596 * arg kEDMA_LinkNone
597 * arg kEDMA_MinorLink
598 * arg kEDMA_MajorLink
599 * param linkedChannel The linked channel number.
600 */
601void EDMA_TcdSetChannelLink(edma_tcd_t *tcd, edma_channel_link_type_t type, uint32_t linkedChannel)
602{
603 assert(tcd != NULL);
604 assert(((uint32_t)tcd & 0x1FU) == 0U);
605 assert(linkedChannel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
606
607 if (type == kEDMA_MinorLink) /* Minor link config */
608 {
609 uint16_t tmpreg;
610
611 /* Enable minor link */
612 tcd->CITER |= DMA_CITER_ELINKYES_ELINK_MASK;
613 tcd->BITER |= DMA_BITER_ELINKYES_ELINK_MASK;
614 /* Set linked channel */
615 tmpreg = tcd->CITER & (~(uint16_t)DMA_CITER_ELINKYES_LINKCH_MASK);
616 tmpreg |= DMA_CITER_ELINKYES_LINKCH(linkedChannel);
617 tcd->CITER = tmpreg;
618 tmpreg = tcd->BITER & (~(uint16_t)DMA_BITER_ELINKYES_LINKCH_MASK);
619 tmpreg |= DMA_BITER_ELINKYES_LINKCH(linkedChannel);
620 tcd->BITER = tmpreg;
621 }
622 else if (type == kEDMA_MajorLink) /* Major link config */
623 {
624 uint16_t tmpreg;
625
626 /* Enable major link */
627 tcd->CSR |= DMA_CSR_MAJORELINK_MASK;
628 /* Set major linked channel */
629 tmpreg = tcd->CSR & (~(uint16_t)DMA_CSR_MAJORLINKCH_MASK);
630 tcd->CSR = tmpreg | DMA_CSR_MAJORLINKCH(linkedChannel);
631 }
632 else /* Link none */
633 {
634 tcd->CITER &= ~(uint16_t)DMA_CITER_ELINKYES_ELINK_MASK;
635 tcd->BITER &= ~(uint16_t)DMA_BITER_ELINKYES_ELINK_MASK;
636 tcd->CSR &= ~(uint16_t)DMA_CSR_MAJORELINK_MASK;
637 }
638}
639
640/*!
641 * brief Sets the source modulo and the destination modulo for the eDMA TCD.
642 *
643 * This function defines a specific address range specified to be the value after (SADDR + SOFF)/(DADDR + DOFF)
644 * calculation is performed or the original register value. It provides the ability to implement a circular data
645 * queue easily.
646 *
647 * param tcd A pointer to the TCD structure.
648 * param srcModulo A source modulo value.
649 * param destModulo A destination modulo value.
650 */
651void EDMA_TcdSetModulo(edma_tcd_t *tcd, edma_modulo_t srcModulo, edma_modulo_t destModulo)
652{
653 assert(tcd != NULL);
654 assert(((uint32_t)tcd & 0x1FU) == 0U);
655
656 uint16_t tmpreg;
657
658 tmpreg = tcd->ATTR & (~(uint16_t)(DMA_ATTR_SMOD_MASK | DMA_ATTR_DMOD_MASK));
659 tcd->ATTR = tmpreg | DMA_ATTR_DMOD(destModulo) | DMA_ATTR_SMOD(srcModulo);
660}
661
662/*!
663 * brief Enables the interrupt source for the eDMA TCD.
664 *
665 * param tcd Point to the TCD structure.
666 * param mask The mask of interrupt source to be set. Users need to use
667 * the defined edma_interrupt_enable_t type.
668 */
669void EDMA_TcdEnableInterrupts(edma_tcd_t *tcd, uint32_t mask)
670{
671 assert(tcd != NULL);
672
673 /* Enable Major interrupt */
674 if (0U != (mask & (uint32_t)kEDMA_MajorInterruptEnable))
675 {
676 tcd->CSR |= DMA_CSR_INTMAJOR_MASK;
677 }
678
679 /* Enable Half major interrupt */
680 if (0U != (mask & (uint32_t)kEDMA_HalfInterruptEnable))
681 {
682 tcd->CSR |= DMA_CSR_INTHALF_MASK;
683 }
684}
685
686/*!
687 * brief Disables the interrupt source for the eDMA TCD.
688 *
689 * param tcd Point to the TCD structure.
690 * param mask The mask of interrupt source to be set. Users need to use
691 * the defined edma_interrupt_enable_t type.
692 */
693void EDMA_TcdDisableInterrupts(edma_tcd_t *tcd, uint32_t mask)
694{
695 assert(tcd != NULL);
696
697 /* Disable Major interrupt */
698 if (0U != (mask & (uint32_t)kEDMA_MajorInterruptEnable))
699 {
700 tcd->CSR &= ~(uint16_t)DMA_CSR_INTMAJOR_MASK;
701 }
702
703 /* Disable Half major interrupt */
704 if (0U != (mask & (uint32_t)kEDMA_HalfInterruptEnable))
705 {
706 tcd->CSR &= ~(uint16_t)DMA_CSR_INTHALF_MASK;
707 }
708}
709
710/*!
711 * brief Gets the remaining major loop count from the eDMA current channel TCD.
712 *
713 * This function checks the TCD (Task Control Descriptor) status for a specified
714 * eDMA channel and returns the number of major loop count that has not finished.
715 *
716 * param base eDMA peripheral base address.
717 * param channel eDMA channel number.
718 * return Major loop count which has not been transferred yet for the current TCD.
719 * note 1. This function can only be used to get unfinished major loop count of transfer without
720 * the next TCD, or it might be inaccuracy.
721 * 2. The unfinished/remaining transfer bytes cannot be obtained directly from registers while
722 * the channel is running.
723 * Because to calculate the remaining bytes, the initial NBYTES configured in DMA_TCDn_NBYTES_MLNO
724 * register is needed while the eDMA IP does not support getting it while a channel is active.
725 * In another word, the NBYTES value reading is always the actual (decrementing) NBYTES value the dma_engine
726 * is working with while a channel is running.
727 * Consequently, to get the remaining transfer bytes, a software-saved initial value of NBYTES (for example
728 * copied before enabling the channel) is needed. The formula to calculate it is shown below:
729 * RemainingBytes = RemainingMajorLoopCount * NBYTES(initially configured)
730 */
731uint32_t EDMA_GetRemainingMajorLoopCount(DMA_Type *base, uint32_t channel)
732{
733 assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
734
735 uint32_t remainingCount = 0;
736
737 if (0U != (DMA_CSR_DONE_MASK & base->TCD[channel].CSR))
738 {
739 remainingCount = 0;
740 }
741 else
742 {
743 /* Calculate the unfinished bytes */
744 if (0U != (base->TCD[channel].CITER_ELINKNO & DMA_CITER_ELINKNO_ELINK_MASK))
745 {
746 remainingCount = (((uint32_t)base->TCD[channel].CITER_ELINKYES & DMA_CITER_ELINKYES_CITER_MASK) >>
747 DMA_CITER_ELINKYES_CITER_SHIFT);
748 }
749 else
750 {
751 remainingCount = (((uint32_t)base->TCD[channel].CITER_ELINKNO & DMA_CITER_ELINKNO_CITER_MASK) >>
752 DMA_CITER_ELINKNO_CITER_SHIFT);
753 }
754 }
755
756 return remainingCount;
757}
758
759/*!
760 * brief Gets the eDMA channel status flags.
761 *
762 * param base eDMA peripheral base address.
763 * param channel eDMA channel number.
764 * return The mask of channel status flags. Users need to use the
765 * _edma_channel_status_flags type to decode the return variables.
766 */
767uint32_t EDMA_GetChannelStatusFlags(DMA_Type *base, uint32_t channel)
768{
769 assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
770
771 uint32_t retval = 0;
772
773 /* Get DONE bit flag */
774 retval |= (((uint32_t)base->TCD[channel].CSR & DMA_CSR_DONE_MASK) >> DMA_CSR_DONE_SHIFT);
775 /* Get ERROR bit flag */
776 retval |= ((((uint32_t)base->ERR >> channel) & 0x1U) << 1U);
777 /* Get INT bit flag */
778 retval |= ((((uint32_t)base->INT >> channel) & 0x1U) << 2U);
779
780 return retval;
781}
782
783/*!
784 * brief Clears the eDMA channel status flags.
785 *
786 * param base eDMA peripheral base address.
787 * param channel eDMA channel number.
788 * param mask The mask of channel status to be cleared. Users need to use
789 * the defined _edma_channel_status_flags type.
790 */
791void EDMA_ClearChannelStatusFlags(DMA_Type *base, uint32_t channel, uint32_t mask)
792{
793 assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
794
795 /* Clear DONE bit flag */
796 if (0U != (mask & (uint32_t)kEDMA_DoneFlag))
797 {
798 base->CDNE = (uint8_t)channel;
799 }
800 /* Clear ERROR bit flag */
801 if (0U != (mask & (uint32_t)kEDMA_ErrorFlag))
802 {
803 base->CERR = (uint8_t)channel;
804 }
805 /* Clear INT bit flag */
806 if (0U != (mask & (uint32_t)kEDMA_InterruptFlag))
807 {
808 base->CINT = (uint8_t)channel;
809 }
810}
811
812static uint32_t EDMA_GetInstanceOffset(uint32_t instance)
813{
814 static uint8_t startInstanceNum;
815
816#if defined(DMA0)
817 startInstanceNum = (uint8_t)EDMA_GetInstance(DMA0);
818#elif defined(DMA1)
819 startInstanceNum = (uint8_t)EDMA_GetInstance(DMA1);
820#elif defined(DMA2)
821 startInstanceNum = (uint8_t)EDMA_GetInstance(DMA2);
822#elif defined(DMA3)
823 startInstanceNum = (uint8_t)EDMA_GetInstance(DMA3);
824#endif
825
826 assert(startInstanceNum <= instance);
827
828 return instance - startInstanceNum;
829}
830
831/*!
832 * brief Creates the eDMA handle.
833 *
834 * This function is called if using the transactional API for eDMA. This function
835 * initializes the internal state of the eDMA handle.
836 *
837 * param handle eDMA handle pointer. The eDMA handle stores callback function and
838 * parameters.
839 * param base eDMA peripheral base address.
840 * param channel eDMA channel number.
841 */
842void EDMA_CreateHandle(edma_handle_t *handle, DMA_Type *base, uint32_t channel)
843{
844 assert(handle != NULL);
845 assert(channel < (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL);
846
847 uint32_t edmaInstance;
848 uint32_t channelIndex;
849 edma_tcd_t *tcdRegs;
850
851 /* Zero the handle */
852 (void)memset(handle, 0, sizeof(*handle));
853
854 handle->base = base;
855 handle->channel = (uint8_t)channel;
856
857 /* Get the DMA instance number */
858 edmaInstance = EDMA_GetInstance(base);
859 channelIndex = (EDMA_GetInstanceOffset(edmaInstance) * (uint32_t)FSL_FEATURE_EDMA_MODULE_CHANNEL) + channel;
860 s_EDMAHandle[channelIndex] = handle;
861
862 /* Enable NVIC interrupt */
863 (void)EnableIRQ(s_edmaIRQNumber[edmaInstance][channel]);
864
865 /*
866 Reset TCD registers to zero. Unlike the EDMA_TcdReset(DREQ will be set),
867 CSR will be 0. Because in order to suit EDMA busy check mechanism in
868 EDMA_SubmitTransfer, CSR must be set 0.
869 */
870 tcdRegs = (edma_tcd_t *)(uint32_t)&handle->base->TCD[handle->channel];
871 tcdRegs->SADDR = 0;
872 tcdRegs->SOFF = 0;
873 tcdRegs->ATTR = 0;
874 tcdRegs->NBYTES = 0;
875 tcdRegs->SLAST = 0;
876 tcdRegs->DADDR = 0;
877 tcdRegs->DOFF = 0;
878 tcdRegs->CITER = 0;
879 tcdRegs->DLAST_SGA = 0;
880 tcdRegs->CSR = 0;
881 tcdRegs->BITER = 0;
882}
883
884/*!
885 * brief Installs the TCDs memory pool into the eDMA handle.
886 *
887 * This function is called after the EDMA_CreateHandle to use scatter/gather feature. This function shall only be used
888 * while users need to use scatter gather mode. Scatter gather mode enables EDMA to load a new transfer control block
889 * (tcd) in hardware, and automatically reconfigure that DMA channel for a new transfer.
890 * Users need to prepare tcd memory and also configure tcds using interface EDMA_SubmitTransfer.
891 *
892 * param handle eDMA handle pointer.
893 * param tcdPool A memory pool to store TCDs. It must be 32 bytes aligned.
894 * param tcdSize The number of TCD slots.
895 */
896void EDMA_InstallTCDMemory(edma_handle_t *handle, edma_tcd_t *tcdPool, uint32_t tcdSize)
897{
898 assert(handle != NULL);
899 assert(((uint32_t)tcdPool & 0x1FU) == 0U);
900
901 /* Initialize tcd queue attribute. */
902 /* header should initial as 1, since that it is used to point to the next TCD to be loaded into TCD memory,
903 * In EDMA driver IRQ handler, header will be used to calculate how many tcd has done, for example,
904 * If application submit 4 transfer request, A->B->C->D,
905 * when A finshed, the header is 0, C is the next TCD to be load, since B is already loaded,
906 * according to EDMA driver IRQ handler, tcdDone = C - A - header = 2 - header = 2, but actually only 1 TCD done,
907 * so the issue will be the wrong TCD done count will pass to application in first TCD interrupt.
908 * During first submit, the header should be assigned to 1, since 0 is current one and 1 is next TCD to be loaded,
909 * but software cannot know which submission is the first one, so assign 1 to header here.
910 */
911 handle->header = 1;
912 handle->tcdUsed = 0;
913 handle->tcdSize = (int8_t)tcdSize;
914 handle->flags = 0;
915 handle->tcdPool = tcdPool;
916}
917
918/*!
919 * brief Installs a callback function for the eDMA transfer.
920 *
921 * This callback is called in the eDMA IRQ handler. Use the callback to do something after
922 * the current major loop transfer completes. This function will be called every time one tcd finished transfer.
923 *
924 * param handle eDMA handle pointer.
925 * param callback eDMA callback function pointer.
926 * param userData A parameter for the callback function.
927 */
928void EDMA_SetCallback(edma_handle_t *handle, edma_callback callback, void *userData)
929{
930 assert(handle != NULL);
931
932 handle->callback = callback;
933 handle->userData = userData;
934}
935
936static edma_transfer_size_t EDMA_TransferWidthMapping(uint32_t width)
937{
938 edma_transfer_size_t transferSize = kEDMA_TransferSize1Bytes;
939
940 /* map width to register value */
941 switch (width)
942 {
943 /* width 8bit */
944 case 1U:
945 transferSize = kEDMA_TransferSize1Bytes;
946 break;
947 /* width 16bit */
948 case 2U:
949 transferSize = kEDMA_TransferSize2Bytes;
950 break;
951 /* width 32bit */
952 case 4U:
953 transferSize = kEDMA_TransferSize4Bytes;
954 break;
955#if (defined(FSL_FEATURE_EDMA_SUPPORT_8_BYTES_TRANSFER) && FSL_FEATURE_EDMA_SUPPORT_8_BYTES_TRANSFER)
956 /* width 64bit */
957 case 8U:
958 transferSize = kEDMA_TransferSize8Bytes;
959 break;
960#endif
961#if (defined(FSL_FEATURE_EDMA_SUPPORT_16_BYTES_TRANSFER) && FSL_FEATURE_EDMA_SUPPORT_16_BYTES_TRANSFER)
962 /* width 128bit */
963 case 16U:
964 transferSize = kEDMA_TransferSize16Bytes;
965 break;
966#endif
967 /* width 256bit */
968 case 32U:
969 transferSize = kEDMA_TransferSize32Bytes;
970 break;
971 default:
972 /* All the cases have been listed above, the default clause should not be reached. */
973 assert(false);
974 break;
975 }
976
977 return transferSize;
978}
979
980/*!
981 * brief Prepares the eDMA transfer structure configurations.
982 *
983 * This function prepares the transfer configuration structure according to the user input.
984 *
985 * param config The user configuration structure of type edma_transfer_t.
986 * param srcAddr eDMA transfer source address.
987 * param srcWidth eDMA transfer source address width(bytes).
988 * param srcOffset source address offset.
989 * param destAddr eDMA transfer destination address.
990 * param destWidth eDMA transfer destination address width(bytes).
991 * param destOffset destination address offset.
992 * param bytesEachRequest eDMA transfer bytes per channel request.
993 * param transferBytes eDMA transfer bytes to be transferred.
994 * note The data address and the data width must be consistent. For example, if the SRC
995 * is 4 bytes, the source address must be 4 bytes aligned, or it results in
996 * source address error (SAE).
997 */
998void EDMA_PrepareTransferConfig(edma_transfer_config_t *config,
999 void *srcAddr,
1000 uint32_t srcWidth,
1001 int16_t srcOffset,
1002 void *destAddr,
1003 uint32_t destWidth,
1004 int16_t destOffset,
1005 uint32_t bytesEachRequest,
1006 uint32_t transferBytes)
1007{
1008 assert(config != NULL);
1009 assert(srcAddr != NULL);
1010 assert(destAddr != NULL);
1011 assert((srcWidth != 0U) && (srcWidth <= 32U) && ((srcWidth & (srcWidth - 1U)) == 0U));
1012 assert((destWidth != 0U) && (destWidth <= 32U) && ((destWidth & (destWidth - 1U)) == 0U));
1013 assert((transferBytes % bytesEachRequest) == 0U);
1014 assert((((uint32_t)(uint32_t *)srcAddr) % srcWidth) == 0U);
1015 assert((((uint32_t)(uint32_t *)destAddr) % destWidth) == 0U);
1016
1017 /* Initializes the configure structure to zero. */
1018 (void)memset(config, 0, sizeof(*config));
1019
1020#if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
1021 config->srcAddr = MEMORY_ConvertMemoryMapAddress((uint32_t)(uint32_t *)srcAddr, kMEMORY_Local2DMA);
1022 config->destAddr = MEMORY_ConvertMemoryMapAddress((uint32_t)(uint32_t *)destAddr, kMEMORY_Local2DMA);
1023#else
1024 config->destAddr = (uint32_t)(uint32_t *)destAddr;
1025 config->srcAddr = (uint32_t)(uint32_t *)srcAddr;
1026#endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
1027 config->minorLoopBytes = bytesEachRequest;
1028 config->majorLoopCounts = transferBytes / bytesEachRequest;
1029 config->srcTransferSize = EDMA_TransferWidthMapping(srcWidth);
1030 config->destTransferSize = EDMA_TransferWidthMapping(destWidth);
1031 config->destOffset = destOffset;
1032 config->srcOffset = srcOffset;
1033}
1034
1035/*!
1036 * brief Prepares the eDMA transfer structure.
1037 *
1038 * This function prepares the transfer configuration structure according to the user input.
1039 *
1040 * param config The user configuration structure of type edma_transfer_t.
1041 * param srcAddr eDMA transfer source address.
1042 * param srcWidth eDMA transfer source address width(bytes).
1043 * param destAddr eDMA transfer destination address.
1044 * param destWidth eDMA transfer destination address width(bytes).
1045 * param bytesEachRequest eDMA transfer bytes per channel request.
1046 * param transferBytes eDMA transfer bytes to be transferred.
1047 * param type eDMA transfer type.
1048 * note The data address and the data width must be consistent. For example, if the SRC
1049 * is 4 bytes, the source address must be 4 bytes aligned, or it results in
1050 * source address error (SAE).
1051 */
1052void EDMA_PrepareTransfer(edma_transfer_config_t *config,
1053 void *srcAddr,
1054 uint32_t srcWidth,
1055 void *destAddr,
1056 uint32_t destWidth,
1057 uint32_t bytesEachRequest,
1058 uint32_t transferBytes,
1059 edma_transfer_type_t type)
1060{
1061 assert(config != NULL);
1062
1063 int16_t srcOffset = 0, destOffset = 0;
1064
1065 switch (type)
1066 {
1067 case kEDMA_MemoryToMemory:
1068 destOffset = (int16_t)destWidth;
1069 srcOffset = (int16_t)srcWidth;
1070 break;
1071 case kEDMA_MemoryToPeripheral:
1072 destOffset = 0;
1073 srcOffset = (int16_t)srcWidth;
1074 break;
1075 case kEDMA_PeripheralToMemory:
1076 destOffset = (int16_t)destWidth;
1077 srcOffset = 0;
1078 break;
1079 case kEDMA_PeripheralToPeripheral:
1080 destOffset = 0;
1081 srcOffset = 0;
1082 break;
1083 default:
1084 /* All the cases have been listed above, the default clause should not be reached. */
1085 assert(false);
1086 break;
1087 }
1088
1089 EDMA_PrepareTransferConfig(config, srcAddr, srcWidth, srcOffset, destAddr, destWidth, destOffset, bytesEachRequest,
1090 transferBytes);
1091}
1092
1093/*!
1094 * brief Submits the eDMA transfer request.
1095 *
1096 * This function submits the eDMA transfer request according to the transfer configuration structure.
1097 * In scatter gather mode, call this function will add a configured tcd to the circular list of tcd pool.
1098 * The tcd pools is setup by call function EDMA_InstallTCDMemory before.
1099 *
1100 * param handle eDMA handle pointer.
1101 * param config Pointer to eDMA transfer configuration structure.
1102 * retval kStatus_EDMA_Success It means submit transfer request succeed.
1103 * retval kStatus_EDMA_QueueFull It means TCD queue is full. Submit transfer request is not allowed.
1104 * retval kStatus_EDMA_Busy It means the given channel is busy, need to submit request later.
1105 */
1106status_t EDMA_SubmitTransfer(edma_handle_t *handle, const edma_transfer_config_t *config)
1107{
1108 assert(handle != NULL);
1109 assert(config != NULL);
1110
1111 edma_tcd_t *tcdRegs = (edma_tcd_t *)(uint32_t)&handle->base->TCD[handle->channel];
1112
1113 if (handle->tcdPool == NULL)
1114 {
1115 /*
1116 * Check if EDMA channel is busy:
1117 * 1. if channel active bit is set, it implies that minor loop is executing, then channel is busy
1118 * 2. if channel active bit is not set and BITER not equal to CITER, it implies that major loop is executing,
1119 * then channel is busy
1120 *
1121 * There is one case can not be covered in below condition:
1122 * When transfer request is submitted, but no request from peripheral, that is to say channel sevice doesn't
1123 * begin, if application would like to submit another transfer , then the TCD will be overwritten, since the
1124 * ACTIVE is 0 and BITER = CITER, for such case, it is a scatter gather(link TCD) case actually, so
1125 * application should enabled TCD pool for dynamic scatter gather mode by calling EDMA_InstallTCDMemory.
1126 */
1127 if (((handle->base->TCD[handle->channel].CSR & DMA_CSR_ACTIVE_MASK) != 0U) ||
1128 (((handle->base->TCD[handle->channel].CITER_ELINKNO & DMA_CITER_ELINKNO_CITER_MASK) !=
1129 (handle->base->TCD[handle->channel].BITER_ELINKNO & DMA_BITER_ELINKNO_BITER_MASK))))
1130 {
1131 return kStatus_EDMA_Busy;
1132 }
1133 else
1134 {
1135 EDMA_SetTransferConfig(handle->base, handle->channel, config, NULL);
1136 /* Enable auto disable request feature */
1137 handle->base->TCD[handle->channel].CSR |= DMA_CSR_DREQ_MASK;
1138 /* Enable major interrupt */
1139 handle->base->TCD[handle->channel].CSR |= DMA_CSR_INTMAJOR_MASK;
1140
1141 return kStatus_Success;
1142 }
1143 }
1144 else /* Use the TCD queue. */
1145 {
1146 uint32_t primask;
1147 uint16_t csr;
1148 int8_t currentTcd;
1149 int8_t previousTcd;
1150 int8_t nextTcd;
1151 int8_t tmpTcdUsed;
1152 int8_t tmpTcdSize;
1153
1154 /* Check if tcd pool is full. */
1155 primask = DisableGlobalIRQ();
1156 tmpTcdUsed = handle->tcdUsed;
1157 tmpTcdSize = handle->tcdSize;
1158 if (tmpTcdUsed >= tmpTcdSize)
1159 {
1160 EnableGlobalIRQ(primask);
1161
1162 return kStatus_EDMA_QueueFull;
1163 }
1164 currentTcd = handle->tail;
1165 handle->tcdUsed++;
1166 /* Calculate index of next TCD */
1167 nextTcd = currentTcd + 1;
1168 if (nextTcd == handle->tcdSize)
1169 {
1170 nextTcd = 0;
1171 }
1172 /* Advance queue tail index */
1173 handle->tail = nextTcd;
1174 EnableGlobalIRQ(primask);
1175 /* Calculate index of previous TCD */
1176 previousTcd = currentTcd != 0 ? currentTcd - 1 : (handle->tcdSize - 1);
1177 /* Configure current TCD block. */
1178 EDMA_TcdReset(&handle->tcdPool[currentTcd]);
1179 EDMA_TcdSetTransferConfig(&handle->tcdPool[currentTcd], config, NULL);
1180 /* Enable major interrupt */
1181 handle->tcdPool[currentTcd].CSR |= DMA_CSR_INTMAJOR_MASK;
1182 /* Link current TCD with next TCD for identification of current TCD */
1183#if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
1184 handle->tcdPool[currentTcd].DLAST_SGA =
1185 MEMORY_ConvertMemoryMapAddress((uint32_t)&handle->tcdPool[nextTcd], kMEMORY_Local2DMA);
1186#else
1187 handle->tcdPool[currentTcd].DLAST_SGA = (uint32_t)&handle->tcdPool[nextTcd];
1188#endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
1189 /* Chain from previous descriptor unless tcd pool size is 1(this descriptor is its own predecessor). */
1190 if (currentTcd != previousTcd)
1191 {
1192 /* Enable scatter/gather feature in the previous TCD block. */
1193 csr = handle->tcdPool[previousTcd].CSR | ((uint16_t)DMA_CSR_ESG_MASK);
1194 csr &= ~((uint16_t)DMA_CSR_DREQ_MASK);
1195 handle->tcdPool[previousTcd].CSR = csr;
1196 /*
1197 Check if the TCD block in the registers is the previous one (points to current TCD block). It
1198 is used to check if the previous TCD linked has been loaded in TCD register. If so, it need to
1199 link the TCD register in case link the current TCD with the dead chain when TCD loading occurs
1200 before link the previous TCD block.
1201 */
1202#if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
1203 if (tcdRegs->DLAST_SGA ==
1204 MEMORY_ConvertMemoryMapAddress((uint32_t)&handle->tcdPool[currentTcd], kMEMORY_Local2DMA))
1205#else
1206 if (tcdRegs->DLAST_SGA == (uint32_t)&handle->tcdPool[currentTcd])
1207#endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
1208 {
1209 /* Clear the DREQ bits for the dynamic scatter gather */
1210 tcdRegs->CSR |= DMA_CSR_DREQ_MASK;
1211 /* Enable scatter/gather also in the TCD registers. */
1212 csr = tcdRegs->CSR | DMA_CSR_ESG_MASK;
1213 /* Must write the CSR register one-time, because the transfer maybe finished anytime. */
1214 tcdRegs->CSR = csr;
1215 /*
1216 It is very important to check the ESG bit!
1217 Because this hardware design: if DONE bit is set, the ESG bit can not be set. So it can
1218 be used to check if the dynamic TCD link operation is successful. If ESG bit is not set
1219 and the DLAST_SGA is not the next TCD address(it means the dynamic TCD link succeed and
1220 the current TCD block has been loaded into TCD registers), it means transfer finished
1221 and TCD link operation fail, so must install TCD content into TCD registers and enable
1222 transfer again. And if ESG is set, it means transfer has not finished, so TCD dynamic
1223 link succeed.
1224 */
1225 if (0U != (tcdRegs->CSR & DMA_CSR_ESG_MASK))
1226 {
1227 tcdRegs->CSR &= ~(uint16_t)DMA_CSR_DREQ_MASK;
1228 return kStatus_Success;
1229 }
1230 /*
1231 Check whether the current TCD block is already loaded in the TCD registers. It is another
1232 condition when ESG bit is not set: it means the dynamic TCD link succeed and the current
1233 TCD block has been loaded into TCD registers.
1234 */
1235#if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
1236 if (tcdRegs->DLAST_SGA ==
1237 MEMORY_ConvertMemoryMapAddress((uint32_t)&handle->tcdPool[nextTcd], kMEMORY_Local2DMA))
1238#else
1239 if (tcdRegs->DLAST_SGA == (uint32_t)&handle->tcdPool[nextTcd])
1240#endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
1241 {
1242 return kStatus_Success;
1243 }
1244 /*
1245 If go to this, means the previous transfer finished, and the DONE bit is set.
1246 So shall configure TCD registers.
1247 */
1248 }
1249 else if (tcdRegs->DLAST_SGA != 0UL)
1250 {
1251 /* The current TCD block has been linked successfully. */
1252 return kStatus_Success;
1253 }
1254 else
1255 {
1256 /*
1257 DLAST_SGA is 0 and it means the first submit transfer, so shall configure
1258 TCD registers.
1259 */
1260 }
1261 }
1262 /* There is no live chain, TCD block need to be installed in TCD registers. */
1263 EDMA_InstallTCD(handle->base, handle->channel, &handle->tcdPool[currentTcd]);
1264 /* Enable channel request again. */
1265 if (0U != (handle->flags & EDMA_TRANSFER_ENABLED_MASK))
1266 {
1267 handle->base->SERQ = DMA_SERQ_SERQ(handle->channel);
1268 }
1269
1270 return kStatus_Success;
1271 }
1272}
1273
1274/*!
1275 * brief eDMA starts transfer.
1276 *
1277 * This function enables the channel request. Users can call this function after submitting the transfer request
1278 * or before submitting the transfer request.
1279 *
1280 * param handle eDMA handle pointer.
1281 */
1282void EDMA_StartTransfer(edma_handle_t *handle)
1283{
1284 assert(handle != NULL);
1285 uint32_t tmpCSR = 0;
1286
1287 if (handle->tcdPool == NULL)
1288 {
1289 handle->base->SERQ = DMA_SERQ_SERQ(handle->channel);
1290 }
1291 else /* Use the TCD queue. */
1292 {
1293 uint32_t primask;
1294 edma_tcd_t *tcdRegs = (edma_tcd_t *)(uint32_t)&handle->base->TCD[handle->channel];
1295
1296 handle->flags |= EDMA_TRANSFER_ENABLED_MASK;
1297
1298 /* Check if there was at least one descriptor submitted since reset (TCD in registers is valid) */
1299 if (tcdRegs->DLAST_SGA != 0U)
1300 {
1301 primask = DisableGlobalIRQ();
1302 /* Check if channel request is actually disable. */
1303 if ((handle->base->ERQ & ((uint32_t)1U << handle->channel)) == 0U)
1304 {
1305 /* Check if transfer is paused. */
1306 tmpCSR = tcdRegs->CSR;
1307 if ((0U == (tmpCSR & DMA_CSR_DONE_MASK)) || (0U != (tmpCSR & DMA_CSR_ESG_MASK)))
1308 {
1309 /*
1310 Re-enable channel request must be as soon as possible, so must put it into
1311 critical section to avoid task switching or interrupt service routine.
1312 */
1313 handle->base->SERQ = DMA_SERQ_SERQ(handle->channel);
1314 }
1315 }
1316 EnableGlobalIRQ(primask);
1317 }
1318 }
1319}
1320
1321/*!
1322 * brief eDMA stops transfer.
1323 *
1324 * This function disables the channel request to pause the transfer. Users can call EDMA_StartTransfer()
1325 * again to resume the transfer.
1326 *
1327 * param handle eDMA handle pointer.
1328 */
1329void EDMA_StopTransfer(edma_handle_t *handle)
1330{
1331 assert(handle != NULL);
1332
1333 handle->flags &= (~(uint8_t)EDMA_TRANSFER_ENABLED_MASK);
1334 handle->base->CERQ = DMA_CERQ_CERQ(handle->channel);
1335}
1336
1337/*!
1338 * brief eDMA aborts transfer.
1339 *
1340 * This function disables the channel request and clear transfer status bits.
1341 * Users can submit another transfer after calling this API.
1342 *
1343 * param handle DMA handle pointer.
1344 */
1345void EDMA_AbortTransfer(edma_handle_t *handle)
1346{
1347 handle->base->CERQ = DMA_CERQ_CERQ(handle->channel);
1348 /*
1349 Clear CSR to release channel. Because if the given channel started transfer,
1350 CSR will be not zero. Because if it is the last transfer, DREQ will be set.
1351 If not, ESG will be set.
1352 */
1353 handle->base->TCD[handle->channel].CSR = 0;
1354 /* Cancel all next TCD transfer. */
1355 handle->base->TCD[handle->channel].DLAST_SGA = 0;
1356 /* clear the CITER and BITER to make sure the TCD register in a correct state for next calling of
1357 * EDMA_SubmitTransfer */
1358 handle->base->TCD[handle->channel].CITER_ELINKNO = 0;
1359 handle->base->TCD[handle->channel].BITER_ELINKNO = 0;
1360
1361 /* Handle the tcd */
1362 if (handle->tcdPool != NULL)
1363 {
1364 handle->header = 1;
1365 handle->tail = 0;
1366 handle->tcdUsed = 0;
1367 }
1368}
1369
1370/*!
1371 * brief eDMA IRQ handler for the current major loop transfer completion.
1372 *
1373 * This function clears the channel major interrupt flag and calls
1374 * the callback function if it is not NULL.
1375 *
1376 * Note:
1377 * For the case using TCD queue, when the major iteration count is exhausted, additional operations are performed.
1378 * These include the final address adjustments and reloading of the BITER field into the CITER.
1379 * Assertion of an optional interrupt request also occurs at this time, as does a possible fetch of a new TCD from
1380 * memory using the scatter/gather address pointer included in the descriptor (if scatter/gather is enabled).
1381 *
1382 * For instance, when the time interrupt of TCD[0] happens, the TCD[1] has already been loaded into the eDMA engine.
1383 * As sga and sga_index are calculated based on the DLAST_SGA bitfield lies in the TCD_CSR register, the sga_index
1384 * in this case should be 2 (DLAST_SGA of TCD[1] stores the address of TCD[2]). Thus, the "tcdUsed" updated should be
1385 * (tcdUsed - 2U) which indicates the number of TCDs can be loaded in the memory pool (because TCD[0] and TCD[1] have
1386 * been loaded into the eDMA engine at this point already.).
1387 *
1388 * For the last two continuous ISRs in a scatter/gather process, they both load the last TCD (The last ISR does not
1389 * load a new TCD) from the memory pool to the eDMA engine when major loop completes.
1390 * Therefore, ensure that the header and tcdUsed updated are identical for them.
1391 * tcdUsed are both 0 in this case as no TCD to be loaded.
1392 *
1393 * See the "eDMA basic data flow" in the eDMA Functional description section of the Reference Manual for
1394 * further details.
1395 *
1396 * param handle eDMA handle pointer.
1397 */
1398void EDMA_HandleIRQ(edma_handle_t *handle)
1399{
1400 assert(handle != NULL);
1401
1402 bool transfer_done;
1403
1404 /* Clear EDMA interrupt flag */
1405 handle->base->CINT = handle->channel;
1406 /* Check if transfer is already finished. */
1407 transfer_done = ((handle->base->TCD[handle->channel].CSR & DMA_CSR_DONE_MASK) != 0U);
1408
1409 if (handle->tcdPool == NULL)
1410 {
1411 if (handle->callback != NULL)
1412 {
1413 (handle->callback)(handle, handle->userData, transfer_done, 0);
1414 }
1415 }
1416 else /* Use the TCD queue. Please refer to the API descriptions in the eDMA header file for detailed information. */
1417 {
1418 uint32_t sga = (uint32_t)handle->base->TCD[handle->channel].DLAST_SGA;
1419 uint32_t sga_index;
1420 int32_t tcds_done;
1421 uint8_t new_header;
1422 bool esg = ((handle->base->TCD[handle->channel].CSR & DMA_CSR_ESG_MASK) != 0U);
1423
1424 /* Get the offset of the next transfer TCD blocks to be loaded into the eDMA engine. */
1425#if defined FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET && FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET
1426 sga -= MEMORY_ConvertMemoryMapAddress((uint32_t)handle->tcdPool, kMEMORY_Local2DMA);
1427#else
1428 sga -= (uint32_t)handle->tcdPool;
1429#endif /* FSL_FEATURE_MEMORY_HAS_ADDRESS_OFFSET */
1430 /* Get the index of the next transfer TCD blocks to be loaded into the eDMA engine. */
1431 sga_index = sga / sizeof(edma_tcd_t);
1432 /* Adjust header positions. */
1433 if (transfer_done)
1434 {
1435 /* New header shall point to the next TCD to be loaded (current one is already finished) */
1436 new_header = (uint8_t)sga_index;
1437 }
1438 else
1439 {
1440 /* New header shall point to this descriptor currently loaded (not finished yet) */
1441 new_header = sga_index != 0U ? (uint8_t)sga_index - 1U : (uint8_t)handle->tcdSize - 1U;
1442 }
1443 /* Calculate the number of finished TCDs */
1444 if (new_header == (uint8_t)handle->header)
1445 {
1446 int8_t tmpTcdUsed = handle->tcdUsed;
1447 int8_t tmpTcdSize = handle->tcdSize;
1448
1449 /* check esg here for the case that application submit only one request, once the request complete:
1450 * new_header(1) = handle->header(1)
1451 * tcdUsed(1) != tcdSize(>1)
1452 * As the application submit only once, so scatter gather must not enabled, then tcds_done should be 1
1453 */
1454 if ((tmpTcdUsed == tmpTcdSize) || (!esg))
1455 {
1456 tcds_done = handle->tcdUsed;
1457 }
1458 else
1459 {
1460 /* No TCD in the memory are going to be loaded or internal error occurs. */
1461 tcds_done = 0;
1462 }
1463 }
1464 else
1465 {
1466 tcds_done = (int32_t)new_header - (int32_t)handle->header;
1467 if (tcds_done < 0)
1468 {
1469 tcds_done += handle->tcdSize;
1470 }
1471 }
1472 /* Advance header which points to the TCD to be loaded into the eDMA engine from memory. */
1473 handle->header = (int8_t)new_header;
1474 /* Release TCD blocks. tcdUsed is the TCD number which can be used/loaded in the memory pool. */
1475 handle->tcdUsed -= (int8_t)tcds_done;
1476 /* Invoke callback function. */
1477 if (NULL != handle->callback)
1478 {
1479 (handle->callback)(handle, handle->userData, transfer_done, tcds_done);
1480 }
1481
1482 /*
1483 * 1.clear the DONE bit here is meaningful for below cases:
1484 * A new TCD has been loaded to EDMA already:
1485 * need to clear the DONE bit in the IRQ handler to avoid TCD in EDMA been overwritten
1486 * if peripheral request isn't coming before next transfer request.
1487 * 2. Don't clear DONE bit for below case,
1488 * for the case that transfer request submitted in the privious edma callback, this is a case that doesn't
1489 * need scatter gather, so keep DONE bit during the next transfer request submission will re-install the TCD and
1490 * the DONE bit will be cleared together with TCD re-installation.
1491 */
1492 if (transfer_done)
1493 {
1494 if ((handle->base->TCD[handle->channel].CSR & DMA_CSR_ESG_MASK) != 0U)
1495 {
1496 handle->base->CDNE = handle->channel;
1497 }
1498 }
1499 }
1500}
1501
1502#if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET) && \
1503 (FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET == 4)
1504/* 8 channels (Shared): kl28 */
1505#if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && (FSL_FEATURE_EDMA_MODULE_CHANNEL == 8U)
1506
1507#if defined(DMA0)
1508void DMA0_04_DriverIRQHandler(void);
1509void DMA0_04_DriverIRQHandler(void)
1510{
1511 if ((EDMA_GetChannelStatusFlags(DMA0, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1512 {
1513 EDMA_HandleIRQ(s_EDMAHandle[0]);
1514 }
1515 if ((EDMA_GetChannelStatusFlags(DMA0, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1516 {
1517 EDMA_HandleIRQ(s_EDMAHandle[4]);
1518 }
1519 SDK_ISR_EXIT_BARRIER;
1520}
1521
1522void DMA0_15_DriverIRQHandler(void);
1523void DMA0_15_DriverIRQHandler(void)
1524{
1525 if ((EDMA_GetChannelStatusFlags(DMA0, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1526 {
1527 EDMA_HandleIRQ(s_EDMAHandle[1]);
1528 }
1529 if ((EDMA_GetChannelStatusFlags(DMA0, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1530 {
1531 EDMA_HandleIRQ(s_EDMAHandle[5]);
1532 }
1533 SDK_ISR_EXIT_BARRIER;
1534}
1535
1536void DMA0_26_DriverIRQHandler(void);
1537void DMA0_26_DriverIRQHandler(void)
1538{
1539 if ((EDMA_GetChannelStatusFlags(DMA0, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1540 {
1541 EDMA_HandleIRQ(s_EDMAHandle[2]);
1542 }
1543 if ((EDMA_GetChannelStatusFlags(DMA0, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1544 {
1545 EDMA_HandleIRQ(s_EDMAHandle[6]);
1546 }
1547 SDK_ISR_EXIT_BARRIER;
1548}
1549
1550void DMA0_37_DriverIRQHandler(void);
1551void DMA0_37_DriverIRQHandler(void)
1552{
1553 if ((EDMA_GetChannelStatusFlags(DMA0, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1554 {
1555 EDMA_HandleIRQ(s_EDMAHandle[3]);
1556 }
1557 if ((EDMA_GetChannelStatusFlags(DMA0, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1558 {
1559 EDMA_HandleIRQ(s_EDMAHandle[7]);
1560 }
1561 SDK_ISR_EXIT_BARRIER;
1562}
1563#endif
1564
1565#if defined(DMA1)
1566
1567#if defined(DMA0)
1568void DMA1_04_DriverIRQHandler(void);
1569void DMA1_04_DriverIRQHandler(void)
1570{
1571 if ((EDMA_GetChannelStatusFlags(DMA1, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1572 {
1573 EDMA_HandleIRQ(s_EDMAHandle[8]);
1574 }
1575 if ((EDMA_GetChannelStatusFlags(DMA1, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1576 {
1577 EDMA_HandleIRQ(s_EDMAHandle[12]);
1578 }
1579 SDK_ISR_EXIT_BARRIER;
1580}
1581
1582void DMA1_15_DriverIRQHandler(void);
1583void DMA1_15_DriverIRQHandler(void)
1584{
1585 if ((EDMA_GetChannelStatusFlags(DMA1, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1586 {
1587 EDMA_HandleIRQ(s_EDMAHandle[9]);
1588 }
1589 if ((EDMA_GetChannelStatusFlags(DMA1, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1590 {
1591 EDMA_HandleIRQ(s_EDMAHandle[13]);
1592 }
1593 SDK_ISR_EXIT_BARRIER;
1594}
1595
1596void DMA1_26_DriverIRQHandler(void);
1597void DMA1_26_DriverIRQHandler(void)
1598{
1599 if ((EDMA_GetChannelStatusFlags(DMA1, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1600 {
1601 EDMA_HandleIRQ(s_EDMAHandle[10]);
1602 }
1603 if ((EDMA_GetChannelStatusFlags(DMA1, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1604 {
1605 EDMA_HandleIRQ(s_EDMAHandle[14]);
1606 }
1607 SDK_ISR_EXIT_BARRIER;
1608}
1609
1610void DMA1_37_DriverIRQHandler(void);
1611void DMA1_37_DriverIRQHandler(void)
1612{
1613 if ((EDMA_GetChannelStatusFlags(DMA1, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1614 {
1615 EDMA_HandleIRQ(s_EDMAHandle[11]);
1616 }
1617 if ((EDMA_GetChannelStatusFlags(DMA1, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1618 {
1619 EDMA_HandleIRQ(s_EDMAHandle[15]);
1620 }
1621 SDK_ISR_EXIT_BARRIER;
1622}
1623
1624#else
1625void DMA1_04_DriverIRQHandler(void);
1626void DMA1_04_DriverIRQHandler(void)
1627{
1628 if ((EDMA_GetChannelStatusFlags(DMA1, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1629 {
1630 EDMA_HandleIRQ(s_EDMAHandle[0]);
1631 }
1632 if ((EDMA_GetChannelStatusFlags(DMA1, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1633 {
1634 EDMA_HandleIRQ(s_EDMAHandle[4]);
1635 }
1636 SDK_ISR_EXIT_BARRIER;
1637}
1638
1639void DMA1_15_DriverIRQHandler(void);
1640void DMA1_15_DriverIRQHandler(void)
1641{
1642 if ((EDMA_GetChannelStatusFlags(DMA1, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1643 {
1644 EDMA_HandleIRQ(s_EDMAHandle[1]);
1645 }
1646 if ((EDMA_GetChannelStatusFlags(DMA1, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1647 {
1648 EDMA_HandleIRQ(s_EDMAHandle[5]);
1649 }
1650 SDK_ISR_EXIT_BARRIER;
1651}
1652
1653void DMA1_26_DriverIRQHandler(void);
1654void DMA1_26_DriverIRQHandler(void)
1655{
1656 if ((EDMA_GetChannelStatusFlags(DMA1, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1657 {
1658 EDMA_HandleIRQ(s_EDMAHandle[2]);
1659 }
1660 if ((EDMA_GetChannelStatusFlags(DMA1, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1661 {
1662 EDMA_HandleIRQ(s_EDMAHandle[6]);
1663 }
1664 SDK_ISR_EXIT_BARRIER;
1665}
1666
1667void DMA1_37_DriverIRQHandler(void);
1668void DMA1_37_DriverIRQHandler(void)
1669{
1670 if ((EDMA_GetChannelStatusFlags(DMA1, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1671 {
1672 EDMA_HandleIRQ(s_EDMAHandle[3]);
1673 }
1674 if ((EDMA_GetChannelStatusFlags(DMA1, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1675 {
1676 EDMA_HandleIRQ(s_EDMAHandle[7]);
1677 }
1678 SDK_ISR_EXIT_BARRIER;
1679}
1680#endif
1681#endif
1682#endif /* 8 channels (Shared) */
1683#endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET */
1684
1685#if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET) && \
1686 (FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET == 8)
1687/* 16 channels (Shared): K32H844P */
1688#if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && (FSL_FEATURE_EDMA_MODULE_CHANNEL == 16U)
1689
1690void DMA0_08_DriverIRQHandler(void);
1691void DMA0_08_DriverIRQHandler(void)
1692{
1693 if ((EDMA_GetChannelStatusFlags(DMA0, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1694 {
1695 EDMA_HandleIRQ(s_EDMAHandle[0]);
1696 }
1697 if ((EDMA_GetChannelStatusFlags(DMA0, 8U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1698 {
1699 EDMA_HandleIRQ(s_EDMAHandle[8]);
1700 }
1701 SDK_ISR_EXIT_BARRIER;
1702}
1703
1704void DMA0_19_DriverIRQHandler(void);
1705void DMA0_19_DriverIRQHandler(void)
1706{
1707 if ((EDMA_GetChannelStatusFlags(DMA0, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1708 {
1709 EDMA_HandleIRQ(s_EDMAHandle[1]);
1710 }
1711 if ((EDMA_GetChannelStatusFlags(DMA0, 9U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1712 {
1713 EDMA_HandleIRQ(s_EDMAHandle[9]);
1714 }
1715 SDK_ISR_EXIT_BARRIER;
1716}
1717
1718void DMA0_210_DriverIRQHandler(void);
1719void DMA0_210_DriverIRQHandler(void)
1720{
1721 if ((EDMA_GetChannelStatusFlags(DMA0, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1722 {
1723 EDMA_HandleIRQ(s_EDMAHandle[2]);
1724 }
1725 if ((EDMA_GetChannelStatusFlags(DMA0, 10U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1726 {
1727 EDMA_HandleIRQ(s_EDMAHandle[10]);
1728 }
1729 SDK_ISR_EXIT_BARRIER;
1730}
1731
1732void DMA0_311_DriverIRQHandler(void);
1733void DMA0_311_DriverIRQHandler(void)
1734{
1735 if ((EDMA_GetChannelStatusFlags(DMA0, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1736 {
1737 EDMA_HandleIRQ(s_EDMAHandle[3]);
1738 }
1739 if ((EDMA_GetChannelStatusFlags(DMA0, 11U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1740 {
1741 EDMA_HandleIRQ(s_EDMAHandle[11]);
1742 }
1743 SDK_ISR_EXIT_BARRIER;
1744}
1745
1746void DMA0_412_DriverIRQHandler(void);
1747void DMA0_412_DriverIRQHandler(void)
1748{
1749 if ((EDMA_GetChannelStatusFlags(DMA0, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1750 {
1751 EDMA_HandleIRQ(s_EDMAHandle[4]);
1752 }
1753 if ((EDMA_GetChannelStatusFlags(DMA0, 12U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1754 {
1755 EDMA_HandleIRQ(s_EDMAHandle[12]);
1756 }
1757 SDK_ISR_EXIT_BARRIER;
1758}
1759
1760void DMA0_513_DriverIRQHandler(void);
1761void DMA0_513_DriverIRQHandler(void)
1762{
1763 if ((EDMA_GetChannelStatusFlags(DMA0, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1764 {
1765 EDMA_HandleIRQ(s_EDMAHandle[5]);
1766 }
1767 if ((EDMA_GetChannelStatusFlags(DMA0, 13U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1768 {
1769 EDMA_HandleIRQ(s_EDMAHandle[13]);
1770 }
1771 SDK_ISR_EXIT_BARRIER;
1772}
1773
1774void DMA0_614_DriverIRQHandler(void);
1775void DMA0_614_DriverIRQHandler(void)
1776{
1777 if ((EDMA_GetChannelStatusFlags(DMA0, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1778 {
1779 EDMA_HandleIRQ(s_EDMAHandle[6]);
1780 }
1781 if ((EDMA_GetChannelStatusFlags(DMA0, 14U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1782 {
1783 EDMA_HandleIRQ(s_EDMAHandle[14]);
1784 }
1785 SDK_ISR_EXIT_BARRIER;
1786}
1787
1788void DMA0_715_DriverIRQHandler(void);
1789void DMA0_715_DriverIRQHandler(void)
1790{
1791 if ((EDMA_GetChannelStatusFlags(DMA0, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1792 {
1793 EDMA_HandleIRQ(s_EDMAHandle[7]);
1794 }
1795 if ((EDMA_GetChannelStatusFlags(DMA0, 15U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1796 {
1797 EDMA_HandleIRQ(s_EDMAHandle[15]);
1798 }
1799 SDK_ISR_EXIT_BARRIER;
1800}
1801
1802#if defined(DMA1)
1803void DMA1_08_DriverIRQHandler(void);
1804void DMA1_08_DriverIRQHandler(void)
1805{
1806 if ((EDMA_GetChannelStatusFlags(DMA1, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1807 {
1808 EDMA_HandleIRQ(s_EDMAHandle[16]);
1809 }
1810 if ((EDMA_GetChannelStatusFlags(DMA1, 8U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1811 {
1812 EDMA_HandleIRQ(s_EDMAHandle[24]);
1813 }
1814 SDK_ISR_EXIT_BARRIER;
1815}
1816
1817void DMA1_19_DriverIRQHandler(void);
1818void DMA1_19_DriverIRQHandler(void)
1819{
1820 if ((EDMA_GetChannelStatusFlags(DMA1, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1821 {
1822 EDMA_HandleIRQ(s_EDMAHandle[17]);
1823 }
1824 if ((EDMA_GetChannelStatusFlags(DMA1, 9U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1825 {
1826 EDMA_HandleIRQ(s_EDMAHandle[25]);
1827 }
1828 SDK_ISR_EXIT_BARRIER;
1829}
1830
1831void DMA1_210_DriverIRQHandler(void);
1832void DMA1_210_DriverIRQHandler(void)
1833{
1834 if ((EDMA_GetChannelStatusFlags(DMA1, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1835 {
1836 EDMA_HandleIRQ(s_EDMAHandle[18]);
1837 }
1838 if ((EDMA_GetChannelStatusFlags(DMA1, 10U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1839 {
1840 EDMA_HandleIRQ(s_EDMAHandle[26]);
1841 }
1842 SDK_ISR_EXIT_BARRIER;
1843}
1844
1845void DMA1_311_DriverIRQHandler(void);
1846void DMA1_311_DriverIRQHandler(void)
1847{
1848 if ((EDMA_GetChannelStatusFlags(DMA1, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1849 {
1850 EDMA_HandleIRQ(s_EDMAHandle[19]);
1851 }
1852 if ((EDMA_GetChannelStatusFlags(DMA1, 11U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1853 {
1854 EDMA_HandleIRQ(s_EDMAHandle[27]);
1855 }
1856 SDK_ISR_EXIT_BARRIER;
1857}
1858
1859void DMA1_412_DriverIRQHandler(void);
1860void DMA1_412_DriverIRQHandler(void)
1861{
1862 if ((EDMA_GetChannelStatusFlags(DMA1, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1863 {
1864 EDMA_HandleIRQ(s_EDMAHandle[20]);
1865 }
1866 if ((EDMA_GetChannelStatusFlags(DMA1, 12U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1867 {
1868 EDMA_HandleIRQ(s_EDMAHandle[28]);
1869 }
1870 SDK_ISR_EXIT_BARRIER;
1871}
1872
1873void DMA1_513_DriverIRQHandler(void);
1874void DMA1_513_DriverIRQHandler(void)
1875{
1876 if ((EDMA_GetChannelStatusFlags(DMA1, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1877 {
1878 EDMA_HandleIRQ(s_EDMAHandle[21]);
1879 }
1880 if ((EDMA_GetChannelStatusFlags(DMA1, 13U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1881 {
1882 EDMA_HandleIRQ(s_EDMAHandle[29]);
1883 }
1884 SDK_ISR_EXIT_BARRIER;
1885}
1886
1887void DMA1_614_DriverIRQHandler(void);
1888void DMA1_614_DriverIRQHandler(void)
1889{
1890 if ((EDMA_GetChannelStatusFlags(DMA1, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1891 {
1892 EDMA_HandleIRQ(s_EDMAHandle[22]);
1893 }
1894 if ((EDMA_GetChannelStatusFlags(DMA1, 14U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1895 {
1896 EDMA_HandleIRQ(s_EDMAHandle[30]);
1897 }
1898 SDK_ISR_EXIT_BARRIER;
1899}
1900
1901void DMA1_715_DriverIRQHandler(void);
1902void DMA1_715_DriverIRQHandler(void)
1903{
1904 if ((EDMA_GetChannelStatusFlags(DMA1, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1905 {
1906 EDMA_HandleIRQ(s_EDMAHandle[23]);
1907 }
1908 if ((EDMA_GetChannelStatusFlags(DMA1, 15U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1909 {
1910 EDMA_HandleIRQ(s_EDMAHandle[31]);
1911 }
1912 SDK_ISR_EXIT_BARRIER;
1913}
1914#endif
1915#endif /* 16 channels (Shared) */
1916#endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET */
1917
1918#if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET) && \
1919 (FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET == 16)
1920/* 32 channels (Shared): k80 */
1921#if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && FSL_FEATURE_EDMA_MODULE_CHANNEL == 32U
1922#if defined(DMA0)
1923void DMA0_DMA16_DriverIRQHandler(void);
1924void DMA0_DMA16_DriverIRQHandler(void)
1925{
1926 if ((EDMA_GetChannelStatusFlags(DMA0, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1927 {
1928 EDMA_HandleIRQ(s_EDMAHandle[0]);
1929 }
1930 if ((EDMA_GetChannelStatusFlags(DMA0, 16U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1931 {
1932 EDMA_HandleIRQ(s_EDMAHandle[16]);
1933 }
1934 SDK_ISR_EXIT_BARRIER;
1935}
1936
1937void DMA1_DMA17_DriverIRQHandler(void);
1938void DMA1_DMA17_DriverIRQHandler(void)
1939{
1940 if ((EDMA_GetChannelStatusFlags(DMA0, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1941 {
1942 EDMA_HandleIRQ(s_EDMAHandle[1]);
1943 }
1944 if ((EDMA_GetChannelStatusFlags(DMA0, 17U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1945 {
1946 EDMA_HandleIRQ(s_EDMAHandle[17]);
1947 }
1948 SDK_ISR_EXIT_BARRIER;
1949}
1950
1951void DMA2_DMA18_DriverIRQHandler(void);
1952void DMA2_DMA18_DriverIRQHandler(void)
1953{
1954 if ((EDMA_GetChannelStatusFlags(DMA0, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1955 {
1956 EDMA_HandleIRQ(s_EDMAHandle[2]);
1957 }
1958 if ((EDMA_GetChannelStatusFlags(DMA0, 18U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1959 {
1960 EDMA_HandleIRQ(s_EDMAHandle[18]);
1961 }
1962 SDK_ISR_EXIT_BARRIER;
1963}
1964
1965void DMA3_DMA19_DriverIRQHandler(void);
1966void DMA3_DMA19_DriverIRQHandler(void)
1967{
1968 if ((EDMA_GetChannelStatusFlags(DMA0, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1969 {
1970 EDMA_HandleIRQ(s_EDMAHandle[3]);
1971 }
1972 if ((EDMA_GetChannelStatusFlags(DMA0, 19U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1973 {
1974 EDMA_HandleIRQ(s_EDMAHandle[19]);
1975 }
1976 SDK_ISR_EXIT_BARRIER;
1977}
1978
1979void DMA4_DMA20_DriverIRQHandler(void);
1980void DMA4_DMA20_DriverIRQHandler(void)
1981{
1982 if ((EDMA_GetChannelStatusFlags(DMA0, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1983 {
1984 EDMA_HandleIRQ(s_EDMAHandle[4]);
1985 }
1986 if ((EDMA_GetChannelStatusFlags(DMA0, 20U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1987 {
1988 EDMA_HandleIRQ(s_EDMAHandle[20]);
1989 }
1990 SDK_ISR_EXIT_BARRIER;
1991}
1992
1993void DMA5_DMA21_DriverIRQHandler(void);
1994void DMA5_DMA21_DriverIRQHandler(void)
1995{
1996 if ((EDMA_GetChannelStatusFlags(DMA0, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
1997 {
1998 EDMA_HandleIRQ(s_EDMAHandle[5]);
1999 }
2000 if ((EDMA_GetChannelStatusFlags(DMA0, 21U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2001 {
2002 EDMA_HandleIRQ(s_EDMAHandle[21]);
2003 }
2004 SDK_ISR_EXIT_BARRIER;
2005}
2006
2007void DMA6_DMA22_DriverIRQHandler(void);
2008void DMA6_DMA22_DriverIRQHandler(void)
2009{
2010 if ((EDMA_GetChannelStatusFlags(DMA0, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2011 {
2012 EDMA_HandleIRQ(s_EDMAHandle[6]);
2013 }
2014 if ((EDMA_GetChannelStatusFlags(DMA0, 22U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2015 {
2016 EDMA_HandleIRQ(s_EDMAHandle[22]);
2017 }
2018 SDK_ISR_EXIT_BARRIER;
2019}
2020
2021void DMA7_DMA23_DriverIRQHandler(void);
2022void DMA7_DMA23_DriverIRQHandler(void)
2023{
2024 if ((EDMA_GetChannelStatusFlags(DMA0, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2025 {
2026 EDMA_HandleIRQ(s_EDMAHandle[7]);
2027 }
2028 if ((EDMA_GetChannelStatusFlags(DMA0, 23U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2029 {
2030 EDMA_HandleIRQ(s_EDMAHandle[23]);
2031 }
2032 SDK_ISR_EXIT_BARRIER;
2033}
2034
2035void DMA8_DMA24_DriverIRQHandler(void);
2036void DMA8_DMA24_DriverIRQHandler(void)
2037{
2038 if ((EDMA_GetChannelStatusFlags(DMA0, 8U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2039 {
2040 EDMA_HandleIRQ(s_EDMAHandle[8]);
2041 }
2042 if ((EDMA_GetChannelStatusFlags(DMA0, 24U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2043 {
2044 EDMA_HandleIRQ(s_EDMAHandle[24]);
2045 }
2046 SDK_ISR_EXIT_BARRIER;
2047}
2048
2049void DMA9_DMA25_DriverIRQHandler(void);
2050void DMA9_DMA25_DriverIRQHandler(void)
2051{
2052 if ((EDMA_GetChannelStatusFlags(DMA0, 9U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2053 {
2054 EDMA_HandleIRQ(s_EDMAHandle[9]);
2055 }
2056 if ((EDMA_GetChannelStatusFlags(DMA0, 25U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2057 {
2058 EDMA_HandleIRQ(s_EDMAHandle[25]);
2059 }
2060 SDK_ISR_EXIT_BARRIER;
2061}
2062
2063void DMA10_DMA26_DriverIRQHandler(void);
2064void DMA10_DMA26_DriverIRQHandler(void)
2065{
2066 if ((EDMA_GetChannelStatusFlags(DMA0, 10U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2067 {
2068 EDMA_HandleIRQ(s_EDMAHandle[10]);
2069 }
2070 if ((EDMA_GetChannelStatusFlags(DMA0, 26U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2071 {
2072 EDMA_HandleIRQ(s_EDMAHandle[26]);
2073 }
2074 SDK_ISR_EXIT_BARRIER;
2075}
2076
2077void DMA11_DMA27_DriverIRQHandler(void);
2078void DMA11_DMA27_DriverIRQHandler(void)
2079{
2080 if ((EDMA_GetChannelStatusFlags(DMA0, 11U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2081 {
2082 EDMA_HandleIRQ(s_EDMAHandle[11]);
2083 }
2084 if ((EDMA_GetChannelStatusFlags(DMA0, 27U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2085 {
2086 EDMA_HandleIRQ(s_EDMAHandle[27]);
2087 }
2088 SDK_ISR_EXIT_BARRIER;
2089}
2090
2091void DMA12_DMA28_DriverIRQHandler(void);
2092void DMA12_DMA28_DriverIRQHandler(void)
2093{
2094 if ((EDMA_GetChannelStatusFlags(DMA0, 12U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2095 {
2096 EDMA_HandleIRQ(s_EDMAHandle[12]);
2097 }
2098 if ((EDMA_GetChannelStatusFlags(DMA0, 28U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2099 {
2100 EDMA_HandleIRQ(s_EDMAHandle[28]);
2101 }
2102 SDK_ISR_EXIT_BARRIER;
2103}
2104
2105void DMA13_DMA29_DriverIRQHandler(void);
2106void DMA13_DMA29_DriverIRQHandler(void)
2107{
2108 if ((EDMA_GetChannelStatusFlags(DMA0, 13U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2109 {
2110 EDMA_HandleIRQ(s_EDMAHandle[13]);
2111 }
2112 if ((EDMA_GetChannelStatusFlags(DMA0, 29U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2113 {
2114 EDMA_HandleIRQ(s_EDMAHandle[29]);
2115 }
2116 SDK_ISR_EXIT_BARRIER;
2117}
2118
2119void DMA14_DMA30_DriverIRQHandler(void);
2120void DMA14_DMA30_DriverIRQHandler(void)
2121{
2122 if ((EDMA_GetChannelStatusFlags(DMA0, 14U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2123 {
2124 EDMA_HandleIRQ(s_EDMAHandle[14]);
2125 }
2126 if ((EDMA_GetChannelStatusFlags(DMA0, 30U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2127 {
2128 EDMA_HandleIRQ(s_EDMAHandle[30]);
2129 }
2130 SDK_ISR_EXIT_BARRIER;
2131}
2132
2133void DMA15_DMA31_DriverIRQHandler(void);
2134void DMA15_DMA31_DriverIRQHandler(void)
2135{
2136 if ((EDMA_GetChannelStatusFlags(DMA0, 15U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2137 {
2138 EDMA_HandleIRQ(s_EDMAHandle[15]);
2139 }
2140 if ((EDMA_GetChannelStatusFlags(DMA0, 31U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2141 {
2142 EDMA_HandleIRQ(s_EDMAHandle[31]);
2143 }
2144 SDK_ISR_EXIT_BARRIER;
2145}
2146
2147#else
2148void DMA0_DMA16_DriverIRQHandler(void);
2149void DMA0_DMA16_DriverIRQHandler(void)
2150{
2151 if ((EDMA_GetChannelStatusFlags(DMA1, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2152 {
2153 EDMA_HandleIRQ(s_EDMAHandle[0]);
2154 }
2155 if ((EDMA_GetChannelStatusFlags(DMA1, 16U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2156 {
2157 EDMA_HandleIRQ(s_EDMAHandle[16]);
2158 }
2159 SDK_ISR_EXIT_BARRIER;
2160}
2161
2162void DMA1_DMA17_DriverIRQHandler(void);
2163void DMA1_DMA17_DriverIRQHandler(void)
2164{
2165 if ((EDMA_GetChannelStatusFlags(DMA1, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2166 {
2167 EDMA_HandleIRQ(s_EDMAHandle[1]);
2168 }
2169 if ((EDMA_GetChannelStatusFlags(DMA1, 17U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2170 {
2171 EDMA_HandleIRQ(s_EDMAHandle[17]);
2172 }
2173 SDK_ISR_EXIT_BARRIER;
2174}
2175
2176void DMA2_DMA18_DriverIRQHandler(void);
2177void DMA2_DMA18_DriverIRQHandler(void)
2178{
2179 if ((EDMA_GetChannelStatusFlags(DMA1, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2180 {
2181 EDMA_HandleIRQ(s_EDMAHandle[2]);
2182 }
2183 if ((EDMA_GetChannelStatusFlags(DMA1, 18U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2184 {
2185 EDMA_HandleIRQ(s_EDMAHandle[18]);
2186 }
2187 SDK_ISR_EXIT_BARRIER;
2188}
2189
2190void DMA3_DMA19_DriverIRQHandler(void);
2191void DMA3_DMA19_DriverIRQHandler(void)
2192{
2193 if ((EDMA_GetChannelStatusFlags(DMA1, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2194 {
2195 EDMA_HandleIRQ(s_EDMAHandle[3]);
2196 }
2197 if ((EDMA_GetChannelStatusFlags(DMA1, 19U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2198 {
2199 EDMA_HandleIRQ(s_EDMAHandle[19]);
2200 }
2201 SDK_ISR_EXIT_BARRIER;
2202}
2203
2204void DMA4_DMA20_DriverIRQHandler(void);
2205void DMA4_DMA20_DriverIRQHandler(void)
2206{
2207 if ((EDMA_GetChannelStatusFlags(DMA1, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2208 {
2209 EDMA_HandleIRQ(s_EDMAHandle[4]);
2210 }
2211 if ((EDMA_GetChannelStatusFlags(DMA1, 20U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2212 {
2213 EDMA_HandleIRQ(s_EDMAHandle[20]);
2214 }
2215 SDK_ISR_EXIT_BARRIER;
2216}
2217
2218void DMA5_DMA21_DriverIRQHandler(void);
2219void DMA5_DMA21_DriverIRQHandler(void)
2220{
2221 if ((EDMA_GetChannelStatusFlags(DMA1, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2222 {
2223 EDMA_HandleIRQ(s_EDMAHandle[5]);
2224 }
2225 if ((EDMA_GetChannelStatusFlags(DMA1, 21U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2226 {
2227 EDMA_HandleIRQ(s_EDMAHandle[21]);
2228 }
2229 SDK_ISR_EXIT_BARRIER;
2230}
2231
2232void DMA6_DMA22_DriverIRQHandler(void);
2233void DMA6_DMA22_DriverIRQHandler(void)
2234{
2235 if ((EDMA_GetChannelStatusFlags(DMA1, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2236 {
2237 EDMA_HandleIRQ(s_EDMAHandle[6]);
2238 }
2239 if ((EDMA_GetChannelStatusFlags(DMA1, 22U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2240 {
2241 EDMA_HandleIRQ(s_EDMAHandle[22]);
2242 }
2243 SDK_ISR_EXIT_BARRIER;
2244}
2245
2246void DMA7_DMA23_DriverIRQHandler(void);
2247void DMA7_DMA23_DriverIRQHandler(void)
2248{
2249 if ((EDMA_GetChannelStatusFlags(DMA1, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2250 {
2251 EDMA_HandleIRQ(s_EDMAHandle[7]);
2252 }
2253 if ((EDMA_GetChannelStatusFlags(DMA1, 23U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2254 {
2255 EDMA_HandleIRQ(s_EDMAHandle[23]);
2256 }
2257 SDK_ISR_EXIT_BARRIER;
2258}
2259
2260void DMA8_DMA24_DriverIRQHandler(void);
2261void DMA8_DMA24_DriverIRQHandler(void)
2262{
2263 if ((EDMA_GetChannelStatusFlags(DMA1, 8U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2264 {
2265 EDMA_HandleIRQ(s_EDMAHandle[8]);
2266 }
2267 if ((EDMA_GetChannelStatusFlags(DMA1, 24U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2268 {
2269 EDMA_HandleIRQ(s_EDMAHandle[24]);
2270 }
2271 SDK_ISR_EXIT_BARRIER;
2272}
2273
2274void DMA9_DMA25_DriverIRQHandler(void);
2275void DMA9_DMA25_DriverIRQHandler(void)
2276{
2277 if ((EDMA_GetChannelStatusFlags(DMA1, 9U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2278 {
2279 EDMA_HandleIRQ(s_EDMAHandle[9]);
2280 }
2281 if ((EDMA_GetChannelStatusFlags(DMA1, 25U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2282 {
2283 EDMA_HandleIRQ(s_EDMAHandle[25]);
2284 }
2285 SDK_ISR_EXIT_BARRIER;
2286}
2287
2288void DMA10_DMA26_DriverIRQHandler(void);
2289void DMA10_DMA26_DriverIRQHandler(void)
2290{
2291 if ((EDMA_GetChannelStatusFlags(DMA1, 10U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2292 {
2293 EDMA_HandleIRQ(s_EDMAHandle[10]);
2294 }
2295 if ((EDMA_GetChannelStatusFlags(DMA1, 26U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2296 {
2297 EDMA_HandleIRQ(s_EDMAHandle[26]);
2298 }
2299 SDK_ISR_EXIT_BARRIER;
2300}
2301
2302void DMA11_DMA27_DriverIRQHandler(void);
2303void DMA11_DMA27_DriverIRQHandler(void)
2304{
2305 if ((EDMA_GetChannelStatusFlags(DMA1, 11U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2306 {
2307 EDMA_HandleIRQ(s_EDMAHandle[11]);
2308 }
2309 if ((EDMA_GetChannelStatusFlags(DMA1, 27U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2310 {
2311 EDMA_HandleIRQ(s_EDMAHandle[27]);
2312 }
2313 SDK_ISR_EXIT_BARRIER;
2314}
2315
2316void DMA12_DMA28_DriverIRQHandler(void);
2317void DMA12_DMA28_DriverIRQHandler(void)
2318{
2319 if ((EDMA_GetChannelStatusFlags(DMA1, 12U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2320 {
2321 EDMA_HandleIRQ(s_EDMAHandle[12]);
2322 }
2323 if ((EDMA_GetChannelStatusFlags(DMA1, 28U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2324 {
2325 EDMA_HandleIRQ(s_EDMAHandle[28]);
2326 }
2327 SDK_ISR_EXIT_BARRIER;
2328}
2329
2330void DMA13_DMA29_DriverIRQHandler(void);
2331void DMA13_DMA29_DriverIRQHandler(void)
2332{
2333 if ((EDMA_GetChannelStatusFlags(DMA1, 13U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2334 {
2335 EDMA_HandleIRQ(s_EDMAHandle[13]);
2336 }
2337 if ((EDMA_GetChannelStatusFlags(DMA1, 29U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2338 {
2339 EDMA_HandleIRQ(s_EDMAHandle[29]);
2340 }
2341 SDK_ISR_EXIT_BARRIER;
2342}
2343
2344void DMA14_DMA30_DriverIRQHandler(void);
2345void DMA14_DMA30_DriverIRQHandler(void)
2346{
2347 if ((EDMA_GetChannelStatusFlags(DMA1, 14U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2348 {
2349 EDMA_HandleIRQ(s_EDMAHandle[14]);
2350 }
2351 if ((EDMA_GetChannelStatusFlags(DMA1, 30U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2352 {
2353 EDMA_HandleIRQ(s_EDMAHandle[30]);
2354 }
2355 SDK_ISR_EXIT_BARRIER;
2356}
2357
2358void DMA15_DMA31_DriverIRQHandler(void);
2359void DMA15_DMA31_DriverIRQHandler(void)
2360{
2361 if ((EDMA_GetChannelStatusFlags(DMA1, 15U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2362 {
2363 EDMA_HandleIRQ(s_EDMAHandle[15]);
2364 }
2365 if ((EDMA_GetChannelStatusFlags(DMA1, 31U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2366 {
2367 EDMA_HandleIRQ(s_EDMAHandle[31]);
2368 }
2369 SDK_ISR_EXIT_BARRIER;
2370}
2371
2372#endif
2373#endif /* 32 channels (Shared) */
2374#endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET */
2375
2376#if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET) && \
2377 (FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET == 4)
2378/* 32 channels (Shared): MCIMX7U5_M4 */
2379#if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && (FSL_FEATURE_EDMA_MODULE_CHANNEL == 32U)
2380
2381void DMA0_0_4_DriverIRQHandler(void);
2382void DMA0_0_4_DriverIRQHandler(void)
2383{
2384 if ((EDMA_GetChannelStatusFlags(DMA0, 0U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2385 {
2386 EDMA_HandleIRQ(s_EDMAHandle[0]);
2387 }
2388 if ((EDMA_GetChannelStatusFlags(DMA0, 4U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2389 {
2390 EDMA_HandleIRQ(s_EDMAHandle[4]);
2391 }
2392 SDK_ISR_EXIT_BARRIER;
2393}
2394
2395void DMA0_1_5_DriverIRQHandler(void);
2396void DMA0_1_5_DriverIRQHandler(void)
2397{
2398 if ((EDMA_GetChannelStatusFlags(DMA0, 1U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2399 {
2400 EDMA_HandleIRQ(s_EDMAHandle[1]);
2401 }
2402 if ((EDMA_GetChannelStatusFlags(DMA0, 5U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2403 {
2404 EDMA_HandleIRQ(s_EDMAHandle[5]);
2405 }
2406 SDK_ISR_EXIT_BARRIER;
2407}
2408
2409void DMA0_2_6_DriverIRQHandler(void);
2410void DMA0_2_6_DriverIRQHandler(void)
2411{
2412 if ((EDMA_GetChannelStatusFlags(DMA0, 2U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2413 {
2414 EDMA_HandleIRQ(s_EDMAHandle[2]);
2415 }
2416 if ((EDMA_GetChannelStatusFlags(DMA0, 6U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2417 {
2418 EDMA_HandleIRQ(s_EDMAHandle[6]);
2419 }
2420 SDK_ISR_EXIT_BARRIER;
2421}
2422
2423void DMA0_3_7_DriverIRQHandler(void);
2424void DMA0_3_7_DriverIRQHandler(void)
2425{
2426 if ((EDMA_GetChannelStatusFlags(DMA0, 3U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2427 {
2428 EDMA_HandleIRQ(s_EDMAHandle[3]);
2429 }
2430 if ((EDMA_GetChannelStatusFlags(DMA0, 7U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2431 {
2432 EDMA_HandleIRQ(s_EDMAHandle[7]);
2433 }
2434 SDK_ISR_EXIT_BARRIER;
2435}
2436
2437void DMA0_8_12_DriverIRQHandler(void);
2438void DMA0_8_12_DriverIRQHandler(void)
2439{
2440 if ((EDMA_GetChannelStatusFlags(DMA0, 8U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2441 {
2442 EDMA_HandleIRQ(s_EDMAHandle[8]);
2443 }
2444 if ((EDMA_GetChannelStatusFlags(DMA0, 12U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2445 {
2446 EDMA_HandleIRQ(s_EDMAHandle[12]);
2447 }
2448 SDK_ISR_EXIT_BARRIER;
2449}
2450
2451void DMA0_9_13_DriverIRQHandler(void);
2452void DMA0_9_13_DriverIRQHandler(void)
2453{
2454 if ((EDMA_GetChannelStatusFlags(DMA0, 9U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2455 {
2456 EDMA_HandleIRQ(s_EDMAHandle[9]);
2457 }
2458 if ((EDMA_GetChannelStatusFlags(DMA0, 13U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2459 {
2460 EDMA_HandleIRQ(s_EDMAHandle[13]);
2461 }
2462 SDK_ISR_EXIT_BARRIER;
2463}
2464
2465void DMA0_10_14_DriverIRQHandler(void);
2466void DMA0_10_14_DriverIRQHandler(void)
2467{
2468 if ((EDMA_GetChannelStatusFlags(DMA0, 10U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2469 {
2470 EDMA_HandleIRQ(s_EDMAHandle[10]);
2471 }
2472 if ((EDMA_GetChannelStatusFlags(DMA0, 14U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2473 {
2474 EDMA_HandleIRQ(s_EDMAHandle[14]);
2475 }
2476 SDK_ISR_EXIT_BARRIER;
2477}
2478
2479void DMA0_11_15_DriverIRQHandler(void);
2480void DMA0_11_15_DriverIRQHandler(void)
2481{
2482 if ((EDMA_GetChannelStatusFlags(DMA0, 11U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2483 {
2484 EDMA_HandleIRQ(s_EDMAHandle[11]);
2485 }
2486 if ((EDMA_GetChannelStatusFlags(DMA0, 15U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2487 {
2488 EDMA_HandleIRQ(s_EDMAHandle[15]);
2489 }
2490 SDK_ISR_EXIT_BARRIER;
2491}
2492
2493void DMA0_16_20_DriverIRQHandler(void);
2494void DMA0_16_20_DriverIRQHandler(void)
2495{
2496 if ((EDMA_GetChannelStatusFlags(DMA0, 16U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2497 {
2498 EDMA_HandleIRQ(s_EDMAHandle[16]);
2499 }
2500 if ((EDMA_GetChannelStatusFlags(DMA0, 20U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2501 {
2502 EDMA_HandleIRQ(s_EDMAHandle[20]);
2503 }
2504 SDK_ISR_EXIT_BARRIER;
2505}
2506
2507void DMA0_17_21_DriverIRQHandler(void);
2508void DMA0_17_21_DriverIRQHandler(void)
2509{
2510 if ((EDMA_GetChannelStatusFlags(DMA0, 17U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2511 {
2512 EDMA_HandleIRQ(s_EDMAHandle[17]);
2513 }
2514 if ((EDMA_GetChannelStatusFlags(DMA0, 21U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2515 {
2516 EDMA_HandleIRQ(s_EDMAHandle[21]);
2517 }
2518 SDK_ISR_EXIT_BARRIER;
2519}
2520
2521void DMA0_18_22_DriverIRQHandler(void);
2522void DMA0_18_22_DriverIRQHandler(void)
2523{
2524 if ((EDMA_GetChannelStatusFlags(DMA0, 18U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2525 {
2526 EDMA_HandleIRQ(s_EDMAHandle[18]);
2527 }
2528 if ((EDMA_GetChannelStatusFlags(DMA0, 22U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2529 {
2530 EDMA_HandleIRQ(s_EDMAHandle[22]);
2531 }
2532 SDK_ISR_EXIT_BARRIER;
2533}
2534
2535void DMA0_19_23_DriverIRQHandler(void);
2536void DMA0_19_23_DriverIRQHandler(void)
2537{
2538 if ((EDMA_GetChannelStatusFlags(DMA0, 19U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2539 {
2540 EDMA_HandleIRQ(s_EDMAHandle[19]);
2541 }
2542 if ((EDMA_GetChannelStatusFlags(DMA0, 23U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2543 {
2544 EDMA_HandleIRQ(s_EDMAHandle[23]);
2545 }
2546 SDK_ISR_EXIT_BARRIER;
2547}
2548
2549void DMA0_24_28_DriverIRQHandler(void);
2550void DMA0_24_28_DriverIRQHandler(void)
2551{
2552 if ((EDMA_GetChannelStatusFlags(DMA0, 24U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2553 {
2554 EDMA_HandleIRQ(s_EDMAHandle[24]);
2555 }
2556 if ((EDMA_GetChannelStatusFlags(DMA0, 28U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2557 {
2558 EDMA_HandleIRQ(s_EDMAHandle[28]);
2559 }
2560 SDK_ISR_EXIT_BARRIER;
2561}
2562
2563void DMA0_25_29_DriverIRQHandler(void);
2564void DMA0_25_29_DriverIRQHandler(void)
2565{
2566 if ((EDMA_GetChannelStatusFlags(DMA0, 25U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2567 {
2568 EDMA_HandleIRQ(s_EDMAHandle[25]);
2569 }
2570 if ((EDMA_GetChannelStatusFlags(DMA0, 29U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2571 {
2572 EDMA_HandleIRQ(s_EDMAHandle[29]);
2573 }
2574 SDK_ISR_EXIT_BARRIER;
2575}
2576
2577void DMA0_26_30_DriverIRQHandler(void);
2578void DMA0_26_30_DriverIRQHandler(void)
2579{
2580 if ((EDMA_GetChannelStatusFlags(DMA0, 26U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2581 {
2582 EDMA_HandleIRQ(s_EDMAHandle[26]);
2583 }
2584 if ((EDMA_GetChannelStatusFlags(DMA0, 30U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2585 {
2586 EDMA_HandleIRQ(s_EDMAHandle[30]);
2587 }
2588 SDK_ISR_EXIT_BARRIER;
2589}
2590
2591void DMA0_27_31_DriverIRQHandler(void);
2592void DMA0_27_31_DriverIRQHandler(void)
2593{
2594 if ((EDMA_GetChannelStatusFlags(DMA0, 27U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2595 {
2596 EDMA_HandleIRQ(s_EDMAHandle[27]);
2597 }
2598 if ((EDMA_GetChannelStatusFlags(DMA0, 31U) & (uint32_t)kEDMA_InterruptFlag) != 0U)
2599 {
2600 EDMA_HandleIRQ(s_EDMAHandle[31]);
2601 }
2602 SDK_ISR_EXIT_BARRIER;
2603}
2604#endif /* 32 channels (Shared): MCIMX7U5 */
2605#endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET */
2606
2607#if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET) && \
2608 (FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET == 0)
2609/* 4 channels (No Shared): kv10 */
2610#if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && (FSL_FEATURE_EDMA_MODULE_CHANNEL > 0)
2611
2612void DMA0_DriverIRQHandler(void);
2613void DMA0_DriverIRQHandler(void)
2614{
2615 EDMA_HandleIRQ(s_EDMAHandle[0]);
2616 SDK_ISR_EXIT_BARRIER;
2617}
2618
2619void DMA1_DriverIRQHandler(void);
2620void DMA1_DriverIRQHandler(void)
2621{
2622 EDMA_HandleIRQ(s_EDMAHandle[1]);
2623 SDK_ISR_EXIT_BARRIER;
2624}
2625
2626void DMA2_DriverIRQHandler(void);
2627void DMA2_DriverIRQHandler(void)
2628{
2629 EDMA_HandleIRQ(s_EDMAHandle[2]);
2630 SDK_ISR_EXIT_BARRIER;
2631}
2632
2633void DMA3_DriverIRQHandler(void);
2634void DMA3_DriverIRQHandler(void)
2635{
2636 EDMA_HandleIRQ(s_EDMAHandle[3]);
2637 SDK_ISR_EXIT_BARRIER;
2638}
2639
2640/* 8 channels (No Shared) */
2641#if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && (FSL_FEATURE_EDMA_MODULE_CHANNEL > 4U)
2642
2643void DMA4_DriverIRQHandler(void);
2644void DMA4_DriverIRQHandler(void)
2645{
2646 EDMA_HandleIRQ(s_EDMAHandle[4]);
2647 SDK_ISR_EXIT_BARRIER;
2648}
2649
2650void DMA5_DriverIRQHandler(void);
2651void DMA5_DriverIRQHandler(void)
2652{
2653 EDMA_HandleIRQ(s_EDMAHandle[5]);
2654 SDK_ISR_EXIT_BARRIER;
2655}
2656
2657void DMA6_DriverIRQHandler(void);
2658void DMA6_DriverIRQHandler(void)
2659{
2660 EDMA_HandleIRQ(s_EDMAHandle[6]);
2661 SDK_ISR_EXIT_BARRIER;
2662}
2663
2664void DMA7_DriverIRQHandler(void);
2665void DMA7_DriverIRQHandler(void)
2666{
2667 EDMA_HandleIRQ(s_EDMAHandle[7]);
2668 SDK_ISR_EXIT_BARRIER;
2669}
2670#endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL == 8 */
2671
2672/* 16 channels (No Shared) */
2673#if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && (FSL_FEATURE_EDMA_MODULE_CHANNEL > 8U)
2674
2675void DMA8_DriverIRQHandler(void);
2676void DMA8_DriverIRQHandler(void)
2677{
2678 EDMA_HandleIRQ(s_EDMAHandle[8]);
2679 SDK_ISR_EXIT_BARRIER;
2680}
2681
2682void DMA9_DriverIRQHandler(void);
2683void DMA9_DriverIRQHandler(void)
2684{
2685 EDMA_HandleIRQ(s_EDMAHandle[9]);
2686 SDK_ISR_EXIT_BARRIER;
2687}
2688
2689void DMA10_DriverIRQHandler(void);
2690void DMA10_DriverIRQHandler(void)
2691{
2692 EDMA_HandleIRQ(s_EDMAHandle[10]);
2693 SDK_ISR_EXIT_BARRIER;
2694}
2695
2696void DMA11_DriverIRQHandler(void);
2697void DMA11_DriverIRQHandler(void)
2698{
2699 EDMA_HandleIRQ(s_EDMAHandle[11]);
2700 SDK_ISR_EXIT_BARRIER;
2701}
2702
2703void DMA12_DriverIRQHandler(void);
2704void DMA12_DriverIRQHandler(void)
2705{
2706 EDMA_HandleIRQ(s_EDMAHandle[12]);
2707 SDK_ISR_EXIT_BARRIER;
2708}
2709
2710void DMA13_DriverIRQHandler(void);
2711void DMA13_DriverIRQHandler(void)
2712{
2713 EDMA_HandleIRQ(s_EDMAHandle[13]);
2714 SDK_ISR_EXIT_BARRIER;
2715}
2716
2717void DMA14_DriverIRQHandler(void);
2718void DMA14_DriverIRQHandler(void)
2719{
2720 EDMA_HandleIRQ(s_EDMAHandle[14]);
2721 SDK_ISR_EXIT_BARRIER;
2722}
2723
2724void DMA15_DriverIRQHandler(void);
2725void DMA15_DriverIRQHandler(void)
2726{
2727 EDMA_HandleIRQ(s_EDMAHandle[15]);
2728 SDK_ISR_EXIT_BARRIER;
2729}
2730#endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL == 16 */
2731
2732/* 32 channels (No Shared) */
2733#if defined(FSL_FEATURE_EDMA_MODULE_CHANNEL) && (FSL_FEATURE_EDMA_MODULE_CHANNEL > 16U)
2734
2735void DMA16_DriverIRQHandler(void);
2736void DMA16_DriverIRQHandler(void)
2737{
2738 EDMA_HandleIRQ(s_EDMAHandle[16]);
2739 SDK_ISR_EXIT_BARRIER;
2740}
2741
2742void DMA17_DriverIRQHandler(void);
2743void DMA17_DriverIRQHandler(void)
2744{
2745 EDMA_HandleIRQ(s_EDMAHandle[17]);
2746 SDK_ISR_EXIT_BARRIER;
2747}
2748
2749void DMA18_DriverIRQHandler(void);
2750void DMA18_DriverIRQHandler(void)
2751{
2752 EDMA_HandleIRQ(s_EDMAHandle[18]);
2753 SDK_ISR_EXIT_BARRIER;
2754}
2755
2756void DMA19_DriverIRQHandler(void);
2757void DMA19_DriverIRQHandler(void)
2758{
2759 EDMA_HandleIRQ(s_EDMAHandle[19]);
2760 SDK_ISR_EXIT_BARRIER;
2761}
2762
2763void DMA20_DriverIRQHandler(void);
2764void DMA20_DriverIRQHandler(void)
2765{
2766 EDMA_HandleIRQ(s_EDMAHandle[20]);
2767 SDK_ISR_EXIT_BARRIER;
2768}
2769
2770void DMA21_DriverIRQHandler(void);
2771void DMA21_DriverIRQHandler(void)
2772{
2773 EDMA_HandleIRQ(s_EDMAHandle[21]);
2774 SDK_ISR_EXIT_BARRIER;
2775}
2776
2777void DMA22_DriverIRQHandler(void);
2778void DMA22_DriverIRQHandler(void)
2779{
2780 EDMA_HandleIRQ(s_EDMAHandle[22]);
2781 SDK_ISR_EXIT_BARRIER;
2782}
2783
2784void DMA23_DriverIRQHandler(void);
2785void DMA23_DriverIRQHandler(void)
2786{
2787 EDMA_HandleIRQ(s_EDMAHandle[23]);
2788 SDK_ISR_EXIT_BARRIER;
2789}
2790
2791void DMA24_DriverIRQHandler(void);
2792void DMA24_DriverIRQHandler(void)
2793{
2794 EDMA_HandleIRQ(s_EDMAHandle[24]);
2795 SDK_ISR_EXIT_BARRIER;
2796}
2797
2798void DMA25_DriverIRQHandler(void);
2799void DMA25_DriverIRQHandler(void)
2800{
2801 EDMA_HandleIRQ(s_EDMAHandle[25]);
2802 SDK_ISR_EXIT_BARRIER;
2803}
2804
2805void DMA26_DriverIRQHandler(void);
2806void DMA26_DriverIRQHandler(void)
2807{
2808 EDMA_HandleIRQ(s_EDMAHandle[26]);
2809 SDK_ISR_EXIT_BARRIER;
2810}
2811
2812void DMA27_DriverIRQHandler(void);
2813void DMA27_DriverIRQHandler(void)
2814{
2815 EDMA_HandleIRQ(s_EDMAHandle[27]);
2816 SDK_ISR_EXIT_BARRIER;
2817}
2818
2819void DMA28_DriverIRQHandler(void);
2820void DMA28_DriverIRQHandler(void)
2821{
2822 EDMA_HandleIRQ(s_EDMAHandle[28]);
2823 SDK_ISR_EXIT_BARRIER;
2824}
2825
2826void DMA29_DriverIRQHandler(void);
2827void DMA29_DriverIRQHandler(void)
2828{
2829 EDMA_HandleIRQ(s_EDMAHandle[29]);
2830 SDK_ISR_EXIT_BARRIER;
2831}
2832
2833void DMA30_DriverIRQHandler(void);
2834void DMA30_DriverIRQHandler(void)
2835{
2836 EDMA_HandleIRQ(s_EDMAHandle[30]);
2837 SDK_ISR_EXIT_BARRIER;
2838}
2839
2840void DMA31_DriverIRQHandler(void);
2841void DMA31_DriverIRQHandler(void)
2842{
2843 EDMA_HandleIRQ(s_EDMAHandle[31]);
2844 SDK_ISR_EXIT_BARRIER;
2845}
2846#endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL == 32 */
2847
2848#endif /* 4/8/16/32 channels (No Shared) */
2849#endif /* FSL_FEATURE_EDMA_MODULE_CHANNEL_IRQ_ENTRY_SHARED_OFFSET */
2850