Hi Community,
I want to make the user LED ON for 100us and OFF for 1s based on RT1064 evaluation Kit. And I modify the example program of GPT, but the minimum timing I can use is 1ms. When I set the compare value lower than 250000, there was something wrong. How to solve it?
Here is the code:
#include "fsl_debug_console.h"
#include "pin_mux.h"
#include "clock_config.h"
#include "board.h"
#include "fsl_gpt.h"
/*******************************************************************************
* Definitions
******************************************************************************/
#define GPT_IRQ_ID GPT2_IRQn
#define EXAMPLE_GPT GPT2
#define EXAMPLE_GPT_IRQHandler GPT2_IRQHandler
#define EXAMPLE_LED_GPIO BOARD_USER_LED_GPIO
#define EXAMPLE_LED_GPIO_PIN BOARD_USER_LED_PIN
/* Get source clock for GPT driver (GPT prescaler = 0) */
#define EXAMPLE_GPT_CLK_FREQ CLOCK_GetFreq(kCLOCK_PerClk)
/*******************************************************************************
* Prototypes
******************************************************************************/
/*******************************************************************************
* Variables
******************************************************************************/
volatile bool gptIsrFlag = false;
volatile uint32_t g_systickCounter;
volatile uint32_t count=0;
/*******************************************************************************
* Code
******************************************************************************/
void EXAMPLE_GPT_IRQHandler(void)
{
/* Clear interrupt flag.*/
GPT_ClearStatusFlags(EXAMPLE_GPT, kGPT_OutputCompare1Flag);
gptIsrFlag = true;
count+=1;
/* Add for ARM errata 838869, affects Cortex-M4, Cortex-M4F, Cortex-M7, Cortex-M7F Store immediate overlapping
exception return operation might vector to incorrect interrupt */
SDK_ISR_EXIT_BARRIER;
}
/*!
* @brief Main function
*/
int main(void)
{
uint32_t gptFreq = 250000; //why cannot use 25?
gpt_config_t gptConfig;
/* Board pin, clock, debug console init */
BOARD_ConfigMPU();
BOARD_InitBootPins();
BOARD_InitBootClocks();
BOARD_InitDebugConsole();
GPT_GetDefaultConfig(&gptConfig);
/* Initialize GPT module */
GPT_Init(EXAMPLE_GPT, &gptConfig);
/* Divide GPT clock source frequency by 3 inside GPT module */
GPT_SetClockDivider(EXAMPLE_GPT, 3);
/* Get GPT clock frequency */
//gptFreq = EXAMPLE_GPT_CLK_FREQ;
/* GPT frequency is divided by 3 inside module */
//gptFreq /= 3;
/* Set both GPT modules to 100us duration */
GPT_SetOutputCompareValue(EXAMPLE_GPT, kGPT_OutputCompare_Channel1, gptFreq);
/* Enable GPT Output Compare1 interrupt */
GPT_EnableInterrupts(EXAMPLE_GPT, kGPT_OutputCompare1InterruptEnable);
/* Enable at the Interrupt */
EnableIRQ(GPT_IRQ_ID);
// PRINTF("\r\nPress any key to start the example");
// GETCHAR();
/* Start Timer */
PRINTF("\r\nStarting GPT timer ...");
GPT_StartTimer(EXAMPLE_GPT);
while (true)
{
/* Check whether occur interupt and toggle LED */
if (true == gptIsrFlag)
{
if(count==1)
{
GPIO_PinWrite(EXAMPLE_LED_GPIO, EXAMPLE_LED_GPIO_PIN, 0U);
}
if(count==100)
{
count=0;
GPIO_PinWrite(EXAMPLE_LED_GPIO, EXAMPLE_LED_GPIO_PIN, 1U);
}
PRINTF("\r\n GPT interrupt is occurred !");
gptIsrFlag = false;
}
else
{
__WFI();
}
}
}
To achieve a 100 microseconds (us) LED ON and 1-second OFF timing with the RT1064 evaluation kit, modify your code as follows:
#include "fsl_debug_console.h"
#include "pin_mux.h"
#include "clock_config.h"
#include "board.h"
#include "fsl_gpt.h"
volatile bool gptIsrFlag = false;
void EXAMPLE_GPT_IRQHandler(void)
{
GPT_ClearStatusFlags(EXAMPLE_GPT, kGPT_OutputCompare1Flag);
GPIO_PinWrite(EXAMPLE_LED_GPIO, EXAMPLE_LED_GPIO_PIN, 1U); // Turn LED ON
for (uint32_t i = 0; i < 25; i++) {
__NOP(); // Introduce a 100us delay
}
GPIO_PinWrite(EXAMPLE_LED_GPIO, EXAMPLE_LED_GPIO_PIN, 0U); // Turn LED OFF
gptIsrFlag = true;
}
int main(void)
{
gpt_config_t gptConfig;
uint32_t gptFreq = 1000; // 1ms
BOARD_ConfigMPU();
BOARD_InitBootPins();
BOARD_InitBootClocks();
BOARD_InitDebugConsole();
GPT_GetDefaultConfig(&gptConfig);
GPT_Init(EXAMPLE_GPT, &gptConfig);
GPT_SetOutputCompareValue(EXAMPLE_GPT, kGPT_OutputCompare_Channel1, gptFreq);
GPT_EnableInterrupts(EXAMPLE_GPT, kGPT_OutputCompare1InterruptEnable);
EnableIRQ(GPT_IRQ_ID);
PRINTF("\r\nStarting GPT timer ...");
GPT_StartTimer(EXAMPLE_GPT);
while (true)
{
__WFI();
}
}
This code turns the LED ON for 100us and OFF for 1ms using the GPT module for precise timing. The key modification is the introduction of a __NOP() loop to create a 100us delay inside the interrupt handler.
Thanks a lot, It works. But I still have some confusions about the clock.
I divide the GPT clock and change the value, then every 1 s , it will enter an interrupt.
uint32_t gptFreq = 25000000;
/* Divide GPT clock source frequency by 3 inside GPT module */
GPT_SetClockDivider(EXAMPLE_GPT, 3);
But how to determine the time of _nop()? I found that when I set 20000 times nop, then it will delay about 100us. So what clock it should use?
void EXAMPLE_GPT_IRQHandler(void)
{
/* Clear interrupt flag.*/
GPT_ClearStatusFlags(EXAMPLE_GPT, kGPT_OutputCompare1Flag);
GPIO_PinWrite(EXAMPLE_LED_GPIO, EXAMPLE_LED_GPIO_PIN, 1U); // Turn LED ON
for (uint32_t i = 0; i < 20000; i++) {
__NOP(); // Introduce a 100us delay
}
GPIO_PinWrite(EXAMPLE_LED_GPIO, EXAMPLE_LED_GPIO_PIN, 0U); // Turn LED OFF
}
I use kCLOCK_IpgClk as GPT Clock and set the compare value as 150.
I count the interrupt number, and after 200 and 500000 counts, change the GPIO_pin, it can work well, as the picture shows bellow:
But if after 150 and 500000 counts to change the GPIO, It cannot work.