Author Topic: STM32 Timer running slow on first DMA transfer  (Read 12323 times)

0 Members and 1 Guest are viewing this topic.

Offline TorrentulaTopic starter

  • Regular Contributor
  • *
  • Posts: 91
  • Country: de
    • My blog
STM32 Timer running slow on first DMA transfer
« on: March 07, 2014, 09:14:37 pm »
So I'm currently trying to interface an STM32F100 @ 24MHz to WS2812 addressable RGB LEDs using the same approach that the OctoWS2811 library uses.

The idea to create 16 individual data streams is the following:
1. Have a Timer (TIM2) run at 800kHz and configure the compare modules so that there is a match on the low bit time after ~375ns (CC1) and the high bit time after ~700ns (CC2).

2. Use three DMA streams to control the output data register of an IO bank and set the IOs appropriately. DMA channel 2 transfers all bits high to the GPIOA output data register on a TMR2 update event. DMA channel 5 then transfers the actual data bytes to the output data register on CC1 event, if the bit is supposed to represent a 0, then the pin goes low, otherwise it stays high. Finally DMA channel 7 sets all remaining IO pins to low on a CC2 event.

The code I have here works fine for the first frame (after reset) but when I restart the timer for transfer after reconfiguring the DMA, the first timer period seems to be only half the speed that I actually configured resulting in the first bit being about twice as long. The strange part is that after that the other bits get transferred without problems.

Currently I am handling the end of data transfer with an interrupt. When the DMA channel 7 transfer complete interrupt occurs, I disable the timer and DMA channels, then clear all relevant timer and DMA flags.

I have been trying out various things the whole day but I can't seem to find the solution to this, my assumption that some flag is still set is obviously wrong as resetting them is not helping.

The code currently looks like this:

Code: [Select]
#include <stm32f10x.h>

#define GPIOA_ODR_Address 0x4001080C

uint16_t WS2812_IO_High = 0xFFFF;
uint16_t WS2812_IO_Low = 0x0000;

uint16_t WS2812_IO_framedata[48] =
{
0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000,
0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF,
0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000,
0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000,
0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF, 0xFFFF,
0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000
};

void Delay(__IO uint32_t nCount) {
  while(nCount--) {
  }
}

void GPIO_init(void)
{
GPIO_InitTypeDef GPIO_InitStructure;
// GPIOA Periph clock enable
RCC_APB2PeriphClockCmd(RCC_APB2Periph_GPIOA, ENABLE);
// GPIOA pins WS2812 data outputs
GPIO_InitStructure.GPIO_Pin = 0xFFFF;
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_Out_PP;
GPIO_InitStructure.GPIO_Speed = GPIO_Speed_50MHz;
GPIO_Init(GPIOA, &GPIO_InitStructure);

RCC_APB2PeriphClockCmd(RCC_APB2Periph_GPIOB, ENABLE);
GPIO_InitStructure.GPIO_Pin = GPIO_Pin_0;
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_Out_PP;
GPIO_InitStructure.GPIO_Speed = GPIO_Speed_50MHz;
GPIO_Init(GPIOB, &GPIO_InitStructure);

GPIO_InitStructure.GPIO_Pin = GPIO_Pin_10;
GPIO_InitStructure.GPIO_Mode = GPIO_Mode_AF_PP;
GPIO_InitStructure.GPIO_Speed = GPIO_Speed_50MHz;
GPIO_Init(GPIOB, &GPIO_InitStructure);
}

void TIM2_init(void)
{
TIM_TimeBaseInitTypeDef TIM_TimeBaseStructure;
TIM_OCInitTypeDef TIM_OCInitStructure;
uint16_t PrescalerValue = 0;

// TIM2 Periph clock enable
RCC_APB1PeriphClockCmd(RCC_APB1Periph_TIM2, ENABLE);

PrescalerValue = (uint16_t) (SystemCoreClock / 24000000) - 1;
/* Time base configuration */
TIM_TimeBaseStructure.TIM_Period = 29; // 800kHz
TIM_TimeBaseStructure.TIM_Prescaler = PrescalerValue;
TIM_TimeBaseStructure.TIM_ClockDivision = 0;
TIM_TimeBaseStructure.TIM_CounterMode = TIM_CounterMode_Up;
TIM_TimeBaseInit(TIM2, &TIM_TimeBaseStructure);

TIM_ARRPreloadConfig(TIM2, DISABLE);

/* Timing Mode configuration: Channel 1 */
TIM_OCInitStructure.TIM_OCMode = TIM_OCMode_Timing;
TIM_OCInitStructure.TIM_OutputState = TIM_OutputState_Enable;
TIM_OCInitStructure.TIM_Pulse = 9;
TIM_OCInitStructure.TIM_OCPolarity = TIM_OCPolarity_High;
TIM_OC1Init(TIM2, &TIM_OCInitStructure);
TIM_OC1PreloadConfig(TIM2, TIM_OCPreload_Disable);

/* Timing Mode configuration: Channel 2 */
TIM_OCInitStructure.TIM_OCMode = TIM_OCMode_Timing;
TIM_OCInitStructure.TIM_OutputState = TIM_OutputState_Enable;
TIM_OCInitStructure.TIM_Pulse = 17;
TIM_OCInitStructure.TIM_OCPolarity = TIM_OCPolarity_High;
TIM_OC2Init(TIM2, &TIM_OCInitStructure);
TIM_OC2PreloadConfig(TIM2, TIM_OCPreload_Disable);

/* TIM2 CC1 DMA Request enable */
TIM_DMACmd(TIM2, TIM_DMA_CC1, ENABLE);

/* TIM2 CC2 DMA Request enable */
TIM_DMACmd(TIM2, TIM_DMA_CC2, ENABLE);

/* TIM2 Update DMA Request enable */
TIM_DMACmd(TIM2, TIM_DMA_Update, ENABLE);
}

void DMA_init(void)
{
DMA_InitTypeDef DMA_InitStructure;
NVIC_InitTypeDef NVIC_InitStructure;

RCC_AHBPeriphClockCmd(RCC_AHBPeriph_DMA1, ENABLE);

// TIM2 Update event
/* DMA1 Channel2 configuration ----------------------------------------------*/
DMA_DeInit(DMA1_Channel2);
DMA_InitStructure.DMA_PeripheralBaseAddr = (uint32_t)GPIOA_ODR_Address;
DMA_InitStructure.DMA_MemoryBaseAddr = (uint32_t)WS2812_IO_High;
DMA_InitStructure.DMA_DIR = DMA_DIR_PeripheralDST;
DMA_InitStructure.DMA_BufferSize = 0;
DMA_InitStructure.DMA_PeripheralInc = DMA_PeripheralInc_Disable;
DMA_InitStructure.DMA_MemoryInc = DMA_MemoryInc_Disable;
DMA_InitStructure.DMA_PeripheralDataSize = DMA_PeripheralDataSize_HalfWord;
DMA_InitStructure.DMA_MemoryDataSize = DMA_MemoryDataSize_HalfWord;
DMA_InitStructure.DMA_Mode = DMA_Mode_Normal;
DMA_InitStructure.DMA_Priority = DMA_Priority_High;
DMA_InitStructure.DMA_M2M = DMA_M2M_Disable;
DMA_Init(DMA1_Channel2, &DMA_InitStructure);

// TIM2 CH1 event
/* DMA1 Channel5 configuration ----------------------------------------------*/
DMA_DeInit(DMA1_Channel5);
DMA_InitStructure.DMA_PeripheralBaseAddr = (uint32_t)GPIOA_ODR_Address;
DMA_InitStructure.DMA_MemoryBaseAddr = (uint32_t)WS2812_IO_framedata;
DMA_InitStructure.DMA_DIR = DMA_DIR_PeripheralDST;
DMA_InitStructure.DMA_BufferSize = 0;
DMA_InitStructure.DMA_PeripheralInc = DMA_PeripheralInc_Disable;
DMA_InitStructure.DMA_MemoryInc = DMA_MemoryInc_Enable;
DMA_InitStructure.DMA_PeripheralDataSize = DMA_PeripheralDataSize_HalfWord;
DMA_InitStructure.DMA_MemoryDataSize = DMA_MemoryDataSize_HalfWord;
DMA_InitStructure.DMA_Mode = DMA_Mode_Normal;
DMA_InitStructure.DMA_Priority = DMA_Priority_High;
DMA_InitStructure.DMA_M2M = DMA_M2M_Disable;
DMA_Init(DMA1_Channel5, &DMA_InitStructure);

// TIM2 CH2 event
/* DMA1 Channel7 configuration ----------------------------------------------*/
DMA_DeInit(DMA1_Channel7);
DMA_InitStructure.DMA_PeripheralBaseAddr = (uint32_t)GPIOA_ODR_Address;
DMA_InitStructure.DMA_MemoryBaseAddr = (uint32_t)WS2812_IO_Low;
DMA_InitStructure.DMA_DIR = DMA_DIR_PeripheralDST;
DMA_InitStructure.DMA_BufferSize = 0;
DMA_InitStructure.DMA_PeripheralInc = DMA_PeripheralInc_Disable;
DMA_InitStructure.DMA_MemoryInc = DMA_MemoryInc_Disable;
DMA_InitStructure.DMA_PeripheralDataSize = DMA_PeripheralDataSize_HalfWord;
DMA_InitStructure.DMA_MemoryDataSize = DMA_MemoryDataSize_HalfWord;
DMA_InitStructure.DMA_Mode = DMA_Mode_Normal;
DMA_InitStructure.DMA_Priority = DMA_Priority_High;
DMA_InitStructure.DMA_M2M = DMA_M2M_Disable;
DMA_Init(DMA1_Channel7, &DMA_InitStructure);

/* configure DMA1 Channel7 interrupt */
NVIC_InitStructure.NVIC_IRQChannel = DMA1_Channel7_IRQn;
NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 0;
NVIC_InitStructure.NVIC_IRQChannelSubPriority = 1;
NVIC_InitStructure.NVIC_IRQChannelCmd = ENABLE;
NVIC_Init(&NVIC_InitStructure);
/* enable DMA1 Channel7 transfer complete interrupt */
DMA_ITConfig(DMA1_Channel7, DMA_IT_TC, ENABLE);
}

void WS2812_send(uint16_t buffersize)
{
DMA_SetCurrDataCounter(DMA1_Channel2, buffersize);
DMA_SetCurrDataCounter(DMA1_Channel5, buffersize);
DMA_SetCurrDataCounter(DMA1_Channel7, buffersize);

/* enable DMA1 Channels */
DMA_Cmd(DMA1_Channel2, ENABLE);
DMA_Cmd(DMA1_Channel5, ENABLE);
DMA_Cmd(DMA1_Channel7, ENABLE);

TIM_SetCounter(TIM2, 29);
TIM_Cmd(TIM2, ENABLE);
}

void DMA1_Channel7_IRQHandler(void)
{
if (DMA_GetITStatus(DMA1_IT_TC7))
{
GPIOB->ODR ^= 0x0001;
TIM_Cmd(TIM2, DISABLE);
DMA_Cmd(DMA1_Channel2, DISABLE);
DMA_Cmd(DMA1_Channel5, DISABLE);
DMA_Cmd(DMA1_Channel7, DISABLE);
TIM_ClearFlag(TIM2, TIM_FLAG_Update | TIM_FLAG_CC1 | TIM_FLAG_CC2);
DMA_ClearFlag(DMA1_FLAG_TC2 | DMA1_FLAG_HT2 | DMA1_FLAG_GL2 | DMA1_FLAG_TE2);
DMA_ClearFlag(DMA1_FLAG_TC5 | DMA1_FLAG_HT5 | DMA1_FLAG_GL5 | DMA1_FLAG_TE5);
DMA_ClearFlag(DMA1_FLAG_HT7 | DMA1_FLAG_GL7 | DMA1_FLAG_TE7);
DMA_ClearITPendingBit(DMA1_IT_TC7);
}
}

int main(void) {
GPIO_init();
DMA_init();
TIM2_init();

while (1){ 
WS2812_send(24);
Delay(5000000L);
}
}

The first attachment shows the waveform directly after reset (first transfer) and the second looks like all frames after the first transfer.

Can anyone spot an obvious mistake here?

Cheers,
Elia
 

Offline philpem

  • Frequent Contributor
  • **
  • Posts: 338
  • Country: gb
  • That Sneaky British Bloke
Re: STM32 Timer running slow on first DMA transfer
« Reply #1 on: March 11, 2014, 01:14:29 pm »
I think you might have to have Preload enabled for PWM mode -- check the reference manual for the part you're using. This is certainly true for the STM32F407VG on the F4 Discovery board.

I was actually playing with WS2811s last night - you might find this useful: https://bitbucket.org/philpem/stm32f4_ws2811

There's some other STM32 stuff on my Bitbucket account too -- a toolchain, and a merged version of the Standard Peripheral Library ("build for all chips with a change in make command line parameters"): https://bitbucket.org/philpem/
Phil / M0OFX -- Electronics/Software Engineer
"Why do I have a room full of test gear? Why, it saves on the heating bill!"
 

Offline TorrentulaTopic starter

  • Regular Contributor
  • *
  • Posts: 91
  • Country: de
    • My blog
Re: STM32 Timer running slow on first DMA transfer
« Reply #2 on: March 11, 2014, 06:37:29 pm »
Quote
I was actually playing with WS2811s last night - you might find this useful: https://bitbucket.org/philpem/stm32f4_ws2811

That's my rather inefficient code I wrote a couple of months back. The approach here is much more memory efficient (24 bytes for 16 LEDs).

Investigating this further has shown that none of the relevant flags are set when I call end the transfer/start a new transfer.

It's really confusing I would think that the timer should start running one clock cycle after setting the enable bit and because no changes to the ARR or the compare registers are made the timing should be spot on, and it is on subsequent transfers.

Quote
I think you might have to have Preload enabled for PWM mode

I've checked and I'm using output compare mode without preloaded compare registers.

And now for the weirdest part:
The order in which I reload the counter register, enable the DMA channels and start the timer seems to be affecting the period of the first bit. My current approach get's the first bit just into spec (see attachment):

Code: [Select]
/* !!! DO NOT CHANGE !!!
* this sequence gets the timing of the first bit
* into spec, haven't figured out why yet */
TIM2->CNT = 20; // preload counter register --> UEV generated at next overflow (starts DMA transfer)
TIM2->SR = 0; // clear all status flags just to be sure
DMA1_Channel2->CCR |= 0x0001; // enable the DMA channels
DMA1_Channel5->CCR |= 0x0001;
DMA1_Channel7->CCR |= 0x0001;
TIM2->CR1 |= 0x0001; // start TIM2

Complete code is here for reference: https://github.com/Torrentula/0xWS2812

Cheers,
Elia

 

Offline TorrentulaTopic starter

  • Regular Contributor
  • *
  • Posts: 91
  • Country: de
    • My blog
Re: STM32 Timer running slow on first DMA transfer
« Reply #3 on: March 15, 2014, 09:40:55 am »
Okay I'm replying to myself here for completeness as I found the issue.

The solution to this problem was the obvious one that I thought I had tried right at the start and I could've sworn it didn't work the first time, probably because some other misconfiguration further upstream.  :wtf:

Code: [Select]
TIM2->SR = 0; // clear all status flags just to be sure
DMA1_Channel2->CCR |= 0x0001; // enable the DMA channels
DMA1_Channel5->CCR |= 0x0001;
DMA1_Channel7->CCR |= 0x0001;
TIM2->CNT = 29; // set CNT to 29 so that TIM2 immediately overflows and generates UEV to start DMA transfer
TIM2->CR1 |= 0x0001; // start TIM2

Elia
 

Offline mrflibble

  • Super Contributor
  • ***
  • Posts: 2051
  • Country: nl
Re: STM32 Timer running slow on first DMA transfer
« Reply #4 on: March 16, 2014, 05:12:30 pm »
After lurking this thread to see what the issue was, part question + part hint/tip.

Did you find out what the problem was? Because the 2 code snippets you provided look to be functionally equivalent. Where your last post is probably the one I'd pick. First clear status, then setup everything timer related, then everything DMA related, and then kick off the timer.

As for the hint, you possibly already know this one, but just in case... This problem looks like not having the registers not setup properly for whatever random reason. For this kind of thing I find the EmbSys Registers plugin damn handy. Set breakpoint at the location where you think everything should be configured properly, and then use EmbSys register view to check if those pesky registers are actually setup correctly. Helped me find a silly mistake on several occasions.
 

Offline TorrentulaTopic starter

  • Regular Contributor
  • *
  • Posts: 91
  • Country: de
    • My blog
Re: STM32 Timer running slow on first DMA transfer
« Reply #5 on: March 17, 2014, 01:12:17 pm »
Thanks for pointing me to EmbSys, I didn't know that one as I don't work with Eclipse (or any IDE for that matter).

Code: [Select]
Did you find out what the problem was? Because the 2 code snippets you provided look to be functionally equivalent.

No unfortunately I haven't been able to find the problem, I didn't fix the problem either. Once I played around with the code a bit more the problem appears again. The point why I suspect that it must be some kind of status flag and/or other register setting problem is that it works the first time.

My suspicion is that the prescaler register is not 0 when starting the counter therefore the first timer period takes a bit longer or something with the ARR shadow registers not being loaded with the correct value, even though the buffer register for ARR is disabled.

I have tried updating the registers using an update event like so but without any luck:
Code: [Select]
TIM2->SR = 0; // clear all status flags just to be sure
TIM2->EGR |= 0x0001;                    // generate UEV
DMA1_Channel2->CCR |= 0x0001; // enable the DMA channels
DMA1_Channel5->CCR |= 0x0001;
DMA1_Channel7->CCR |= 0x0001;
TIM2->CNT = 29; // set CNT to 29 so that TIM2 immediately overflows and generates UEV to start DMA transfer
TIM2->CR1 |= 0x0001; // start TIM2

So if anyone has got an idea how to fix this I'd really appreciate it.

Cheers,
Elia
 

Offline Jope

  • Regular Contributor
  • *
  • Posts: 111
  • Country: de
Re: STM32 Timer running slow on first DMA transfer
« Reply #6 on: March 17, 2014, 02:24:55 pm »
Regardless of your problem, you should always clear interrupt flags at the beginning of an IRQ handler:
   
Code: [Select]
if (DMA_GetITStatus(DMA1_IT_TC7))
{
  DMA_ClearITPendingBit(DMA1_IT_TC7);
  [...]
}

Otherwise, it is possible - due to the pipelining nature of the core and the peripherals - that the flag is not
yet set to zero after the end of the IRQ handler, which leads to the IRQ handler being called again:
STM32 FAQ: Interrupt re-enters after peripheral's flag clear


Regarding resetting the prescaler to zero by generating a timer update event:
you also have to set the UDIS bit in the TIM2->CR1 register before setting the UG bit:

Code: [Select]
TIM2->CR1 |= TIM_CR1_UDIS;
TIM2->EGR |= TIM_EGR_UG;
TIM2->CR1 &= ~TIM_CR1_UDIS;

Otherwise, the generated update event will also generate interrupt and DMA requests.
Not sure if this solves your problem.

I don't understand your obscure Delay() function. What's the point of that?
If you want to wait for something to finish, the way to do it is to use a flag variable.
And if you really need a defined time delay, use a timer (I use the SysTick timer).


 

Offline TorrentulaTopic starter

  • Regular Contributor
  • *
  • Posts: 91
  • Country: de
    • My blog
Re: STM32 Timer running slow on first DMA transfer
« Reply #7 on: March 17, 2014, 02:35:37 pm »
Quote
I don't understand your obscure Delay() function.

Yes the delay function is not good practice and used simply to waste some clock cycles and generate a delay. I promise not to use it for anything timing critical :P

Quote
If you want to wait for something to finish, the way to do it is to use a flag variable

Eventually that's what I'm going to do, the point here is that if I simply waited for a flag variable I wouldn't be able to tell when new data is sent as the controller executes the code so qiuckly that it's not visible to the human eye when the LED changes color.

Quote
Regarding resetting the prescaler to zero by generating a timer update event:
you also have to set the UDIS bit in the TIM2->CR1 register before setting the UG bit:

I thought that might not be needed as the DMA channels aren't enabled at that point and I could reset the timer status register afterwards. Anyway thanks for pointing me at this!
Maybe the DMA request gets served anyway when set the DMA channel enable bits.

Quote
Otherwise, it is possible - due to the pipelining nature of the core and the peripherals - that the flag is not
yet set to zero after the end of the IRQ handler, which leads to the IRQ handler being called again:

Interesting thanks for pointing that out I wasn't aware of that.
 

Offline TorrentulaTopic starter

  • Regular Contributor
  • *
  • Posts: 91
  • Country: de
    • My blog
Re: STM32 Timer running slow on first DMA transfer
« Reply #8 on: March 20, 2014, 05:27:13 pm »
Okay replying once again as I finally have found the issue (it was a dog to find let me tell ya!).

Apparently, besides disabling the DMA channels after the transfer, reloading the data counter and then reenabling the DMA channels is NOT enough. I had to disable the actual DMA requests too and reenable them AFTER the DMA channels get reenabled.

I figured this out when using the Timer in PWM mode and looking at the output waveforms. It turns out that the first DMA transfer was not triggered by the timer itself, the timer actually started counting when the second bit got transferred, which explains the second bit being bang on. The first DMA transfer for the bit was simply triggered by some left over request signal that got serviced immediately after the DMA channels were enabled, that's why the number of instructions between enabling the DMA and starting the timer actually influnced the timing of the first bit.

TL;DR this is the working sequence (the whole code can be found on my GitHub):
Code: [Select]
/* Transmit the frambuffer with buffersize number of bytes to the LEDs
 * buffersize = (#LEDs / 16) * 24 */
void WS2812_sendbuf(uint32_t buffersize)
{
/* transmission complete flag, indicate that transmission is taking place */
tfin = 0;

// clear all relevant DMA flags
DMA_ClearFlag(DMA1_FLAG_TC2 | DMA1_FLAG_HT2 | DMA1_FLAG_GL2 | DMA1_FLAG_TE2);
DMA_ClearFlag(DMA1_FLAG_TC5 | DMA1_FLAG_HT5 | DMA1_FLAG_GL5 | DMA1_FLAG_TE5);
DMA_ClearFlag(DMA1_FLAG_HT7 | DMA1_FLAG_GL7 | DMA1_FLAG_TE7);

// configure the number of bytes to be transferred by the DMA controller
DMA_SetCurrDataCounter(DMA1_Channel2, buffersize);
DMA_SetCurrDataCounter(DMA1_Channel5, buffersize);
DMA_SetCurrDataCounter(DMA1_Channel7, buffersize);

// clear all TIM2 flags
TIM2->SR = 0;

// enable the corresponding DMA channels
DMA_Cmd(DMA1_Channel2, ENABLE);
DMA_Cmd(DMA1_Channel5, ENABLE);
DMA_Cmd(DMA1_Channel7, ENABLE);

// IMPORTANT: enable the TIM2 DMA requests AFTER enabling the DMA channels!
TIM_DMACmd(TIM2, TIM_DMA_CC1, ENABLE);
TIM_DMACmd(TIM2, TIM_DMA_CC2, ENABLE);
TIM_DMACmd(TIM2, TIM_DMA_Update, ENABLE);

// preload counter with 29 so TIM2 generates UEV directly to start DMA transfer
TIM_SetCounter(TIM2, 29);

// start TIM2
TIM_Cmd(TIM2, ENABLE);
}

/* DMA1 Channel7 Interrupt Handler gets executed once the complete framebuffer has been transmitted to the LEDs */
void DMA1_Channel7_IRQHandler(void)
{
DMA_ClearITPendingBit(DMA1_IT_TC7); // clear DMA7 transfer complete interrupt flag
TIM_Cmd(TIM2, DISABLE); // stop TIM2
// disable the DMA channels
DMA_Cmd(DMA1_Channel2, DISABLE);
DMA_Cmd(DMA1_Channel5, DISABLE);
DMA_Cmd(DMA1_Channel7, DISABLE);
// IMPORTANT: disable the DMA requests, too!
TIM_DMACmd(TIM2, TIM_DMA_CC1, DISABLE);
TIM_DMACmd(TIM2, TIM_DMA_CC2, DISABLE);
TIM_DMACmd(TIM2, TIM_DMA_Update, DISABLE);
tfin = 1; // indicate that transfer has finished
}

Thanks again for your help!

Elia
 


Share me

Digg  Facebook  SlashDot  Delicious  Technorati  Twitter  Google  Yahoo
Smf