Longer Time Delay When More Then One "while" Polling Instruction - c

Microcontroller : ATmega328P in Arduino Uno
Clock Frequency : 16MHz
void timeDelay_CTC(float sec, unsigned char times) //0.1 <= sec <= 4
{
OCR1A = (sec / 0.000064f) - 1;
TCCR1A = 0b00000000;
TCCR1B = 0b00001101;
for( unsigned char i = 1; i <= times; i++ )
{
while( (TIFR1 & (1<<OCF1A)) == 0 );
TIFR1 |= (1<<OCF1A);
}
TCCR1A = 0;
TCCR1B = 0;
}
The above function is used for calculating the number of time delay cycles and then implement it in CTC mode. It works well. Now, I want to write a similar function in normal mode. The folowing is the code.
void timeDelay_NORM(float sec, unsigned char times)
{
unsigned int cycle = (sec / 0.000064f);
TCNT1 = 65534 - cycle;
TCNT1 = 49910;
TCCR1A = 0b00000000;
TCCR1B = 0b00000101;
for( unsigned char x = 1; x <= 2; x++ )
{
while( (TIFR1 & (1<<TOV1)) == 0 );
TIFR1 |= (1<<TOV1);
}
TCCR1A = 0;
TCCR1B = 0;
}
However, the normal mode function with argument "times" > 1, the time delay will be much longer than expected. So, I tried the following code.
void timeDelay_NORM(float sec, unsigned char times)
{
//unsigned int cycle = (sec / 0.000064f);
//TCNT1 = 65534 - cycle;
TCNT1 = 49910; //Cycles for 0.5sec
TCCR1A = 0b00000000;
TCCR1B = 0b00000101;
//for( unsigned char x = 1; x <= 2; x++ )
//{
while( (TIFR1 & (1<<TOV1)) == 0 ); //Run 0.5sec two times to delay 1sec
TIFR1 |= (1<<TOV1);
while( (TIFR1 & (1<<TOV1)) == 0 );
TIFR1 |= (1<<TOV1);
//}
TCCR1A = 0;
TCCR1B = 0;
}
I found that when it run the following instruction 2 times, the time delay will be much longer than expected. It delay around 5s instead of 1s.
while( (TIFR1 & (1<<TOV1)) == 0 );
TIFR1 |= (1<<TOV1);
Can you teach me how to make it work? Or give me some hints.
Thank you for your help!

You do not reset TCNT1 between the loop iterations.
On the first loop it will count (UINT16_MAX - 49910) cycles. After TOV1 is set, TCNT1 rolls over to 0 (overflow) and counts up all the way to UINT16_MAX which causes the longer delay.

Related

Programming of built in timer of arduino

I am new to programming in IDE. I am using built-in timer of arduino.I am using TDR method to capture reflection from fault location in cable and for that I am using timer.Timer should start as soon as output is send and stop at reflection. I have a reference code for it but I dont able to understand it, so if anyone know about it , it would be great.
void setup()
{
pinMode(stepPin, OUTPUT);
pinMode(refPin, OUTPUT);
pinMode(shutdownPin, OUTPUT);
TCCR1A = 0;
TCCR1B = (1 << ICNC1); // input capture noise canceller enabled, capture on falling edge
TIMSK1 = 0; // timer 1 interrupts disabled
ACSR = 0; // input capture from ICP1 pin
TCCR2B = (1 << CS20); // change timer 2 PWM frequency to 31.25kHz because we're using pin 11 as a DAC
Serial.begin(19200);
}
struct Step
{
unsigned int time;
unsigned int amplitude;
};
// Take a single measurement, using either a positive or negative edge from the comparator.
// The comparator reference voltage must have been set up and allowed to stablise before calling this.
unsigned int takeMeasurement(bool posEdge)
{
byte reg1b = (posEdge) ? 0 : (1 << ICES1); // input capture noise canceller csdisabled, set up input capture polarity, stop timer
reg1b |= (1 << CS10);
TCCR1B = reg1b;
TCNT1H = 0;
TCNT1L = 0; // clear timer
unsigned int capture = 0;
unsigned long start = micros(); // get the time
cli();
TCNT1H = 0;
TCNT1L = 0; // clear timer
TIFR1 = (1 << ICF1); // clear timer 1 input capture bit
PORTD |= (1 << 4); // set output high
sei();
do
{
if ((TIFR1 & (1 << ICF1)) && capture == 0)
{
byte temp = ICR1L;
capture = (ICR1H << 8) | temp;
}
} while (micros() - start < 100);
PORTD &= ~(1 << 4); // set output low
return capture;
}
size_t findSteps(bool positive, struct Step *results, size_t maxResults)
{
byte amplitude = (positive) ? 5 : 250;
analogWrite(refPin, amplitude);
delay(100); // wait 100ms for the output to stabilise
unsigned int lastReading = 0;
size_t numResults = 0;
unsigned int stepSize = 0; // 0 means not in a step
#ifdef DEBUG
Serial.print((positive) ? "pos " : "neg ");
#endif
for (int i = 0; i < 50; ++i)
{
analogWrite(refPin, amplitude);
delay(10);
unsigned int currentReading = takeMeasurement(positive);
unsigned int currentDiff = currentReading - lastReading; // diff since start of possible step
if (stepSize == 0)
{
// Not currently in a step
if (i != 0 && currentReading != 0 && currentDiff == 0)
{
// Found the start of a possible step
++stepSize;
}
lastReading = currentReading;
}
else
{
if (currentDiff > 2 || i + 1 == 50)
{
// Step has endeed, so record it if it is big enough
if (stepSize >= 2)
{
results->time = lastReading;
results->amplitude = amplitude - 5;
++results;
++numResults;
if (numResults == maxResults) break;
}
stepSize = 0;
lastReading = currentReading;
}
else if (currentDiff == 0)
{
++stepSize;
}
}
#ifdef DEBUG
if (i != 0) Serial.write(',');
Serial.print(currentReading);
#endif
if (positive)
{
amplitude += 5;
}
else
{
amplitude -= 5;
}
}
#ifdef DEBUG
Serial.println();
#endif
return numResults;
}

Custom Delay function using arduino IDE

I'm in a microprocessors class and we're writing our own delay functions that are actually accurate. Our professor gave us, what I assume is, a 4 ms delay function. I don't really understand how to transfer that to a .25 s or a 1 s delay, which are both needed for my homework.
The given function is as follows(Assume _BV() is defined as _BV(x) 1<<(x)):
DDRB |= _BV(1);
TCCR1A |= _BV(COM1A0);
TCNT1 = 0;
OCR1A = 100;
TIFR1 = _BV(OCF1A);
TCCR1B |= _BV(CS10);
while((TIFR1 & _BV(OCF1A)) == 0);
TIFR1 = _BV(OCF1A);
OCR1A = 100 + 64000;
while((TIFR1 & _BV(OCF1A)) == 0);
TCCR1B = 0;
TCCR1A = 0;
I've written the code needed to complete the homework except the two delay functions.
Here is what I have so far:
#include <avr/io.h>
uint8_t numIN;
void setup() {
Serial.begin(9600);
DDRB |= _BV(5);
}
void loop() {
int i;
numIN = 10;
Serial.println("Enter a number between 0 and 9.");
do {
while (Serial.available() > 0)
{
numIN = Serial.read() - '0';
if (numIN < 0 || numIN > 9)
{
Serial.println("Input Error!");
}
}
} while (numIN < 0 || numIN > 9);
Serial.print("You entered ");
Serial.println(numIN);
if (isEven(numIN))
{
for (i = 0; i < 5; i++)
{
PORTB |= _BV(5);
delay(1000); //temporary
//delay_Sec();
PORTB &= ~_BV(5);
delay(1000); //temporary
//delay_Sec();
}
}
else
{
for (i = 0; i < 5; i++)
{
PORTB |= _BV(5);
delay(250); //temporary
//delay_quarterSec();
PORTB &= ~_BV(5);
delay(250); //temporary
//delay_quarterSec();
}
}
}
void delay_quarterSec(void)
{
//need to finish
}
void delay_Sec(void)
{
//need to finish
}
boolean isEven(int num)
{
if (num & _BV(0))
return false;
else
return true;
}
I'm just confused how I take my professor's code and transfer it to what I need to do. Any help is greatly appreciated!
I can give you a quick overview about what the provided code does.
(This is from memory, so don't take my word for it. And you don't mention your controller type. You will have to look up the registers up in detail.)
DDRB |= _BV(1); // set the compare match output pin to output
TCCR1A |= _BV(COM1A0); // enable output compare PIN toggle
TCNT1 = 0; // set counter start value to 0
OCR1A = 100; // set compare match value to 100 (the actual delay)
TIFR1 = _BV(OCF1A); // clear the output compare flag
TCCR1B |= _BV(CS10); // enable the timer by setting the pre-scaler
while((TIFR1 & _BV(OCF1A)) == 0); // wait until the timer counted to 100 (compare flag is set again)
So the actual delay depends on:
the setting of the prescaler
the clock speed of your controller
the value of OCR1A
Example:
If the prescaler is set to 1 and you run at 10MHz you got
t = (1 / (10000000/s)) * 100 = 10us
This should get you started.

Unexpected float to unsigned char conversion in AVR programming

Microcontroller : ATmega328P in Arduino Uno
Clock Frequency : 16MHz
void timeDelay_CTC(float sec, unsigned char times)
{
unsigned char cycles = (unsigned char)(sec / 0.000064f);
OCR1A = cycles - 1;
TCCR1A = 0b00000000;
TCCR1B = 0b00001101;
for( unsigned char i = 1; i <= times; i++ )
{
while( (TIFR1 & (1<<OCF1A)) == 0 );
TIFR1 |= (1<<OCF1A);
}
TCCR1A = 0;
TCCR1B = 0;
}
The function is used for calculating the number of time delay cycles and then implement it.
int main(void)
{
//Initialization
LED1_DDR |= (1<<LED1_BIT);
LED1_PORT |= (1<<LED1_BIT);
//Start
while(1)
{
LED1_PORT ^= (1<<LED1_BIT);
timeDelay_CTC(1, 1);
}
}
However, when running the code above, the LED light does not toggle. If I just type OCR1A = 15624; (the number of cycles for 1s), it works well. Therefore, the problem should come from the calculation of unsigned char cycles = (unsigned char)(sec / 0.000064f); I think that the data type conversion may be wrong. Can you teach me how to make it work? Or give me some hints.
Please check the range of unsigned char, it is [0 255], it is not big enough for your application. Try to use unsigned int.

blinking led continuously with 1 second in C language(atmega328p)

I am using AVR Studio5 to program the arduino uno rev3 with atmel atmega328p.Now , I am trying blink a led continuously within 1 second .
The code is :
PORTD = 0b10001010;
TCNT1H = 0xBB;
TCNT1L = 0xBB;
TCCR1A = 0;
TCCR1B = 5; // prescaler is 1024
while((TIFR1 & (1<<TOV1)) == 0)
{
temp = TCNT1H;
while ((TCNT1H - temp) >= 11);
PORTD ^= 1<<7; // blinking as expected
}
TIFR1 = 1<<TOV1;
TCCR1A = 0;
TCCR1B = 0;
The above code shows that I use a timer1 lasting for 1 second in which I attempt to blink the PORTD.7 led for each 0.032768s.
But , now , the problem is that the timer works for delaying 1 second but the led keep lighting without blinking . Please help .
( P.S the circuit works fine )
Complement :
If I use the following code , it shows the led blinking.
for ( a = 0;a<2;a++)
{
PORTD = 0b00001010;
TCNT1H = 0xEE;
TCNT1L = 0xEE;
TCCR1A = 0;
TCCR1B = 5; // prescaler is 1024
while((TIFR1 & (1<<TOV1)) == 0);
TIFR1 = 1<<TOV1;
TCCR1A = 0;
TCCR1B = 0;
PORTD = 0b10001010;
TCNT1H = 0xEE;
TCNT1L = 0xEE;
TCCR1A = 0;
TCCR1B = 5; // prescaler is 1024
while((TIFR1 & (1<<TOV1)) == 0);
TIFR1 = 1<<TOV1;
TCCR1A = 0;
TCCR1B = 0;
}
But , for the simplicity , I prefer the most top method if working .
while ((TCNT1H - temp) >= 10)
{
PORTD ^= 1<<7; // blinking as expected
}
You are blinking too fast, so fast that actually what are you are seeing is a LED with half the luminosity. You need to add some delay between two invocations of PORTD ^= 1<<7.
This is something that should be done with an interrupt.
void TMR_init(void)
{
DDRD|=_BV(PD7); //bit 7 of port D to output
TCNT1=0; //reset the timer counter
OCR1AL=0xC6; //depends on your osc. This values are for 12MHz
OCR1AH=0x2D; //with 12 000 000Hz / 1024 it takes 11718 ticks for 1 sec->0x2D C6
TIMSK1|=_BV(OCIE1A); //enable interrupt on output compare A (when timer value == value in the OCR1AL/H-registers)
TCCR1A=0; //normal operation
TCCR1B=_BV(CS12) | _BV(CS10); //prescaler 1024 and starts the timer
sei(); //enable interrupts
}
//isr
SIGNAL(TIMER1_COMPA_vect)
{
PORTD^=_BV(PD7); //toggle
}
This code should work but is untested. Don't forget to include avr/interrupt.h. Some macros may differ due to version differences of the compiler.

AVR Butterfly - Timer reuse after internal oscillator calibration

I am calibrating the AVR Butterfly internal oscillator for being able to use USART, based on the sample code provided by AVR (see code below). As I also wanted to use two timer-controlled servo motors, I am wondering whether it is possible to reuse 16-bit timer 1 after the calibration process - I tried resetting the TCCR1A/B but it did not work out (code also below). I hope you can help me out with this.
void OSCCAL_Calibrate(void){
unsigned char calibrate = 0;
int temp;
unsigned char tempL;
CLKPR = (1<<CLKPCE); // set Clock Prescaler Change Enable
// set prescaler = 8, Inter RC 8Mhz / 8 = 1Mhz
CLKPR = (1<<CLKPS1) | (1<<CLKPS0);
TIMSK2 = 0; //disable OCIE2A and TOIE2
ASSR = (1<<AS2); //select asynchronous operation of timer2 (32,768kHz)
OCR2A = 200; // set timer2 compare value
TIMSK0 = 0; // delete any interrupt sources
TCCR1B = (1<<CS10); // start timer1 with no prescaling
TCCR2A = (1<<CS20); // start timer2 with no prescaling
while((ASSR & 0x01) | (ASSR & 0x04)); //wait for TCN2UB and TCR2UB to be cleared
delayMs(1000); // wait for external crystal to stabilise
while(!calibrate)
{
cli(); // disable global interrupt
TIFR1 = 0xFF; // delete TIFR1 flags
TIFR2 = 0xFF; // delete TIFR2 flags
TCNT1H = 0; // clear timer1 counter
TCNT1L = 0;
TCNT2 = 0; // clear timer2 counter
while ( !(TIFR2 & (1<<OCF2A)) ); // wait for timer2 compareflag
TCCR1B = 0; // stop timer1
sei(); // enable global interrupt
if ( (TIFR1 & (1<<TOV1)) )
{
temp = 0xFFFF; // if timer1 overflows, set the temp to 0xFFFF
}else
{ // read out the timer1 counter value
tempL = TCNT1L;
temp = TCNT1H;
temp = (temp << 8);
temp += tempL;
}
if (temp > 6250)
OSCCAL--; // the internRC oscillator runs to fast, decrease the OSCCAL
else if (temp < 6120)
OSCCAL++; // the internRC oscillator runs to slow, increase the OSCCAL
else
calibrate = 1; // the interRC is correct
TCCR1B = (1<<CS10); // start timer1
}
}
void motorInit(){
// reset timer 1
TCCR1A = 0;
TCCR1B = 0;
// initialize Servo Pins
DDRB |= (1<<PB5) | (1<<PB6);
ICR1H = ICR_VALUE >> 8;
ICR1L = ICR_VALUE & (TOP_VALUE);
// reset OCRs
setServoSpeed(0, 0);
setServoSpeed(1, 0);
// Set Timer mode (PWM Phase & Freq. correct, clear on compare match)
// and prescaler (8)
TCCR1A = ((1<<COM1A1) | (1<<COM1B1));
TCCR1B = ((1<<WGM13) | (0<<CS12) | (1<<CS11) | (0<<CS10));
}
Maybe you can check code which I used for a project while ago, but you should take care that I decreased the system frequency to 7.3768 MHz for 56700 baudrate, which maybe you need to adjust.
void OSCCAL_Calibrate(void)
{
uint8_t LoopCount = (0x7F / 2);
ATOMIC_BLOCK(ATOMIC_RESTORESTATE)
{
// Make sure all clock division is turned off (8MHz RC clock)
CLKPR = (1 << CLKPCE);
CLKPR = 0x00;
// Inital OSCCAL of half its maximum
OSCCAL = (0x7F / 2);
// Disable timer interrupts
TIMSK1 = 0;
TIMSK2 = 0;
// Set timer 2 to asyncronous mode (32.768KHz crystal)
ASSR = (1 << AS2);
// Ensure timer 1 control register A is cleared
TCCR1A = 0;
// Start both counters with no prescaling
TCCR1B = (1 << CS10);
TCCR2A = (1 << CS20);
// Wait until timer 2's external 32.768KHz crystal is stable
while (ASSR & ((1 << TCN2UB) | (1 << TCR2UB) | (1 << OCR2UB)));
// Clear the timer values
TCNT1 = 0;
TCNT2 = 0;
while (LoopCount--)
{
// Wait until timer 2 overflows
while (!(TIFR2 & (1 << TOV2)));
// Stop timer 1 so it can be read
TCCR1B = 0x00;
// Check timer value against ideal constant
if (TCNT1 > OSCCAL_TARGETCOUNT) // Clock is running too fast
OSCCAL--;
else if (TCNT1 < OSCCAL_TARGETCOUNT) // Clock is running too slow
OSCCAL++;
// Clear timer 2 overflow flag
TIFR2 |= (1 << TOV2);
Check this out!

Resources