diff --git a/CMakeLists.txt b/CMakeLists.txt index d5c0df8..85148f7 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -181,22 +181,25 @@ set(PERIPH_SRC ) set(PX4FLOW_HDRS + inc/camera.h inc/communication.h inc/dcmi.h inc/debug.h + inc/filter.h inc/flow.h inc/gyro.h inc/i2c_frame.h inc/i2c.h - inc/led.h inc/main.h inc/mavlink_bridge_header.h inc/mt9v034.h + inc/result_accumulator.h inc/settings.h inc/sonar.h inc/sonar_mode_filter.h inc/stm32f4xx_conf.h inc/stm32f4xx_it.h + inc/timer.h inc/usart.h inc/usb_conf.h inc/usbd_cdc_vcp.h @@ -206,29 +209,30 @@ set(PX4FLOW_HDRS ) set(PX4FLOW_SRC + inc/camera.c src/communication.c src/dcmi.c src/debug.c + src/filter.c src/flow.c src/gyro.c src/i2c.c - src/led.c src/main.c src/mt9v034.c src/reset.c + src/result_accumulator.c src/settings.c src/sonar.c src/sonar_mode_filter.c src/stm32f4xx_it.c src/system_stm32f4xx.c + src/timer.c src/usart.c src/usb_bsp.c src/usbd_cdc_vcp.c src/usbd_desc.c src/usbd_usr.c src/utils.c - src/system_stm32f4xx.c - src/stm32f4xx_it.c ) # building diff --git a/Makefile b/Makefile index d26ead5..d8cbd10 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,3 @@ -# # Copyright (c) 2012-2015 PX4 Development Team. All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -29,6 +28,7 @@ # POSSIBILITY OF SUCH DAMAGE. # + # # Top-level Makefile for building PX4 firmware images. # diff --git a/inc/no_warnings.h b/inc/no_warnings.h deleted file mode 100644 index 77f89ca..0000000 --- a/inc/no_warnings.h +++ /dev/null @@ -1,67 +0,0 @@ -/**************************************************************************** - * - * Copyright (c) 2015 PX4 Development Team. All rights reserved. - * Author: David Sidrane - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided with the - * distribution. - * 3. Neither the name PX4 nor the names of its contributors may be - * used to endorse or promote products derived from this software - * without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN - * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * - ****************************************************************************/ -#ifndef NO_WARNINGS_H_ -#define NO_WARNINGS_H_ - -#include - -inline bool FLOAT_AS_BOOL(float f); -inline int FLOAT_EQ_INT(float f , int i); -inline int FLOAT_EQ_FLOAT(float f1 , float f2); - -inline bool FLOAT_AS_BOOL(float f) -{ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wfloat-equal" - return (f != 0.0f); -#pragma GCC diagnostic pop -} - -inline int FLOAT_EQ_INT(float f , int i) -{ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wfloat-equal" - return (f == i); -#pragma GCC diagnostic pop -} - -inline int FLOAT_EQ_FLOAT(float f1 , float f2) -{ -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wfloat-equal" - return (f1 == f2); -#pragma GCC diagnostic pop -} - -#endif /* NO_WARNINGS_H_ */ diff --git a/makefiles/baremetal/config_px4flow-v1_default.mk b/makefiles/baremetal/config_px4flow-v1_default.mk index f2e0bbc..eeb5dd9 100644 --- a/makefiles/baremetal/config_px4flow-v1_default.mk +++ b/makefiles/baremetal/config_px4flow-v1_default.mk @@ -30,7 +30,7 @@ MODULES += drivers/boards/px4flow-v1 # # General system control # -##TODO EXPECT I2C - MODULES += modules/uavcannode +MODULES += modules/i2c_slave # C support MODULES += modules/libc @@ -38,6 +38,7 @@ MODULES += modules/libc # Flow MODULES += modules/flow +MODULES += modules/sonar # Generate parameter XML file diff --git a/makefiles/baremetal/config_px4flow-v2_default.mk b/makefiles/baremetal/config_px4flow-v2_default.mk index 78c20b4..0aec58d 100644 --- a/makefiles/baremetal/config_px4flow-v2_default.mk +++ b/makefiles/baremetal/config_px4flow-v2_default.mk @@ -41,7 +41,7 @@ MODULES += modules/libc # Flow MODULES += modules/flow - +MODULES += modules/lidar-sf10 # Generate parameter XML file #GEN_PARAM_XML = 1 diff --git a/src/drivers/boards/px4flow-v1/module.mk b/src/drivers/boards/px4flow-v1/module.mk index 2714935..fdbed3c 100644 --- a/src/drivers/boards/px4flow-v1/module.mk +++ b/src/drivers/boards/px4flow-v1/module.mk @@ -8,6 +8,7 @@ ABS_BOOTLOADER_SRC := $(PX4_BOOTLOADER_BASE)src/ SRCS = \ px4flow_init.c \ + px4flow_led.c \ BOOTLOADER_SRC = \ $(ABS_BOOTLOADER_SRC)common/boot_app_shared.c \ diff --git a/src/drivers/boards/px4flow-v1/px4flow_init.c b/src/drivers/boards/px4flow-v1/px4flow_init.c index 143855f..fe41e33 100644 --- a/src/drivers/boards/px4flow-v1/px4flow_init.c +++ b/src/drivers/boards/px4flow-v1/px4flow_init.c @@ -134,131 +134,4 @@ __EXPORT int board_reset(int status) } -/**************************************************************************** - * Name: board_led_initialize - * - * Description: - * This functions is called very early in initialization to perform board- - * specific initialization of LED-related resources. This includes such - * things as, for example, configure GPIO pins to drive the LEDs and also - * putting the LEDs in their correct initial state. - * - * NOTE: In most architectures, board_led_initialize() is called from - * board-specific initialization logic. But there are a few architectures - * where this initialization function is still called from common chip - * architecture logic. This interface is not, however, a common board - * interface in any event and, hence, the usage of the name - * board_led_initialize is deprecated. - * - * WARNING: This interface name will eventually be removed; do not use it - * in new board ports. New implementations should use the naming - * conventions for "Microprocessor-Specific Interfaces" or the "Board- - * Specific Interfaces" as described above. - * - * Input Parameters: - * None - * - * Returned Value: - * None - * - ****************************************************************************/ - -__EXPORT void board_led_initialize(void) -{ - -} - -/**************************************************************************** - * Name: board_led_on - * - * Description: - * Set the LED configuration into the ON condition for the state provided - * by the led parameter. This may be one of: - * - * LED_STARTED NuttX has been started - * LED_HEAPALLOCATE Heap has been allocated - * LED_IRQSENABLED Interrupts enabled - * LED_STACKCREATED Idle stack created - * LED_INIRQ In an interrupt - * LED_SIGNAL In a signal handler - * LED_ASSERTION An assertion failed - * LED_PANIC The system has crashed - * LED_IDLE MCU is in sleep mode - * - * Where these values are defined in a board-specific way in the standard - * board.h header file exported by every architecture. - * - * Input Parameters: - * led - Identifies the LED state to put in the ON state (which may or may - * not equate to turning an LED on) - * - * Returned Value: - * None - * - ****************************************************************************/ - -__EXPORT void board_led_on(int led) -{ -} - -/**************************************************************************** - * Name: board_led_off - * - * Description: - * Set the LED configuration into the OFF condition for the state provided - * by the led parameter. This may be one of: - * - * LED_INIRQ Leaving an interrupt - * LED_SIGNAL Leaving a signal handler - * LED_ASSERTION Recovering from an assertion failure - * LED_PANIC The system has crashed (blinking). - * LED_IDLE MCU is not in sleep mode - * - * Where these values are defined in a board-specific way in the standard - * board.h header file exported by every architecture. - * - * Input Parameters: - * led - Identifies the LED state to put in the OFF state (which may or may - * not equate to turning an LED off) - * - * Returned Value: - * None - * - ****************************************************************************/ - -__EXPORT void board_led_off(int led) -{ - -} - -/**************************************************************************** - * Name: board_crashdump - * - * Description: - * If CONFIG_BOARD_CRASHDUMP is selected then up_asseert will call out to - * board_crashdump prior to calling exit in the case of an assertion failure. - * Or in the case of a hardfault looping indefinitely. board_crashdump then - * has a chance to save the state of the machine. The provided - * board_crashdump should save as much information as it can about the cause - * of the fault and then most likely reset the system. - * - * N.B. There are limited system resources that can be used by the provided - * board_crashdump function. The tems from the fact that most critical/fatal - * crashes are because of a hard fault or during interrupt processing. - * In these cases, up_assert is running from the context of an interrupt - * handlerand it is impossible to use any device driver in this context. - * - * Also consider the following: Who knows what state the system is in? Is - * memory trashed? Is the Heap intact? Therefore all we can expect to do in - * board_crashdump is save the "machine state" in a place where on the next - * reset we can write it to more sophisticated storage in a sane operating - * environment. - * - ****************************************************************************/ - -__EXPORT void board_crashdump(uint32_t currentsp, void *tcb, uint8_t *filename, - int lineno) -{ - -} diff --git a/src/modules/flow/led.c b/src/drivers/boards/px4flow-v1/px4flow_led.c similarity index 76% rename from src/modules/flow/led.c rename to src/drivers/boards/px4flow-v1/px4flow_led.c index b5cf372..2a3e774 100644 --- a/src/modules/flow/led.c +++ b/src/drivers/boards/px4flow-v1/px4flow_led.c @@ -31,8 +31,40 @@ * POSSIBILITY OF SUCH DAMAGE. * ****************************************************************************/ +#include -#include "led.h" +#include +#include + +#include +#include +#include "stm32f4xx_gpio.h" +#include "stm32f4xx_rcc.h" +#include "stm32f4xx_tim.h" +#include "stm32f4xx_conf.h" +#include "stm32f4xx.h" + +typedef enum +{ + LED_ACT = 0, // Blue + LED_COM = 1, // Amber + LED_ERR = 2, // Red +} Led_TypeDef; + + +#define LEDn 3 + +#define LED_ACTIVITY_PIN GPIO_Pin_3 +#define LED_ACTIVITY_GPIO_PORT GPIOE +#define LED_ACTIVITY_GPIO_CLK RCC_AHB1Periph_GPIOE + +#define LED_BOOTLOADER_PIN GPIO_Pin_2 +#define LED_BOOTLOADER_GPIO_PORT GPIOE +#define LED_BOOTLOADER_GPIO_CLK RCC_AHB1Periph_GPIOE + +#define LED_TEST_PIN GPIO_Pin_7 +#define LED_TEST_GPIO_PORT GPIOE +#define LED_TEST_GPIO_CLK RCC_AHB1Periph_GPIOE /* LED GPIOs */ GPIO_TypeDef* LED_GPIO_PORTS[LEDn] = {LED_ACTIVITY_GPIO_PORT, LED_BOOTLOADER_GPIO_PORT, LED_TEST_GPIO_PORT}; @@ -48,7 +80,7 @@ const uint32_t LED_GPIO_CLKS[LEDn] = {LED_ACTIVITY_GPIO_CLK, LED_BOOTLOADER_GPIO * @arg LED_ERR * @retval None */ -void LEDInit(Led_TypeDef Led) +static void LEDInit(Led_TypeDef Led) { GPIO_InitTypeDef GPIO_InitStructure; @@ -73,7 +105,7 @@ void LEDInit(Led_TypeDef Led) * @arg LED_ERR * @retval None */ -void LEDOn(Led_TypeDef Led) +static void LEDOn(Led_TypeDef Led) { GPIO_ResetBits(LED_GPIO_PORTS[Led],LED_GPIO_PINS[Led]); } @@ -87,7 +119,7 @@ void LEDOn(Led_TypeDef Led) * @arg LED_ERR * @retval None */ -void LEDOff(Led_TypeDef Led) +static void LEDOff(Led_TypeDef Led) { GPIO_SetBits(LED_GPIO_PORTS[Led],LED_GPIO_PINS[Led]); } @@ -101,7 +133,22 @@ void LEDOff(Led_TypeDef Led) * @arg LED_ERR * @retval None */ -void LEDToggle(Led_TypeDef Led) +static void LEDToggle(Led_TypeDef Led) { GPIO_ToggleBits(LED_GPIO_PORTS[Led],LED_GPIO_PINS[Led]); } + +__EXPORT void board_led_initialize(void) +{ + LEDInit(LED_ACT); + LEDInit(LED_COM); + LEDInit(LED_ERR); +} + +__EXPORT void board_led_status_update(float quality) +{ + UNUSED(quality); + LEDToggle(LED_ACT); + LEDOff(LED_COM); + LEDOff(LED_ERR); +} diff --git a/src/drivers/boards/px4flow-v2/px4flow_init.c b/src/drivers/boards/px4flow-v2/px4flow_init.c index 1d63f52..07a54e3 100644 --- a/src/drivers/boards/px4flow-v2/px4flow_init.c +++ b/src/drivers/boards/px4flow-v2/px4flow_init.c @@ -179,131 +179,3 @@ int board_get_serialnumber(uint8_t serial[BOARD_SERIALNUMBER_SIZE]) return BOARD_SERIALNUMBER_SIZE; } -/**************************************************************************** - * Name: board_led_initialize - * - * Description: - * This functions is called very early in initialization to perform board- - * specific initialization of LED-related resources. This includes such - * things as, for example, configure GPIO pins to drive the LEDs and also - * putting the LEDs in their correct initial state. - * - * NOTE: In most architectures, board_led_initialize() is called from - * board-specific initialization logic. But there are a few architectures - * where this initialization function is still called from common chip - * architecture logic. This interface is not, however, a common board - * interface in any event and, hence, the usage of the name - * board_led_initialize is deprecated. - * - * WARNING: This interface name will eventually be removed; do not use it - * in new board ports. New implementations should use the naming - * conventions for "Microprocessor-Specific Interfaces" or the "Board- - * Specific Interfaces" as described above. - * - * Input Parameters: - * None - * - * Returned Value: - * None - * - ****************************************************************************/ - -__EXPORT void board_led_initialize(void) -{ - -} - -/**************************************************************************** - * Name: board_led_on - * - * Description: - * Set the LED configuration into the ON condition for the state provided - * by the led parameter. This may be one of: - * - * LED_STARTED NuttX has been started - * LED_HEAPALLOCATE Heap has been allocated - * LED_IRQSENABLED Interrupts enabled - * LED_STACKCREATED Idle stack created - * LED_INIRQ In an interrupt - * LED_SIGNAL In a signal handler - * LED_ASSERTION An assertion failed - * LED_PANIC The system has crashed - * LED_IDLE MCU is in sleep mode - * - * Where these values are defined in a board-specific way in the standard - * board.h header file exported by every architecture. - * - * Input Parameters: - * led - Identifies the LED state to put in the ON state (which may or may - * not equate to turning an LED on) - * - * Returned Value: - * None - * - ****************************************************************************/ - -__EXPORT void board_led_on(int led) -{ - -} - -/**************************************************************************** - * Name: board_led_off - * - * Description: - * Set the LED configuration into the OFF condition for the state provided - * by the led parameter. This may be one of: - * - * LED_INIRQ Leaving an interrupt - * LED_SIGNAL Leaving a signal handler - * LED_ASSERTION Recovering from an assertion failure - * LED_PANIC The system has crashed (blinking). - * LED_IDLE MCU is not in sleep mode - * - * Where these values are defined in a board-specific way in the standard - * board.h header file exported by every architecture. - * - * Input Parameters: - * led - Identifies the LED state to put in the OFF state (which may or may - * not equate to turning an LED off) - * - * Returned Value: - * None - * - ****************************************************************************/ - -__EXPORT void board_led_off(int led) -{ - -} - -/**************************************************************************** - * Name: board_crashdump - * - * Description: - * If CONFIG_BOARD_CRASHDUMP is selected then up_asseert will call out to - * board_crashdump prior to calling exit in the case of an assertion failure. - * Or in the case of a hardfault looping indefinitely. board_crashdump then - * has a chance to save the state of the machine. The provided - * board_crashdump should save as much information as it can about the cause - * of the fault and then most likely reset the system. - * - * N.B. There are limited system resources that can be used by the provided - * board_crashdump function. The tems from the fact that most critical/fatal - * crashes are because of a hard fault or during interrupt processing. - * In these cases, up_assert is running from the context of an interrupt - * handlerand it is impossible to use any device driver in this context. - * - * Also consider the following: Who knows what state the system is in? Is - * memory trashed? Is the Heap intact? Therefore all we can expect to do in - * board_crashdump is save the "machine state" in a place where on the next - * reset we can write it to more sophisticated storage in a sane operating - * environment. - * - ****************************************************************************/ - -__EXPORT void board_crashdump(uint32_t currentsp, void *tcb, uint8_t *filename, - int lineno) -{ - -} diff --git a/src/drivers/boards/px4flow-v2/px4flow_led.c b/src/drivers/boards/px4flow-v2/px4flow_led.c index 269165a..872a57b 100644 --- a/src/drivers/boards/px4flow-v2/px4flow_led.c +++ b/src/drivers/boards/px4flow-v2/px4flow_led.c @@ -43,12 +43,45 @@ #include "stm32f4xx_tim.h" -#define TMR_FREQUENCY STM32_TIMCLK1 +const long fosc = STM32_TIMCLK1; +const long prescale = 128; + /* * PE[09] PE9/TIM1_CH1/FMC_D6 40 TIM1_CH1 R Miswired to B * PE[11] PE11/TIM1_CH2/8PI4_NSS/FMC_D8/LCD_G3 42 TIM1_CH2 G * PE[13] PE13/TIM1_CH3/SPI4_MISO/FMC_D10/LCD_DE 44 TIM1_CH2 B Miswired to R */ + + __EXPORT void board_led_initialize(void) { + RCC_APB2PeriphClockCmd(RCC_APB2Periph_TIM1, ENABLE); + RCC_AHB1PeriphClockCmd(RCC_AHB1Periph_GPIOE, ENABLE); + GPIO_InitTypeDef GPIO_InitStructure; + + GPIO_InitStructure.GPIO_Pin = (GPIO_Pin_9 | GPIO_Pin_11 | GPIO_Pin_13); + GPIO_InitStructure.GPIO_Mode = GPIO_Mode_AF; + GPIO_InitStructure.GPIO_Speed = GPIO_Speed_2MHz; + GPIO_InitStructure.GPIO_OType = GPIO_OType_PP; + GPIO_InitStructure.GPIO_PuPd = GPIO_PuPd_NOPULL; + GPIO_Init(GPIOE, &GPIO_InitStructure); + + // Connect TIM1 pins to AF + GPIO_PinAFConfig(GPIOE, GPIO_PinSource9, GPIO_AF_TIM1); + GPIO_PinAFConfig(GPIOE, GPIO_PinSource11, GPIO_AF_TIM1); + GPIO_PinAFConfig(GPIOE, GPIO_PinSource13, GPIO_AF_TIM1); + + TIM1->EGR |= TIM_PSCReloadMode_Immediate; + TIM1->PSC = prescale; + TIM1->CR1 = TIM_CR1_ARPE | TIM_CR1_CEN; + + TIM1->CCMR1 = (TIM_OCMode_PWM1|TIM_CCMR1_OC1PE) | (TIM_OCMode_PWM1<<8|TIM_CCMR1_OC2PE); + TIM1->CCMR2 = (TIM_OCMode_PWM1|TIM_CCMR1_OC2PE); + + TIM1->CCER |= (TIM_CCER_CC1P|TIM_CCER_CC1E|TIM_CCER_CC2P|TIM_CCER_CC2E|TIM_CCER_CC3P|TIM_CCER_CC3E); + + TIM_CCPreloadControl(TIM1, ENABLE); + TIM_Cmd(TIM1, ENABLE); + TIM_CtrlPWMOutputs(TIM1, ENABLE); +} /**************************************************************************** * Name: board_led_rgb @@ -60,66 +93,33 @@ * red - intensity of the red led * green - intensity of the green led * blue - intensity of the blue led - * hz - number of flashes per second * * Returned Value: * None * ****************************************************************************/ - - -void board_led_rgb(uint16_t red, uint16_t green , uint16_t blue, - uint16_t hz) +static void board_led_rgb(uint16_t red, uint16_t green , uint16_t blue, uint16_t hz) { + const long p1s = fosc / prescale; + const long p0p5s = p1s / 2; - long fosc = TMR_FREQUENCY; - long prescale = 2048; - long p1s = fosc / prescale; - long p0p5s = p1s / 2; - static bool once = 0; - - if (!once) { - once = 1; - - - RCC_APB2PeriphClockCmd(RCC_APB2Periph_TIM1, ENABLE); - RCC_AHB1PeriphClockCmd(RCC_AHB1Periph_GPIOE, ENABLE); - GPIO_InitTypeDef GPIO_InitStructure; - - GPIO_InitStructure.GPIO_Pin = (GPIO_Pin_9 | GPIO_Pin_11 | GPIO_Pin_13); - GPIO_InitStructure.GPIO_Mode = GPIO_Mode_AF; - GPIO_InitStructure.GPIO_Speed = GPIO_Speed_2MHz; - GPIO_InitStructure.GPIO_OType = GPIO_OType_PP; - GPIO_InitStructure.GPIO_PuPd = GPIO_PuPd_NOPULL; - GPIO_Init(GPIOE, &GPIO_InitStructure); + long p2 = hz == 0 ? p1s : p1s / hz; + TIM1->ARR = p2; + p2 = hz == 0 ? p1s + 1 : p0p5s / hz; + TIM1->CCR3 = (red * p2) / 255; + TIM1->CCR2 = (green * p2) / 255; + TIM1->CCR1 = (blue * p2) / 255; - // Connect TIM1 pins to AF - GPIO_PinAFConfig(GPIOE, GPIO_PinSource9, GPIO_AF_TIM1); - GPIO_PinAFConfig(GPIOE, GPIO_PinSource11, GPIO_AF_TIM1); - GPIO_PinAFConfig(GPIOE, GPIO_PinSource13, GPIO_AF_TIM1); - - TIM1->EGR |= TIM_PSCReloadMode_Immediate; - TIM1->PSC = prescale; - TIM1->CR1 = TIM_CR1_ARPE | TIM_CR1_CEN; - - TIM1->CCMR1 = (TIM_OCMode_PWM1|TIM_CCMR1_OC1PE) | (TIM_OCMode_PWM1<<8|TIM_CCMR1_OC2PE); - TIM1->CCMR2 = (TIM_OCMode_PWM1|TIM_CCMR1_OC2PE); - - TIM1->CCER |= (TIM_CCER_CC1P|TIM_CCER_CC1E|TIM_CCER_CC2P|TIM_CCER_CC2E|TIM_CCER_CC3P|TIM_CCER_CC3E); - - TIM_CCPreloadControl(TIM1, ENABLE); - TIM_Cmd(TIM1, ENABLE); - TIM_CtrlPWMOutputs(TIM1, ENABLE); - } - - long p2 = hz == 0 ? p1s : p1s / hz; - TIM1->ARR = p2; - p2 = hz == 0 ? p1s + 1 : p0p5s / hz; - TIM1->CCR3 = (red * p2) / 255; - TIM1->CCR2 = (green * p2) / 255; - TIM1->CCR1 = (blue * p2) / 255; - - TIM_Cmd(TIM1, hz == 0 ? DISABLE : ENABLE); + TIM_Cmd(TIM1, hz == 0 ? DISABLE : ENABLE); +} +__EXPORT void board_led_status_update(float quality) { + float alpha = 0.3; + static float filter = 0; + filter = (1-alpha)*filter + alpha*quality; + uint8_t r = 255 * (1-filter); + uint8_t g = 255 * filter; + uint8_t b = 0; + board_led_rgb(r, g, b, 10000); } diff --git a/src/include/bsp/board.h b/src/include/bsp/board.h index 89a4bd9..2c5c410 100644 --- a/src/include/bsp/board.h +++ b/src/include/bsp/board.h @@ -210,114 +210,8 @@ int board_get_serialnumber(uint8_t serial[BOARD_SERIALNUMBER_SIZE]); void board_led_initialize(void); -/**************************************************************************** - * Name: board_led_on - * - * Description: - * Set the LED configuration into the ON condition for the state provided - * by the led parameter. This may be one of: - * - * LED_STARTED NuttX has been started - * LED_HEAPALLOCATE Heap has been allocated - * LED_IRQSENABLED Interrupts enabled - * LED_STACKCREATED Idle stack created - * LED_INIRQ In an interrupt - * LED_SIGNAL In a signal handler - * LED_ASSERTION An assertion failed - * LED_PANIC The system has crashed - * LED_IDLE MCU is in sleep mode - * - * Where these values are defined in a board-specific way in the standard - * board.h header file exported by every architecture. - * - * Input Parameters: - * led - Identifies the LED state to put in the ON state (which may or may - * not equate to turning an LED on) - * - * Returned Value: - * None - * - ****************************************************************************/ - -void board_led_on(int led); - -/**************************************************************************** - * Name: board_led_off - * - * Description: - * Set the LED configuration into the OFF condition for the state provided - * by the led parameter. This may be one of: - * - * LED_INIRQ Leaving an interrupt - * LED_SIGNAL Leaving a signal handler - * LED_ASSERTION Recovering from an assertion failure - * LED_PANIC The system has crashed (blinking). - * LED_IDLE MCU is not in sleep mode - * - * Where these values are defined in a board-specific way in the standard - * board.h header file exported by every architecture. - * - * Input Parameters: - * led - Identifies the LED state to put in the OFF state (which may or may - * not equate to turning an LED off) - * - * Returned Value: - * None - * - ****************************************************************************/ - -void board_led_off(int led); - -/**************************************************************************** - * Name: board_led_rgb - * - * Description: - * - * Input Parameters: - * - * red - intensity of the red led - * green - intensity of the green led - * blue - intensity of the blue led - * hz - number of flashes per second - * - * Returned Value: - * None - * - ****************************************************************************/ -#if defined(CONFIG_ARCH_BOARD_PX4FLOW_V1) -#define board_led_rgb(red, green , blue, hz) -#endif -#if defined(CONFIG_ARCH_BOARD_PX4FLOW_V2) -weak_function void board_led_rgb(uint16_t red, uint16_t green , uint16_t blue, - uint16_t hz); -#endif - -/**************************************************************************** - * Name: board_crashdump - * - * Description: - * If CONFIG_BOARD_CRASHDUMP is selected then up_asseert will call out to - * board_crashdump prior to calling exit in the case of an assertion failure. - * Or in the case of a hardfault looping indefinitely. board_crashdump then - * has a chance to save the state of the machine. The provided - * board_crashdump should save as much information as it can about the cause - * of the fault and then most likely reset the system. - * - * N.B. There are limited system resources that can be used by the provided - * board_crashdump function. The tems from the fact that most critical/fatal - * crashes are because of a hard fault or during interrupt processing. - * In these cases, up_assert is running from the context of an interrupt - * handlerand it is impossible to use any device driver in this context. - * - * Also consider the following: Who knows what state the system is in? Is - * memory trashed? Is the Heap intact? Therefore all we can expect to do in - * board_crashdump is save the "machine state" in a place where on the next - * reset we can write it to more sophisticated storage in a sane operating - * environment. - * - ****************************************************************************/ +/// Update the LED with the current status +void board_led_status_update(float quality); -void board_crashdump(uint32_t currentsp, void *tcb, uint8_t *filename, - int lineno); __END_DECLS #endif /* __INCLUDE_NUTTX_BOARD_H */ diff --git a/src/include/camera.h b/src/include/camera.h new file mode 100644 index 0000000..926d9a1 --- /dev/null +++ b/src/include/camera.h @@ -0,0 +1,435 @@ +/**************************************************************************** + * + * Copyright (C) 2013 PX4 Development Team. All rights reserved. + * Author: Simon Laube + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * 3. Neither the name PX4 nor the names of its contributors may be + * used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED + * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * + ****************************************************************************/ + +#ifndef CAMERA_H_ +#define CAMERA_H_ + +#include +#include +#include + +/** + * The maximum number of buffers the camera driver supports. + */ +#define CONFIG_CAMERA_MAX_BUFFER_COUNT 5 + +/** + * Number of bits to use for the histogram bins. + * This affects the speed of the algorithm as well as the possible granularity of the other configuration options. + */ +#define CONFIG_CAMERA_EXPOSURE_BIN_BITS (5u) +#define CONFIG_CAMERA_EXPOSURE_BIN_COUNT (1u << CONFIG_CAMERA_EXPOSURE_BIN_BITS) +/** + * The ratio of pixels (in percent) that need to fall in the highest histogram bin to trigger + * the extreme overexpose condition. This is intended to quickly react to highly over-exposed images + * by decreasing the exposure time quickly. + */ +#define CONFIG_CAMERA_EXTREME_OVEREXPOSE_RATIO (20u) +/** + * The ratio of pixels (in percent) that are rejected as outliers when evaluating the histogram starting from + * the brightest pixel. + */ +#define CONFIG_CAMERA_OUTLIER_RATIO (5u) +/** + * The value that the algorithm uses as a target pixel intensity when adjusting the exposure time. + * The algorithm compares the extracted value from the histogram (after outlier rejection) to this value + * and adjusts the exposure time accordingly. + */ +#define CONFIG_CAMERA_DESIRED_EXPOSURE_8B (200u) +/** + * The size of the exposure tolerance band. + * If there is a certain amount of change in the image brightness the tolerance band will decide after + * how much brightness delta the exposure algorithm will start to re-adjust the exposure time. + */ +#define CONFIG_CAMERA_DESIRED_EXPOSURE_TOL_8B (24) +/** + * The smoothing factor of the exposure time. This is the constant of an exponential filter. + * + * new = old * K + (1 - K) * new + * + * This constant influences the speed with which the algorithm reacts to brightness changes. + */ +#define CONFIG_CAMERA_EXPOSURE_SMOOTHING_K (0.83f) +/** + * The interval between updating the exposure value in number of frames. Snapshot and invalid frames are skipped. + */ +#define CONFIG_CAMERA_EXPOSURE_UPDATE_INTERVAL (4) + +struct _camera_sensor_interface; +typedef struct _camera_sensor_interface camera_sensor_interface; + +struct _camera_transport_interface; +typedef struct _camera_transport_interface camera_transport_interface; + +struct _camera_ctx; +typedef struct _camera_ctx camera_ctx; + +typedef struct _camera_img_param { + struct _size { + uint16_t x; ///< Image size in x direction. + uint16_t y; ///< Image size in y direction. + } size; /**< Image size. + * The image size is constrained by the transfer size of the camera_transport_interface. + * The image size in bytes must be an exact multiple of the transfer size + * as well as at least two times the transfer size. */ + uint8_t binning; ///< Column and row binning ratio. +} camera_img_param; + +/** + * Struct holding image parameters for the camera. + */ +typedef struct _camera_img_param_ex { + camera_img_param p; ///< User editable parameters. + uint32_t exposure; ///< Exposure time in master clock times. + float analog_gain; ///< Analog gain value. 1 - 4. +} camera_img_param_ex; + +/** + * Struct holding information about an image and a pointer to the buffer itself. + */ +typedef struct _camera_image_buffer { + camera_img_param_ex param; ///< The parameters of the image that is stored in the buffer. + uint32_t frame_number; ///< The frame number. This number increases with every frame that is captured from the camera module. + uint32_t timestamp; ///< Frame timestamp in microseconds. + void *buffer; ///< Pointer to the buffer itself. + size_t buffer_size; ///< Maximum size of the buffer. Note: this is not the actual size of the image in the buffer. + uint32_t meta; ///< Buffer meta data not used by the camera driver. This can be used to identify buffers. +} camera_image_buffer; + +#define BuildCameraImageBuffer(memory_variable) ((const camera_image_buffer){.buffer = memory_variable, .buffer_size = sizeof(memory_variable)}) + +/** + * Callback which is called when a snapshot capture has finished. + * @note This callback may be called from an interrupt handler. + * @param success True when capture was successful, false if it failed due to desynchronization. + * You should retry in the mainloop after calling camera_snapshot_acknowledge. + */ +typedef void (*camera_snapshot_done_cb)(bool success); + +/** + * Initializes the camera driver. + * @param ctx The context to use. + * @param sensor The sensor interface to use. + * @param transport The sensor data transport interface to use. + * @param exposure_min_clks Minimum exposure in clocks. + * @param exposure_max_clks Maximum exposure in clocks. + * @param analog_gain_max Maximum analog gain. > 1 depending on what the sensor supports. (mt9v034: 1-4) + * @param img_param The initial image parameters to use for img_stream operation mode. + * @param buffers Array of initialized buffers to use for buffering the images in img_stream mode. + * In each camera_image_buffer the buffer and buffer_size members must be correctly set. + * The camera_img_stream_get_buffers function will return pointers to one of these (copied) buffers. + * The buffer can reside in the CCM of the microcontroller. + * The structs are copied to an internal data-structure. + * @param buffer_count Number of buffers that are passed in buffers. + * Must be lower than or equal the configured CAMERA_MAX_BUFFER_COUNT. + * There must be at least one more buffer than what is requested in camera_img_stream_get_buffers. + * More buffers will reduce the latency when frames are skipped. + * @return True when successful. + */ +bool camera_init(camera_ctx *ctx, const camera_sensor_interface *sensor, const camera_transport_interface *transport, + uint32_t exposure_min_clks, uint32_t exposure_max_clks, float analog_gain_max, + const camera_img_param *img_param, + camera_image_buffer buffers[], size_t buffer_count); + + +/** + * Reconfigures the general camera parameters. + * Call this after some MAVLINK parameters have changed. + * @param ctx The context to use. + */ +void camera_reconfigure_general(camera_ctx *ctx); + +/** + * Schedules the new parameters to take effect as soon as possible. This function makes sure that + * parameter changes are commited to the sensor in a time window which will guarantee glitch free operation. + * @param ctx The context to use. + * @param img_param The new image parameters to use. Note that the image must still fit inside the buffers passed + * to camera_init. + * @return True on success. + */ +bool camera_img_stream_schedule_param_change(camera_ctx *ctx, const camera_img_param *img_param); + +/** + * Gets the most recent images. If no more recent image is available this function returns immediately without doing anything. + * @param ctx The context to use. + * @param buffers Pointer to an array of buffer pointers which are updated to point to the most recent buffers. + * The first buffer pointer will be updated to the most recent image followed by the next + * pointer which is updated to the next older image and so on. + * It is guaranteed to retrieve consecutive images. + * @param count Number of most recent images to retrieve. Must be lower than buffer_count - 1 which was + * passed to camera_init. + * @param check_for_new When true the function will return true only if a new frame is pending. It will then clear the + pending flag. + * @return true when a set of count consecutive most recent images have been retrieved. + false when two consecutive images are not ready + * @note When this function is successful (return value 0) the buffers need to be returned to the camera driver before + * requesting new buffers. (use camera_img_stream_return_buffers) + */ +bool camera_img_stream_get_buffers(camera_ctx *ctx, camera_image_buffer *buffers[], size_t count, bool wait_for_new); + +/** + * Returns the buffers that have been retrieved by camera_img_stream_get_buffers back to the camera driver. + * This function must be called when image processing has been finished. + * @param ctx The context to use. + * @param buffers Pointer to an array of buffer pointers which will be returned to the camera driver. + * @param count Number of buffer pointers in the array. + */ +void camera_img_stream_return_buffers(camera_ctx *ctx, camera_image_buffer *buffers[], size_t count); + +/** + * Schedules a snapshot to be taken with the camera. The snapshot will be scheduled as soon as possible. + * Only one snapshot can be scheduled at a time. + * @param ctx The context to use. + * @param img_param The image parameters for the snapshot image. + * @param dst The destination buffer which should receive the image. + * The buffer and buffer_size members must be correctly set. + * The pointed variable must remain valid until the snapshot capture is done. + * @param cb Callback function which is called when the snapshot has been taken. + * Note that the callback is called from the interrupt context and should only do minimal stuff + * like setting a flag to notify the main loop. Calling camera_* functions is not allowed. + * camera_snapshot_acknowledge must be called as soon as possible afterwards in the mainloop + * regardless of the success status of the snapshot. + * @return True when snapshot has been successfully scheduled. + */ +bool camera_snapshot_schedule(camera_ctx *ctx, const camera_img_param *img_param, camera_image_buffer *dst, camera_snapshot_done_cb cb); + +/** + * This function must be called right after the snapshot callback function has been called. No other calls to the camera driver are allowed. + * @note This function must not be called directly from the callback! + * @param ctx The context to use. + */ +void camera_snapshot_acknowledge(camera_ctx *ctx); + +/** + * Camera sensor configuration interface. + * + * The functions inside this interface are divided into two sets of function classes: + * + * A) init, prepare_update_param, reconfigure_general + * These functions may only be called from the mainloop and have a possible long run-time. They may be interrupted by any of the class B functions. + * The sensor interface has to make sure that there will be no conflict when a class B function starts to run while a class A function is not finished yet. + * + * B) restore_previous_param, notify_readout_start, notify_readout_end, update_exposure_param, get_current_param + * These functions may be called from an interrupt context and may interrupt class A functions. They have to make sure to not access resources in use + * by class A functions. + * + * The camera driver will make sure that only one function of each class is entered at the same time and that class B functions are never interrupted by class A functions. + * + * + * The sequence of functions that are called is the following one: + * + * === First buffer of a new frame has arrived === + * notify_readout_start() + * get_current_param() -> returned parameters are used to calculate the number of buffers we need to receive for this frame. + * restore_previous_param() (optional if switching back to the previous parameters is desired) + * + * === ... receiving the buffers between the first and the last ... === + * + * === Last buffer of the current frame has arrived === + * notify_readout_end() + * update_exposure_param() (optional if the exposure parameters have been updated) + * + * ==> repeat + * + */ +struct _camera_sensor_interface { + void *usr; ///< User pointer that should be passed to the following interface functions. + /** + * Initializes the sensor and the hardware of the microcontroller. + * @param usr User pointer from this struct. + * @param img_param The image parameters to use for initialization. + * @return true on success. + */ + bool (*init)(void *usr, const camera_img_param_ex *img_param); + /** + * Prepares the sensor to switch to new parameters. + * This function should perform most of the work that is needed to update the sensor with new parameters. + * @param usr User pointer from this struct. + * @param img_param The new image parameters. + * @return true on success. + */ + bool (*prepare_update_param)(void *usr, const camera_img_param_ex *img_param); + /** + * Reconfigures the general sensor parameters. + */ + void (*reconfigure_general)(void *usr); + /** + * Immediately switches back to the previous parameters. + * @param usr User pointer from this struct. + */ + void (*restore_previous_param)(void *usr); + /** + * Called every frame just after readout has started (but not completed yet). + * This function may be used to switch the sensor to new parameters that have been previously prepared. + * @param usr User pointer from this struct. + * @note This function may be called from an interrupt vector and should do as little work as necessary. + */ + void (*notify_readout_start)(void *usr); + /** + * Called every frame just after readout has finished. + * @param usr User pointer from this struct. + * @note This function may be called from an interrupt vector and should do as little work as necessary. + */ + void (*notify_readout_end)(void *usr); + /** + * Called at the end of the readout after notify_readout_end to update the exposure parameters of the current context. + * @return Return true if it was possible to perform the update. + */ + bool (*update_exposure_param)(void *usr, uint32_t exposure, float gain); + /** + * Called to retrieve the image parameters of the current frame that is being output. + * This function is called after notify_readout_start to retrieve the + * parameters that were in effect at the time the image was taken. + * @param img_data_valid boolean which receives a flag whether the image data is valid or not. + */ + void (*get_current_param)(void *usr, camera_img_param_ex *img_param, bool *img_data_valid); +}; + +/** + * Callback for notifying the camera driver about a completed transfer. + * @param usr The user data pointer that has been specified in the init function. (cb_usr) + * @param buffer The buffer that contains the data of the transfer was completed. + * @param size The size of the transfer. + */ +typedef void (*camera_transport_transfer_done_cb)(void *usr, const void *buffer, size_t size); + +/** + * Callback for notifying the camera driver about a completed frame. + * This callback should be called AFTER the last camera_transport_transfer_done_cb call of the frame. + * @param usr The user data pointer that has been specified in the init function. (cb_usr) + * @param probably_infront_dma A hint to the camera driver that the frame done call might be just in front of the transfer done call. + * It is up to the camera driver to interpret it. + */ +typedef void (*camera_transport_frame_done_cb)(void *usr, bool probably_infront_dma); + +/** Camera image transport interface. + */ +struct _camera_transport_interface { + void *usr; ///< User pointer that should be passed to the following interface functions. + size_t transfer_size; ///< Transfer size of this transport. + /** + * Initializes the sensor and the hardware of the microcontroller. + * @param usr User pointer from this struct. + * @param transfer_done_cb Callback which should be called when a transfer has been completed. + * @param frame_done_cb Callback which should be called when a frame has been completed. + * @param cb_usr Callback user data pointer which should be passed to the transfer_done_cb + * and frame_done_cb callbacks. + * @return True on success. + */ + bool (*init)(void *usr, + camera_transport_transfer_done_cb transfer_done_cb, + camera_transport_frame_done_cb frame_done_cb, + void *cb_usr); + /** + * This function is called to reset the transport layer to align it with the frame boundaries. + * It is called in the frame_done_cb after a few frames when we can be sure that the camera module has the correct resolution. + */ + void (*reset)(void *usr); +}; + +/** + * The camera driver context struct. + */ +struct _camera_ctx { + /* startup control */ + volatile int resync_discard_frame_count; ///< Number of frames to discard at startup. + + /* assets of the camera driver */ + + const camera_sensor_interface *sensor; ///< Sensor interface. + const camera_transport_interface *transport; ///< Transport interface. + + uint32_t exposure_min_clks; ///< Minimum exposure in clocks. + uint32_t exposure_max_clks; ///< Maximum exposure in clocks. + float analog_gain_max; ///< Maximum gain. + + /* exposure control */ + + float analog_gain; ///< Current analog gain. + uint32_t exposure; ///< Current exposure in clocks. + + float exposure_smoothing; ///< Exposure smoothing variable. + + int exposure_skip_frame_cnt; ///< Counter to skip frames between exposure calculations. + + uint16_t exposure_sampling_stride; ///< Image sampling stride used for calculating exposure. + uint16_t exposure_bins[CONFIG_CAMERA_EXPOSURE_BIN_COUNT];///< Histogram of the sampled pixels. + uint16_t exposure_hist_count; ///< Number of sampled pixels in the histogram. + int last_brightness; ///< The last brightness value that has been extracted from the histogram. + + /* image streaming buffer and parameters */ + + camera_img_param_ex img_stream_param; ///< The parameters of the image streaming mode. + camera_img_param img_stream_param_pend; ///< The pending image streaming mode parameters. + camera_image_buffer buffers[CONFIG_CAMERA_MAX_BUFFER_COUNT];///< The image buffers for image stream mode. + size_t buffer_count; ///< Total number of buffers. + volatile uint8_t buf_avail[CONFIG_CAMERA_MAX_BUFFER_COUNT];///< Indexes to the buffers that are available. Ordered in the MRU order. + volatile uint8_t buf_avail_count; ///< Number of buffer indexes in the avail_bufs array. + volatile uint8_t buf_put_back_pos; ///< Position where to put back the reserved buffers. + + volatile bool new_frame_arrived; ///< Flag which is set by the interrupt handler to notify that a new frame has arrived. + volatile bool buffers_are_reserved; ///< True when buffers have been reserved by the camera_img_stream_get_buffers function and need to be returned. + + /* image snapshot buffer and parameters */ + + camera_img_param_ex snapshot_param; ///< The parameters of the snapshot mode. + camera_image_buffer *snapshot_buffer; ///< Pointer to buffer which receives the snapshot. NULL when no snapshot is pending. + camera_snapshot_done_cb snapshot_cb; ///< Callback pointer which is called when the snapshot is complete. + + /* frame acquisition */ + + camera_img_param_ex cur_frame_param; ///< Parameters of the frame we are currently receiving. + uint32_t cur_frame_number; ///< Number of the frame we are currently receiving. + camera_image_buffer *cur_frame_target_buf; ///< Target buffer of the frame we are currently receiving. When NULL we are discarding the frame. + int cur_frame_target_buf_idx; ///< When the target buffer points to one of the image stream buffers this is set to the index of that buffer. -1 otherwise. + uint32_t cur_frame_size; ///< The number of bytes that need to be received until the frame is complete. + uint32_t cur_frame_pos; ///< The current byte position inside the frame that is beeing received. + + /* sequencing */ + + int frame_done_infront_count; + + volatile bool seq_frame_receiving; ///< True when we are currently receiving a frame. + volatile bool seq_updating_sensor; ///< True when the mainloop is currently calling sensor functions to update the sensor context. + volatile bool seq_repeat_updating_sensor; /**< This variable is set when the interrupt hanlder could not update the exposure settings because seq_updating_sensor was set. + * After the mainloop has finished updating the sensor context, it should repeat it immediatly with new exposure settings. */ + volatile bool seq_snapshot_active; /**< Flag that is asserted as long as a snapshot is not finished. + * While asserted the camera_img_stream_schedule_param_change function + * should not update the sensor. It should write the new parameters to the img_stream_param_pend variable. */ + volatile bool seq_pend_reconfigure_general; /**< Flag signalling that a general reconfiguration is pending because seq_snapshot_active was active. */ + volatile bool seq_pend_img_stream_param; /**< Flag that is set when pending img stream parameter updates are in the img_stream_param_pend variable. + * These updates will be written to the sensor in the camera_snapshot_acknowledge function. */ +}; + +#endif /* CAMERA_H_ */ \ No newline at end of file diff --git a/src/include/dcmi.h b/src/include/dcmi.h index b127273..4e51d96 100644 --- a/src/include/dcmi.h +++ b/src/include/dcmi.h @@ -35,44 +35,16 @@ #define DCMI_H_ #include -#include "mt9v034.h" +#include "camera.h" -#define DCMI_DR_ADDRESS 0x50050028 - -/** - * @brief Copy image to fast RAM address - */ -void dma_copy_image_buffers(uint8_t ** current_image, uint8_t ** previous_image, uint16_t buffer_size, uint8_t image_step); - -/** - * @brief Send calibration image with MAVLINK over USB - */ -void send_calibration_image(uint8_t ** image_buffer_fast_1, uint8_t ** image_buffer_fast_2); +#define CONFIG_DCMI_DMA_BUFFER_SIZE (1024u) -/** - * @brief Initialize DCMI DMA and enable image capturing - */ -void enable_image_capture(void); - -/* Init Functions */ -void dcmi_clock_init(void); -void dcmi_hw_init(void); -void dcmi_dma_init(uint16_t buffer_size); -void dcmi_it_init(void); -void dma_it_init(void); +const camera_transport_interface *dcmi_get_transport_interface(void); /* Interrupt Handlers */ void DCMI_IRQHandler(void); void DMA2_Stream1_IRQHandler(void); -void dcmi_dma_enable(void); -void dcmi_dma_disable(void); -void dma_reconfigure(void); -void dcmi_restart_calibration_routine(void); -void dma_swap_buffers(void); - -uint32_t get_time_between_images(void); -uint32_t get_frame_counter(void); -void reset_frame_counter(void); +#define DCMI_DR_ADDRESS 0x50050028 #endif /* DCMI_H_ */ diff --git a/src/include/utils.h b/src/include/distance.h similarity index 82% rename from src/include/utils.h rename to src/include/distance.h index 92134b9..297e069 100644 --- a/src/include/utils.h +++ b/src/include/distance.h @@ -1,7 +1,6 @@ /**************************************************************************** * - * Copyright (C) 2013 PX4 Development Team. All rights reserved. - * -> Code from CodeForge.com + * Copyright (c) 2013-2015 PX4 Development Team. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -32,16 +31,15 @@ * ****************************************************************************/ -#ifndef UTILS_H_ -#define UTILS_H_ - +#ifndef DISTANCE_H_ +#define DISTANCE_H_ #include +#include -/** -* @brief Float to Ascii string -*/ -char* ftoa(float f); -void ltoa(char *buf, unsigned long i, int base); -void itoa(char *buf, unsigned int i, int base); +void distance_init(void); +void distance_trigger(void); +void distance_readback(void); +bool distance_read(float* distance_filtered, float* distance_raw); +uint32_t get_distance_measure_time(void); -#endif /* UTILS_H_ */ +#endif //DISTANCE_H_ diff --git a/src/include/sonar_mode_filter.h b/src/include/distance_mode_filter.h similarity index 91% rename from src/include/sonar_mode_filter.h rename to src/include/distance_mode_filter.h index e22e972..0972ebf 100644 --- a/src/include/sonar_mode_filter.h +++ b/src/include/distance_mode_filter.h @@ -31,17 +31,17 @@ * ****************************************************************************/ -#ifndef SONAR_MODE_FILTER_H_ -#define SONAR_MODE_FILTER_H_ +#ifndef DISTANCE_MODE_FILTER_H_ +#define DISTANCE_MODE_FILTER_H_ #ifdef __cplusplus extern "C" { #endif -float insert_sonar_value_and_get_mode_value(float insert); +float insert_distance_value_and_get_mode_value(float insert); #ifdef __cplusplus } #endif -#endif /* SONAR_MODE_FILTER_H_ */ +#endif /* DISTANCE_MODE_FILTER_H_ */ diff --git a/src/include/flow.h b/src/include/flow.h index af5989b..dfb8f54 100644 --- a/src/include/flow.h +++ b/src/include/flow.h @@ -36,10 +36,82 @@ #include +#define FLOW_FRAME_SIZE 64 + +typedef struct _flow_raw_result { + float x; ///< The flow in x direction + float y; ///< The flow in y direction + float quality; ///< The quality of this result. 0 = bad + uint8_t at_x; ///< The mid-position of the patch that was used to calculate the flow. + uint8_t at_y; ///< The mid-position of the patch that was used to calculate the flow. +} flow_raw_result; + +typedef struct _flow_klt_image { + uint8_t *image; + uint8_t preprocessed[(FLOW_FRAME_SIZE * FLOW_FRAME_SIZE) / 2]; + uint32_t meta; +} flow_klt_image; + +/** + * @brief Computes pixel flow from image1 to image2 + * Searches the corresponding position in the new image (image2) of max. 64 pixels from the old image (image1). + * @param image1 The older image + * @param image2 The new image + * @param x_rate The gyro x-rate during the frame interval in pixels. (In the image x direction) + * @param y_rate The gyro y-rate during the frame interval in pixels. (In the image y direction) + * @param z_rate The gyro z-rate during the frame interval in radians. + * @param out Array which receives the raw result vectors computed for the blocks in the image. + * @param max_out The available space in the out buffer. + * @return The number of results written to the out buffer. + */ +uint16_t compute_flow(uint8_t *image1, uint8_t *image2, float x_rate, float y_rate, float z_rate, + flow_raw_result *out, uint16_t max_out); + +/* + * Returns the capability of the flow algorithm. + */ +float get_flow_capability(void); + /** - * @brief Computes pixel flow from image1 to image2 + * Preprocesses the image for use with compute_klt. + * This will add the pyramid levels. + * The pointed memory needs to be in CCM. */ -uint8_t compute_flow(uint8_t *image1, uint8_t *image2, float x_rate, float y_rate, float z_rate, - float *histflowx, float *histflowy); +void klt_preprocess_image(uint8_t *image, flow_klt_image *klt_image); + +/** + * @brief Computes pixel flow from image1 to image2 + * Searches the corresponding position in the new image (image2) of max. 64 pixels from the old image (image1) + * with the KLT method and outputs the value of all flow vectors. + * @NOTE call klt_preprocess_image on the images first! (No need to call it on the previous image again if it has been treated already) + * @param image1 The older image + * @param image2 The new image + * @param x_rate The gyro x-rate during the frame interval in pixels. (In the image x direction) + * @param y_rate The gyro y-rate during the frame interval in pixels. (In the image y direction) + * @param z_rate The gyro z-rate during the frame interval in radians. + * @param out Array which receives the raw result vectors computed for the blocks in the image. + * @param max_out The available space in the out buffer. + * @return The number of results written to the out buffer. + */ +uint16_t compute_klt(flow_klt_image *image1, flow_klt_image *image2, float x_rate, float y_rate, float z_rate, + flow_raw_result *out, uint16_t max_out); + +/* + * Returns the capability of the KLT algorithm. + */ +float get_flow_klt_capability(void); + +/** +* +* @brief Extracts pixel flow from the result vector +* @param in Raw result vector from flow calculation. +* @param result_count Number of results in flow_raw_result. +* @param px_flow_x Receives the pixel flow in x direction. +* @param px_flow_y Receives the pixel flow in y direction. +* @param accuracy_p Outlier detection threshold in percent. (0 - 1). +* @param accuracy_px Minimum outlier detection threshold in absolute pixel flow values. +*/ +uint8_t flow_extract_result(flow_raw_result *in, uint16_t result_count, float *px_flow_x, float *px_flow_y, + float accuracy_p, float accuracy_px); #endif /* FLOW_H_ */ diff --git a/src/include/legacy_i2c.h b/src/include/fmu_comm.h similarity index 77% rename from src/include/legacy_i2c.h rename to src/include/fmu_comm.h index d551912..9999f19 100644 --- a/src/include/legacy_i2c.h +++ b/src/include/fmu_comm.h @@ -1,6 +1,6 @@ /**************************************************************************** * - * Copyright (C) 2013 PX4 Development Team. All rights reserved. + * Copyright (c) 2013-2015 PX4 Development Team. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -30,13 +30,22 @@ * POSSIBILITY OF SUCH DAMAGE. * ****************************************************************************/ +#ifndef FMU_COMM_H +#define FMU_COMM_H -#pragma once +/// Abstraction over the FMU communications interface -- I2C slave or UAVCAN -#include -#include -#include +#include +#include "result_accumulator.h" + +/// Call on boot to initialize +void fmu_comm_init(void); + +/// Call in the main loop periodically +void fmu_comm_run(void); + +/// Call every frame to update with new data +void fmu_comm_update(const result_accumulator_frame* frame); + +#endif -typedef struct legacy_12c_data_t { - uint64_t time_stamp_utc; -} legacy_12c_data_t; diff --git a/src/include/i2c.h b/src/include/i2c.h deleted file mode 100644 index 47d790b..0000000 --- a/src/include/i2c.h +++ /dev/null @@ -1,56 +0,0 @@ -/**************************************************************************** - * - * Copyright (c) 2013-2015 PX4 Development Team. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided with the - * distribution. - * 3. Neither the name PX4 nor the names of its contributors may be - * used to endorse or promote products derived from this software - * without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN - * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * - ****************************************************************************/ - -/** - * @file i2c.h - * I2C communication functions. - * @author Thomas Boehm - */ - - -#ifndef I2C_H_ -#define I2C_H_ -#include -#include - -#define I2C1_OWNADDRESS_1_BASE 0x42 //7bit base address -/** - * @brief Configures I2C1 for communication as a slave (default behaviour for STM32F) - */ - -void i2c_init(void); -void update_TX_buffer(float pixel_flow_x, float pixel_flow_y, float flow_comp_m_x, float flow_comp_m_y, uint8_t qual, - float ground_distance, float x_rate, float y_rate, float z_rate, int16_t gyro_temp, legacy_12c_data_t *pd); -char i2c_get_ownaddress1(void); -#endif /* I2C_H_ */ - diff --git a/src/include/i2c_frame.h b/src/include/i2c_frame.h index b391b1f..343f4a7 100644 --- a/src/include/i2c_frame.h +++ b/src/include/i2c_frame.h @@ -55,7 +55,7 @@ typedef struct i2c_frame int16_t gyro_y_rate; int16_t gyro_z_rate; uint8_t gyro_range; - uint8_t sonar_timestamp; + uint8_t distance_timestamp; int16_t ground_distance; } i2c_frame; @@ -71,7 +71,7 @@ typedef struct i2c_integral_frame int16_t gyro_y_rate_integral; int16_t gyro_z_rate_integral; uint32_t integration_timespan; - uint32_t sonar_timestamp; + uint32_t distance_timestamp; uint16_t ground_distance; int16_t gyro_temperature; uint8_t qual; diff --git a/src/include/led.h b/src/include/led.h deleted file mode 100644 index e445cb1..0000000 --- a/src/include/led.h +++ /dev/null @@ -1,69 +0,0 @@ -/**************************************************************************** - * - * Copyright (C) 2013 PX4 Development Team. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided with the - * distribution. - * 3. Neither the name PX4 nor the names of its contributors may be - * used to endorse or promote products derived from this software - * without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN - * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * - ****************************************************************************/ - -#ifndef LED_H_ -#define LED_H_ - -#include "stm32f4xx_conf.h" -#include "stm32f4xx.h" - -typedef enum -{ - LED_ACT = 0, // Blue - LED_COM = 1, // Amber - LED_ERR = 2, // Red -} Led_TypeDef; - - -#define LEDn 3 - -#define LED_ACTIVITY_PIN GPIO_Pin_3 -#define LED_ACTIVITY_GPIO_PORT GPIOE -#define LED_ACTIVITY_GPIO_CLK RCC_AHB1Periph_GPIOE - -#define LED_BOOTLOADER_PIN GPIO_Pin_2 -#define LED_BOOTLOADER_GPIO_PORT GPIOE -#define LED_BOOTLOADER_GPIO_CLK RCC_AHB1Periph_GPIOE - -#define LED_TEST_PIN GPIO_Pin_7 -#define LED_TEST_GPIO_PORT GPIOE -#define LED_TEST_GPIO_CLK RCC_AHB1Periph_GPIOE - - -void LEDInit(Led_TypeDef Led); -void LEDOn(Led_TypeDef Led); -void LEDOff(Led_TypeDef Led); -void LEDToggle(Led_TypeDef Led); - - -#endif /* LED_H_ */ diff --git a/src/include/main.h b/src/include/main.h index 2abc9e7..b22024b 100644 --- a/src/include/main.h +++ b/src/include/main.h @@ -34,11 +34,6 @@ #ifndef __PX4_FLOWBOARD_H #define __PX4_FLOWBOARD_H -extern uint32_t get_time_between_images(void); - -void timer_update(void); -void timer_update_ms(void); -uint32_t get_boot_time_ms(void); -uint32_t get_boot_time_us(void); +void notify_changed_camera_parameters(void); #endif /* __PX4_FLOWBOARD_H */ diff --git a/src/include/mt9v034.h b/src/include/mt9v034.h index c3c16b5..54961f6 100644 --- a/src/include/mt9v034.h +++ b/src/include/mt9v034.h @@ -36,20 +36,45 @@ #include #include "settings.h" +#include "camera.h" + +/** + * Returns the sensor interface that can be passed to the camera driver. + * @return The initialized sensor interface struct. + */ +const camera_sensor_interface *mt9v034_get_sensor_interface(void); + +uint32_t mt9v034_get_clks_per_row(uint16_t width, int binning); + +/** + * Configures the maximum exposure time in number of image rows. + * Exposure should not affect frame time. + * Set to number of rows in the image. + */ +#define CONFIG_MAX_EXPOSURE_ROWS (64) +/** + * Configures the maximum analog gain. + * Range: 1x - 4x => 16 - 64. + * Higher gain means more noise. + */ +#define CONFIG_MAX_ANALOG_GAIN (64) +/** + * Configures the workaround to eliminate corrupted pixel data. + * According to the datasheet the image data should be sampled at the rising edge of the clock. + * In binning mode 2 and 4 however this occasionally causes corrupted pixel data. + * If we sample on the falling edge of the clock there will be corrupted pixels in no binning mode. + * + * This workaround inverts the pixel clock at the correct time when switching binning modes. + * + * Provide 3 booleans which tell whether to invert the pixel clock for 1x, 2x and 4x binning mode. + */ +#define CONFIG_CLOCK_INVERSION_WORKAROUND {false, true, true} + +#define CONFIG_MTV_VERTICAL_BLANKING_INTROWS (64) /* Constants */ #define TIMEOUT_MAX 10000 -#define BINNING_ROW_A 4 -#define BINNING_COLUMN_A 4 -#define BINNING_ROW_B 2 -#define BINNING_COLUMN_B 2 -#define MINIMUM_HORIZONTAL_BLANKING 91 // see datasheet -#define MAX_IMAGE_HEIGHT 480 -#define MAX_IMAGE_WIDTH 752 -#define MINIMUM_COLUMN_START 1 -#define MINIMUM_ROW_START 4 - /* Camera I2C registers */ #define mt9v034_DEVICE_WRITE_ADDRESS 0xB8 #define mt9v034_DEVICE_READ_ADDRESS 0xB9 @@ -74,6 +99,7 @@ #define MTV_V3_CTRL_REG_A 0x33 #define MTV_V4_CTRL_REG_A 0x34 #define MTV_ANALOG_GAIN_CTRL_REG_A 0x35 +#define MTV_BLC_VALUE_REG_A 0x48 /* Context B */ #define MTV_COLUMN_START_REG_B 0xC9 @@ -101,48 +127,33 @@ #define MTV_CHIP_CONTROL_REG 0x07 #define MTV_SOFT_RESET_REG 0x0C -#define MTV_HDR_ENABLE_REG 0x0F +#define MTV_SENSOR_TYPE_CTRL_REG 0x0F +#define MTV_LED_OUT_CTRL_REG 0x1B #define MTV_ADC_RES_CTRL_REG 0x1C +#define MTV_VREF_ADC_CTRL_REG 0x2C +#define MTV_FRAME_DARK_AVG_REG 0x42 +#define MTV_FRAME_DARK_AVG_THR_REG 0x46 +#define MTV_BLC_CTRL_REG 0x47 +#define MTV_BLC_VALUE_STEP_SIZE_REG 0x4C #define MTV_ROW_NOISE_CORR_CTRL_REG 0x70 +#define MTV_ROW_NOISE_CONST_REG 0x71 +#define MTV_PIXEL_FRAME_LINE_CTRL_REG 0x72 #define MTV_TEST_PATTERN_REG 0x7F #define MTV_TILED_DIGITAL_GAIN_REG 0x80 #define MTV_AGC_AEC_DESIRED_BIN_REG 0xA5 -#define MTV_MAX_GAIN_REG 0xAB -#define MTV_MIN_EXPOSURE_REG 0xAC // datasheet min coarse shutter width -#define MTV_MAX_EXPOSURE_REG 0xAD // datasheet max coarse shutter width +#define MTV_AEC_UPDATE_FREQ_REG 0xA6 +#define MTV_AEC_LPF_REG 0xA8 +#define MTV_AGC_UPDATE_FREQ_REG 0xA9 +#define MTV_AGC_LPF_REG 0xAA +#define MTV_ANALOG_MAX_GAIN_REG 0xAB +#define MTV_MIN_COARSE_SW_REG 0xAC +#define MTV_MAX_COARSE_SW_REG 0xAD +#define MTV_AEC_AGC_BIN_DIFF_THR_REG 0xAE #define MTV_AEC_AGC_ENABLE_REG 0xAF #define MTV_AGC_AEC_PIXEL_COUNT_REG 0xB0 -#define MTV_AEC_UPDATE_REG 0xA6 -#define MTV_AEC_LOWPASS_REG 0xA8 -#define MTV_AGC_UPDATE_REG 0xA9 -#define MTV_AGC_LOWPASS_REG 0xAA -#define MTV_DIGITAL_TEST_REG 0x7F - -/* - * Resolution: - * ROW_SIZE * BINNING_ROW <= MAX_IMAGE_WIDTH - * COLUMN_SIZE * BINNING_COLUMN <= MAX_IMAGE_HEIGHT - */ - -#define FULL_IMAGE_SIZE (188*120) -#define FULL_IMAGE_ROW_SIZE (188) -#define FULL_IMAGE_COLUMN_SIZE (120) - -/* Functions */ - -/** - * @brief Configures the mt9v034 camera with two context (binning 4 and binning 2). - */ -void mt9v034_context_configuration(void); - -/** - * @brief Changes sensor context based on settings - */ -void mt9v034_set_context(void); -uint16_t mt9v034_ReadReg16(uint8_t address); -uint8_t mt9v034_WriteReg16(uint16_t address, uint16_t Data); -uint8_t mt9v034_ReadReg(uint16_t Addr); -uint8_t mt9v034_WriteReg(uint16_t Addr, uint8_t Data); +#define MTV_AGC_GAIN_OUT_REG 0xBA +#define MTV_AEC_EXPOSURE_REG 0xBB +#define MTV_AGC_AEC_CUR_BIN_REG 0xBC #endif /* MT9V34_H_ */ diff --git a/src/include/result_accumulator.h b/src/include/result_accumulator.h new file mode 100644 index 0000000..001e16b --- /dev/null +++ b/src/include/result_accumulator.h @@ -0,0 +1,138 @@ +/**************************************************************************** + * + * Copyright (C) 2013 PX4 Development Team. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * 3. Neither the name PX4 nor the names of its contributors may be + * used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED + * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * + ****************************************************************************/ + +#ifndef RESULT_ACCUMULATOR_H_ +#define RESULT_ACCUMULATOR_H_ + +#include +#include +#include "i2c_frame.h" + +typedef struct _result_accumulator_frame { + float dt; ///< The time delta of this sample. + float dropped_dt; ///< The time delta of samples that have been dropped before this sample + float x_rate; ///< The current x_rate of the gyro in rad / sec. (Image/Flow coordinates) + float y_rate; ///< The current y_rate of the gyro in rad / sec. (Image/Flow coordinates) + float z_rate; ///< The current z_rate of the gyro in rad / sec. (Image/Flow coordinates) + int16_t gyro_temp; ///< Temperature * 100 in centi-degrees Celsius + uint8_t qual; ///< The quality output of the flow algorithm. + float rad_per_pixel; ///< Pixel to radian conversion factor. + float pixel_flow_x; ///< The measured x-flow in the current image in pixel. Sensor linear motion along the positive X axis induces a negative flow. + float pixel_flow_y; ///< The measured y-flow in the current image in pixel. Sensor linear motion along the positive Y axis induces a negative flow. + float ground_distance; ///< The measured distance to the ground in meter. + uint32_t distance_age; ///< Age of the distance measurement in us. + float max_px_frame; ///< Max velocity, in pixels per frame. +} result_accumulator_frame; + +typedef struct _result_accumulator_ctx { + uint32_t frame_count; + float px_flow_x_accu; + float px_flow_y_accu; + float rad_flow_x_accu; + float rad_flow_y_accu; + float m_flow_x_accu; + float m_flow_y_accu; + uint8_t min_quality; + uint16_t data_count; + uint16_t valid_data_count; + float valid_dist_time; + float valid_time; + float full_time; + float gyro_x_accu; + float gyro_y_accu; + float gyro_z_accu; + int16_t last_gyro_temp; ///< Temperature * 100 in centi-degrees Celsius + float last_ground_distance; ///< The measured distance to the ground in meter. + uint32_t last_distance_age; ///< Age of the distance measurement in us. + float flow_cap_mv_rad; /**< The maximum velocity that could be measured by all datasets together in one accumulation period. + * This is the minimum of all max velocities. In rad / s. */ +} result_accumulator_ctx; + +typedef struct _result_accumulator_output_flow { + int16_t flow_x; ///< Flow in pixels * 10 in x-sensor direction (dezi-pixels) + int16_t flow_y; ///< Flow in pixels * 10 in y-sensor direction (dezi-pixels) + float flow_comp_m_x; ///< Flow in meters in x-sensor direction, angular-speed compensated + float flow_comp_m_y; ///< Flow in meters in y-sensor direction, angular-speed compensated + uint8_t quality; ///< Optical flow quality / confidence. 0: bad, 255: maximum quality + float ground_distance; ///< Ground distance in meters. Positive value: distance known. Negative value: Unknown distance +} result_accumulator_output_flow; + +typedef struct _result_accumulator_output_flow_rad { + uint32_t integration_time; ///< Integration time in microseconds. Divide integrated_x and integrated_y by the integration time to obtain average flow. + float integrated_x; ///< Flow in radians around X axis (Sensor RH rotation about the X axis induces a positive flow. Sensor linear motion along the positive Y axis induces a negative flow.) + float integrated_y; ///< Flow in radians around Y axis (Sensor RH rotation about the Y axis induces a positive flow. Sensor linear motion along the positive X axis induces a positive flow.) + float integrated_xgyro; ///< RH rotation around X axis (rad) + float integrated_ygyro; ///< RH rotation around X axis (rad) + float integrated_zgyro; ///< RH rotation around X axis (rad) + uint8_t quality; ///< Optical flow quality / confidence. 0: bad, 255: maximum quality + int16_t temperature; ///< Temperature * 100 in centi-degrees Celsius + uint32_t time_delta_distance_us; ///< Time in microseconds since the distance was sampled. + float ground_distance; ///< Distance to the center of the flow field in meters. Positive value (including zero): distance known. Negative value: Unknown distance. +} result_accumulator_output_flow_rad; + + +typedef struct _result_accumulator_output_flow_i2c { + uint32_t frame_count; ///< Frame counter. + uint16_t valid_frames; ///< Number of valid frames. + uint32_t integration_time; ///< Integration time in microseconds. + int16_t rad_flow_x; ///< Flow in 10 * mrad around X axis (Sensor RH rotation about the X axis induces a positive flow. Sensor linear motion along the positive Y axis induces a negative flow.) + int16_t rad_flow_y; ///< Flow in 10 * mrad around Y axis (Sensor RH rotation about the Y axis induces a positive flow. Sensor linear motion along the positive X axis induces a positive flow.) + int16_t integrated_gyro_x; ///< RH rotation around X axis (10 * mrad) + int16_t integrated_gyro_y; ///< RH rotation around Y axis (10 * mrad) + int16_t integrated_gyro_z; ///< RH rotation around Z axis (10 * mrad) + int16_t gyro_x; ///< RH rotation around X axis (10 * mrad) + int16_t gyro_y; ///< RH rotation around Y axis (10 * mrad) + int16_t gyro_z; ///< RH rotation around Z axis (10 * mrad) + uint8_t quality; ///< Optical flow quality / confidence. 0: bad, 255: maximum quality + int16_t temperature; ///< Temperature * 100 in centi-degrees Celsius + uint32_t time_delta_distance_us; ///< Time in microseconds since the distance was sampled. + uint16_t ground_distance; ///< Distance to the center of the flow field in mmeters. +} result_accumulator_output_flow_i2c; + +/** Initializes the result accumulator. + */ +void result_accumulator_init(result_accumulator_ctx *ctx); +void result_accumulator_feed(result_accumulator_ctx *ctx, const result_accumulator_frame* frame); + +/** Recalculates the output values of the result_accumulator. Call this before using any of the output values. + */ +void result_accumulator_calculate_output_flow(result_accumulator_ctx *ctx, uint16_t min_valid_data_count_percent, result_accumulator_output_flow *out); +void result_accumulator_calculate_output_flow_rad(result_accumulator_ctx *ctx, uint16_t min_valid_data_count_percent, result_accumulator_output_flow_rad *out); +void result_accumulator_calculate_output_flow_i2c(result_accumulator_ctx *ctx, uint16_t min_valid_data_count_percent, result_accumulator_output_flow_i2c *out); +void result_accumulator_fill_i2c_data(result_accumulator_ctx *ctx, i2c_frame* f, i2c_integral_frame* f_integral); + +/** Resets the accumulator to prepare it for the next accumulation phase. + */ +void result_accumulator_reset(result_accumulator_ctx *ctx); + +#endif /* RESULT_ACCUMULATOR_H_ */ diff --git a/src/include/settings.h b/src/include/settings.h index 0139530..be2d499 100644 --- a/src/include/settings.h +++ b/src/include/settings.h @@ -37,10 +37,10 @@ #include -#define ONBOARD_PARAM_NAME_LENGTH 15 -#define BOTTOM_FLOW_IMAGE_HEIGHT 64 -#define BOTTOM_FLOW_IMAGE_WIDTH 64 -#define BOTTOM_FLOW_SEARCH_WINDOW_SIZE 4 +#define ONBOARD_PARAM_NAME_LENGTH 16 ///< Parameter length including zero termination. +#define FLOW_IMAGE_HEIGHT 64 +#define FLOW_IMAGE_WIDTH 64 +#define FLOW_SEARCH_WINDOW_SIZE 4 /****************************************************************** * ALL TYPE DEFINITIONS @@ -55,18 +55,6 @@ typedef enum READ_WRITE = 1, } ParameterAccess_TypeDef; -/** - * @brief sensor position enumeration - */ -typedef enum -{ - BOTTOM = 0x00, /*!< at bottom position */ - FRONT = 0x01, /*!< at front position */ - TOP = 0x02, /*!< at top position */ - BACK = 0x03, /*!< at back position */ - RIGHT = 0x04, /*!< at right position */ - LEFT = 0x05 /*!< at left position */ -} SensorPosition_TypeDef; /** * @brief sensor position enumeration @@ -111,15 +99,31 @@ enum global_param_id_t PARAM_USART2_BAUD, PARAM_USART3_BAUD, + PARAM_FOCAL_LENGTH_MM, + PARAM_IMAGE_WIDTH, PARAM_IMAGE_HEIGHT, - PARAM_MAX_FLOW_PIXEL, + PARAM_IMAGE_LOW_LIGHT, PARAM_IMAGE_ROW_NOISE_CORR, PARAM_IMAGE_TEST_PATTERN, + PARAM_IMAGE_INTERVAL, + + PARAM_ALGORITHM_CHOICE, + PARAM_ALGORITHM_CORNER_KAPPA, + PARAM_ALGORITHM_OUTLIER_THR_BLOCK, + PARAM_ALGORITHM_OUTLIER_THR_KLT, + PARAM_ALGORITHM_OUTLIER_THR_RATIO, + PARAM_ALGORITHM_MIN_VALID_RATIO, + + PARAM_KLT_DET_VALUE_MIN, + PARAM_KLT_MAX_ITERS, + PARAM_KLT_GYRO_ASSIST, + PARAM_GYRO_SENSITIVITY_DPS, PARAM_GYRO_COMPENSATION_THRESHOLD, + PARAM_SONAR_FILTERED, PARAM_SONAR_KALMAN_L1, PARAM_SONAR_KALMAN_L2, @@ -129,21 +133,21 @@ enum global_param_id_t PARAM_USB_SEND_GYRO, PARAM_USB_SEND_FORWARD, PARAM_USB_SEND_DEBUG, + PARAM_USB_SEND_FLOW_OUTL, + PARAM_USB_SEND_QUAL_0, PARAM_VIDEO_ONLY, PARAM_VIDEO_RATE, - PARAM_BOTTOM_FLOW_FEATURE_THRESHOLD, - PARAM_BOTTOM_FLOW_VALUE_THRESHOLD, - PARAM_BOTTOM_FLOW_HIST_FILTER, - PARAM_BOTTOM_FLOW_GYRO_COMPENSATION, - PARAM_BOTTOM_FLOW_LP_FILTERED, - PARAM_BOTTOM_FLOW_WEIGHT_NEW, - PARAM_BOTTOM_FLOW_SERIAL_THROTTLE_FACTOR, + PARAM_FLOW_MAX_PIXEL, + PARAM_FLOW_VALUE_THRESHOLD, + PARAM_FLOW_FEATURE_THRESHOLD, + PARAM_FLOW_GYRO_COMPENSATION, + PARAM_FLOW_SERIAL_THROTTLE_FACTOR, - PARAM_SENSOR_POSITION, DEBUG_VARIABLE, + ONBOARD_PARAM_COUNT }; diff --git a/src/include/sonar.h b/src/include/timer.h similarity index 53% rename from src/include/sonar.h rename to src/include/timer.h index dbaba35..734ecb0 100644 --- a/src/include/sonar.h +++ b/src/include/timer.h @@ -31,43 +31,58 @@ * ****************************************************************************/ -#ifndef SONAR_H_ -#define SONAR_H_ +#ifndef TIMER_H_ +#define TIMER_H_ #include -#include -#include "settings.h" -/** - * @brief Configures the sonar sensor Peripheral. +#define NTIMERS 16 + +/** Initializes the timer module. */ -void sonar_config(void); +void timer_init(void); -/** - * @brief Sonar interrupt handler - */ -void UART4_IRQHandler(void); +/** Registers a new timer with a corresponding function. + * @note: The function will be called from within the timer_check function. + * @param timer_fn The timer function to call when the timer rolls over. + * This function is NOT called from the interrupt handler. + * @param period_ms The period of the timer in milliseconds. + */ +void timer_register(void (*timer_fn)(void), uint32_t period_ms); -/** - * @brief Triggers the sonar to measure the next value - */ -void sonar_trigger(void); +/** Updates the period of an timer that has already been registered. + * @param timer_fn The timer function that has been registered. + * @param period_ms The new period of the timer in milliseconds. + */ +int timer_update(void (*timer_fn)(void), uint32_t period_ms); -/** - * @brief Read out newest sonar data - * - * @return true if valid measurement values were obtained, false else - */ -bool sonar_read(float* sonar_value_filtered, float* sonar_value_raw); +/** Checks any pending timers and calls the respective timer functions. + */ +void timer_check(void); -/** - * @brief Get the timestamp of the new sonar value when available to the main code - */ -uint32_t get_sonar_measure_time(void); +void delay(uint16_t ms); + +/** Returns the number of milliseconds since booting. + */ +uint32_t get_boot_time_ms(void); + +/** Returns the number of microseconds since booting. + */ +uint32_t get_boot_time_us(void); + +/** Computes the time delta in microseconds while taking the roll-over into account. + */ +uint32_t calculate_time_delta_us(uint32_t end, uint32_t start); + +/** Computes the time delta in microseconds while taking the roll-over into account. + */ +uint32_t get_time_delta_us(uint32_t start); /** - * @brief Get the timestamp of the new sonar value when the interrupt is triggered + * @brief Triggered by systen timer interrupt every millisecond. + * @param None + * @retval None */ -uint32_t get_sonar_measure_time_interrupt(void); +void timer_interrupt_fn(void); -#endif /* SONAR_H_ */ +#endif /* TIMER_H_ */ diff --git a/src/include/uavcan_if.h b/src/include/uavcan_if.h deleted file mode 100644 index e142433..0000000 --- a/src/include/uavcan_if.h +++ /dev/null @@ -1,112 +0,0 @@ -/**************************************************************************** - * - * Copyright (C) 2013 PX4 Development Team. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided with the - * distribution. - * 3. Neither the name PX4 nor the names of its contributors may be - * used to endorse or promote products derived from this software - * without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN - * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * - ****************************************************************************/ - -#pragma once - -#include -#include -#include -#include -#include "i2c_frame.h" -#include "hrt.h" - - -typedef struct legacy_12c_data_t { - uint64_t time_stamp_utc; - i2c_frame frame; - i2c_integral_frame integral_frame; -} legacy_12c_data_t; - - -typedef enum range_sensor_type_t { - SENSOR_TYPE_UNDEFINED = 0, - SENSOR_TYPE_SONAR = 1, - SENSOR_TYPE_LIDAR = 2, - SENSOR_TYPE_RADAR = 3 -} range_sensor_type_t; - -typedef enum range_sensor_reding_type_t { - READING_TYPE_UNDEFINED = 0, // Range is unknown - READING_TYPE_VALID_RANGE = 1, // Range field contains valid distance - READING_TYPE_TOO_CLOSE = 2, // Range field contains min range for the sensor - READING_TYPE_TOO_FAR = 3, // Range field contains max range for the sensor -} range_sensor_reding_type_t; - -typedef struct range_data_t { - uint64_t time_stamp_utc; - int8_t roll; - int8_t pitch; - int8_t yaw; - bool orientation_defined; - uint8_t sensor_id; - float field_of_view; - range_sensor_type_t sensor_type; - range_sensor_reding_type_t reading_type; - float range; -} range_data_t; - -__BEGIN_DECLS -__EXPORT int uavcannode_run(void); -__EXPORT int uavcannode_publish_flow(legacy_12c_data_t *pdata); -__EXPORT int uavcannode_publish_range(range_data_t *pdata); -__EXPORT int uavcannode_main(bool start_not_stop); -__END_DECLS - - -#if defined(CONFIG_ARCH_BOARD_PX4FLOW_V2) -#define uavcan_start() uavcannode_main(true) -#define uavcan_stop() uavcannode_main(false) -#define uavcan_run() uavcannode_run() -#define uavcan_publish(what, rate, name) if (++name##_cnt >= rate) { \ - name##_cnt = 0; \ - (void) uavcannode_publish_##what(&name); \ - } -#define uavcan_export memcpy -#define uavcan_define_export(name, type, section) type name; \ - static uint8_t name##_cnt = 0; - -#define uavcan_use_export(name) &name -#define uavcan_timestamp_export(name) name.time_stamp_utc = hrt_absolute_time() -#define uavcan_assign(lhs, rhs) lhs = rhs - -#else -#define uavcan_start() -#define uavcan_stop() -#define uavcan_run() -#define uavcan_publish(what, rate, name) -#define uavcan_export(d,s,l) -#define uavcan_define_export(name, type, section) -#define uavcan_use_export(name) NULL -#define uavcan_timestamp_export(name) -#define uavcan_assign(lhs, rhs) -#endif diff --git a/src/lib/stm32/st/v1.0.2/core_cmInstr.h b/src/lib/stm32/st/v1.0.2/core_cmInstr.h index ceb4f87..17250fb 100644 --- a/src/lib/stm32/st/v1.0.2/core_cmInstr.h +++ b/src/lib/stm32/st/v1.0.2/core_cmInstr.h @@ -463,9 +463,11 @@ __attribute__( ( always_inline ) ) static __INLINE uint32_t __LDREXW(volatile ui */ __attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXB(uint8_t value, volatile uint8_t *addr) { - uint32_t result; + register uint32_t result asm ("r0"); + register uint8_t value_r asm ("r1") = value; + register volatile uint8_t *addr_r asm ("r2") = addr; - __ASM volatile ("strexb %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) ); + __ASM volatile ("strexb %0, %2, [%1]" : "=r" (result) : "r" (addr_r), "r" (value_r) ); return(result); } @@ -482,8 +484,10 @@ __attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXB(uint8_t val __attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXH(uint16_t value, volatile uint16_t *addr) { uint32_t result; + register uint16_t value_r asm ("r1") = value; + register volatile uint16_t *addr_r asm ("r2") = addr; - __ASM volatile ("strexh %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) ); + __ASM volatile ("strexh %0, %2, [%1]" : "=r" (result) : "r" (addr_r), "r" (value_r) ); return(result); } @@ -500,8 +504,10 @@ __attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXH(uint16_t va __attribute__( ( always_inline ) ) static __INLINE uint32_t __STREXW(uint32_t value, volatile uint32_t *addr) { uint32_t result; + register uint32_t value_r asm ("r1") = value; + register volatile uint32_t *addr_r asm ("r2") = addr; - __ASM volatile ("strex %0, %2, [%1]" : "=r" (result) : "r" (addr), "r" (value) ); + __ASM volatile ("strex %0, %2, [%1]" : "=r" (result) : "r" (addr_r), "r" (value_r) ); return(result); } diff --git a/src/modules/flow/camera.c b/src/modules/flow/camera.c new file mode 100644 index 0000000..719c4e9 --- /dev/null +++ b/src/modules/flow/camera.c @@ -0,0 +1,583 @@ +/**************************************************************************** + * + * Copyright (C) 2013 PX4 Development Team. All rights reserved. + * Author: Simon Laube + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * 3. Neither the name PX4 nor the names of its contributors may be + * used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED + * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * + ****************************************************************************/ + +#include "camera.h" +#include "timer.h" +#include "fmu_comm.h" + +#include "stm32f4xx_conf.h" +#include "core_cmFunc.h" + +#include + +#define abs(x) ({ \ + typeof(x) __x = (x); \ + if (__x < 0) __x = -__x;\ + __x; \ +}) + + +void camera_transport_transfer_done_fn(void *usr, const void *buffer, size_t size); +void camera_transport_frame_done_fn(void *usr, bool probably_infront_dma); + +bool camera_init(camera_ctx *ctx, const camera_sensor_interface *sensor, const camera_transport_interface *transport, + uint32_t exposure_min_clks, uint32_t exposure_max_clks, float analog_gain_max, + const camera_img_param *img_param, + camera_image_buffer buffers[], size_t buffer_count) { + memset(ctx, 0, sizeof(camera_ctx)); + ctx->sensor = sensor; + ctx->transport = transport; + // check parameter: + if (buffer_count > CONFIG_CAMERA_MAX_BUFFER_COUNT || buffer_count < 2) { + return false; + } + uint32_t img_size = (uint32_t)img_param->size.x * (uint32_t)img_param->size.y; + if (img_size % ctx->transport->transfer_size != 0 || img_size / ctx->transport->transfer_size < 2) { + // invalid size parameter! + return false; + } + // initialize state: + ctx->resync_discard_frame_count = -1; + ctx->exposure_min_clks = exposure_min_clks; + ctx->exposure_max_clks = exposure_max_clks; + ctx->analog_gain_max = analog_gain_max; + ctx->analog_gain = 1; + ctx->exposure = ctx->exposure_min_clks; + ctx->exposure_smoothing = ctx->exposure_min_clks; + ctx->exposure_sampling_stride = 0; + ctx->exposure_skip_frame_cnt = 0; + memset(ctx->exposure_bins, 0, sizeof(ctx->exposure_bins)); + ctx->exposure_hist_count = 0; + ctx->img_stream_param.p = *img_param; + ctx->img_stream_param.analog_gain = ctx->analog_gain; + ctx->img_stream_param.exposure = ctx->exposure; + ctx->snapshot_param = ctx->img_stream_param; + for (unsigned i = 0; i < buffer_count; ++i) { + // check the buffer: + if (buffers[i].buffer_size < img_size || buffers[i].buffer == NULL) { + return false; + } + // init the buffer: + ctx->buffers[i] = buffers[i]; + ctx->buffers[i].frame_number = 0; + ctx->buffers[i].timestamp = get_boot_time_us(); + ctx->buffers[i].param = ctx->img_stream_param; + memset(ctx->buffers[i].buffer, 0, img_size); + // init the avail_bufs array: + ctx->buf_avail[i] = i; + } + ctx->buffer_count = buffer_count; + ctx->buf_avail_count = buffer_count; + ctx->new_frame_arrived = false; + + ctx->snapshot_buffer = NULL; + + ctx->frame_done_infront_count = 0; + + ctx->cur_frame_number = buffer_count + 1; + ctx->seq_frame_receiving = false; + ctx->cur_frame_target_buf = NULL; + + ctx->seq_snapshot_active = false; + ctx->seq_pend_img_stream_param = false; + // initialize hardware: + if (!ctx->transport->init(ctx->transport->usr, + camera_transport_transfer_done_fn, + camera_transport_frame_done_fn, + ctx)) { + return false; + } + if (!ctx->sensor->init(ctx->sensor->usr, &ctx->img_stream_param)) { + return false; + } + /* after initialization start the discard count down! */ + ctx->resync_discard_frame_count = 16; + return true; +} + +static void uint32_t_memcpy(uint32_t *dst, const uint32_t *src, size_t count) { + for (size_t i = 0; i < count; ++i) { + dst[i] = src[i]; + } +} + +static bool camera_buffer_fifo_remove_front(camera_ctx *ctx, int *out, size_t count) { + size_t bc = ctx->buf_avail_count; + size_t i; + if (count > bc) return false; + // read out: + for (i = 0; i < count; ++i) { + *out++ = ctx->buf_avail[i]; + } + // close gap: + #pragma GCC diagnostic push + #pragma GCC diagnostic warning "-Warray-bounds" + for (i = count; i < bc && i < CONFIG_CAMERA_MAX_BUFFER_COUNT; ++i) { + ctx->buf_avail[i - count] = ctx->buf_avail[i]; + } + ctx->buf_avail_count = bc - count; + #pragma GCC diagnostic pop + return true; +} + +static bool camera_buffer_fifo_remove_back(camera_ctx *ctx, int *out, size_t count) { + size_t bc = ctx->buf_avail_count; + size_t i; + if (count > bc) return false; + // read out: + for (i = bc - count; i < bc; ++i) { + *out++ = ctx->buf_avail[i]; + } + // reduce count: + ctx->buf_avail_count = bc - count; + return true; +} + +static void camera_buffer_fifo_push_at(camera_ctx *ctx, size_t pos, const int *in, size_t count) { + size_t bc = ctx->buf_avail_count; + size_t i; + // move away: + for (i = bc; i > pos; --i) { + ctx->buf_avail[i - 1 + count] = ctx->buf_avail[i - 1]; + } + // fill in: + for (i = pos; i < pos + count; ++i) { + ctx->buf_avail[i] = *in++; + } + // update count: + ctx->buf_avail_count = bc + count; +} + +static void camera_update_exposure_hist(camera_ctx *ctx, const uint8_t *buffer, size_t size) { + if (ctx->exposure_sampling_stride > 0) { + size_t i; + int c = 0; + for (i = ctx->exposure_sampling_stride / 2; i < size; i += ctx->exposure_sampling_stride) { + int idx = (buffer[i] >> (8u - CONFIG_CAMERA_EXPOSURE_BIN_BITS)); + ctx->exposure_bins[idx]++; + c++; + } + ctx->exposure_hist_count += c; + } +} + +static int camera_exposure_hist_extract_brightness_8b(camera_ctx *ctx) { + /* test for over-exposed image: */ + int value = CONFIG_CAMERA_EXPOSURE_BIN_COUNT - 1; + if ((uint32_t)ctx->exposure_bins[value] * 100u > + ((uint32_t)ctx->exposure_hist_count) * CONFIG_CAMERA_EXTREME_OVEREXPOSE_RATIO) { + return 256; + } + /* find the bin where the outliers have been ignored: */ + uint32_t pix_rem = ((uint32_t)ctx->exposure_hist_count * (uint32_t)CONFIG_CAMERA_OUTLIER_RATIO) / 100u; + while (ctx->exposure_bins[value] < pix_rem && value > 0) { + pix_rem -= ctx->exposure_bins[value]; + value--; + } + /* expand to 8bits: */ + uint32_t bin_size = 1 << (8u - CONFIG_CAMERA_EXPOSURE_BIN_BITS); + value = value * bin_size; + return value; +} + +void camera_transport_transfer_done_fn(void *usr, const void *buffer, size_t size) { + camera_ctx *ctx = (camera_ctx *)usr; + if (ctx->resync_discard_frame_count != 0) { + return; + } + if (ctx->seq_frame_receiving) { + camera_update_exposure_hist(ctx, buffer, size); + // check if we have a target buffer: + if (ctx->cur_frame_target_buf != NULL) { + uint32_t_memcpy((uint32_t *)((uint8_t *)ctx->cur_frame_target_buf->buffer + ctx->cur_frame_pos), + (const uint32_t *)buffer, size / 4); + } + // update current position: + ctx->cur_frame_pos += size; + // check if we are done: + if (ctx->cur_frame_pos >= ctx->cur_frame_size) { + // frame done! + ctx->sensor->notify_readout_end(ctx->sensor->usr); + if (ctx->cur_frame_target_buf_idx >= 0) { + // put back into available buffers (in the front) + camera_buffer_fifo_push_at(ctx, 0, &ctx->cur_frame_target_buf_idx, 1); + if (ctx->buf_put_back_pos < ctx->buf_avail_count) { + ctx->buf_put_back_pos += 1; + } + // notify camera_img_stream_get_buffers function. + ctx->new_frame_arrived = true; + } else if (ctx->seq_snapshot_active && ctx->cur_frame_target_buf != NULL) { + // snapshot has been taken! + ctx->snapshot_cb(true); + ctx->seq_snapshot_active = false; + } + /* handle auto-exposure: */ + if (ctx->exposure_sampling_stride > 0) { + float exposure = (float)ctx->cur_frame_param.exposure * ctx->cur_frame_param.analog_gain; + // analyze histogram: + int brightness = camera_exposure_hist_extract_brightness_8b(ctx); + ctx->last_brightness = brightness; + if (brightness >= 256) { + // extremely over-exposed! halve the integration time: + exposure = exposure * 0.33f; + } else { + int desired_bright = CONFIG_CAMERA_DESIRED_EXPOSURE_8B; + desired_bright &= (0xFF << (8u - CONFIG_CAMERA_EXPOSURE_BIN_BITS)); + if (brightness < desired_bright / 4) { + // extremely under-exposed! double the integration time: + exposure = exposure * 4.f; + } else { + if (abs(desired_bright - brightness) > CONFIG_CAMERA_DESIRED_EXPOSURE_TOL_8B) { + // determine optimal exposure for next frame: + exposure = (exposure * desired_bright) / brightness; + } + } + } + /* clip the value within bounds: */ + if (exposure < (float)ctx->exposure_min_clks) { + exposure = ctx->exposure_min_clks; + } else if (exposure > (float)ctx->exposure_max_clks * ctx->analog_gain_max) { + exposure = (float)ctx->exposure_max_clks * ctx->analog_gain_max; + } + ctx->exposure_smoothing = CONFIG_CAMERA_EXPOSURE_SMOOTHING_K * ctx->exposure_smoothing + + (1.0f - CONFIG_CAMERA_EXPOSURE_SMOOTHING_K) * exposure; + /* update the exposure and analog gain values: */ + if (ctx->exposure_smoothing > ctx->exposure_max_clks) { + ctx->exposure = ctx->exposure_max_clks; + ctx->analog_gain = ctx->exposure_smoothing / (float)ctx->exposure_max_clks; + } else { + ctx->exposure = ctx->exposure_smoothing; + ctx->analog_gain = 1; + } + /* update the sensor! */ + if (!ctx->seq_updating_sensor) { + /* update it ourself! */ + ctx->sensor->update_exposure_param(ctx->sensor->usr, ctx->exposure, ctx->analog_gain); + } else { + ctx->seq_repeat_updating_sensor = true; + } + } + // reset state: + ctx->cur_frame_target_buf = NULL; + ctx->seq_frame_receiving = false; + } + } else { + // no frame currently as the target frame. it must be the beginning of a new frame then! + // update the sensor: (this might trigger some I2C transfers) + ctx->sensor->notify_readout_start(ctx->sensor->usr); + // get current sensor parameter: + bool img_data_valid; + ctx->sensor->get_current_param(ctx->sensor->usr, &ctx->cur_frame_param, &img_data_valid); + // update the receiving variables: + ctx->cur_frame_number += 1; + ctx->seq_frame_receiving = true; + ctx->cur_frame_target_buf = NULL; + ctx->cur_frame_size = (uint32_t)ctx->cur_frame_param.p.size.x * (uint32_t)ctx->cur_frame_param.p.size.y; + ctx->cur_frame_pos = size; + ctx->cur_frame_target_buf_idx = -1; + if (!ctx->seq_snapshot_active) { + // check that the size parameters match to the img stream: + if (ctx->cur_frame_param.p.size.x == ctx->img_stream_param.p.size.x && + ctx->cur_frame_param.p.size.y == ctx->img_stream_param.p.size.y) { + if (img_data_valid) { + // get least recently used buffer from the available buffers: + if (camera_buffer_fifo_remove_back(ctx, &ctx->cur_frame_target_buf_idx, 1)) { + ctx->cur_frame_target_buf = &ctx->buffers[ctx->cur_frame_target_buf_idx]; + /* if there are no more buffers in the fifo reset the flag again: */ + if (ctx->buf_avail_count == 0) { + ctx->new_frame_arrived = false; + } + } else { + ctx->cur_frame_target_buf_idx = -1; + } + } + } + } else { + // check that the size parameters match to the snapshot parameters: + if (ctx->cur_frame_param.p.size.x == ctx->snapshot_param.p.size.x && + ctx->cur_frame_param.p.size.y == ctx->snapshot_param.p.size.y && + ctx->cur_frame_param.p.binning == ctx->snapshot_param.p.binning) { + if (img_data_valid) { + // get the buffer: + ctx->cur_frame_target_buf = ctx->snapshot_buffer; + // initiate switching back to img stream mode: + ctx->sensor->restore_previous_param(ctx->sensor->usr); + } + } + } + // initialize the target buffer: + if (ctx->cur_frame_target_buf != NULL) { + ctx->cur_frame_target_buf->timestamp = get_boot_time_us(); + ctx->cur_frame_target_buf->frame_number = ctx->cur_frame_number; + ctx->cur_frame_target_buf->param = ctx->cur_frame_param; + // write data to it: (at position 0) + uint32_t_memcpy((uint32_t *)ctx->cur_frame_target_buf->buffer, (uint32_t *)buffer, size / 4); + } + // initialize exposure measuring (do not process snapshot images) + if (img_data_valid && !ctx->seq_snapshot_active) { + ctx->exposure_skip_frame_cnt++; + if (ctx->exposure_skip_frame_cnt >= CONFIG_CAMERA_EXPOSURE_UPDATE_INTERVAL) { + ctx->exposure_skip_frame_cnt = 0; + /* make sure no more than 65535 pixels are sampled: */ + uint16_t skip = ctx->cur_frame_size / 65536u + 1; + if (skip < 6) { + skip = 6; + } + ctx->exposure_sampling_stride = skip; + // reset histogram: + ctx->exposure_hist_count = 0; + memset(ctx->exposure_bins, 0, sizeof(ctx->exposure_bins)); + } else { + ctx->exposure_sampling_stride = 0; + } + } else { + ctx->exposure_sampling_stride = 0; + } + camera_update_exposure_hist(ctx, (const uint8_t *)buffer, size); + } +} + +void camera_transport_frame_done_fn(void *usr, bool probably_infront_dma) { + camera_ctx *ctx = (camera_ctx *)usr; + /* we will only pay attention to the probably_infront_dma flag if we are in normal operating mode. */ + int fdc = ctx->resync_discard_frame_count; + if (fdc > 0) { + fdc--; + ctx->resync_discard_frame_count = fdc; + /* re-initialize the transport twice */ + if (fdc == 0 || fdc == 1) { + ctx->transport->reset(ctx->transport->usr); + } + } else { + /* we trust the probably infront DMA flag 3 times in a row. */ + if (!probably_infront_dma || ctx->frame_done_infront_count > 3) { + ctx->frame_done_infront_count = 0; + /* if we don't trust it anymore we trigger a re-sync */ + if (ctx->seq_frame_receiving || probably_infront_dma) { + /* we are out of sync! abort current frame: */ + if (ctx->cur_frame_target_buf_idx >= 0) { + // put back into available buffers (in the back) + uint16_t ac_b = ctx->buf_avail_count; + camera_buffer_fifo_push_at(ctx, ac_b, &ctx->cur_frame_target_buf_idx, 1); + // set frame number something much older than the last frame: + if (ac_b > 0) { + int last_b = ctx->buf_avail[ac_b - 1]; + int this_b = ctx->buf_avail[ac_b]; + ctx->buffers[this_b].frame_number = ctx->buffers[last_b].frame_number - ctx->buffer_count - 1; + } else { + ctx->new_frame_arrived = false; + } + } else if (ctx->seq_snapshot_active && ctx->cur_frame_target_buf != NULL) { + // snapshot abort! + ctx->snapshot_cb(false); + ctx->seq_snapshot_active = false; + } + // reset state: + ctx->cur_frame_target_buf = NULL; + ctx->seq_frame_receiving = false; + /* initiate resynchronization: */ + ctx->resync_discard_frame_count = 2; + } + } else { + ctx->frame_done_infront_count++; + } + } +} + +static bool camera_update_sensor_param(camera_ctx *ctx, const camera_img_param *img_param, camera_img_param_ex *ptoup) { + camera_img_param_ex p = *ptoup; + p.p = *img_param; + bool result = false; + do { + ctx->seq_repeat_updating_sensor = false; + ctx->seq_updating_sensor = true; + /* update exposure and analog gain: */ + p.exposure = ctx->exposure; + p.analog_gain = ctx->analog_gain; + /* write: */ + if (ctx->sensor->prepare_update_param(ctx->sensor->usr, &p)) { + *ptoup = p; + result = true; + } else { + /* restore previous: */ + p = *ptoup; + } + ctx->seq_updating_sensor = false; + } while (ctx->seq_repeat_updating_sensor); + return result; +} + +void camera_reconfigure_general(camera_ctx *ctx) { + if (!ctx->seq_snapshot_active && !ctx->seq_pend_reconfigure_general) { + ctx->sensor->reconfigure_general(ctx->sensor->usr); + } else { + ctx->seq_pend_reconfigure_general = true; + } +} + +bool camera_img_stream_schedule_param_change(camera_ctx *ctx, const camera_img_param *img_param) { + uint32_t img_size = (uint32_t)img_param->size.x * (uint32_t)img_param->size.y; + if (img_size % ctx->transport->transfer_size != 0 || img_size / ctx->transport->transfer_size < 2) { + // invalid size parameter! + return false; + } + size_t i; + for (i = 0; i < ctx->buffer_count; ++i) { + // check image size against each buffer: + if (img_size > ctx->buffers[i].buffer_size) { + return false; + } + } + if (!ctx->seq_snapshot_active && !ctx->seq_pend_img_stream_param) { + return camera_update_sensor_param(ctx, img_param, &ctx->img_stream_param); + } else { + ctx->img_stream_param_pend = *img_param; + ctx->seq_pend_img_stream_param = true; + return true; + } + return false; +} + +bool camera_snapshot_schedule(camera_ctx *ctx, const camera_img_param *img_param, camera_image_buffer *dst, camera_snapshot_done_cb cb) { + uint32_t img_size = (uint32_t)img_param->size.x * (uint32_t)img_param->size.y; + if (img_size % ctx->transport->transfer_size != 0 || img_size / ctx->transport->transfer_size < 2) { + // invalid size parameter! + return false; + } + // check image size against the buffer: + if (img_size > dst->buffer_size || dst->buffer == NULL) { + return false; + } + if (!ctx->seq_snapshot_active) { + if (camera_update_sensor_param(ctx, img_param, &ctx->snapshot_param)) { + ctx->snapshot_buffer = dst; + ctx->snapshot_cb = cb; + ctx->seq_snapshot_active = true; + return true; + } + } + return false; +} + +void camera_snapshot_acknowledge(camera_ctx *ctx) { + if (!ctx->seq_snapshot_active) { + if (ctx->seq_pend_reconfigure_general) { + ctx->seq_pend_reconfigure_general = false; + ctx->sensor->reconfigure_general(ctx->sensor->usr); + } + if (ctx->seq_pend_img_stream_param) { + ctx->seq_pend_img_stream_param = false; + /* write the pending changes to the sensor: */ + camera_update_sensor_param(ctx, &ctx->img_stream_param_pend, &ctx->img_stream_param); + } + } +} + +static bool camera_img_stream_get_buffers_idx(camera_ctx *ctx, int bidx[], size_t count, bool reset_new_frm) { + size_t i; + __disable_irq(); + if (!camera_buffer_fifo_remove_front(ctx, bidx, count)) { + if (reset_new_frm) { + ctx->new_frame_arrived = false; + } + __enable_irq(); + return false; + } + /* check that the buffers are in consecutive order: */ + bool consecutive = true; + uint32_t exp_frame = ctx->buffers[bidx[0]].frame_number - 1; + for (i = 1; i < count; ++i, --exp_frame) { + if (ctx->buffers[bidx[i]].frame_number != exp_frame) { + consecutive = false; + break; + } + } + if (consecutive) { + /* good! */ + ctx->buf_put_back_pos = 0; + ctx->buffers_are_reserved = true; + } else { + /* not good. put back the buffers: */ + camera_buffer_fifo_push_at(ctx, 0, bidx, count); + } + if (reset_new_frm) { + ctx->new_frame_arrived = false; + } + __enable_irq(); + return consecutive; +} + +bool camera_img_stream_get_buffers(camera_ctx *ctx, camera_image_buffer *buffers[], size_t count, bool wait_for_new) { + if (ctx->buffers_are_reserved) return -1; + if (count > ctx->buffer_count - 1 || count <= 0) return -1; + /* buffer management needs to be performed atomically: */ + int bidx[count]; + size_t i; + + if (wait_for_new) { + /* wait until a new frame is available: */ + if (!ctx->new_frame_arrived) { + return false; + } + } + if (camera_img_stream_get_buffers_idx(ctx, bidx, count, wait_for_new)) { + /* update the pointers: */ + for (i = 0; i < count; ++i) { + buffers[i] = &ctx->buffers[bidx[i]]; + } + return true; + } else { + /* not possible! */ + return false; + } + +} + +void camera_img_stream_return_buffers(camera_ctx *ctx, camera_image_buffer *buffers[], size_t count) { + if (!ctx->buffers_are_reserved) return; + /* get the buffer indexes: */ + int bidx[count]; + for (size_t i = 0; i < count; ++i) { + int idx = buffers[i] - &ctx->buffers[0]; + if (idx < 0 || idx >= (int) ctx->buffer_count) return; + bidx[i] = idx; + } + /* buffer management needs to be performed atomically: */ + __disable_irq(); + size_t at_pos = ctx->buf_put_back_pos; + if (at_pos > ctx->buf_avail_count) at_pos = ctx->buf_avail_count; + camera_buffer_fifo_push_at(ctx, ctx->buf_put_back_pos, bidx, count); + ctx->buffers_are_reserved = false; + __enable_irq(); +} diff --git a/src/modules/flow/communication.c b/src/modules/flow/communication.c index 9dfac24..2bec201 100644 --- a/src/modules/flow/communication.c +++ b/src/modules/flow/communication.c @@ -45,13 +45,13 @@ #include "mt9v034.h" #include "dcmi.h" #include "gyro.h" -#include "debug.h" #include "communication.h" +#include "timer.h" -extern uint32_t get_boot_time_us(void); -extern void buffer_reset(void); extern void systemreset(bool to_bootloader); +extern void notify_changed_camera_parameters(void); + mavlink_system_t mavlink_system; static uint32_t m_parameter_i = 0; @@ -235,48 +235,16 @@ void handle_mavlink_message(mavlink_channel_t chan, && global_data.param_access[i]) { global_data.param[i] = set.param_value; - - /* handle sensor position */ - if(i == PARAM_SENSOR_POSITION) - { - set_sensor_position_settings((uint8_t) set.param_value); - mt9v034_context_configuration(); - dma_reconfigure(); - buffer_reset(); - } - /* handle low light mode and noise correction */ - else if(i == PARAM_IMAGE_LOW_LIGHT || i == PARAM_IMAGE_ROW_NOISE_CORR|| i == PARAM_IMAGE_TEST_PATTERN) - { - mt9v034_context_configuration(); - dma_reconfigure(); - buffer_reset(); - } - - /* handle calibration on/off */ - else if(i == PARAM_VIDEO_ONLY) + if(i == PARAM_IMAGE_LOW_LIGHT || i == PARAM_IMAGE_ROW_NOISE_CORR || i == PARAM_IMAGE_TEST_PATTERN) { - mt9v034_set_context(); - dma_reconfigure(); - buffer_reset(); - - if (FLOAT_AS_BOOL(global_data.param[PARAM_VIDEO_ONLY])) - debug_string_message_buffer("Calibration Mode On"); - else - debug_string_message_buffer("Calibration Mode Off"); + notify_changed_camera_parameters(); } - - /* handle sensor position */ else if(i == PARAM_GYRO_SENSITIVITY_DPS) { l3gd20_config(); } - else - { - debug_int_message_buffer("Parameter received, param id =", i); - } - /* report back new value */ mavlink_msg_param_value_send(MAVLINK_COMM_0, global_data.param_name[i], diff --git a/src/modules/flow/dcmi.c b/src/modules/flow/dcmi.c index f66cc20..9fc71e3 100644 --- a/src/modules/flow/dcmi.c +++ b/src/modules/flow/dcmi.c @@ -38,10 +38,8 @@ #include #include #include "no_warnings.h" -#include "mavlink_bridge_header.h" -#include -#include "utils.h" #include "dcmi.h" +#include "timer.h" #include "stm32f4xx_gpio.h" #include "stm32f4xx_rcc.h" #include "stm32f4xx_i2c.h" @@ -51,389 +49,174 @@ #include "misc.h" #include "stm32f4xx.h" -#include - //#define CONFIG_USE_PROBES #include -/* counters */ -volatile uint8_t image_counter = 0; -volatile uint32_t frame_counter; -volatile uint32_t time_last_frame = 0; -volatile uint32_t cycle_time = 0; -volatile uint32_t time_between_next_images; -volatile uint8_t dcmi_calibration_counter = 0; - -/* state variables */ -volatile uint8_t dcmi_image_buffer_memory0 = 1; -volatile uint8_t dcmi_image_buffer_memory1 = 2; -volatile uint8_t dcmi_image_buffer_unused = 3; -volatile uint8_t calibration_used; -volatile uint8_t calibration_unused; -volatile uint8_t calibration_mem0; -volatile uint8_t calibration_mem1; - -/* image buffers */ -uint8_t dcmi_image_buffer_8bit_1[FULL_IMAGE_SIZE]; -uint8_t dcmi_image_buffer_8bit_2[FULL_IMAGE_SIZE]; -uint8_t dcmi_image_buffer_8bit_3[FULL_IMAGE_SIZE]; - -uint32_t time_between_images; - -/* extern functions */ -extern uint32_t get_boot_time_us(void); -extern void delay(unsigned msec); +struct _dcmi_transport_ctx; +typedef struct _dcmi_transport_ctx dcmi_transport_ctx; -/** - * @brief Initialize DCMI DMA and enable image capturing - */ -void enable_image_capture(void) -{ - dcmi_clock_init(); - dcmi_hw_init(); - dcmi_dma_init(global_data.param[PARAM_IMAGE_WIDTH] * global_data.param[PARAM_IMAGE_HEIGHT]); - mt9v034_context_configuration(); - dcmi_dma_enable(); -} +/* DMA buffers */ +uint8_t dcmi_dma_buffer_1[CONFIG_DCMI_DMA_BUFFER_SIZE]; +uint8_t dcmi_dma_buffer_2[CONFIG_DCMI_DMA_BUFFER_SIZE]; -/** - * @brief DMA reconfiguration after changing image window - */ -void dma_reconfigure(void) -{ - dcmi_dma_disable(); - - if (FLOAT_AS_BOOL(global_data.param[PARAM_VIDEO_ONLY])) - dcmi_dma_init(FULL_IMAGE_SIZE); - else - dcmi_dma_init(global_data.param[PARAM_IMAGE_WIDTH] * global_data.param[PARAM_IMAGE_HEIGHT]); +bool dcmi_init(void *usr, + camera_transport_transfer_done_cb transfer_done_cb, + camera_transport_frame_done_cb frame_done_cb, + void *cb_usr); - dcmi_dma_enable(); -} +void dcmi_reset(void *usr); /** - * @brief Calibration image collection routine restart + * @brief HW initialization of DCMI clock */ -void dcmi_restart_calibration_routine(void) -{ - /* wait until we have all 4 parts of image */ - while(frame_counter < 4){} - frame_counter = 0; - dcmi_dma_enable(); -} - +static void dcmi_clock_init(void); /** - * @brief Interrupt handler of DCMI + * @brief HW initialization DCMI */ -void DCMI_IRQHandler(void) -{ - if (DCMI_GetITStatus(DCMI_IT_FRAME) != RESET) - { - DCMI_ClearITPendingBit(DCMI_IT_FRAME); - } - - return; -} - +static void dcmi_hw_init(void); /** - * @brief Interrupt handler of DCMI DMA stream + * @brief Configures DCMI and DMA to capture image from the camera. */ -void DMA2_Stream1_IRQHandler(void) -{ - /* transfer completed */ - if (DMA_GetITStatus(DMA2_Stream1, DMA_IT_TCIF1) != RESET) - { - DMA_ClearITPendingBit(DMA2_Stream1, DMA_IT_TCIF1); - frame_counter++; - - if (FLOAT_AS_BOOL(global_data.param[PARAM_VIDEO_ONLY])) - { - if (frame_counter >= 4) - { - dcmi_dma_disable(); - calibration_used = DMA_GetCurrentMemoryTarget(DMA2_Stream1); - calibration_unused = dcmi_image_buffer_unused; - calibration_mem0 = dcmi_image_buffer_memory0; - calibration_mem1 = dcmi_image_buffer_memory1; - } - } - - return; - } - - /* transfer half completed - * - * We use three buffers and switch the buffers if dma transfer - * is in half state. - */ - if (DMA_GetITStatus(DMA2_Stream1, DMA_IT_HTIF1) != RESET) - { - DMA_ClearITPendingBit(DMA2_Stream1, DMA_IT_HTIF1); - } - - dma_swap_buffers(); -} - +static void dcmi_dma_init(void); /** - * @brief Swap DMA image buffer addresses + * @brief Enable DCMI and DMA stream */ -void dma_swap_buffers(void) -{ - /* check which buffer is in use */ - if (DMA_GetCurrentMemoryTarget(DMA2_Stream1)) - { - /* swap dcmi image buffer */ - if (dcmi_image_buffer_unused == 1) - DMA_MemoryTargetConfig(DMA2_Stream1, (uint32_t) dcmi_image_buffer_8bit_1, DMA_Memory_0); - else if (dcmi_image_buffer_unused == 2) - DMA_MemoryTargetConfig(DMA2_Stream1, (uint32_t) dcmi_image_buffer_8bit_2, DMA_Memory_0); - else - DMA_MemoryTargetConfig(DMA2_Stream1, (uint32_t) dcmi_image_buffer_8bit_3, DMA_Memory_0); - - int tmp_buffer = dcmi_image_buffer_memory0; - dcmi_image_buffer_memory0 = dcmi_image_buffer_unused; - dcmi_image_buffer_unused = tmp_buffer; - } - else - { - /* swap dcmi image buffer */ - if (dcmi_image_buffer_unused == 1) - DMA_MemoryTargetConfig(DMA2_Stream1, (uint32_t) dcmi_image_buffer_8bit_1, DMA_Memory_1); - else if (dcmi_image_buffer_unused == 2) - DMA_MemoryTargetConfig(DMA2_Stream1, (uint32_t) dcmi_image_buffer_8bit_2, DMA_Memory_1); - else - DMA_MemoryTargetConfig(DMA2_Stream1, (uint32_t) dcmi_image_buffer_8bit_3, DMA_Memory_1); - - int tmp_buffer = dcmi_image_buffer_memory1; - dcmi_image_buffer_memory1 = dcmi_image_buffer_unused; - dcmi_image_buffer_unused = tmp_buffer; - } - - /* set next time_between_images */ - cycle_time = get_boot_time_us() - time_last_frame; - time_last_frame = get_boot_time_us(); - - if(image_counter) // image was not fetched jet - { - time_between_next_images = time_between_next_images + cycle_time; - } - else - { - time_between_next_images = cycle_time; - } - - /* set new image true and increment frame counter*/ - image_counter += 1; - - return; +static void dcmi_dma_enable(void); +/** + * @brief Initialize/Enable DCMI Interrupt + */ +static void dcmi_dma_it_init(void); + +struct _dcmi_transport_ctx { + /* assets */ + camera_transport_transfer_done_cb transfer_done_cb; + camera_transport_frame_done_cb frame_done_cb; + void *cb_usr; + + /* IRQ monitoring: */ + volatile uint32_t last_dma_irq_t; + uint32_t last_dma_irq_pause_t; +}; + +static dcmi_transport_ctx dcmi_ctx; + +const camera_transport_interface dcmi_transport_interface = { + .usr = &dcmi_ctx, + .transfer_size = CONFIG_DCMI_DMA_BUFFER_SIZE, + .init = dcmi_init, + .reset = dcmi_reset +}; + +const camera_transport_interface *dcmi_get_transport_interface() { + return &dcmi_transport_interface; } -uint32_t get_time_between_images(void){ - return time_between_images; +bool dcmi_init(void *usr, + camera_transport_transfer_done_cb transfer_done_cb, + camera_transport_frame_done_cb frame_done_cb, + void *cb_usr) { + dcmi_transport_ctx *ctx = (dcmi_transport_ctx *)usr; + memset(ctx, 0, sizeof(dcmi_transport_ctx)); + /* init assets: */ + ctx->transfer_done_cb = transfer_done_cb; + ctx->frame_done_cb = frame_done_cb; + ctx->cb_usr = cb_usr; + ctx->last_dma_irq_t = get_boot_time_us(); + ctx->last_dma_irq_pause_t = 1000000; + /* initialize hardware: */ + dcmi_clock_init(); + dcmi_hw_init(); + dcmi_dma_init(); + dcmi_dma_enable(); + dcmi_dma_it_init(); + return true; } -uint32_t get_frame_counter(void){ - return frame_counter; +void dcmi_reset(void *usr) { + /* stop the DMA: */ + DMA_Cmd(DMA2_Stream1, DISABLE); + /* clear pending interrupt: */ + if (DMA_GetITStatus(DMA2_Stream1, DMA_IT_TCIF1) != RESET) { + DMA_ClearITPendingBit(DMA2_Stream1, DMA_IT_TCIF1); + } + /* re-enable DMA */ + DMA_Cmd(DMA2_Stream1, ENABLE); } /** - * @brief Copy image to fast RAM address - * - * @param current_image Current image buffer - * @param previous_image Previous image buffer - * @param image_size Image size of the image to copy - * @param image_step Image to wait for (if 1 no waiting) + * @brief Interrupt handler of DCMI */ -void dma_copy_image_buffers(uint8_t ** current_image, uint8_t ** previous_image, uint16_t image_size, uint8_t image_step){ - - /* swap image buffers */ - uint8_t * tmp_image = *current_image; - *current_image = *previous_image; - *previous_image = tmp_image; - -TODO(NB dma_copy_image_buffers is calling uavcan_run()); - - /* wait for new image if needed */ - while(image_counter < image_step) { - PROBE_1(false); - uavcan_run(); - PROBE_1(true); - } - - image_counter = 0; - - /* time between images */ - time_between_images = time_between_next_images; - - /* copy image */ - if (dcmi_image_buffer_unused == 1) - { - for (uint16_t pixel = 0; pixel < image_size; pixel++) - (*current_image)[pixel] = (uint8_t)(dcmi_image_buffer_8bit_1[pixel]); - } - else if (dcmi_image_buffer_unused == 2) - { - for (uint16_t pixel = 0; pixel < image_size; pixel++) - (*current_image)[pixel] = (uint8_t)(dcmi_image_buffer_8bit_2[pixel]); - } - else - { - for (uint16_t pixel = 0; pixel < image_size; pixel++) - (*current_image)[pixel] = (uint8_t)(dcmi_image_buffer_8bit_3[pixel]); +void DCMI_IRQHandler(void) { + if (DCMI_GetITStatus(DCMI_IT_FRAME) != RESET) { + DCMI_ClearITPendingBit(DCMI_IT_FRAME); + /* get context: */ + dcmi_transport_ctx *ctx = &dcmi_ctx; + /* calculate time delta to the end of DMA2_Stream1_IRQHandler: + * this should be almost zero because normally the DCMI interrupt will happen after the DMA interrupt */ + uint32_t dt = get_time_delta_us(ctx->last_dma_irq_t); + /* take the last pause between DMA request as a basis to calculate the maximum allowed delta time: */ + uint32_t allowed_dt = ctx->last_dma_irq_pause_t / 4; + if (allowed_dt < 10) allowed_dt = 10; + /* typedef void (*camera_transport_frame_done_cb)(void *usr); */ + ctx->frame_done_cb(ctx->cb_usr, dt > allowed_dt); } } /** - * @brief Send calibration image with MAVLINK over USB - * - * @param image_buffer_fast_1 Image buffer in fast RAM - * @param image_buffer_fast_2 Image buffer in fast RAM + * @brief Interrupt handler of DCMI DMA stream */ -void send_calibration_image(uint8_t ** image_buffer_fast_1, uint8_t ** image_buffer_fast_2) { - - /* transmit raw 8-bit image */ - /* TODO image is too large for this transmission protocol (too much packets), but it works */ - mavlink_msg_data_transmission_handshake_send( - MAVLINK_COMM_2, - MAVLINK_DATA_STREAM_IMG_RAW8U, - FULL_IMAGE_SIZE * 4, - FULL_IMAGE_ROW_SIZE * 2, - FULL_IMAGE_COLUMN_SIZE * 2, - FULL_IMAGE_SIZE * 4 / MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN + 1, - MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN, - 100); - - uint16_t frame = 0; - uint8_t image = 0; - uint8_t frame_buffer[MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN]; - - for (int i = 0; i < FULL_IMAGE_SIZE * 4; i++) - { - - if (i % FULL_IMAGE_SIZE == 0 && i != 0) - { - image++; - } - - if (i % MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN == 0 && i != 0) - { - mavlink_msg_encapsulated_data_send(MAVLINK_COMM_2, frame, frame_buffer); - frame++; - delay(2); - } - - if (image == 0 ) - { - frame_buffer[i % MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN] = (uint8_t)(*image_buffer_fast_1)[i % FULL_IMAGE_SIZE]; - } - else if (image == 1 ) - { - frame_buffer[i % MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN] = (uint8_t)(*image_buffer_fast_2)[i % FULL_IMAGE_SIZE]; - } - else if (image == 2) - { - if (calibration_unused == 1) - frame_buffer[i % MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN] = dcmi_image_buffer_8bit_1[i % FULL_IMAGE_SIZE]; - else if (calibration_unused == 2) - frame_buffer[i % MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN] = dcmi_image_buffer_8bit_2[i % FULL_IMAGE_SIZE]; - else - frame_buffer[i % MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN] = dcmi_image_buffer_8bit_3[i % FULL_IMAGE_SIZE]; - } - else - { - if (calibration_used) - { - if (calibration_mem0 == 1) - frame_buffer[i % MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN] = dcmi_image_buffer_8bit_1[i % FULL_IMAGE_SIZE]; - else if (calibration_mem0 == 2) - frame_buffer[i % MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN] = dcmi_image_buffer_8bit_2[i % FULL_IMAGE_SIZE]; - else - frame_buffer[i % MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN] = dcmi_image_buffer_8bit_3[i % FULL_IMAGE_SIZE]; - } - else - { - if (calibration_mem1 == 1) - frame_buffer[i % MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN] = dcmi_image_buffer_8bit_1[i % FULL_IMAGE_SIZE]; - else if (calibration_mem1 == 2) - frame_buffer[i % MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN] = dcmi_image_buffer_8bit_2[i % FULL_IMAGE_SIZE]; - else - frame_buffer[i % MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN] = dcmi_image_buffer_8bit_3[i % FULL_IMAGE_SIZE]; - } +void DMA2_Stream1_IRQHandler(void) { + /* transfer completed */ + if (DMA_GetITStatus(DMA2_Stream1, DMA_IT_TCIF1) != RESET) { + DMA_ClearITPendingBit(DMA2_Stream1, DMA_IT_TCIF1); + /* get the buffer that has been completed: */ + void *buffer; + if (DMA_GetCurrentMemoryTarget(DMA2_Stream1) == 0) { + buffer = dcmi_dma_buffer_2; + } else { + buffer = dcmi_dma_buffer_1; } + /* get context: */ + dcmi_transport_ctx *ctx = &dcmi_ctx; + /* calculate the pause-time: */ + ctx->last_dma_irq_pause_t = get_time_delta_us(ctx->last_dma_irq_t); + /* typedef void (*camera_transport_transfer_done_cb)(void *usr, const void *buffer, size_t size); */ + ctx->transfer_done_cb(ctx->cb_usr, buffer, CONFIG_DCMI_DMA_BUFFER_SIZE); + /* store the time when the function finished. */ + ctx->last_dma_irq_t = get_boot_time_us(); } - - mavlink_msg_encapsulated_data_send(MAVLINK_COMM_2, frame, frame_buffer); - } -/** - * @brief Initialize/Enable DCMI Interrupt - */ -void dcmi_it_init() -{ +static void dcmi_dma_it_init() { NVIC_InitTypeDef NVIC_InitStructure; /* Enable the DCMI global Interrupt */ NVIC_InitStructure.NVIC_IRQChannel = DCMI_IRQn; - NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 5; + NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 0; NVIC_InitStructure.NVIC_IRQChannelSubPriority = 2; NVIC_InitStructure.NVIC_IRQChannelCmd = ENABLE; NVIC_Init(&NVIC_InitStructure); - DCMI_ITConfig(DCMI_IT_FRAME,ENABLE); -} - -/** - * @brief Initialize/Enable DMA Interrupt - */ -void dma_it_init() -{ - NVIC_InitTypeDef NVIC_InitStructure; - + DCMI_ITConfig(DCMI_IT_FRAME, ENABLE); + /* Enable the DMA global Interrupt */ NVIC_InitStructure.NVIC_IRQChannel = DMA2_Stream1_IRQn; - NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 5; + NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 0; NVIC_InitStructure.NVIC_IRQChannelSubPriority = 2; NVIC_InitStructure.NVIC_IRQChannelCmd = ENABLE; NVIC_Init(&NVIC_InitStructure); - DMA_ITConfig(DMA2_Stream1, DMA_IT_HT, ENABLE); // half transfer interrupt DMA_ITConfig(DMA2_Stream1, DMA_IT_TC, ENABLE); // transfer complete interrupt } -/** - * @brief Enable DCMI DMA stream - */ -void dcmi_dma_enable() -{ +static void dcmi_dma_enable() { /* Enable DMA2 stream 1 and DCMI interface then start image capture */ DMA_Cmd(DMA2_Stream1, ENABLE); DCMI_Cmd(ENABLE); DCMI_CaptureCmd(ENABLE); - dma_it_init(); } -/** - * @brief Disable DCMI DMA stream - */ -void dcmi_dma_disable() -{ - /* Disable DMA2 stream 1 and DCMI interface then stop image capture */ - DMA_Cmd(DMA2_Stream1, DISABLE); - DCMI_Cmd(DISABLE); - DCMI_CaptureCmd(DISABLE); -} - -void reset_frame_counter() -{ - frame_counter = 0; -} - -/** - * @brief HW initialization of DCMI clock - */ -void dcmi_clock_init() -{ +static void dcmi_clock_init() { GPIO_InitTypeDef GPIO_InitStructure; TIM_TimeBaseInitTypeDef TIM_TimeBaseStructure; TIM_OCInitTypeDef TIM_OCInitStructure; @@ -479,23 +262,8 @@ void dcmi_clock_init() TIM_Cmd(TIM3, ENABLE); } -/** - * @brief HW initialization DCMI - */ -void dcmi_hw_init(void) -{ - uint16_t image_size = global_data.param[PARAM_IMAGE_WIDTH] * global_data.param[PARAM_IMAGE_HEIGHT]; - - /* Reset image buffers */ - for (int i = 0; i < image_size; i++) { - dcmi_image_buffer_8bit_1 [i] = 0; - dcmi_image_buffer_8bit_2 [i] = 0; - dcmi_image_buffer_8bit_3 [i] = 0; - } - +static void dcmi_hw_init(void) { GPIO_InitTypeDef GPIO_InitStructure; - I2C_InitTypeDef I2C_InitStruct; - /*** Configures the DCMI GPIOs to interface with the OV2640 camera module ***/ /* Enable DCMI GPIOs clocks */ RCC_AHB1PeriphClockCmd( @@ -538,72 +306,20 @@ void dcmi_hw_init(void) GPIO_InitStructure.GPIO_Pin = GPIO_Pin_0 | GPIO_Pin_1 | GPIO_Pin_4 | GPIO_Pin_5 | GPIO_Pin_6; GPIO_Init(GPIOE, &GPIO_InitStructure); - - /* I2C2 clock enable */ - RCC_APB1PeriphClockCmd(RCC_APB1Periph_I2C2, ENABLE); - - /* GPIOB clock enable */ - RCC_AHB1PeriphClockCmd(RCC_AHB1Periph_GPIOB, ENABLE); - - /* Connect I2C2 pins to AF4 */ - GPIO_PinAFConfig(GPIOB, GPIO_PinSource10, GPIO_AF_I2C2); - GPIO_PinAFConfig(GPIOB, GPIO_PinSource11, GPIO_AF_I2C2); - - /* Configure I2C2 GPIOs */ - GPIO_InitStructure.GPIO_Pin = GPIO_Pin_10 | GPIO_Pin_11; - GPIO_InitStructure.GPIO_Mode = GPIO_Mode_AF; - GPIO_InitStructure.GPIO_Speed = GPIO_Speed_2MHz; - GPIO_InitStructure.GPIO_OType = GPIO_OType_OD; - GPIO_InitStructure.GPIO_PuPd = GPIO_PuPd_NOPULL; - GPIO_Init(GPIOB, &GPIO_InitStructure); - - /* I2C DeInit */ - I2C_DeInit(I2C2); - - /* Enable the I2C peripheral */ - I2C_Cmd(I2C2, ENABLE); - - /* Set the I2C structure parameters */ - I2C_InitStruct.I2C_Mode = I2C_Mode_I2C; - I2C_InitStruct.I2C_DutyCycle = I2C_DutyCycle_2; - I2C_InitStruct.I2C_OwnAddress1 = 0xFE; - I2C_InitStruct.I2C_Ack = I2C_Ack_Enable; - I2C_InitStruct.I2C_AcknowledgedAddress = I2C_AcknowledgedAddress_7bit; - I2C_InitStruct.I2C_ClockSpeed = 100000; - - /* Initialize the I2C peripheral w/ selected parameters */ - I2C_Init(I2C2, &I2C_InitStruct); - - /* Initialize GPIOs for EXPOSURE and STANDBY lines of the camera */ - GPIO_InitStructure.GPIO_Pin = GPIO_Pin_2 | GPIO_Pin_3; - GPIO_InitStructure.GPIO_Mode = GPIO_Mode_OUT; - GPIO_InitStructure.GPIO_OType = GPIO_OType_PP; - GPIO_InitStructure.GPIO_Speed = GPIO_Speed_2MHz; - GPIO_InitStructure.GPIO_PuPd = GPIO_PuPd_NOPULL; - GPIO_Init(GPIOA, &GPIO_InitStructure); - GPIO_ResetBits(GPIOA, GPIO_Pin_2 | GPIO_Pin_3); } -/** - * @brief Configures DCMI/DMA to capture image from the mt9v034 camera. - * - * @param buffer_size Buffer size in bytes - */ -void dcmi_dma_init(uint16_t buffer_size) -{ - reset_frame_counter(); - +static void dcmi_dma_init() { DCMI_InitTypeDef DCMI_InitStructure; DMA_InitTypeDef DMA_InitStructure; - /*** Configures the DCMI to interface with the mt9v034 camera module ***/ + /*** Configures the DCMI to interface with the camera module ***/ /* Enable DCMI clock */ RCC_AHB2PeriphClockCmd(RCC_AHB2Periph_DCMI, ENABLE); /* DCMI configuration */ DCMI_InitStructure.DCMI_CaptureMode = DCMI_CaptureMode_Continuous; DCMI_InitStructure.DCMI_SynchroMode = DCMI_SynchroMode_Hardware; - DCMI_InitStructure.DCMI_PCKPolarity = DCMI_PCKPolarity_Falling; + DCMI_InitStructure.DCMI_PCKPolarity = DCMI_PCKPolarity_Rising; DCMI_InitStructure.DCMI_VSPolarity = DCMI_VSPolarity_Low; DCMI_InitStructure.DCMI_HSPolarity = DCMI_HSPolarity_Low; DCMI_InitStructure.DCMI_CaptureRate = DCMI_CaptureRate_All_Frame; @@ -618,9 +334,9 @@ void dcmi_dma_init(uint16_t buffer_size) DMA_InitStructure.DMA_Channel = DMA_Channel_1; DMA_InitStructure.DMA_PeripheralBaseAddr = DCMI_DR_ADDRESS; - DMA_InitStructure.DMA_Memory0BaseAddr = (uint32_t) dcmi_image_buffer_8bit_1; + DMA_InitStructure.DMA_Memory0BaseAddr = (uint32_t)dcmi_dma_buffer_1; DMA_InitStructure.DMA_DIR = DMA_DIR_PeripheralToMemory; - DMA_InitStructure.DMA_BufferSize = buffer_size / 4; // buffer size in date unit (word) + DMA_InitStructure.DMA_BufferSize = CONFIG_DCMI_DMA_BUFFER_SIZE / 4; // buffer size in date unit (word) DMA_InitStructure.DMA_PeripheralInc = DMA_PeripheralInc_Disable; DMA_InitStructure.DMA_MemoryInc = DMA_MemoryInc_Enable; DMA_InitStructure.DMA_PeripheralDataSize = DMA_PeripheralDataSize_Word; @@ -632,8 +348,8 @@ void dcmi_dma_init(uint16_t buffer_size) DMA_InitStructure.DMA_MemoryBurst = DMA_MemoryBurst_Single; DMA_InitStructure.DMA_PeripheralBurst = DMA_PeripheralBurst_Single; - DMA_DoubleBufferModeConfig(DMA2_Stream1,(uint32_t) dcmi_image_buffer_8bit_2, DMA_Memory_0); - DMA_DoubleBufferModeCmd(DMA2_Stream1,ENABLE); + DMA_DoubleBufferModeConfig(DMA2_Stream1, (uint32_t)dcmi_dma_buffer_2, DMA_Memory_0); + DMA_DoubleBufferModeCmd(DMA2_Stream1, ENABLE); /* DCMI configuration */ DCMI_Init(&DCMI_InitStructure); diff --git a/src/modules/flow/debug.c b/src/modules/flow/debug.c deleted file mode 100644 index 03f3b79..0000000 --- a/src/modules/flow/debug.c +++ /dev/null @@ -1,201 +0,0 @@ -/**************************************************************************** - * - * Copyright (C) 2013 PX4 Development Team. All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided with the - * distribution. - * 3. Neither the name PX4 nor the names of its contributors may be - * used to endorse or promote products derived from this software - * without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN - * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * - ****************************************************************************/ - -#include -#include "no_warnings.h" -#include "mavlink_bridge_header.h" -#include -#include "utils.h" -#include "settings.h" -#include "debug.h" - -typedef enum -{ - DEBUG_STRING, - DEBUG_INT, - DEBUG_FLOAT -} DebugMsgType_TypeDef; - -/* send buffers */ -static DebugMsgType_TypeDef m_debug_buf_type[DEBUG_COUNT]; -static char const * m_debug_buf_pointer[DEBUG_COUNT]; //string pointer buffer -static int32_t m_debug_buf_int[DEBUG_COUNT]; -static float m_debug_buf_float[DEBUG_COUNT]; - -static uint8_t m_debug_index_write = 0; -static uint8_t m_debug_index_read = 0; -static uint16_t m_debug_count = 0; -static bool m_debug_was_full = false; - -/** - * @brief Add a debug message to the send buffer - * @note This function is interrupt service routine (ISR) SAFE, as it only access RAM. - * - * @param string Message - * @return 1 if successfully added, 0 else - */ -static uint8_t debug_message_buffer(const char* string) -{ - if (m_debug_index_read - m_debug_index_write == 1 || (m_debug_index_read - == 0 && m_debug_index_write == DEBUG_COUNT - 1)) - { - /* buffer full, can't send */ - m_debug_was_full = true; - return 0; - } - m_debug_index_write = (m_debug_index_write + 1) % DEBUG_COUNT; - m_debug_count++; - - /* buffer pointer to string in program code */ - m_debug_buf_pointer[m_debug_index_write] = string; - - return 1; -} - -/** - * @brief Add a string debug message to the send buffer - * - * @param string Message - * @return 1 if successfully added, 0 else - */ -uint8_t debug_string_message_buffer(const char* string) -{ - if (debug_message_buffer(string)) - { - /* Could write, save message to buffer */ - m_debug_buf_type[m_debug_index_write] = DEBUG_STRING; - return 1; - } - else - { - /* Could not write, do nothing */ - return 0; - } -} - -/** - * @brief Add a string-integer debug message to the send buffers - * - * @param string Message - * @return 1 if successfully added, 0 else - */ -uint8_t debug_int_message_buffer(const char* string, int32_t num) -{ - if (debug_message_buffer(string)) - { - /* Could write, save integer to buffer */ - m_debug_buf_int[m_debug_index_write] = num; - m_debug_buf_type[m_debug_index_write] = DEBUG_INT; - return 1; - } - else - { - /* Could not write, do nothing */ - return 0; - } -} - -/** - * @brief Add a string-float debug message to the send buffers - * - * @param string Message - * @return 1 if successfully added, 0 else - */ -uint8_t debug_float_message_buffer(const char* string, float num) -{ - if (debug_message_buffer(string)) - { - /* Could write, save float to buffer */ - m_debug_buf_float[m_debug_index_write] = num; - m_debug_buf_type[m_debug_index_write] = DEBUG_FLOAT; - return 1; - } - else - { - /* Could not write, do nothing */ - return 0; - } -} - -/** - * @brief Send one of the buffered messages - */ -void debug_message_send_one(void) -{ - if (m_debug_index_write == m_debug_index_read) - { - /* buffer empty */ - return; - } - m_debug_index_read = (m_debug_index_read + 1) % DEBUG_COUNT; - - char msg[DEBUG_MAX_LEN] = {}; - - switch(m_debug_buf_type[m_debug_index_read]) - { - case(DEBUG_STRING): - strncpy(msg, m_debug_buf_pointer[m_debug_index_read], DEBUG_MAX_LEN); - break; - - case(DEBUG_INT): - strncat(msg, m_debug_buf_pointer[m_debug_index_read], DEBUG_MAX_LEN); - strncat(msg, " ", DEBUG_MAX_LEN); - strncat(msg, ftoa(m_debug_buf_int[m_debug_index_read]), DEBUG_MAX_LEN); - msg[strlen(msg) - 2] = '\0'; // TODO workaround: cut ".0" of float - break; - - case(DEBUG_FLOAT): - strncat(msg, m_debug_buf_pointer[m_debug_index_read], DEBUG_MAX_LEN); - strncat(msg, " ", DEBUG_MAX_LEN); - strncat(msg, ftoa(m_debug_buf_float[m_debug_index_read]), DEBUG_MAX_LEN); - break; - - default: - return; - } - - - if(m_debug_was_full) - { - msg[0] = '+'; - m_debug_was_full = false; - } - - msg[DEBUG_MAX_LEN - 1] = '\0'; //enforce string termination - - mavlink_msg_statustext_send(MAVLINK_COMM_0, 0, msg); - - if (FLOAT_AS_BOOL(global_data.param[PARAM_USB_SEND_DEBUG])) - mavlink_msg_statustext_send(MAVLINK_COMM_2, 0, msg); -} - diff --git a/src/modules/flow/sonar_mode_filter.c b/src/modules/flow/distance_mode_filter.c similarity index 72% rename from src/modules/flow/sonar_mode_filter.c rename to src/modules/flow/distance_mode_filter.c index 94205f3..c335be4 100644 --- a/src/modules/flow/sonar_mode_filter.c +++ b/src/modules/flow/distance_mode_filter.c @@ -31,51 +31,51 @@ * ****************************************************************************/ -#include "sonar_mode_filter.h" +#include "distance_mode_filter.h" #include /** - * insert-only ring buffer of sonar data, needs to be of uneven size + * insert-only ring buffer of distance data, needs to be of uneven size * the initialization to zero will make the filter respond zero for the - * first three inserted readinds, which is a decent startup-logic. + * first N inserted readings, which is a decent startup-logic. */ -static float sonar_values[3] = { 0.0f }; +static float distance_values[5] = { 0.0f }; static unsigned insert_index = 0; -static void sonar_bubble_sort(float in_out_sonar_values[], unsigned n) +static void distance_bubble_sort(float distances[], unsigned n) { float t; for (unsigned i = 0; i < (n - 1); i++) { for (unsigned j = 0; j < (n - i - 1); j++) { - if (in_out_sonar_values[j] > in_out_sonar_values[j+1]) { + if (distances[j] > distances[j+1]) { /* swap two values */ - t = in_out_sonar_values[j]; - in_out_sonar_values[j] = in_out_sonar_values[j + 1]; - in_out_sonar_values[j + 1] = t; + t = distances[j]; + distances[j] = distances[j + 1]; + distances[j + 1] = t; } } } } -float insert_sonar_value_and_get_mode_value(float insert) +float insert_distance_value_and_get_mode_value(float insert) { - const unsigned sonar_count = sizeof(sonar_values) / sizeof(sonar_values[0]); + const unsigned distance_count = sizeof(distance_values) / sizeof(distance_values[0]); - sonar_values[insert_index] = insert; + distance_values[insert_index] = insert; insert_index++; - if (insert_index == sonar_count) { + if (insert_index == distance_count) { insert_index = 0; } /* sort and return mode */ /* copy ring buffer */ - float sonar_temp[sonar_count]; - memcpy(sonar_temp, sonar_values, sizeof(sonar_values)); + float distance_temp[distance_count]; + memcpy(distance_temp, distance_values, sizeof(distance_values)); - sonar_bubble_sort(sonar_temp, sonar_count); + distance_bubble_sort(distance_temp, distance_count); /* the center element represents the mode after sorting */ - return sonar_temp[sonar_count / 2]; + return distance_temp[distance_count / 2]; } diff --git a/src/modules/flow/flow.c b/src/modules/flow/flow.c index 9255f2a..f7c7c57 100644 --- a/src/modules/flow/flow.c +++ b/src/modules/flow/flow.c @@ -41,22 +41,30 @@ #include "no_warnings.h" #include "mavlink_bridge_header.h" +#include "settings.h" #include +#include "flow.h" #include "dcmi.h" -#include "debug.h" +#include "timer.h" #define __INLINE inline #define __ASM asm #include "core_cm4_simd.h" -#define FRAME_SIZE global_data.param[PARAM_IMAGE_WIDTH] -#define SEARCH_SIZE global_data.param[PARAM_MAX_FLOW_PIXEL] // maximum offset to search: 4 + 1/2 pixels +#define SEARCH_SIZE global_data.param[PARAM_FLOW_MAX_PIXEL] // maximum offset to search: 4 + 1/2 pixels #define TILE_SIZE 8 // x & y tile size #define NUM_BLOCKS 5 // x & y number of tiles to check +#define NUM_BLOCK_KLT 4 -#define sign(x) (( x > 0 ) - ( x < 0 )) +//this are the settings for KLT based flow +#define PYR_LVLS 2 +#define HALF_PATCH_SIZE 3 //this is half the wanted patch size minus 1 +#define PATCH_SIZE (HALF_PATCH_SIZE*2+1) + +float Jx[PATCH_SIZE*PATCH_SIZE]; +float Jy[PATCH_SIZE*PATCH_SIZE]; -uint8_t compute_flow(uint8_t *image1, uint8_t *image2, float x_rate, float y_rate, float z_rate, float *pixel_flow_x, float *pixel_flow_y); +#define sign(x) (( x > 0 ) - ( x < 0 )) // compliments of Adam Williams #define ABSDIFF(frame1, frame2) \ @@ -382,46 +390,29 @@ static inline uint32_t compute_sad_8x8(uint8_t *image1, uint8_t *image2, uint16_ return acc; } -/** - * @brief Computes pixel flow from image1 to image2 - * - * Searches the corresponding position in the new image (image2) of max. 64 pixels from the old image (image1) - * and calculates the average offset of all. - * - * @param image1 previous image buffer - * @param image2 current image buffer (new) - * @param x_rate gyro x rate - * @param y_rate gyro y rate - * @param z_rate gyro z rate - * - * @return quality of flow calculation - */ -uint8_t compute_flow(uint8_t *image1, uint8_t *image2, float x_rate, float y_rate, float z_rate, float *pixel_flow_x, float *pixel_flow_y) { +float get_flow_capability() { + const uint16_t search_size = SEARCH_SIZE; + return search_size + 0.5; +} +uint16_t compute_flow(uint8_t *image1, uint8_t *image2, float x_rate, float y_rate, float z_rate, + flow_raw_result *out, uint16_t max_out) +{ /* constants */ - const int16_t winmin = -SEARCH_SIZE; - const int16_t winmax = SEARCH_SIZE; - const uint16_t hist_size = 2*(winmax-winmin+1)+1; + const uint16_t search_size = SEARCH_SIZE; + const uint16_t frame_size = FLOW_FRAME_SIZE; + const int16_t winmin = -search_size; + const int16_t winmax = search_size; /* variables */ - uint16_t pixLo = SEARCH_SIZE + 1; - uint16_t pixHi = FRAME_SIZE - (SEARCH_SIZE + 1) - TILE_SIZE; - uint16_t pixStep = (pixHi - pixLo) / NUM_BLOCKS + 1; + uint16_t pixLo = search_size + 1; + uint16_t pixHi = frame_size - (search_size + 1) - TILE_SIZE; + uint16_t pixStep = (pixHi - pixLo - 1) / NUM_BLOCKS; + pixHi = pixLo + pixStep * NUM_BLOCKS; uint16_t i, j; uint32_t acc[8]; // subpixels - uint16_t histx[hist_size]; // counter for x shift - uint16_t histy[hist_size]; // counter for y shift - int8_t dirsx[64]; // shift directions in x - int8_t dirsy[64]; // shift directions in y - uint8_t subdirs[64]; // shift directions of best subpixels - float meanflowx = 0.0f; - float meanflowy = 0.0f; - uint16_t meancount = 0; - float histflowx = 0.0f; - float histflowy = 0.0f; - - /* initialize with 0 */ - for (j = 0; j < hist_size; j++) { histx[j] = 0; histy[j] = 0; } + + uint16_t result_count = 0; /* iterate over all patterns */ @@ -429,9 +420,21 @@ uint8_t compute_flow(uint8_t *image1, uint8_t *image2, float x_rate, float y_rat { for (i = pixLo; i < pixHi; i += pixStep) { + /* abort if the output buffer is full */ + if (result_count >= max_out) break; + flow_raw_result *result = &out[result_count]; + /* init the result */ + result->x = 0; + result->y = 0; + result->quality = 0; + result->at_x = i + TILE_SIZE / 2; + result->at_y = j + TILE_SIZE / 2; + + result_count++; + /* test pixel if it is suitable for flow tracking */ - uint32_t diff = compute_diff(image1, i, j, (uint16_t) global_data.param[PARAM_IMAGE_WIDTH]); - if (diff < global_data.param[PARAM_BOTTOM_FLOW_FEATURE_THRESHOLD]) + uint32_t diff = compute_diff(image1, i, j, frame_size); + if (diff < global_data.param[PARAM_FLOW_FEATURE_THRESHOLD]) { continue; } @@ -441,16 +444,16 @@ uint8_t compute_flow(uint8_t *image1, uint8_t *image2, float x_rate, float y_rat int8_t sumy = 0; int8_t ii, jj; - uint8_t *base1 = image1 + j * (uint16_t) global_data.param[PARAM_IMAGE_WIDTH] + i; + //uint8_t *base1 = image1 + j * frame_size + i; for (jj = winmin; jj <= winmax; jj++) { - uint8_t *base2 = image2 + (j+jj) * (uint16_t) global_data.param[PARAM_IMAGE_WIDTH] + i; + //uint8_t *base2 = image2 + (j+jj) * frame_size + i; for (ii = winmin; ii <= winmax; ii++) { -// uint32_t temp_dist = compute_sad_8x8(image1, image2, i, j, i + ii, j + jj, (uint16_t) global_data.param[PARAM_IMAGE_WIDTH]); - uint32_t temp_dist = ABSDIFF(base1, base2 + ii); + uint32_t temp_dist = compute_sad_8x8(image1, image2, i, j, i + ii, j + jj, frame_size); +// uint32_t temp_dist = ABSDIFF(base1, base2 + ii); if (temp_dist < dist) { sumx = ii; @@ -461,12 +464,9 @@ uint8_t compute_flow(uint8_t *image1, uint8_t *image2, float x_rate, float y_rat } /* acceptance SAD distance threshhold */ - if (dist < global_data.param[PARAM_BOTTOM_FLOW_VALUE_THRESHOLD]) + if (dist < global_data.param[PARAM_FLOW_VALUE_THRESHOLD]) { - meanflowx += (float) sumx; - meanflowy += (float) sumy; - - compute_subpixel(image1, image2, i, j, i + sumx, j + sumy, acc, (uint16_t) global_data.param[PARAM_IMAGE_WIDTH]); + compute_subpixel(image1, image2, i, j, i + sumx, j + sumy, acc, frame_size); uint32_t mindist = dist; // best SAD until now uint8_t mindir = 8; // direction 8 for no direction for(uint8_t k = 0; k < 8; k++) @@ -478,272 +478,421 @@ uint8_t compute_flow(uint8_t *image1, uint8_t *image2, float x_rate, float y_rat mindir = k; } } - dirsx[meancount] = sumx; - dirsy[meancount] = sumy; - subdirs[meancount] = mindir; - meancount++; - - /* feed histogram filter*/ - uint8_t hist_index_x = 2*sumx + (winmax-winmin+1); - if (subdirs[i] == 0 || subdirs[i] == 1 || subdirs[i] == 7) hist_index_x += 1; - if (subdirs[i] == 3 || subdirs[i] == 4 || subdirs[i] == 5) hist_index_x += -1; - uint8_t hist_index_y = 2*sumy + (winmax-winmin+1); - if (subdirs[i] == 5 || subdirs[i] == 6 || subdirs[i] == 7) hist_index_y += -1; - if (subdirs[i] == 1 || subdirs[i] == 2 || subdirs[i] == 3) hist_index_y += 1; - - histx[hist_index_x]++; - histy[hist_index_y]++; - + /* store the flow value */ + result->x = sumx; + result->y = sumy; + if (mindir == 0 || mindir == 1 || mindir == 7) result->x += 0.5f; + if (mindir == 3 || mindir == 4 || mindir == 5) result->x -= 0.5f; + if (mindir == 5 || mindir == 6 || mindir == 7) result->y -= 0.5f; + if (mindir == 1 || mindir == 2 || mindir == 3) result->y += 0.5f; + if (FLOAT_AS_BOOL(global_data.param[PARAM_FLOW_GYRO_COMPENSATION])) { + /* gyro compensation */ + result->x -= x_rate; + result->y -= y_rate; + } + result->quality = 1.0; } } } - /* create flow image if needed (image1 is not needed anymore) - * -> can be used for debugging purpose + return result_count; +} + +void klt_preprocess_image(uint8_t *image, flow_klt_image *klt_image) { + uint16_t i, j; + + klt_image->image = image; + + /* + * compute image pyramid for current frame + * there is 188*120 bytes per buffer, we are only using 64*64 per buffer, + * so just add the pyramid levels after the image */ -// if (global_data.param[PARAM_USB_SEND_VIDEO] )//&& global_data.param[PARAM_VIDEO_USB_MODE] == FLOW_VIDEO) -// { -// -// for (j = pixLo; j < pixHi; j += pixStep) -// { -// for (i = pixLo; i < pixHi; i += pixStep) -// { -// -// uint32_t diff = compute_diff(image1, i, j, (uint16_t) global_data.param[PARAM_IMAGE_WIDTH]); -// if (diff > global_data.param[PARAM_BOTTOM_FLOW_FEATURE_THRESHOLD]) -// { -// image1[j * ((uint16_t) global_data.param[PARAM_IMAGE_WIDTH]) + i] = 255; -// } -// -// } -// } -// } - - /* evaluate flow calculation */ - if (meancount > 10) + //first compute the offsets in the memory for the pyramid levels + uint8_t *lvl_base[PYR_LVLS]; + uint16_t frame_size = (uint16_t)(FLOW_FRAME_SIZE+0.5); + uint16_t s = frame_size / 2; + uint16_t off = 0; + lvl_base[0] = image; + for (int l = 1; l < PYR_LVLS; l++) { - meanflowx /= meancount; - meanflowy /= meancount; - - int16_t maxpositionx = 0; - int16_t maxpositiony = 0; - uint16_t maxvaluex = 0; - uint16_t maxvaluey = 0; + lvl_base[l] = klt_image->preprocessed + off; + off += s*s; + s /= 2; + } - /* position of maximal histogram peek */ - for (j = 0; j < hist_size; j++) - { - if (histx[j] > maxvaluex) - { - maxvaluex = histx[j]; - maxpositionx = j; - } - if (histy[j] > maxvaluey) + //then subsample the images consecutively, no blurring is done before the subsampling (if someone volunteers, please go ahead...) + for (int l = 1; l < PYR_LVLS; l++) + { + uint16_t src_size = frame_size >> (l-1); + uint16_t tar_size = frame_size >> l; + uint8_t *source = lvl_base[l-1]; //pointer to the beginning of the previous level + uint8_t *target = lvl_base[l]; //pointer to the beginning of the current level + for (j = 0; j < tar_size; j++) { + for (i = 0; i < tar_size; i+=2) { - maxvaluey = histy[j]; - maxpositiony = j; + //subsample the image by 2, use the halving-add instruction to do so + uint32_t l1 = (__UHADD8(*((uint32_t*) &source[(j*2+0)*src_size + i*2]), *((uint32_t*) &source[(j*2+0)*src_size + i*2+1]))); + uint32_t l2 = (__UHADD8(*((uint32_t*) &source[(j*2+1)*src_size + i*2]), *((uint32_t*) &source[(j*2+1)*src_size + i*2+1]))); + uint32_t r = __UHADD8(l1, l2); + + //the first and the third byte are the values we want to have + target[j*tar_size + i+0] = (uint8_t) r; + target[j*tar_size + i+1] = (uint8_t) (r>>16); } } + } +} - /* check if there is a peak value in histogram */ - if (1) //(histx[maxpositionx] > meancount / 6 && histy[maxpositiony] > meancount / 6) - { - if (FLOAT_AS_BOOL(global_data.param[PARAM_BOTTOM_FLOW_HIST_FILTER])) - { - - /* use histogram filter peek value */ - uint16_t hist_x_min = maxpositionx; - uint16_t hist_x_max = maxpositionx; - uint16_t hist_y_min = maxpositiony; - uint16_t hist_y_max = maxpositiony; +float get_flow_klt_capability() { + uint16_t topPyrStep = 1 << (PYR_LVLS - 1); + return topPyrStep * 2; +} - /* x direction */ - if (maxpositionx > 1 && maxpositionx < hist_size-2) - { - hist_x_min = maxpositionx - 2; - hist_x_max = maxpositionx + 2; - } - else if (maxpositionx == 0) - { - hist_x_min = maxpositionx; - hist_x_max = maxpositionx + 2; - } - else if (maxpositionx == hist_size-1) - { - hist_x_min = maxpositionx - 2; - hist_x_max = maxpositionx; - } - else if (maxpositionx == 1) - { - hist_x_min = maxpositionx - 1; - hist_x_max = maxpositionx + 2; - } - else if (maxpositionx == hist_size-2) - { - hist_x_min = maxpositionx - 2; - hist_x_max = maxpositionx + 1; - } +uint16_t compute_klt(flow_klt_image *image1, flow_klt_image *image2, float x_rate, float y_rate, float z_rate, + flow_raw_result *out, uint16_t max_out) +{ + /* variables */ + uint16_t i, j; - /* y direction */ - if (maxpositiony > 1 && maxpositiony < hist_size-2) - { - hist_y_min = maxpositiony - 2; - hist_y_max = maxpositiony + 2; - } - else if (maxpositiony == 0) - { - hist_y_min = maxpositiony; - hist_y_max = maxpositiony + 2; - } - else if (maxpositiony == hist_size-1) - { - hist_y_min = maxpositiony - 2; - hist_y_max = maxpositiony; - } - else if (maxpositiony == 1) - { - hist_y_min = maxpositiony - 1; - hist_y_max = maxpositiony + 2; - } - else if (maxpositiony == hist_size-2) - { - hist_y_min = maxpositiony - 2; - hist_y_max = maxpositiony + 1; - } + float chi_sum = 0.0f; + uint8_t chicount = 0; - float hist_x_value = 0.0f; - float hist_x_weight = 0.0f; + uint16_t max_iters = global_data.param[PARAM_KLT_MAX_ITERS]; - float hist_y_value = 0.0f; - float hist_y_weight = 0.0f; + /* + * compute image pyramid for current frame + * there is 188*120 bytes per buffer, we are only using 64*64 per buffer, + * so just add the pyramid levels after the image + */ + //first compute the offsets in the memory for the pyramid levels + uint8_t *lvl_base1[PYR_LVLS]; + uint8_t *lvl_base2[PYR_LVLS]; + uint16_t frame_size = (uint16_t)(FLOW_FRAME_SIZE+0.5); + uint16_t s = frame_size / 2; + uint16_t off = 0; + lvl_base1[0] = image1->image; + lvl_base2[0] = image2->image; + for (int l = 1; l < PYR_LVLS; l++) + { + lvl_base1[l] = image1->preprocessed + off; + lvl_base2[l] = image2->preprocessed + off; + off += s*s; + s /= 2; + } - for (uint8_t h = hist_x_min; h < hist_x_max+1; h++) - { - hist_x_value += (float) (h*histx[h]); - hist_x_weight += (float) histx[h]; - } + //need to store the flow values between pyramid level changes + float us[NUM_BLOCK_KLT*NUM_BLOCK_KLT]; + float vs[NUM_BLOCK_KLT*NUM_BLOCK_KLT]; + uint16_t is[NUM_BLOCK_KLT*NUM_BLOCK_KLT]; + uint16_t js[NUM_BLOCK_KLT*NUM_BLOCK_KLT]; + + //initialize flow values with the pixel value of the previous image + uint16_t topPyrStep = 1 << (PYR_LVLS - 1); + + /* + * if the gyro change (x_rate & y_rate) is more than the maximum pixel + * difference that can be detected between two frames (depends on PYR_LVLS) + * we can't calculate the flow. So return empty flow. + */ + if(fabsf(x_rate) > (float)(2 * topPyrStep) || fabsf(y_rate) > (float)(2 * topPyrStep)){ + return 0; + } + + uint16_t pixStep = frame_size / (NUM_BLOCK_KLT + 1); + uint16_t pixLo = pixStep; + /* align with topPyrStep */ + pixStep = ((uint16_t)(pixStep )) & ~((uint16_t)(topPyrStep - 1)); // round down + pixLo = ((uint16_t)(pixLo + (topPyrStep - 1))) & ~((uint16_t)(topPyrStep - 1)); // round up + //uint16_t pixHi = pixLo + pixStep * (NUM_BLOCK_KLT - 1); + + j = pixLo; + for (int y = 0; y < NUM_BLOCK_KLT; y++, j += pixStep) + { + i = pixLo; + for (int x = 0; x < NUM_BLOCK_KLT; x++, i += pixStep) + { + uint16_t idx = y*NUM_BLOCK_KLT+x; + if (FLOAT_AS_BOOL(global_data.param[PARAM_KLT_GYRO_ASSIST])) { + /* use the gyro measurement to guess the initial position in the new image */ + us[idx] = i + x_rate; //position in new image at level 0 + vs[idx] = j + y_rate; + if ((int16_t)us[idx] < HALF_PATCH_SIZE) us[idx] = HALF_PATCH_SIZE; + if ((int16_t)us[idx] > frame_size - HALF_PATCH_SIZE - 1) us[idx] = frame_size - HALF_PATCH_SIZE - 1; + if ((int16_t)vs[idx] < HALF_PATCH_SIZE) vs[idx] = HALF_PATCH_SIZE; + if ((int16_t)vs[idx] > frame_size - HALF_PATCH_SIZE - 1) vs[idx] = frame_size - HALF_PATCH_SIZE - 1; + } else { + us[idx] = i; //position in new image at level 0 + vs[idx] = j; + } + is[idx] = i; //position in previous image at level 0 + js[idx] = j; + /* init output vector */ + if (idx < max_out) { + out[idx].x = 0; + out[idx].y = 0; + out[idx].quality = 0; + out[idx].at_x = i; + out[idx].at_y = j; + } + } + } - for (uint8_t h = hist_y_min; h= 0; l--) + { + //iterate over all patterns + for (int k = 0; k < NUM_BLOCK_KLT*NUM_BLOCK_KLT; k++) + { + i = is[k] >> l; //reference pixel for the current level + j = js[k] >> l; + + uint16_t iwidth = frame_size >> l; + uint8_t *base1 = lvl_base1[l] + j * iwidth + i; + + float JTJ[4]; //the 2x2 Hessian + JTJ[0] = 0; + JTJ[1] = 0; + JTJ[2] = 0; + JTJ[3] = 0; + int c = 0; + + //compute jacobians and the hessian for the patch at the current location + uint8_t min_val = 255; + uint8_t max_val = 0; + for (int8_t jj = -HALF_PATCH_SIZE; jj <= HALF_PATCH_SIZE; jj++) + { + uint8_t *left = base1 + jj*iwidth; + for (int8_t ii = -HALF_PATCH_SIZE; ii <= HALF_PATCH_SIZE; ii++) { - hist_y_value += (float) (h*histy[h]); - hist_y_weight += (float) histy[h]; + uint8_t val = left[ii]; + if (val > max_val) max_val = val; + if (val < min_val) min_val = val; + const float jx = ((uint16_t)left[ii+1] - (uint16_t)left[ii-1]) * 0.5f; + const float jy = ((uint16_t)left[ii+iwidth] - (uint16_t)left[ii-iwidth]) * 0.5f; + Jx[c] = jx; + Jy[c] = jy; + JTJ[0] += jx*jx; + JTJ[1] += jx*jy; + JTJ[2] += jx*jy; + JTJ[3] += jy*jy; + c++; } - - histflowx = (hist_x_value/hist_x_weight - (winmax-winmin+1)) / 2.0f ; - histflowy = (hist_y_value/hist_y_weight - (winmax-winmin+1)) / 2.0f; - } - else + + //compute inverse of hessian + float det = (JTJ[0]*JTJ[3]-JTJ[1]*JTJ[2]); + float dyn_range = (float)(max_val - min_val) + 1; + float trace = (JTJ[0] + JTJ[3]); + float M_c = det - global_data.param[PARAM_ALGORITHM_CORNER_KAPPA] * trace * trace; + if (fabsf(det) > global_data.param[PARAM_KLT_DET_VALUE_MIN] * dyn_range && M_c > 0.0f) { + float detinv = 1.f / det; + float JTJinv[4]; + JTJinv[0] = detinv * JTJ[3]; + JTJinv[1] = detinv * -JTJ[1]; + JTJinv[2] = detinv * -JTJ[2]; + JTJinv[3] = detinv * JTJ[0]; - /* use average of accepted flow values */ - uint32_t meancount_x = 0; - uint32_t meancount_y = 0; + // us and vs store the sample position in level 0 pixel coordinates + float u = (us[k] / (1< global_data.param[PARAM_GYRO_COMPENSATION_THRESHOLD]) - { - /* calc pixel of gyro */ - float y_rate_pixel = y_rate * (get_time_between_images() / 1000000.0f) * focal_length_px; - float comp_x = histflowx + y_rate_pixel; - - /* clamp value to maximum search window size plus half pixel from subpixel search */ - if (comp_x < (-SEARCH_SIZE - 0.5f)) - *pixel_flow_x = (-SEARCH_SIZE - 0.5f); - else if (comp_x > (SEARCH_SIZE + 0.5f)) - *pixel_flow_x = (SEARCH_SIZE + 0.5f); - else - *pixel_flow_x = comp_x; - } - else + //Now do some Gauss-Newton iterations for flow + for (int iters = 0; iters < max_iters; iters++) { - *pixel_flow_x = histflowx; - } + float JTe_x = 0; //accumulators for Jac transposed times error + float JTe_y = 0; + + uint8_t *base2 = lvl_base2[l] + (uint16_t)v * iwidth + (uint16_t)u; + + //extract bilinearly filtered pixel values for the current location in image2 + float dX = u - floorf(u); + float dY = v - floorf(v); + float fMixTL = (1.f - dX) * (1.f - dY); + float fMixTR = (dX) * (1.f - dY); + float fMixBL = (1.f - dX) * (dY); + float fMixBR = (dX) * (dY); + + float chi_sq = 0.f; + c = 0; + for (int8_t jj = -HALF_PATCH_SIZE; jj <= HALF_PATCH_SIZE; jj++) + { + uint8_t *left1 = base1 + jj*iwidth; + uint8_t *left2 = base2 + jj*iwidth; + + for (int8_t ii = -HALF_PATCH_SIZE; ii <= HALF_PATCH_SIZE; ii++) + { + float fPixel = fMixTL * left2[ii] + fMixTR * left2[ii+1] + fMixBL * left2[ii+iwidth] + fMixBR * left2[ii+iwidth+1]; + float fDiff = fPixel - left1[ii]; + JTe_x += fDiff * Jx[c]; + JTe_y += fDiff * Jy[c]; + chi_sq += fDiff*fDiff; + c++; + } + } - if(fabsf(x_rate) > global_data.param[PARAM_GYRO_COMPENSATION_THRESHOLD]) - { - /* calc pixel of gyro */ - float x_rate_pixel = x_rate * (get_time_between_images() / 1000000.0f) * focal_length_px; - float comp_y = histflowy - x_rate_pixel; - - /* clamp value to maximum search window size plus/minus half pixel from subpixel search */ - if (comp_y < (-SEARCH_SIZE - 0.5f)) - *pixel_flow_y = (-SEARCH_SIZE - 0.5f); - else if (comp_y > (SEARCH_SIZE + 0.5f)) - *pixel_flow_y = (SEARCH_SIZE + 0.5f); + //only update if the error got smaller + if (iters == 0 || chi_sq_previous > chi_sq) + { + //compute update and shift current position accordingly + float updx = JTJinv[0]*JTe_x + JTJinv[1]*JTe_y; + float updy = JTJinv[2]*JTe_x + JTJinv[3]*JTe_y; + float new_u = u-updx; + float new_v = v-updy; + + //check if we drifted outside the image + if (((int16_t)new_u < HALF_PATCH_SIZE) || (int16_t)new_u > (iwidth-HALF_PATCH_SIZE-1) || ((int16_t)new_v < HALF_PATCH_SIZE) || (int16_t)new_v > (iwidth-HALF_PATCH_SIZE-1)) + { + result_good = false; + break; + } + else + { + u = new_u; + v = new_v; + } + } else - *pixel_flow_y = comp_y; + { + chi_sum += chi_sq_previous; + chicount++; + break; + } + chi_sq_previous = chi_sq; } - else + if (l > 0) { - *pixel_flow_y = histflowy; + // TODO: evaluate recording failure at each level to calculate a final quality value + us[k] = u * (1<value; + float v2 = ((const struct flow_res_dim_value *)elem2)->value; + if(v1 < v2) + return -1; + return v1 > v2; +} +uint8_t flow_extract_result(flow_raw_result *in, uint16_t result_count, float *px_flow_x, float *px_flow_y, + float accuracy_p, float accuracy_px) +{ + /* extract all valid results: */ + struct flow_res_dim_value xvalues[result_count]; + struct flow_res_dim_value yvalues[result_count]; + uint16_t valid_c = 0; + for (int i = 0; i < result_count; i++) { + if (in[i].quality > 0) { + xvalues[valid_c].value = in[i].x; + xvalues[valid_c].idx = i; + yvalues[valid_c].value = in[i].y; + yvalues[valid_c].idx = i; + valid_c++; } - else - { - *pixel_flow_x = 0.0f; - *pixel_flow_y = 0.0f; + } + if (valid_c < (result_count + 2) / 3 || valid_c < 3) { + *px_flow_x = 0; + *px_flow_y = 0; + return 0; + } + struct flow_res_dim_value *axes[2] = {xvalues, yvalues}; + float *output[2] = {px_flow_x, px_flow_y}; + float max_spread_val[2] = {}; + float spread_val[2] = {}; + uint16_t total_avg_c = 0; + for (int i = 0; i < 2; ++i) { + struct flow_res_dim_value *axis = axes[i]; + /* sort them */ + qsort(axis, valid_c, sizeof(struct flow_res_dim_value), flow_res_dim_value_compare); + spread_val[i] = axis[valid_c * 3 / 4].value - axis[valid_c / 4].value; + /* start with one element */ + uint16_t s = valid_c / 2; + uint16_t e = valid_c / 2 + 1; + uint16_t s_res; + uint16_t e_res; + float avg_sum = axis[s].value; + uint16_t avg_c = 1; + float avg; + while (1) { + s_res = s; + e_res = e; + /* calculate average and maximum spread to throw away outliers */ + avg = avg_sum / avg_c; + float max_spread = fabsf(avg) * accuracy_p; + max_spread = accuracy_px * accuracy_px / (accuracy_px + max_spread) + max_spread; + max_spread_val[i] = max_spread; + /* decide on which side to add new data-point (to remain centered in the sorted set) */ + if (s > valid_c - e && s > 0) { + s--; + avg_sum += axis[s].value; + } else if (e < valid_c) { + e++; + avg_sum += axis[e - 1].value; + } else { + break; + } + avg_c++; + /* check maximum spread */ + if (axis[e - 1].value - axis[s].value > max_spread) break; + /* its good. continue .. */ + } + if (e_res - s_res > 1) { + /* we have a result */ + *output[i] = avg; + total_avg_c += e_res - s_res; + } else { + *px_flow_x = 0; + *px_flow_y = 0; return 0; } } - else - { - *pixel_flow_x = 0.0f; - *pixel_flow_y = 0.0f; - return 0; + if (FLOAT_AS_BOOL(global_data.param[PARAM_USB_SEND_FLOW_OUTL])) { + static int ctr = 0; + if (ctr++ % 20 == 0) { + mavlink_msg_debug_vect_send(MAVLINK_COMM_2, "SPREAD_X", get_boot_time_us(), spread_val[0], max_spread_val[0], *output[0]); + mavlink_msg_debug_vect_send(MAVLINK_COMM_2, "SPREAD_Y", get_boot_time_us(), spread_val[1], max_spread_val[1], *output[1]); + } } - - /* calc quality */ - uint8_t qual = (uint8_t)(meancount * 255 / (NUM_BLOCKS*NUM_BLOCKS)); - - return qual; + total_avg_c = total_avg_c / 2; + return (total_avg_c * 255) / result_count; } + diff --git a/src/modules/flow/main.c b/src/modules/flow/main.c index 6104364..f77104b 100644 --- a/src/modules/flow/main.c +++ b/src/modules/flow/main.c @@ -5,6 +5,7 @@ * Dominik Honegger * Petri Tanskanen * Samuel Zihlmann + * Simon Laube * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -50,23 +51,22 @@ #include "mavlink_bridge_header.h" #include #include "settings.h" -#include "utils.h" -#include "led.h" +#include "result_accumulator.h" #include "flow.h" +#include "timer.h" #include "dcmi.h" #include "mt9v034.h" #include "gyro.h" -#include "i2c.h" +#include "fmu_comm.h" #include "usart.h" -#include "sonar.h" +#include "distance.h" #include "communication.h" -#include "debug.h" #include "usbd_cdc_core.h" #include "usbd_usr.h" #include "usbd_desc.h" #include "usbd_cdc_vcp.h" #include "main.h" -#include +#include "hrt.h" #include //#define CONFIG_USE_PROBES @@ -79,160 +79,116 @@ #endif - -/* prototypes */ -void delay(unsigned msec); -void buffer_reset(void); - __ALIGN_BEGIN USB_OTG_CORE_HANDLE USB_OTG_dev __ALIGN_END; -/* fast image buffers for calculations */ -uint8_t image_buffer_8bit_1[FULL_IMAGE_SIZE] __attribute__((section(".ccm"))); -uint8_t image_buffer_8bit_2[FULL_IMAGE_SIZE] __attribute__((section(".ccm"))); -uint8_t buffer_reset_needed; +#define FLOW_IMAGE_SIZE (64) -/* boot time in milliseconds ticks */ -volatile uint32_t boot_time_ms = 0; -/* boot time in 10 microseconds ticks */ -volatile uint32_t boot_time10_us = 0; +static void check_for_frame(void); +static float latest_ground_distance(void); /* timer constants */ -#define NTIMERS 9 -#define TIMER_CIN 0 -#define TIMER_LED 1 -#define TIMER_DELAY 2 -#define TIMER_SONAR 3 -#define TIMER_SYSTEM_STATE 4 -#define TIMER_RECEIVE 5 -#define TIMER_PARAMS 6 -#define TIMER_IMAGE 7 -#define TIMER_LPOS 8 -#define MS_TIMER_COUNT 100 /* steps in 10 microseconds ticks */ -#define LED_TIMER_COUNT 500 /* steps in milliseconds ticks */ -#define SONAR_TIMER_COUNT 100 /* steps in milliseconds ticks */ -#define SYSTEM_STATE_COUNT 1000/* steps in milliseconds ticks */ -#define PARAMS_COUNT 100 /* steps in milliseconds ticks */ +#define DISTANCE_POLL_MS 100 /* steps in milliseconds ticks */ +#define SYSTEM_STATE_MS 1000/* steps in milliseconds ticks */ +#define PARAMS_MS 100 /* steps in milliseconds ticks */ #define LPOS_TIMER_COUNT 100 /* steps in milliseconds ticks */ -static volatile unsigned timer[NTIMERS]; -static volatile unsigned timer_ms = MS_TIMER_COUNT; - -/* timer/system booleans */ -bool send_system_state_now = true; -bool receive_now = true; -bool send_params_now = true; -bool send_image_now = true; -bool send_lpos_now = true; - -/* local position estimate without orientation, useful for unit testing w/o FMU */ -static struct lpos_t { - float x; - float y; - float z; - float vx; - float vy; - float vz; -} lpos; - -/** - * @brief Increment boot_time_ms variable and decrement timer array. - * @param None - * @retval None - */ -void timer_update_ms(void) -{ - boot_time_ms++; - - /* each timer decrements every millisecond if > 0 */ - for (unsigned i = 0; i < NTIMERS; i++) - if (timer[i] > 0) - timer[i]--; - - - if (timer[TIMER_LED] == 0) - { - /* blink activitiy */ - LEDToggle(LED_ACT); - timer[TIMER_LED] = LED_TIMER_COUNT; - } - - if (timer[TIMER_SONAR] == 0) - { - sonar_trigger(); - timer[TIMER_SONAR] = SONAR_TIMER_COUNT; +static camera_ctx cam_ctx; +static camera_img_param img_stream_param; + +static void distance_update_fn(void) { + static bool state = 0; + if (state == 0) { + distance_trigger(); + state = 1; + } else { + distance_readback(); + state = 0; } +} - if (timer[TIMER_SYSTEM_STATE] == 0) +static void system_state_send_fn(void) { + /* every second */ + if (FLOAT_AS_BOOL(global_data.param[PARAM_SYSTEM_SEND_STATE])) { - send_system_state_now = true; - timer[TIMER_SYSTEM_STATE] = SYSTEM_STATE_COUNT; + communication_system_state_send(); } +} - if (timer[TIMER_RECEIVE] == 0) - { - receive_now = true; - timer[TIMER_RECEIVE] = SYSTEM_STATE_COUNT; - } +static void system_receive_fn(void) { + /* test every 0.5s */ + communication_receive(); + communication_receive_usb(); +} - if (timer[TIMER_PARAMS] == 0) - { - send_params_now = true; - timer[TIMER_PARAMS] = PARAMS_COUNT; +const camera_image_buffer *previous_image = NULL; + +static void mavlink_send_image(const camera_image_buffer *image) { + uint32_t img_size = (uint32_t)image->param.p.size.x * (uint32_t)image->param.p.size.y; + mavlink_msg_data_transmission_handshake_send( + MAVLINK_COMM_2, + MAVLINK_DATA_STREAM_IMG_RAW8U, + img_size, + image->param.p.size.x, + image->param.p.size.y, + img_size / MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN + 1, + MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN, + 100); + uint16_t frame = 0; + for (frame = 0; frame < img_size / MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN + 1; frame++) { + mavlink_msg_encapsulated_data_send(MAVLINK_COMM_2, frame, &((uint8_t *)image->buffer)[frame * MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN]); } +} - if (timer[TIMER_IMAGE] == 0) - { - send_image_now = true; - timer[TIMER_IMAGE] = global_data.param[PARAM_VIDEO_RATE]; - } +static void send_video_fn(void) { + /* update the rate */ + timer_update(send_video_fn, global_data.param[PARAM_VIDEO_RATE]); - if (timer[TIMER_LPOS] == 0) + if (previous_image == NULL) return; + + /* transmit raw 8-bit image */ + if (FLOAT_AS_BOOL(global_data.param[PARAM_USB_SEND_VIDEO])) { - send_lpos_now = true; - timer[TIMER_LPOS] = LPOS_TIMER_COUNT; + mavlink_send_image(previous_image); } } -/** - * @brief Increment boot_time10_us variable and decrement millisecond timer, triggered by timer interrupt - * @param None - * @retval None - */ -void timer_update(void) -{ - boot_time10_us++; - - /* decrements every 10 microseconds*/ - timer_ms--; +static void send_params_fn(void) { + communication_parameter_send(); +} - if (timer_ms == 0) - { - timer_update_ms(); - timer_ms = MS_TIMER_COUNT; +/*void switch_params_fn(void) { + switch (img_stream_param.binning) { + case 1: img_stream_param.binning = 4; break; + case 2: img_stream_param.binning = 1; break; + case 4: img_stream_param.binning = 2; break; } + camera_img_stream_schedule_param_change(&cam_ctx, &img_stream_param); +}*/ +void notify_changed_camera_parameters(void) { + camera_reconfigure_general(&cam_ctx); } +#define FLOW_IMAGE_SIZE (64) -uint32_t get_boot_time_ms(void) -{ - return boot_time_ms; -} +static uint8_t image_buffer_8bit_1[FLOW_IMAGE_SIZE * FLOW_IMAGE_SIZE] __attribute__((section(".ccm"))); +static uint8_t image_buffer_8bit_2[FLOW_IMAGE_SIZE * FLOW_IMAGE_SIZE] __attribute__((section(".ccm"))); +static uint8_t image_buffer_8bit_3[FLOW_IMAGE_SIZE * FLOW_IMAGE_SIZE] __attribute__((section(".ccm"))); +static uint8_t image_buffer_8bit_4[FLOW_IMAGE_SIZE * FLOW_IMAGE_SIZE] __attribute__((section(".ccm"))); +static uint8_t image_buffer_8bit_5[FLOW_IMAGE_SIZE * FLOW_IMAGE_SIZE] __attribute__((section(".ccm"))); -uint32_t get_boot_time_us(void) -{ - return boot_time10_us*10;// *10 to return microseconds -} +static flow_klt_image flow_klt_images[2] __attribute__((section(".ccm"))); -void delay(unsigned msec) -{ - timer[TIMER_DELAY] = msec; - while (timer[TIMER_DELAY] > 0) {}; -} +/* variables */ +uint32_t counter = 0; -void buffer_reset(void) { - buffer_reset_needed = 1; -} +result_accumulator_ctx mavlink_accumulator; +uint32_t fps_timing_start; +uint16_t fps_counter = 0; +uint16_t fps_skipped_counter = 0; + +uint32_t last_frame_index = 0; +uint32_t last_processed_frame_timestamp; /** * @brief Main function. @@ -245,32 +201,14 @@ int main(void) global_data_reset_param_defaults(); global_data_reset(); PROBE_INIT(); - /* init led */ - LEDInit(LED_ACT); - LEDInit(LED_COM); - LEDInit(LED_ERR); - LEDOff(LED_ACT); - LEDOff(LED_COM); - LEDOff(LED_ERR); - board_led_rgb(255,255,255, 1); - board_led_rgb( 0, 0,255, 0); - board_led_rgb( 0, 0, 0, 0); - board_led_rgb(255, 0, 0, 1); - board_led_rgb(255, 0, 0, 2); - board_led_rgb(255, 0, 0, 3); - board_led_rgb( 0,255, 0, 3); - board_led_rgb( 0, 0,255, 4); - + + board_led_initialize(); + /* enable FPU on Cortex-M4F core */ SCB_CPACR |= ((3UL << 10 * 2) | (3UL << 11 * 2)); /* set CP10 Full Access and set CP11 Full Access */ - /* init clock */ - if (SysTick_Config(SystemCoreClock / 100000))/*set timer to trigger interrupt every 10 microsecond */ - { - /* capture clock error */ - LEDOn(LED_ERR); - while (1); - } + /* init timers */ + timer_init(); /* init usb */ USBD_Init( &USB_OTG_dev, @@ -282,432 +220,298 @@ int main(void) /* init mavlink */ communication_init(); - /* enable image capturing */ - enable_image_capture(); - - /* gyro config */ - gyro_config(); - - /* init and clear fast image buffers */ - for (int i = 0; i < global_data.param[PARAM_IMAGE_WIDTH] * global_data.param[PARAM_IMAGE_HEIGHT]; i++) + /* initialize camera: */ + img_stream_param.size.x = FLOW_IMAGE_SIZE; + img_stream_param.size.y = FLOW_IMAGE_SIZE; + img_stream_param.binning = 4; { - image_buffer_8bit_1[i] = 0; - image_buffer_8bit_2[i] = 0; + camera_image_buffer buffers[5] = { + BuildCameraImageBuffer(image_buffer_8bit_1), + BuildCameraImageBuffer(image_buffer_8bit_2), + BuildCameraImageBuffer(image_buffer_8bit_3), + BuildCameraImageBuffer(image_buffer_8bit_4), + BuildCameraImageBuffer(image_buffer_8bit_5) + }; + camera_init(&cam_ctx, mt9v034_get_sensor_interface(), dcmi_get_transport_interface(), + mt9v034_get_clks_per_row(64, 4) * 1, mt9v034_get_clks_per_row(64, 4) * 128, 2.0, + &img_stream_param, buffers, 5); } - uint8_t * current_image = image_buffer_8bit_1; - uint8_t * previous_image = image_buffer_8bit_2; + /* gyro config */ + gyro_config(); /* usart config*/ usart_init(); - - /* i2c config*/ - i2c_init(); - - /* sonar config*/ - float sonar_distance_filtered = 0.0f; // distance in meter - float sonar_distance_raw = 0.0f; // distance in meter - bool distance_valid = false; - sonar_config(); + fmu_comm_init(); + distance_init(); /* reset/start timers */ - timer[TIMER_SONAR] = SONAR_TIMER_COUNT; - timer[TIMER_SYSTEM_STATE] = SYSTEM_STATE_COUNT; - timer[TIMER_RECEIVE] = SYSTEM_STATE_COUNT / 2; - timer[TIMER_PARAMS] = PARAMS_COUNT; - timer[TIMER_IMAGE] = global_data.param[PARAM_VIDEO_RATE]; - - /* variables */ - uint32_t counter = 0; - uint8_t qual = 0; - - /* bottom flow variables */ - float pixel_flow_x = 0.0f; - float pixel_flow_y = 0.0f; - float pixel_flow_x_sum = 0.0f; - float pixel_flow_y_sum = 0.0f; - float velocity_x_sum = 0.0f; - float velocity_y_sum = 0.0f; - float velocity_x_lp = 0.0f; - float velocity_y_lp = 0.0f; - int valid_frame_count = 0; - int pixel_flow_count = 0; - - static float accumulated_flow_x = 0; - static float accumulated_flow_y = 0; - static float accumulated_gyro_x = 0; - static float accumulated_gyro_y = 0; - static float accumulated_gyro_z = 0; - static uint16_t accumulated_framecount = 0; - static uint16_t accumulated_quality = 0; - static uint32_t integration_timespan = 0; - static uint32_t lasttime = 0; - uint32_t time_since_last_sonar_update= 0; - - uavcan_start(); + timer_register(distance_update_fn, DISTANCE_POLL_MS/2); + timer_register(system_state_send_fn, SYSTEM_STATE_MS); + timer_register(system_receive_fn, SYSTEM_STATE_MS / 2); + timer_register(send_params_fn, PARAMS_MS); + timer_register(send_video_fn, global_data.param[PARAM_VIDEO_RATE]); + //timer_register(switch_params_fn, 2000); + + result_accumulator_init(&mavlink_accumulator); + fps_timing_start = last_processed_frame_timestamp = get_boot_time_us(); + /* main loop */ while (1) { - PROBE_1(false); - uavcan_run(); - PROBE_1(true); - /* reset flow buffers if needed */ - if(buffer_reset_needed) - { - buffer_reset_needed = 0; - for (int i = 0; i < global_data.param[PARAM_IMAGE_WIDTH] * global_data.param[PARAM_IMAGE_HEIGHT]; i++) - { - image_buffer_8bit_1[i] = 0; - image_buffer_8bit_2[i] = 0; - } - delay(500); - continue; - } - - /* calibration routine */ - if(FLOAT_AS_BOOL(global_data.param[PARAM_VIDEO_ONLY])) - { - while(FLOAT_AS_BOOL(global_data.param[PARAM_VIDEO_ONLY])) - { - dcmi_restart_calibration_routine(); - - /* waiting for first quarter of image */ - while(get_frame_counter() < 2){} - dma_copy_image_buffers(¤t_image, &previous_image, FULL_IMAGE_SIZE, 1); - - /* waiting for second quarter of image */ - while(get_frame_counter() < 3){} - dma_copy_image_buffers(¤t_image, &previous_image, FULL_IMAGE_SIZE, 1); - - /* waiting for all image parts */ - while(get_frame_counter() < 4){} - - send_calibration_image(&previous_image, ¤t_image); - - if (FLOAT_AS_BOOL(global_data.param[PARAM_SYSTEM_SEND_STATE])) - communication_system_state_send(); - - communication_receive_usb(); - debug_message_send_one(); - communication_parameter_send(); - - LEDToggle(LED_COM); - } - - dcmi_restart_calibration_routine(); - LEDOff(LED_COM); - } + /* check timers */ + timer_check(); + fmu_comm_run(); + check_for_frame(); + } +} - uint16_t image_size = global_data.param[PARAM_IMAGE_WIDTH] * global_data.param[PARAM_IMAGE_HEIGHT]; +static float latest_ground_distance(void) { + float distance_filtered, distance_raw; + bool distance_valid = distance_read(&distance_filtered, &distance_raw); + if (!distance_valid) { + return -1; + } else if(FLOAT_AS_BOOL(global_data.param[PARAM_SONAR_FILTERED])) { + return distance_filtered; + } else { + return distance_raw; + } +} +static void check_for_frame(void) { + /* get recent images */ + camera_image_buffer *frames[2]; + if (camera_img_stream_get_buffers(&cam_ctx, frames, 2, true)) { /* new gyroscope data */ float x_rate_sensor, y_rate_sensor, z_rate_sensor; int16_t gyro_temp; gyro_read(&x_rate_sensor, &y_rate_sensor, &z_rate_sensor,&gyro_temp); - /* gyroscope coordinate transformation */ - float x_rate = y_rate_sensor; // change x and y rates + /* gyroscope coordinate transformation to flow sensor coordinates */ + float x_rate = y_rate_sensor; // change x and y rates float y_rate = - x_rate_sensor; - float z_rate = z_rate_sensor; // z is correct - - /* calculate focal_length in pixel */ - const float focal_length_px = (global_data.param[PARAM_FOCAL_LENGTH_MM]) / (4.0f * 6.0f) * 1000.0f; //original focal lenght: 12mm pixelsize: 6um, binning 4 enabled - - /* get sonar data */ - distance_valid = sonar_read(&sonar_distance_filtered, &sonar_distance_raw); - - /* reset to zero for invalid distances */ - if (!distance_valid) { - sonar_distance_filtered = 0.0f; - sonar_distance_raw = 0.0f; - } - - /* compute optical flow */ - if (FLOAT_EQ_INT(global_data.param[PARAM_SENSOR_POSITION], BOTTOM)) + float z_rate = z_rate_sensor; // z is correct + + bool use_klt = !FLOAT_EQ_INT(global_data.param[PARAM_ALGORITHM_CHOICE], 0); + + uint32_t start_computations = get_boot_time_us(); + + int frame_delta = ((int32_t)frames[0]->frame_number - (int32_t)last_frame_index); + last_frame_index = frames[0]->frame_number; + fps_skipped_counter += frame_delta - 1; + + flow_klt_image *klt_images[2] = {NULL, NULL}; { - /* copy recent image to faster ram */ - dma_copy_image_buffers(¤t_image, &previous_image, image_size, 1); - - /* compute optical flow */ - qual = compute_flow(previous_image, current_image, x_rate, y_rate, z_rate, &pixel_flow_x, &pixel_flow_y); - - /* - * real point P (X,Y,Z), image plane projection p (x,y,z), focal-length f, distance-to-scene Z - * x / f = X / Z - * y / f = Y / Z - */ - float flow_compx = pixel_flow_x / focal_length_px / (get_time_between_images() / 1000000.0f); - float flow_compy = pixel_flow_y / focal_length_px / (get_time_between_images() / 1000000.0f); - - /* integrate velocity and output values only if distance is valid */ - if (distance_valid) - { - /* calc velocity (negative of flow values scaled with distance) */ - float new_velocity_x = - flow_compx * sonar_distance_filtered; - float new_velocity_y = - flow_compy * sonar_distance_filtered; - - time_since_last_sonar_update = (get_boot_time_us()- get_sonar_measure_time()); - - if (qual > 0) - { - velocity_x_sum += new_velocity_x; - velocity_y_sum += new_velocity_y; - valid_frame_count++; - - uint32_t deltatime = (get_boot_time_us() - lasttime); - integration_timespan += deltatime; - accumulated_flow_x += pixel_flow_y / focal_length_px * 1.0f; //rad axis swapped to align x flow around y axis - accumulated_flow_y += pixel_flow_x / focal_length_px * -1.0f;//rad - accumulated_gyro_x += x_rate * deltatime / 1000000.0f; //rad - accumulated_gyro_y += y_rate * deltatime / 1000000.0f; //rad - accumulated_gyro_z += z_rate * deltatime / 1000000.0f; //rad - accumulated_framecount++; - accumulated_quality += qual; - - /* lowpass velocity output */ - velocity_x_lp = global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW] * new_velocity_x + - (1.0f - global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW]) * velocity_x_lp; - velocity_y_lp = global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW] * new_velocity_y + - (1.0f - global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW]) * velocity_y_lp; - } - else - { - /* taking flow as zero */ - velocity_x_lp = (1.0f - global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW]) * velocity_x_lp; - velocity_y_lp = (1.0f - global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW]) * velocity_y_lp; + /* make sure that the new images get the correct treatment */ + /* this algorithm will still work if both images are new */ + int i; + bool used_klt_image[2] = {false, false}; + for (i = 0; i < 2; ++i) { + if (frames[i]->frame_number != frames[i]->meta) { + /* update meta data to mark it as an up-to date image: */ + frames[i]->meta = frames[i]->frame_number; + } else { + // the image has the preprocessing already applied. + if (use_klt) { + int j; + /* find the klt image that matches: */ + for (j = 0; j < 2; ++j) { + if (flow_klt_images[j].meta == frames[i]->frame_number) { + used_klt_image[j] = true; + klt_images[i] = &flow_klt_images[j]; + } + } + } } } - else - { - /* taking flow as zero */ - velocity_x_lp = (1.0f - global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW]) * velocity_x_lp; - velocity_y_lp = (1.0f - global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW]) * velocity_y_lp; - } - //update lasttime - lasttime = get_boot_time_us(); - - pixel_flow_x_sum += pixel_flow_x; - pixel_flow_y_sum += pixel_flow_y; - pixel_flow_count++; - - } - - counter++; - - if (FLOAT_EQ_INT(global_data.param[PARAM_SENSOR_POSITION], BOTTOM)) - { - /* send bottom flow if activated */ - - float ground_distance = 0.0f; - - - if(FLOAT_AS_BOOL(global_data.param[PARAM_SONAR_FILTERED])) - { - ground_distance = sonar_distance_filtered; - } - else - { - ground_distance = sonar_distance_raw; - } - - uavcan_define_export(i2c_data, legacy_12c_data_t, ccm); - uavcan_define_export(range_data, range_data_t, ccm); - uavcan_timestamp_export(i2c_data); - uavcan_assign(range_data.time_stamp_utc, i2c_data.time_stamp_utc); - //update I2C transmitbuffer - if(valid_frame_count>0) - { - update_TX_buffer(pixel_flow_x, pixel_flow_y, velocity_x_sum/valid_frame_count, velocity_y_sum/valid_frame_count, qual, - ground_distance, x_rate, y_rate, z_rate, gyro_temp, uavcan_use_export(i2c_data)); - } - else - { - update_TX_buffer(pixel_flow_x, pixel_flow_y, 0.0f, 0.0f, qual, - ground_distance, x_rate, y_rate, z_rate, gyro_temp, uavcan_use_export(i2c_data)); - } - PROBE_2(false); - uavcan_publish(range, 40, range_data); - PROBE_2(true); - - PROBE_3(false); - uavcan_publish(flow, 40, i2c_data); - PROBE_3(true); - - //serial mavlink + usb mavlink output throttled - if (counter % (uint32_t)global_data.param[PARAM_BOTTOM_FLOW_SERIAL_THROTTLE_FACTOR] == 0)//throttling factor - { - - float flow_comp_m_x = 0.0f; - float flow_comp_m_y = 0.0f; - - if(FLOAT_AS_BOOL(global_data.param[PARAM_BOTTOM_FLOW_LP_FILTERED])) - { - flow_comp_m_x = velocity_x_lp; - flow_comp_m_y = velocity_y_lp; - } - else - { - if(valid_frame_count>0) - { - flow_comp_m_x = velocity_x_sum/valid_frame_count; - flow_comp_m_y = velocity_y_sum/valid_frame_count; - } - else - { - flow_comp_m_x = 0.0f; - flow_comp_m_y = 0.0f; + if (use_klt) { + /* only for KLT: */ + /* preprocess the images if they are not yet preprocessed */ + for (i = 0; i < 2; ++i) { + if (klt_images[i] == NULL) { + // need processing. find unused KLT image: + int j; + for (j = 0; j < 2; ++j) { + if (!used_klt_image[j]) { + used_klt_image[j] = true; + klt_images[i] = &flow_klt_images[j]; + break; + } + } + klt_preprocess_image(frames[i]->buffer, klt_images[i]); + klt_images[i]->meta = frames[i]->frame_number; } } + } + } + + float frame_dt = calculate_time_delta_us(frames[0]->timestamp, frames[1]->timestamp) * 0.000001f; + float dropped_dt = calculate_time_delta_us(frames[1]->timestamp, last_processed_frame_timestamp) * 0.000001f; + last_processed_frame_timestamp = frames[0]->timestamp; - - // send flow - mavlink_msg_optical_flow_send(MAVLINK_COMM_0, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID], - pixel_flow_x_sum * 10.0f, pixel_flow_y_sum * 10.0f, - flow_comp_m_x, flow_comp_m_y, qual, ground_distance); - - mavlink_msg_optical_flow_rad_send(MAVLINK_COMM_0, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID], - integration_timespan, accumulated_flow_x, accumulated_flow_y, - accumulated_gyro_x, accumulated_gyro_y, accumulated_gyro_z, - gyro_temp, accumulated_quality/accumulated_framecount, - time_since_last_sonar_update,ground_distance); - - /* send approximate local position estimate without heading */ - if (FLOAT_AS_BOOL(global_data.param[PARAM_SYSTEM_SEND_LPOS])) - { - /* rough local position estimate for unit testing */ - lpos.x += ground_distance*accumulated_flow_x; - lpos.y += ground_distance*accumulated_flow_y; - lpos.z = -ground_distance; - /* velocity not directly measured and not important for testing */ - lpos.vx = 0; - lpos.vy = 0; - lpos.vz = 0; - - } else { - /* toggling param allows user reset */ - lpos.x = 0; - lpos.y = 0; - lpos.z = 0; - lpos.vx = 0; - lpos.vy = 0; - lpos.vz = 0; - } - - if (FLOAT_AS_BOOL(global_data.param[PARAM_USB_SEND_FLOW])) - { - mavlink_msg_optical_flow_send(MAVLINK_COMM_2, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID], - pixel_flow_x_sum * 10.0f, pixel_flow_y_sum * 10.0f, - flow_comp_m_x, flow_comp_m_y, qual, ground_distance); - - - mavlink_msg_optical_flow_rad_send(MAVLINK_COMM_2, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID], - integration_timespan, accumulated_flow_x, accumulated_flow_y, - accumulated_gyro_x, accumulated_gyro_y, accumulated_gyro_z, - gyro_temp, accumulated_quality/accumulated_framecount, - time_since_last_sonar_update,ground_distance); - } - - - if(FLOAT_AS_BOOL(global_data.param[PARAM_USB_SEND_GYRO])) - { - mavlink_msg_debug_vect_send(MAVLINK_COMM_2, "GYRO", get_boot_time_us(), x_rate, y_rate, z_rate); - } - - integration_timespan = 0; - accumulated_flow_x = 0; - accumulated_flow_y = 0; - accumulated_framecount = 0; - accumulated_quality = 0; - accumulated_gyro_x = 0; - accumulated_gyro_y = 0; - accumulated_gyro_z = 0; - - velocity_x_sum = 0.0f; - velocity_y_sum = 0.0f; - pixel_flow_x_sum = 0.0f; - pixel_flow_y_sum = 0.0f; - valid_frame_count = 0; - pixel_flow_count = 0; + /* calculate focal_length in pixel */ + const float focal_length_px = (global_data.param[PARAM_FOCAL_LENGTH_MM]) / + ((float)frames[0]->param.p.binning * 0.006f); // pixel-size: 6um + + /* extract the raw flow from the images: */ + flow_raw_result flow_rslt[32]; + uint16_t flow_rslt_count = 0; + /* make sure both images are taken with same binning mode: */ + if (frames[0]->param.p.binning == frames[1]->param.p.binning) { + /* compute gyro rate in pixels and change to image coordinates */ + float x_rate_px = - y_rate * (focal_length_px * frame_dt); + float y_rate_px = x_rate * (focal_length_px * frame_dt); + float z_rate_fr = - z_rate * frame_dt; + + /* compute optical flow in pixels */ + if (!use_klt) { + flow_rslt_count = compute_flow(frames[1]->buffer, frames[0]->buffer, x_rate_px, y_rate_px, z_rate_fr, + flow_rslt, sizeof(flow_rslt) / sizeof(flow_rslt[0])); + } else { + flow_rslt_count = compute_klt(klt_images[1], klt_images[0], x_rate_px, y_rate_px, z_rate_fr, + flow_rslt, sizeof(flow_rslt) / sizeof(flow_rslt[0])); } + } else { + /* no result for this frame. */ + flow_rslt_count = 0; + } + /* determine velocity capability: */ + float flow_mv_cap; + if (!use_klt) { + flow_mv_cap = get_flow_capability(); + } else { + flow_mv_cap = get_flow_klt_capability(); } - /* forward flow from other sensors */ - if (counter % 2) + /* calculate flow value from the raw results */ + float pixel_flow_x; + float pixel_flow_y; + float outlier_threshold = global_data.param[PARAM_ALGORITHM_OUTLIER_THR_RATIO]; + float min_outlier_threshold = 0; + if(FLOAT_EQ_INT(global_data.param[PARAM_ALGORITHM_CHOICE], 0)) { - communication_receive_forward(); + min_outlier_threshold = global_data.param[PARAM_ALGORITHM_OUTLIER_THR_BLOCK]; + }else + { + min_outlier_threshold = global_data.param[PARAM_ALGORITHM_OUTLIER_THR_KLT]; } - - /* send system state, receive commands */ - if (send_system_state_now) + uint8_t qual = flow_extract_result(flow_rslt, flow_rslt_count, &pixel_flow_x, &pixel_flow_y, + outlier_threshold, min_outlier_threshold); + + /* create flow image if needed (previous_image is not needed anymore) + * -> can be used for debugging purpose + */ + previous_image = frames[1]; + if (FLOAT_AS_BOOL(global_data.param[PARAM_USB_SEND_VIDEO])) { - /* every second */ - if (FLOAT_AS_BOOL(global_data.param[PARAM_SYSTEM_SEND_STATE])) - { - communication_system_state_send(); + uint16_t frame_size = global_data.param[PARAM_IMAGE_WIDTH]; + uint8_t *prev_img = previous_image->buffer; + for (int i = 0; i < flow_rslt_count; i++) { + if (flow_rslt[i].quality > 0) { + prev_img[flow_rslt[i].at_y * frame_size + flow_rslt[i].at_x] = 255; + int ofs = (int)(flow_rslt[i].at_y + flow_rslt[i].y * 2 + 0.5f) * frame_size + (int)(flow_rslt[i].at_x + flow_rslt[i].x * 2 + 0.5f); + if (ofs >= 0 && ofs < frame_size * frame_size) { + prev_img[ofs] = 200; + } + } } - send_system_state_now = false; } - /* receive commands */ - if (receive_now) - { - /* test every second */ - communication_receive(); - communication_receive_usb(); - receive_now = false; - } + /* return the image buffers */ + camera_img_stream_return_buffers(&cam_ctx, frames, 2); + + result_accumulator_frame frame_data = { + .dt = frame_dt, + .dropped_dt = dropped_dt, + .x_rate = x_rate, + .y_rate = y_rate, + .z_rate = z_rate, + .gyro_temp = gyro_temp, + .qual = qual, + .pixel_flow_x = pixel_flow_x, + .pixel_flow_y = pixel_flow_y, + .rad_per_pixel = 1.0f / focal_length_px, + .ground_distance = latest_ground_distance(), + .distance_age = get_time_delta_us(get_distance_measure_time()), + .max_px_frame = flow_mv_cap, + }; + + /* update I2C transmit buffer */ + fmu_comm_update(&frame_data); + result_accumulator_feed(&mavlink_accumulator, &frame_data); + + uint32_t computation_time_us = get_time_delta_us(start_computations); - /* sending debug msgs and requested parameters */ - if (send_params_now) - { - debug_message_send_one(); - communication_parameter_send(); - send_params_now = false; - } + counter++; + fps_counter++; - /* send local position estimate, for testing only, doesn't account for heading */ - if (send_lpos_now) + /* serial mavlink + usb mavlink output throttled */ + if (counter % (uint32_t)global_data.param[PARAM_FLOW_SERIAL_THROTTLE_FACTOR] == 0)//throttling factor { - if (FLOAT_AS_BOOL(global_data.param[PARAM_SYSTEM_SEND_LPOS])) + float fps = 0; + float fps_skip = 0; + if (fps_counter + fps_skipped_counter > 100) { + uint32_t dt = get_time_delta_us(fps_timing_start); + fps_timing_start += dt; + fps = (float)fps_counter / ((float)dt * 1e-6f); + fps_skip = (float)fps_skipped_counter / ((float)dt * 1e-6f); + fps_counter = 0; + fps_skipped_counter = 0; + + mavlink_msg_debug_vect_send(MAVLINK_COMM_2, "TIMING", get_boot_time_us(), computation_time_us, fps, fps_skip); + } + mavlink_msg_debug_vect_send(MAVLINK_COMM_2, "EXPOSURE", get_boot_time_us(), + frames[0]->param.exposure, frames[0]->param.analog_gain, cam_ctx.last_brightness); + + /* calculate the output values */ + result_accumulator_output_flow output_flow; + result_accumulator_output_flow_rad output_flow_rad; + int min_valid_ratio = global_data.param[PARAM_ALGORITHM_MIN_VALID_RATIO]; + result_accumulator_calculate_output_flow(&mavlink_accumulator, min_valid_ratio, &output_flow); + result_accumulator_calculate_output_flow_rad(&mavlink_accumulator, min_valid_ratio, &output_flow_rad); + + board_led_status_update(output_flow.quality / 255.0); + + // send flow + mavlink_msg_optical_flow_send(MAVLINK_COMM_0, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID], + output_flow.flow_x, output_flow.flow_y, + output_flow.flow_comp_m_x, output_flow.flow_comp_m_y, + output_flow.quality, output_flow.ground_distance); + + mavlink_msg_optical_flow_rad_send(MAVLINK_COMM_0, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID], + output_flow_rad.integration_time, + output_flow_rad.integrated_x, output_flow_rad.integrated_y, + output_flow_rad.integrated_xgyro, output_flow_rad.integrated_ygyro, output_flow_rad.integrated_zgyro, + output_flow_rad.temperature, output_flow_rad.quality, + output_flow_rad.time_delta_distance_us,output_flow_rad.ground_distance); + + if (FLOAT_AS_BOOL(global_data.param[PARAM_USB_SEND_FLOW]) && (output_flow.quality > 0 || FLOAT_AS_BOOL(global_data.param[PARAM_USB_SEND_QUAL_0]))) { - mavlink_msg_local_position_ned_send(MAVLINK_COMM_2, timer_ms, lpos.x, lpos.y, lpos.z, lpos.vx, lpos.vy, lpos.vz); + mavlink_msg_optical_flow_send(MAVLINK_COMM_2, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID], + output_flow.flow_x, output_flow.flow_y, + output_flow.flow_comp_m_x, output_flow.flow_comp_m_y, + output_flow.quality, output_flow.ground_distance); + + mavlink_msg_optical_flow_rad_send(MAVLINK_COMM_2, get_boot_time_us(), global_data.param[PARAM_SENSOR_ID], + output_flow_rad.integration_time, + output_flow_rad.integrated_x, output_flow_rad.integrated_y, + output_flow_rad.integrated_xgyro, output_flow_rad.integrated_ygyro, output_flow_rad.integrated_zgyro, + output_flow_rad.temperature, output_flow_rad.quality, + output_flow_rad.time_delta_distance_us,output_flow_rad.ground_distance); } - send_lpos_now = false; - } - /* transmit raw 8-bit image */ - if (FLOAT_AS_BOOL(global_data.param[PARAM_USB_SEND_VIDEO])&& send_image_now) - { - /* get size of image to send */ - uint16_t image_size_send; - uint16_t image_width_send; - uint16_t image_height_send; - - image_size_send = image_size; - image_width_send = global_data.param[PARAM_IMAGE_WIDTH]; - image_height_send = global_data.param[PARAM_IMAGE_HEIGHT]; - - mavlink_msg_data_transmission_handshake_send( - MAVLINK_COMM_2, - MAVLINK_DATA_STREAM_IMG_RAW8U, - image_size_send, - image_width_send, - image_height_send, - image_size_send / MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN + 1, - MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN, - 100); - LEDToggle(LED_COM); - uint16_t frame = 0; - for (frame = 0; frame < image_size_send / MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN + 1; frame++) + if(FLOAT_AS_BOOL(global_data.param[PARAM_USB_SEND_GYRO])) { - mavlink_msg_encapsulated_data_send(MAVLINK_COMM_2, frame, &((uint8_t *) current_image)[frame * MAVLINK_MSG_ENCAPSULATED_DATA_FIELD_DATA_LEN]); + mavlink_msg_debug_vect_send(MAVLINK_COMM_2, "GYRO", get_boot_time_us(), x_rate, y_rate, z_rate); } - send_image_now = false; + result_accumulator_reset(&mavlink_accumulator); } - else if (!FLOAT_AS_BOOL(global_data.param[PARAM_USB_SEND_VIDEO])) + + /* forward flow from other sensors */ + if (counter % 2) { - LEDOff(LED_COM); + communication_receive_forward(); } } } diff --git a/src/modules/flow/module.mk b/src/modules/flow/module.mk index aac44ff..21ec0a4 100644 --- a/src/modules/flow/module.mk +++ b/src/modules/flow/module.mk @@ -22,8 +22,6 @@ SRCS = $(ST_LIB)startup_stm32f4xx.s \ SRCS += main.c \ - utils.c \ - led.c \ settings.c \ communication.c \ flow.c \ @@ -31,15 +29,16 @@ SRCS += main.c \ mt9v034.c \ gyro.c \ usart.c \ - sonar.c \ - debug.c \ usb_bsp.c \ usbd_cdc_vcp.c \ usbd_desc.c \ usbd_usr.c \ - i2c.c \ reset.c \ - sonar_mode_filter.c + distance_mode_filter.c \ + camera.c \ + result_accumulator.c \ + timer.c + SRCS += $(ST_LIB)STM32F4xx_StdPeriph_Driver/src/misc.c \ $(ST_LIB)STM32F4xx_StdPeriph_Driver/src/stm32f4xx_rcc.c \ diff --git a/src/modules/flow/mt9v034.c b/src/modules/flow/mt9v034.c index e9bc681..7e52c69 100644 --- a/src/modules/flow/mt9v034.c +++ b/src/modules/flow/mt9v034.c @@ -5,6 +5,7 @@ * Dominik Honegger * Petri Tanskanen * Samuel Zihlmann + * Simon Laube * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -40,369 +41,790 @@ #include "stm32f4xx_dma.h" #include "stm32f4xx_rcc.h" #include "stm32f4xx_i2c.h" +#include "stm32f4xx_gpio.h" #include "mt9v034.h" +#include + +#define DEBUG_TEST_WORSTCASE_LATENCY (false) + +struct _mt9v034_sensor_ctx; +typedef struct _mt9v034_sensor_ctx mt9v034_sensor_ctx; + +bool mt9v034_init(void *usr, const camera_img_param_ex *img_param); +bool mt9v034_prepare_update_param(void *usr, const camera_img_param_ex *img_param); +void mt9v034_reconfigure_general(void *usr); +void mt9v034_restore_previous_param(void *usr); +void mt9v034_notify_readout_start(void *usr); +void mt9v034_notify_readout_end(void *usr); +bool mt9v034_update_exposure_param(void *usr, uint32_t exposure, float gain); +void mt9v034_get_current_param(void *usr, camera_img_param_ex *img_param, bool *img_data_valid); + +static bool mt9v034_init_hw(mt9v034_sensor_ctx *ctx); +static void mt9v034_configure_general(mt9v034_sensor_ctx *ctx, bool initial); +static bool mt9v034_configure_context(mt9v034_sensor_ctx *ctx, int context_idx, const camera_img_param_ex *img_param, bool full_refresh); + /** - * @brief Configures the mt9v034 camera with two context (binning 4 and binning 2). + * @brief Reads from a specific Camera register + * @param Addr: mt9v034 register address. + * @retval data read from the specific register or 0xFFFF if timeout condition + * occurred. */ -void mt9v034_context_configuration(void) -{ - /* Chip Control - * - * bits | 15 | ... | 9 | 8 | 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 | - * ------------------------------------------------------------------- - * current values | 0 | ... | 0 | 1 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | - * - * (0:2) Scan Mode (Progressive scan) - * (3:4) Sensor Operation Mode (Master mode) - * (5) Stereoscopy Mode (Disable) - * (6) Stereoscopic Master/Slave mode (not used) - * (7) Parallel Output Enable (Enable) - * (8) Simultaneous/Sequential Mode (Simultaneous mode) - * (9) Reserved - * - * (15)Context A (0) / Context B (1) - * - */ +static uint16_t mt9v034_ReadReg(uint16_t Addr); +/** + * @brief Writes a byte at a specific Camera register + * @param Addr: mt9v034 register address. + * @param Data: Data to be written to the specific register + * @retval true when operation is successful. + */ +static bool mt9v034_WriteReg(uint16_t Addr, uint16_t Data); + +struct _mt9v034_sensor_ctx { + /* sequencing */ + + volatile bool seq_i2c_in_use; + + /* context switching */ + + volatile int cur_context; ///< The current camera sensor context that has been activated. + volatile bool do_switch_context;///< When true the mt9v034_notify_readout_start function will switch the camera sensor context. + volatile int desired_context; ///< The desired context when do_switch_context is true. + volatile int previous_context; ///< The previous context index. + + camera_img_param_ex context_p[2]; ///< The parameters of the camera sensor register sets. (for context A and B) + + /* mirrored registers of the sensor */ + + uint16_t chip_control_reg; ///< The current chip control register value. + uint16_t pixel_frame_line_ctrl_reg; ///< The current pixel frame, line control register value. + uint16_t ver_blnk_ctx_a_reg; ///< Vertical blanking context A register value. + uint16_t ver_blnk_ctx_b_reg; ///< Vertical blanking context A register value. + + /* current frame parameter simulation model */ + + camera_img_param_ex cur_param; ///< The parameters of the frame that is beeing read out. + bool cur_param_data_valid; /**< Flag telling wether the image data is going to be valid. + * The sensor outputs garbage on the first frame after changing resolution. + * TODO: verify that this is always the case and why. */ + camera_img_param_ex exp_param; ///< Because exposure parameters take one more frame time to propagate to the output this holds the exposure settings. +}; + +static mt9v034_sensor_ctx mt9v034_ctx; + +const camera_sensor_interface mt9v034_sensor_interface = { + .usr = &mt9v034_ctx, + .init = mt9v034_init, + .prepare_update_param = mt9v034_prepare_update_param, + .reconfigure_general = mt9v034_reconfigure_general, + .restore_previous_param = mt9v034_restore_previous_param, + .notify_readout_start = mt9v034_notify_readout_start, + .notify_readout_end = mt9v034_notify_readout_end, + .update_exposure_param= mt9v034_update_exposure_param, + .get_current_param = mt9v034_get_current_param +}; + +const camera_sensor_interface *mt9v034_get_sensor_interface() { + return &mt9v034_sensor_interface; +} - uint16_t new_control; - - if (FLOAT_AS_BOOL(global_data.param[PARAM_VIDEO_ONLY])) - new_control = 0x8188; // Context B - else - new_control = 0x0188; // Context A - - /* image dimentions */ - uint16_t new_width_context_a = global_data.param[PARAM_IMAGE_WIDTH] * 4; // windowing off, row + col bin reduce size - uint16_t new_height_context_a = global_data.param[PARAM_IMAGE_HEIGHT] * 4; - uint16_t new_width_context_b = FULL_IMAGE_ROW_SIZE * 4; // windowing off, row + col bin reduce size - uint16_t new_height_context_b = FULL_IMAGE_COLUMN_SIZE * 4; - - /* blanking settings */ - uint16_t new_hor_blanking_context_a = 350 + MINIMUM_HORIZONTAL_BLANKING;// 350 is minimum value without distortions - uint16_t new_ver_blanking_context_a = 10; // this value is the first without image errors (dark lines) - uint16_t new_hor_blanking_context_b = MAX_IMAGE_WIDTH - new_width_context_b + MINIMUM_HORIZONTAL_BLANKING; - uint16_t new_ver_blanking_context_b = 10; - - - /* Read Mode - * - * bits | ... | 10 | 9 | 8 | 7 | 6 | 5 | 4 | 3 | 2 | 1 | 0 | - * ------------------------------------------------------------------- - * current values | ... | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | - * - * (1:0) Row Bin - * (3:2) Column Bin - * (9:8) Reserved - * +static uint16_t mt9v034_hor_blanking(uint16_t raw_width, int binning) { + /* + * Horizontal Blanking + * Number of blank columns in a row. + * Minimum horizontal blanking is 61 for normal mode, + * 71 for column bin 2 mode, and 91 for column bin 4 mode. + * + * The MT9V034 uses column parallel analog-digital converters, thus short row timing is not possible. + * The minimum total row time is 690 columns (horizontal width + horizontal blanking). The minimum + * horizontal blanking is 61( / 71 / 91). When the window width is set below 627, horizontal blanking + * must be increased. + * + * In old code for bin 4 and 64 pixels it was 350 + 91. */ - uint16_t new_readmode_context_a = 0x30A ; // row + col bin 4 enable, (9:8) default - uint16_t new_readmode_context_b = 0x305 ; // row bin 2 col bin 4 enable, (9:8) default + uint16_t hor_blanking = 61; + /* init to minimum: */ + switch (binning) { + case 1: hor_blanking = 61; break; + case 2: hor_blanking = 71; break; + case 4: hor_blanking = 91; break; + } + /* increase if window is smaller: */ + if (raw_width < 800 - hor_blanking) { + hor_blanking = 800 - raw_width; + } + return hor_blanking; +} +uint32_t mt9v034_get_clks_per_row(uint16_t width, int binning) { + width = width * binning; + uint16_t hor_blanking = mt9v034_hor_blanking(width, binning); + return width + hor_blanking; +} + +bool mt9v034_init(void *usr, const camera_img_param_ex *img_param) { + mt9v034_sensor_ctx *ctx = (mt9v034_sensor_ctx *)usr; + /* init context: */ + memset(ctx, 0, sizeof(mt9v034_sensor_ctx)); + ctx->cur_context = 0; + ctx->desired_context = 0; + ctx->previous_context = 0; + ctx->do_switch_context = false; + ctx->seq_i2c_in_use = false; + /* init hardware: */ + if (!mt9v034_init_hw(ctx)) return false; + /* configure sensor: */ + ctx->seq_i2c_in_use = true; + mt9v034_configure_context(ctx, 0, img_param, true); + mt9v034_configure_context(ctx, 1, img_param, true); + mt9v034_configure_general(ctx, true); + /* initialize the cur and exp param: */ + ctx->cur_param = *img_param; + ctx->exp_param = *img_param; + ctx->cur_param_data_valid = false; + /* do a forced context update next. */ + ctx->do_switch_context = true; + ctx->seq_i2c_in_use = false; + return true; +} + +void mt9v034_reconfigure_general(void *usr) { + mt9v034_sensor_ctx *ctx = (mt9v034_sensor_ctx *)usr; + /* signal that we are going to use i2c: */ + ctx->seq_i2c_in_use = true; + /* reconfigure general settings: */ + mt9v034_configure_general(ctx, false); + /* i2c usage done: */ + ctx->seq_i2c_in_use = false; +} + +bool mt9v034_prepare_update_param(void *usr, const camera_img_param_ex *img_param) { + mt9v034_sensor_ctx *ctx = (mt9v034_sensor_ctx *)usr; + /* deassert pending context switch command: */ + ctx->seq_i2c_in_use = true; + /* determine which context to use to do the parameter update: */ + int cur_ctx_idx = ctx->cur_context; + int new_ctx_idx = !cur_ctx_idx; + if (!mt9v034_configure_context(ctx, new_ctx_idx, img_param, false)) { + // invalid parameters. + ctx->seq_i2c_in_use = false; + return false; + } + /* setup context switching */ + ctx->previous_context = ctx->cur_context; + ctx->desired_context = new_ctx_idx; + ctx->do_switch_context = true; + ctx->seq_i2c_in_use = false; + return true; +} + +static bool mt9v034_configure_exposure_param(mt9v034_sensor_ctx *ctx, int context_idx, uint32_t exposure, float gain, bool full_refresh) { + camera_img_param_ex *ctx_param = &ctx->context_p[context_idx]; + + uint16_t row_time = mt9v034_get_clks_per_row(ctx_param->p.size.x, ctx_param->p.binning); + /* - * Settings for both context: - * - * Exposure time should not affect frame time - * so we set max on 64 (lines) = 0x40 + * Coarse Shutter Width Total + * Total integration time in number of rows. This value is used only when AEC is disabled. + * Default: 0x01E0 */ - uint16_t min_exposure = 0x0001; // default - uint16_t max_exposure = 0x01E0; // default - uint16_t new_max_gain = 64; // VALID RANGE: 16-64 (default) - uint16_t pixel_count = 4096; //64x64 take all pixels to estimate exposure time // VALID RANGE: 1-65535 - - uint16_t desired_brightness = 58; // default - uint16_t resolution_ctrl = 0x0203; // default - uint16_t hdr_enabled = 0x0100; // default - uint16_t aec_agc_enabled = 0x0003; // default - uint16_t coarse_sw1 = 0x01BB; // default from context A - uint16_t coarse_sw2 = 0x01D9; // default from context A - uint16_t shutter_width_ctrl = 0x0164; // default from context A - uint16_t total_shutter_width = 0x01E0; // default from context A - uint16_t aec_update_freq = 0x02; // default Number of frames to skip between changes in AEC VALID RANGE: 0-15 - uint16_t aec_low_pass = 0x01; // default VALID RANGE: 0-2 - uint16_t agc_update_freq = 0x02; // default Number of frames to skip between changes in AGC VALID RANGE: 0-15 - uint16_t agc_low_pass = 0x02; // default VALID RANGE: 0-2 - - - if (FLOAT_AS_BOOL(global_data.param[PARAM_IMAGE_LOW_LIGHT])) - { - min_exposure = 0x0001; - max_exposure = 0x0040; - desired_brightness = 58; // VALID RANGE: 8-64 - resolution_ctrl = 0x0202;//10 bit linear - hdr_enabled = 0x0000; // off - aec_agc_enabled = 0x0303; // on - coarse_sw1 = 0x01BB; // default from context A - coarse_sw2 = 0x01D9; // default from context A - shutter_width_ctrl = 0x0164; // default from context A - total_shutter_width = 0x01E0; // default from context A + uint16_t coarse_sw_total = exposure / row_time; + uint16_t fine_sw_total = exposure % row_time; + /* ensure minimum: */ + if (coarse_sw_total < 1 && fine_sw_total < 260) { + fine_sw_total = 260; } - else - { - min_exposure = 0x0001; - max_exposure = 0x0040; - desired_brightness = 16; // VALID RANGE: 8-64 - resolution_ctrl = 0x0202;//10bit linear - hdr_enabled = 0x0000; // off - aec_agc_enabled = 0x0303; // on - coarse_sw1 = 0x01BB; // default from context A - coarse_sw2 = 0x01D9; // default from context A - shutter_width_ctrl = 0x0164; // default from context A - total_shutter_width = 0x01E0; // default from context A + uint32_t real_exposure = coarse_sw_total * (uint32_t)row_time + fine_sw_total; + bool update_exposure = (ctx_param->exposure != real_exposure); + /** + * Analog Gain + * [6:0] Analog Gain: Range 16 - 64 + * [15] Force 0.75X: 0 = Disabled. + */ + uint16_t analog_gain = gain * 16 + 0.5f; + /* limit: */ + if (analog_gain < 16) { + analog_gain = 16; + } else if (analog_gain > 64) { + analog_gain = 64; } + bool update_gain = ((uint16_t)(ctx_param->analog_gain * 16 + 0.5f) != analog_gain); + float real_analog_gain = analog_gain * 0.0625f; + + uint16_t sw_for_blanking = coarse_sw_total; + if (sw_for_blanking < ctx_param->p.size.y) { + sw_for_blanking = ctx_param->p.size.y; + } + /* + * Vertical Blanking + * Number of blank rows in a frame. V-Blank value must meet the following minimums: + * Linear Mode: + * V-Blank (min) = SW_total - R0x08 + 7 + * R0x08 (Coarse Shutter Width 1) + * If manual exposure then SW_total = R0x0B. (Coarse Shutter Width Total) + * If auto-exposure mode then SW_total = R0xAD. (Maximum Coarse Shutter Width) + * + * If Auto-Knee Point enabled, then V-Blank (min) = (t2 + t3 + 7). + * t2 = Tint_tot * (1/2) ^ T2 Ratio -> 4 + * t3 = Tint_tot * (1/2) ^ T3 Ratio -> 1 + */ + uint16_t ver_blanking = (sw_for_blanking >> 4) + (sw_for_blanking >> 6) + 7; + + switch (context_idx) { + case 0: + if (full_refresh || update_exposure || DEBUG_TEST_WORSTCASE_LATENCY) { + mt9v034_WriteReg(MTV_COARSE_SW_TOTAL_REG_A, coarse_sw_total); + mt9v034_WriteReg(MTV_FINE_SW_TOTAL_REG_A, fine_sw_total); + } + if (full_refresh || update_gain || DEBUG_TEST_WORSTCASE_LATENCY) { + mt9v034_WriteReg(MTV_ANALOG_GAIN_CTRL_REG_A, analog_gain); + } + if (full_refresh || ver_blanking != ctx->ver_blnk_ctx_a_reg || DEBUG_TEST_WORSTCASE_LATENCY) { + ctx->ver_blnk_ctx_a_reg = ver_blanking; + mt9v034_WriteReg(MTV_VER_BLANKING_REG_A, ctx->ver_blnk_ctx_a_reg); + } + break; + case 1: + if (full_refresh || update_exposure || DEBUG_TEST_WORSTCASE_LATENCY) { + mt9v034_WriteReg(MTV_COARSE_SW_TOTAL_REG_B, coarse_sw_total); + mt9v034_WriteReg(MTV_FINE_SW_TOTAL_REG_B, fine_sw_total); + } + if (full_refresh || update_gain || DEBUG_TEST_WORSTCASE_LATENCY) { + mt9v034_WriteReg(MTV_ANALOG_GAIN_CTRL_REG_B, analog_gain); + } + if (full_refresh || ver_blanking != ctx->ver_blnk_ctx_b_reg || DEBUG_TEST_WORSTCASE_LATENCY) { + ctx->ver_blnk_ctx_b_reg = ver_blanking; + mt9v034_WriteReg(MTV_VER_BLANKING_REG_B, ctx->ver_blnk_ctx_b_reg); + } + break; + } + + /* update the settings: */ + ctx_param->exposure = real_exposure; + ctx_param->analog_gain = real_analog_gain; + + return true; +} - uint16_t row_noise_correction = 0x0000; // default - uint16_t test_data = 0x0000; // default +bool mt9v034_update_exposure_param(void *usr, uint32_t exposure, float gain) { + mt9v034_sensor_ctx *ctx = (mt9v034_sensor_ctx *)usr; + /* check whether i2c is in use: */ + if (ctx->seq_i2c_in_use) { + return false; + } + + int context_idx; + if (ctx->do_switch_context) { + context_idx = ctx->desired_context; + } else { + context_idx = ctx->cur_context; + } + + return mt9v034_configure_exposure_param(ctx, context_idx, exposure, gain, false); +} - if(FLOAT_AS_BOOL(global_data.param[PARAM_IMAGE_ROW_NOISE_CORR]) && !FLOAT_AS_BOOL(global_data.param[PARAM_IMAGE_TEST_PATTERN])) +void mt9v034_restore_previous_param(void *usr) { + mt9v034_sensor_ctx *ctx = (mt9v034_sensor_ctx *)usr; + /* switch back to previous context: */ + if (!ctx->seq_i2c_in_use) { + /* update chip control register bit 15: */ + int desired_ctx_idx = ctx->previous_context; + switch(desired_ctx_idx) { + case 0: ctx->chip_control_reg &= 0x7FFF; break; + case 1: ctx->chip_control_reg |= 0x8000; break; + } + mt9v034_WriteReg(MTV_CHIP_CONTROL_REG, ctx->chip_control_reg); + /* done. */ + ctx->cur_context = desired_ctx_idx; + ctx->do_switch_context = false; + } else { + ctx->desired_context = ctx->previous_context; + ctx->do_switch_context = true; + } +} + +void mt9v034_notify_readout_start(void *usr) { + mt9v034_sensor_ctx *ctx = (mt9v034_sensor_ctx *)usr; + camera_img_param_ex *cur_ctx_param = &ctx->context_p[ctx->cur_context]; + /* update the sensor parameter simulation: */ + /* transfer exposure settings from exp_param into cur_param */ + ctx->cur_param.exposure = ctx->exp_param.exposure; + /* transfer exposure settings of currently active context into exp_param */ + ctx->exp_param.exposure = cur_ctx_param->exposure; + /* transfer other settings of currently active context into cur_param */ + if (cur_ctx_param->p.size.x != ctx->cur_param.p.size.x || + cur_ctx_param->p.size.y != ctx->cur_param.p.size.y || + cur_ctx_param->p.binning!= ctx->cur_param.p.binning) { + // first frame after changing resolution or binning is invalid. + ctx->cur_param_data_valid = false; + } else { + ctx->cur_param_data_valid = true; + } + ctx->cur_param.p.size = cur_ctx_param->p.size; + ctx->cur_param.p.binning = cur_ctx_param->p.binning; + ctx->cur_param.analog_gain = cur_ctx_param->analog_gain; + /* handle context switching: */ + if (ctx->do_switch_context && !ctx->seq_i2c_in_use) { + /* update chip control register bit 15: */ + int desired_ctx_idx = ctx->desired_context; + switch(desired_ctx_idx) { + case 0: ctx->chip_control_reg &= 0x7FFF; break; + case 1: ctx->chip_control_reg |= 0x8000; break; + } + mt9v034_WriteReg(MTV_CHIP_CONTROL_REG, ctx->chip_control_reg); + /* done. */ + ctx->cur_context = desired_ctx_idx; + ctx->do_switch_context = false; + } +} + +void mt9v034_notify_readout_end(void *usr) { + mt9v034_sensor_ctx *ctx = (mt9v034_sensor_ctx *)usr; + /* adjust pix clock inversion as a work-around: */ + if (!ctx->seq_i2c_in_use) { + bool inv_clk = false; + const bool invert_conf[3] = CONFIG_CLOCK_INVERSION_WORKAROUND; + camera_img_param_ex *cur_ctx_param = &ctx->context_p[ctx->cur_context]; + switch (cur_ctx_param->p.binning) { + case 1: inv_clk = invert_conf[0]; break; + case 2: inv_clk = invert_conf[1]; break; + case 4: inv_clk = invert_conf[2]; break; + } + uint16_t reg = ctx->pixel_frame_line_ctrl_reg & ~0x0010; + if (inv_clk) reg |= 0x0010; + if (reg != ctx->pixel_frame_line_ctrl_reg || DEBUG_TEST_WORSTCASE_LATENCY) { + ctx->pixel_frame_line_ctrl_reg = reg; + mt9v034_WriteReg(MTV_PIXEL_FRAME_LINE_CTRL_REG, ctx->pixel_frame_line_ctrl_reg); + } + } +} + +void mt9v034_get_current_param(void *usr, camera_img_param_ex *img_param, bool *img_data_valid) { + mt9v034_sensor_ctx *ctx = (mt9v034_sensor_ctx *)usr; + *img_param = ctx->cur_param; + *img_data_valid = ctx->cur_param_data_valid; +} + +static void mt9v034_configure_general(mt9v034_sensor_ctx *ctx, bool initial) { + if (initial) { + /* + * Chip control register. + * [2:0] Scan Mode: 0 = Progressive scan. + * [4:3] Sensor operating mode: 1 = Master mode + * [5] Stereoscopy Mode: 0 = Disabled + * [6] Stereoscopic Master/Slave: 0 = Master + * [7] Parallel output: 1 = Enabled + * [8] Simultaneous mode: 1 = Enabled. Pixel and column readout take place in conjunction with exposure. + * [15] Context A / B select 0 = Context A registers are used. + */ + ctx->chip_control_reg = 0x0188; + mt9v034_WriteReg(MTV_CHIP_CONTROL_REG, ctx->chip_control_reg); + + /* + * Pixel Clock, FRAME and LINE VALID Control + * [0] Invert LINE_VALID: 0 = Do not invert. + * [1] Invert FRAME_VALID: 0 = Do not invert. + * [2] XOR LINE_VALID: 0 = Normal operation. + * [3] Continuous LINE_VALID: 0 = Normal operation. + * [4] Invert Pixel Clock: 0 = Do not invert. + */ + ctx->pixel_frame_line_ctrl_reg = 0x0000; + mt9v034_WriteReg(MTV_PIXEL_FRAME_LINE_CTRL_REG, ctx->pixel_frame_line_ctrl_reg); + + // Write reserved registers per Rev G datasheet table 8 recommendations + mt9v034_WriteReg(0x13, 0x2D2E); + mt9v034_WriteReg(0x20, 0x03C7); + mt9v034_WriteReg(0x24, 0x001B); + mt9v034_WriteReg(0x2B, 0x0003); + mt9v034_WriteReg(0x2F, 0x0003); + } + + /* settings that are the same for both contexts: */ + + /* + * ADC Companding Mode + * [1:0] ADC Mode Context A: 2 = 10-bit linear + * [9:8] ADC Mode Context B: 2 = 10-bit linear + */ + mt9v034_WriteReg(MTV_ADC_RES_CTRL_REG, 0x0202); + + /* + * Row Noise Correction Control + * [0] Noise Correction Ctx A: 0 / 1 = Disable / Enable + * [1] Noise Use Black Level Avg Ctx A: 0 = Disable + * [8] Noise Correction Ctx B: 0 / 1 = Disable / Enable + * [9] Noise Use Black Level Avg Ctx B: 0 = Disable + */ + uint16_t row_noise_correction; + if (FLOAT_AS_BOOL(global_data.param[PARAM_IMAGE_ROW_NOISE_CORR]) && !FLOAT_AS_BOOL(global_data.param[PARAM_IMAGE_TEST_PATTERN])) { row_noise_correction = 0x0101; - else + } else { row_noise_correction = 0x0000; - - if (FLOAT_AS_BOOL(global_data.param[PARAM_IMAGE_TEST_PATTERN])) - test_data = 0x3000; //enable vertical gray shade pattern - else + } + mt9v034_WriteReg(MTV_ROW_NOISE_CORR_CTRL_REG, row_noise_correction); + + /* + * Shutter Width Control + * When Exposure Knee Point Auto Adjust is enabled, + * then one-half to the power of this value indicates the + * ratio of duration time t2, when saturation control + * gate is adjusted to level V2, to total coarse + * integration. + * [3:0] T2 Ratio: 4 -> Tint_tot * (1/2) ^ n + * [7:4] T3 Ratio: 6 -> Tint_tot * (1/2) ^ n + * [8] Exposure Knee Auto Adj: 1 = Enabled. + * [9] Single Knee Enable: 0 = Disabled. + * + * * Tint_tot = Coarse Shutter Width Total + * + * NOTE: update vertical blanking if changing parameters here! + */ + uint16_t coarse_sw_ctrl = 0x0164; + mt9v034_WriteReg(MTV_COARSE_SW_CTRL_REG_A, coarse_sw_ctrl); + mt9v034_WriteReg(MTV_COARSE_SW_CTRL_REG_B, coarse_sw_ctrl); + /* + * Minimum Coarse Shutter Width + * Set to minimum. (1) + */ + mt9v034_WriteReg(MTV_MIN_COARSE_SW_REG, 0x0001); + /* + * Maximum Coarse Shutter Width + */ + mt9v034_WriteReg(MTV_MAX_COARSE_SW_REG, CONFIG_MAX_EXPOSURE_ROWS); + /* + * Maximum Analog Gain + */ + mt9v034_WriteReg(MTV_ANALOG_MAX_GAIN_REG, CONFIG_MAX_ANALOG_GAIN); + + /* + * AEC/AGC Desired Bin + * User-defined “desired bin” that gives a measure of how bright the image is intended to be. + * Range: 1 - 64. + */ + uint16_t desired_brightness; + if(FLOAT_AS_BOOL(global_data.param[PARAM_IMAGE_LOW_LIGHT])) { + desired_brightness = 58; + } else { + desired_brightness = 16; + } + mt9v034_WriteReg(MTV_AGC_AEC_DESIRED_BIN_REG, desired_brightness); + /* + * AEC Update Frequency + * Number of frames to skip between changes in AEC + * Range: 0-15 + */ + mt9v034_WriteReg(MTV_AEC_UPDATE_FREQ_REG, 2); + /* + * AEC Low Pass Filter + * Range: 0-2 + */ + mt9v034_WriteReg(MTV_AEC_LPF_REG, 1); + /* + * AGC Output Update Frequency + * Number of frames to skip between changes in AGC + * Range: 0-15 + */ + mt9v034_WriteReg(MTV_AGC_UPDATE_FREQ_REG, 2); + /* + * AGC Low Pass Filter + * Range: 0-2 + */ + mt9v034_WriteReg(MTV_AGC_LPF_REG, 2); + /* + * AGC/AEC Enable + * [0] AEC Enable Ctx A: 0 = Enable + * [1] AGC Enable Ctx A: 0 = Enable + * [8] AEC Enable Ctx B: 0 = Enable + * [9] AGC Enable Ctx B: 0 = Enable + */ + mt9v034_WriteReg(MTV_AEC_AGC_ENABLE_REG, 0x0000); + /* + * Sensor Type Control + * [0] HDR Enable Ctx A: 0 = Disable + * [1] Color/Mono Sensor Control: 0 = Monochrome + * [8] HDR Enable Ctx B: 0 = Disable + */ + mt9v034_WriteReg(MTV_SENSOR_TYPE_CTRL_REG, 0x0000); + + /* + * Digital Test Pattern + * [9:0] TWI Test Data: 0 + * [10] Use TWI Test Data: 0 = Disable. + *[12:11] Gray Shade Test Pattern: 0 / 2 = None / Horizontal Shades. + * [13] Test Enable: 0 / 1 = Disable / Enable + * [14] Flip TWI Test Data: 0 = Disable. + */ + uint16_t test_data; + if(FLOAT_AS_BOOL(global_data.param[PARAM_IMAGE_TEST_PATTERN])) { + test_data = 0x3000; + } else { test_data = 0x0000; + } + mt9v034_WriteReg(MTV_TEST_PATTERN_REG, test_data);//enable test pattern - uint16_t version = mt9v034_ReadReg16(MTV_CHIP_VERSION_REG); - - if (version == 0x1324) - { - mt9v034_WriteReg16(MTV_CHIP_CONTROL_REG, new_control); - - /* Context A */ - mt9v034_WriteReg16(MTV_WINDOW_WIDTH_REG_A, new_width_context_a); - mt9v034_WriteReg16(MTV_WINDOW_HEIGHT_REG_A, new_height_context_a); - mt9v034_WriteReg16(MTV_HOR_BLANKING_REG_A, new_hor_blanking_context_a); - mt9v034_WriteReg16(MTV_VER_BLANKING_REG_A, new_ver_blanking_context_a); - mt9v034_WriteReg16(MTV_READ_MODE_REG_A, new_readmode_context_a); - mt9v034_WriteReg16(MTV_COLUMN_START_REG_A, (MAX_IMAGE_WIDTH - new_width_context_a) / 2 + MINIMUM_COLUMN_START); // Set column/row start point for lower resolutions (center window) - mt9v034_WriteReg16(MTV_ROW_START_REG_A, (MAX_IMAGE_HEIGHT - new_height_context_a) / 2 + MINIMUM_ROW_START); - mt9v034_WriteReg16(MTV_COARSE_SW_1_REG_A, coarse_sw1); - mt9v034_WriteReg16(MTV_COARSE_SW_2_REG_A, coarse_sw2); - mt9v034_WriteReg16(MTV_COARSE_SW_CTRL_REG_A, shutter_width_ctrl); - mt9v034_WriteReg16(MTV_V2_CTRL_REG_A, total_shutter_width); - - - /* Context B */ - mt9v034_WriteReg16(MTV_WINDOW_WIDTH_REG_B, new_width_context_b); - mt9v034_WriteReg16(MTV_WINDOW_HEIGHT_REG_B, new_height_context_b); - mt9v034_WriteReg16(MTV_HOR_BLANKING_REG_B, new_hor_blanking_context_b); - mt9v034_WriteReg16(MTV_VER_BLANKING_REG_B, new_ver_blanking_context_b); - mt9v034_WriteReg16(MTV_READ_MODE_REG_B, new_readmode_context_b); - mt9v034_WriteReg16(MTV_COLUMN_START_REG_B, MINIMUM_COLUMN_START); // default - mt9v034_WriteReg16(MTV_ROW_START_REG_B, MINIMUM_ROW_START); - mt9v034_WriteReg16(MTV_COARSE_SW_1_REG_B, coarse_sw1); - mt9v034_WriteReg16(MTV_COARSE_SW_2_REG_B, coarse_sw2); - mt9v034_WriteReg16(MTV_COARSE_SW_CTRL_REG_B, shutter_width_ctrl); - mt9v034_WriteReg16(MTV_V2_CTRL_REG_B, total_shutter_width); - - /* General Settings */ - mt9v034_WriteReg16(MTV_ROW_NOISE_CORR_CTRL_REG, row_noise_correction); - mt9v034_WriteReg16(MTV_AEC_AGC_ENABLE_REG, aec_agc_enabled); // disable AEC/AGC for both contexts - mt9v034_WriteReg16(MTV_HDR_ENABLE_REG, hdr_enabled); // disable HDR on both contexts - mt9v034_WriteReg16(MTV_MIN_EXPOSURE_REG, min_exposure); - mt9v034_WriteReg16(MTV_MAX_EXPOSURE_REG, max_exposure); - mt9v034_WriteReg16(MTV_MAX_GAIN_REG, new_max_gain); - mt9v034_WriteReg16(MTV_AGC_AEC_PIXEL_COUNT_REG, pixel_count); - mt9v034_WriteReg16(MTV_AGC_AEC_DESIRED_BIN_REG, desired_brightness); - mt9v034_WriteReg16(MTV_ADC_RES_CTRL_REG, resolution_ctrl); // here is the way to regulate darkness :) - - mt9v034_WriteReg16(MTV_DIGITAL_TEST_REG, test_data);//enable test pattern - - mt9v034_WriteReg16(MTV_AEC_UPDATE_REG,aec_update_freq); - mt9v034_WriteReg16(MTV_AEC_LOWPASS_REG,aec_low_pass); - mt9v034_WriteReg16(MTV_AGC_UPDATE_REG,agc_update_freq); - mt9v034_WriteReg16(MTV_AGC_LOWPASS_REG,agc_low_pass); - + if (initial) { /* Reset */ - mt9v034_WriteReg16(MTV_SOFT_RESET_REG, 0x01); + mt9v034_WriteReg(MTV_SOFT_RESET_REG, 0x01); } - + + /* + * NOTES: + * Old code unexpectedly used: + * - 12 to 10bit companding mode on 64x64 pixel image. => TESTED: produces lots of noise, I don't see the gain. + * - Enabled AGC & AEC only on 64x64pixel image. => AGC and AEC is not good when resolution is changed. Implemented in software. + * - Enabled HDR mode on big image. + * - Used 64 x 64 = 4096 Pixels for AGC & AEC + * - row / col bin 4 on 64x64 + * - row / col bin 2 on big image + */ } -/** - * @brief Changes sensor context based on settings - */ -void mt9v034_set_context() -{ - uint16_t new_control; - if (FLOAT_AS_BOOL(global_data.param[PARAM_VIDEO_ONLY])) - new_control = 0x8188; // Context B - else - new_control = 0x0188; // Context A - - mt9v034_WriteReg16(MTV_CHIP_CONTROL_REG, new_control); +static bool mt9v034_configure_context(mt9v034_sensor_ctx *ctx, int context_idx, const camera_img_param_ex *img_param, bool full_refresh) { + bool update_size = full_refresh; + bool update_binning = full_refresh; + if (context_idx < 0 || context_idx >= 2) return false; + camera_img_param_ex *ctx_param = &ctx->context_p[context_idx]; + /* check which sets of parameters we need to update: */ + if (!full_refresh) { + if (ctx_param->p.size.x != img_param->p.size.x || + ctx_param->p.size.y != img_param->p.size.y) { + update_size = true; + } + if (ctx_param->p.binning != img_param->p.binning) { + update_binning = true; + } + } + + /* image dimensions */ + uint16_t width = img_param->p.size.x * img_param->p.binning; + uint16_t height = img_param->p.size.y * img_param->p.binning; + if (width < 1 || width > 752) return false; + if (height < 1 || height > 480) return false; + + uint16_t col_start = 1 + (752 - width) / 2; + uint16_t row_start = 4 + (480 - height) / 2; + + uint16_t hor_blanking = mt9v034_hor_blanking(width, img_param->p.binning); + + /* + * Read Mode + * [1:0] Row Bin: 0 / 1 / 2 = Bin 1 / Bin 2 / Bin 4 + * [3:2] Col Bin: 0 / 1 / 2 = Bin 1 / Bin 2 / Bin 4 + * [4] Row flip: 0 = Disable. + * [5] Column flip: 0 = Disable. + * [6] Show Dark Rows: 0 = Disable. + * [7] Show Dark Columns: 0 = Disable. + * [9:8] Reserved: 3 = Must be 3. + */ + uint16_t readmode = 0x0300; + switch (img_param->p.binning) { + case 1: readmode |= 0x0000; break; + case 2: readmode |= 0x0005; break; + case 4: readmode |= 0x000A; break; + default: return false; + } + + switch (context_idx) { + case 0: + if (update_size || update_binning || DEBUG_TEST_WORSTCASE_LATENCY) { + mt9v034_WriteReg(MTV_WINDOW_WIDTH_REG_A, width); + mt9v034_WriteReg(MTV_WINDOW_HEIGHT_REG_A, height); + mt9v034_WriteReg(MTV_HOR_BLANKING_REG_A, hor_blanking); + mt9v034_WriteReg(MTV_READ_MODE_REG_A, readmode); + mt9v034_WriteReg(MTV_COLUMN_START_REG_A, col_start); + mt9v034_WriteReg(MTV_ROW_START_REG_A, row_start); + } + break; + case 1: + if (update_size || update_binning || DEBUG_TEST_WORSTCASE_LATENCY) { + mt9v034_WriteReg(MTV_WINDOW_WIDTH_REG_B, width); + mt9v034_WriteReg(MTV_WINDOW_HEIGHT_REG_B, height); + mt9v034_WriteReg(MTV_HOR_BLANKING_REG_B, hor_blanking); + mt9v034_WriteReg(MTV_READ_MODE_REG_B, readmode); + mt9v034_WriteReg(MTV_COLUMN_START_REG_B, col_start); + mt9v034_WriteReg(MTV_ROW_START_REG_B, row_start); + } + break; + } + + /* update the current settings: */ + ctx_param->p = img_param->p; + + mt9v034_configure_exposure_param(ctx, context_idx, img_param->exposure, img_param->analog_gain, full_refresh); + + return true; } -/** - * @brief Writes a byte at a specific Camera register - * @param Addr: mt9v034 register address. - * @param Data: Data to be written to the specific register - * @retval 0x00 if write operation is OK. - * 0xFF if timeout condition occured (device not connected or bus error). - */ -uint8_t mt9v034_WriteReg(uint16_t Addr, uint8_t Data) -{ - uint32_t timeout = TIMEOUT_MAX; - - /* Generate the Start Condition */ - I2C_GenerateSTART(I2C2, ENABLE); - - /* Test on I2C2 EV5 and clear it */ - timeout = TIMEOUT_MAX; /* Initialize timeout value */ - while(!I2C_CheckEvent(I2C2, I2C_EVENT_MASTER_MODE_SELECT)) - { - /* If the timeout delay is exeeded, exit with error code */ - if ((timeout--) == 0) return 0xFF; - } - - /* Send DCMI selcted device slave Address for write */ - I2C_Send7bitAddress(I2C2, mt9v034_DEVICE_WRITE_ADDRESS, I2C_Direction_Transmitter); - - /* Test on I2C2 EV6 and clear it */ - timeout = TIMEOUT_MAX; /* Initialize timeout value */ - while(!I2C_CheckEvent(I2C2, I2C_EVENT_MASTER_TRANSMITTER_MODE_SELECTED)) - { - /* If the timeout delay is exeeded, exit with error code */ - if ((timeout--) == 0) return 0xFF; - } - - /* Send I2C2 location address LSB */ - I2C_SendData(I2C2, (uint8_t)(Addr)); - - /* Test on I2C2 EV8 and clear it */ - timeout = TIMEOUT_MAX; /* Initialize timeout value */ - while(!I2C_CheckEvent(I2C2, I2C_EVENT_MASTER_BYTE_TRANSMITTED)) - { - /* If the timeout delay is exeeded, exit with error code */ - if ((timeout--) == 0) return 0xFF; - } - - /* Send Data */ - I2C_SendData(I2C2, Data); - - /* Test on I2C2 EV8 and clear it */ - timeout = TIMEOUT_MAX; /* Initialize timeout value */ - while(!I2C_CheckEvent(I2C2, I2C_EVENT_MASTER_BYTE_TRANSMITTED)) - { - /* If the timeout delay is exeeded, exit with error code */ - if ((timeout--) == 0) return 0xFF; - } - - /* Send I2C2 STOP Condition */ - I2C_GenerateSTOP(I2C2, ENABLE); - - /* If operation is OK, return 0 */ - return 0; +static bool mt9v034_init_hw(mt9v034_sensor_ctx *ctx) { + GPIO_InitTypeDef GPIO_InitStructure; + I2C_InitTypeDef I2C_InitStruct; + + /* I2C2 clock enable */ + RCC_APB1PeriphClockCmd(RCC_APB1Periph_I2C2, ENABLE); + + /* GPIOB clock enable */ + RCC_AHB1PeriphClockCmd(RCC_AHB1Periph_GPIOB, ENABLE); + + /* Connect I2C2 pins to AF4 */ + GPIO_PinAFConfig(GPIOB, GPIO_PinSource10, GPIO_AF_I2C2); + GPIO_PinAFConfig(GPIOB, GPIO_PinSource11, GPIO_AF_I2C2); + + /* Configure I2C2 GPIOs */ + GPIO_InitStructure.GPIO_Pin = GPIO_Pin_10 | GPIO_Pin_11; + GPIO_InitStructure.GPIO_Mode = GPIO_Mode_AF; + GPIO_InitStructure.GPIO_Speed = GPIO_Speed_2MHz; + GPIO_InitStructure.GPIO_OType = GPIO_OType_OD; + GPIO_InitStructure.GPIO_PuPd = GPIO_PuPd_NOPULL; + GPIO_Init(GPIOB, &GPIO_InitStructure); + + /* I2C DeInit */ + I2C_DeInit(I2C2); + + /* Enable the I2C peripheral */ + I2C_Cmd(I2C2, ENABLE); + + /* Set the I2C structure parameters */ + I2C_InitStruct.I2C_Mode = I2C_Mode_I2C; + I2C_InitStruct.I2C_DutyCycle = I2C_DutyCycle_2; + I2C_InitStruct.I2C_OwnAddress1 = 0xFE; + I2C_InitStruct.I2C_Ack = I2C_Ack_Enable; + I2C_InitStruct.I2C_AcknowledgedAddress = I2C_AcknowledgedAddress_7bit; + I2C_InitStruct.I2C_ClockSpeed = 400000; + + /* Initialize the I2C peripheral w/ selected parameters */ + I2C_Init(I2C2, &I2C_InitStruct); + + /* Initialize GPIOs for EXPOSURE and STANDBY lines of the camera */ + GPIO_InitStructure.GPIO_Pin = GPIO_Pin_2 | GPIO_Pin_3; + GPIO_InitStructure.GPIO_Mode = GPIO_Mode_OUT; + GPIO_InitStructure.GPIO_OType = GPIO_OType_PP; + GPIO_InitStructure.GPIO_Speed = GPIO_Speed_2MHz; + GPIO_InitStructure.GPIO_PuPd = GPIO_PuPd_NOPULL; + GPIO_Init(GPIOA, &GPIO_InitStructure); + GPIO_ResetBits(GPIOA, GPIO_Pin_2 | GPIO_Pin_3); + + /* test I2C: */ + uint16_t version = mt9v034_ReadReg(MTV_CHIP_VERSION_REG); + if (version != 0x1324) return false; + + return true; } -/** - * @brief Writes to a specific Camera register - */ -uint8_t mt9v034_WriteReg16(uint16_t address, uint16_t Data) -{ - uint8_t result = mt9v034_WriteReg(address, (uint8_t)( Data >> 8)); // write upper byte - result |= mt9v034_WriteReg(0xF0, (uint8_t) Data); // write lower byte - return result; +static bool mt9v034_I2cWaitEvent(uint32_t i2c_event) { + uint32_t timeout = TIMEOUT_MAX; /* Initialize timeout value */ + while(!I2C_CheckEvent(I2C2, i2c_event)) { + /* If the timeout delay is exceeded, exit with error code */ + if ((timeout--) == 0) return false; + } + return true; } -/** - * @brief Reads a byte from a specific Camera register - * @param Addr: mt9v034 register address. - * @retval data read from the specific register or 0xFF if timeout condition - * occured. - */ -uint8_t mt9v034_ReadReg(uint16_t Addr) -{ - uint32_t timeout = TIMEOUT_MAX; - uint8_t Data = 0; - - /* Generate the Start Condition */ - I2C_GenerateSTART(I2C2, ENABLE); - - /* Test on I2C2 EV5 and clear it */ - timeout = TIMEOUT_MAX; /* Initialize timeout value */ - while(!I2C_CheckEvent(I2C2, I2C_EVENT_MASTER_MODE_SELECT)) - { - /* If the timeout delay is exeeded, exit with error code */ - if ((timeout--) == 0) return 0xFF; - } - - /* Send DCMI selcted device slave Address for write */ - I2C_Send7bitAddress(I2C2, mt9v034_DEVICE_READ_ADDRESS, I2C_Direction_Transmitter); - - /* Test on I2C2 EV6 and clear it */ - timeout = TIMEOUT_MAX; /* Initialize timeout value */ - while(!I2C_CheckEvent(I2C2, I2C_EVENT_MASTER_TRANSMITTER_MODE_SELECTED)) - { - /* If the timeout delay is exeeded, exit with error code */ - if ((timeout--) == 0) return 0xFF; - } - - /* Send I2C2 location address LSB */ - I2C_SendData(I2C2, (uint8_t)(Addr)); - - /* Test on I2C2 EV8 and clear it */ - timeout = TIMEOUT_MAX; /* Initialize timeout value */ - while(!I2C_CheckEvent(I2C2, I2C_EVENT_MASTER_BYTE_TRANSMITTED)) - { - /* If the timeout delay is exeeded, exit with error code */ - if ((timeout--) == 0) return 0xFF; - } - - /* Clear AF flag if arised */ - I2C2->SR1 |= (uint16_t)0x0400; - - /* Generate the Start Condition */ - I2C_GenerateSTART(I2C2, ENABLE); - - /* Test on I2C2 EV6 and clear it */ - timeout = TIMEOUT_MAX; /* Initialize timeout value */ - while(!I2C_CheckEvent(I2C2, I2C_EVENT_MASTER_MODE_SELECT)) - { - /* If the timeout delay is exeeded, exit with error code */ - if ((timeout--) == 0) return 0xFF; - } - - /* Send DCMI selcted device slave Address for write */ - I2C_Send7bitAddress(I2C2, mt9v034_DEVICE_READ_ADDRESS, I2C_Direction_Receiver); - - /* Test on I2C2 EV6 and clear it */ - timeout = TIMEOUT_MAX; /* Initialize timeout value */ - while(!I2C_CheckEvent(I2C2, I2C_EVENT_MASTER_RECEIVER_MODE_SELECTED)) - { - /* If the timeout delay is exeeded, exit with error code */ - if ((timeout--) == 0) return 0xFF; - } - - /* Prepare an NACK for the next data received */ - I2C_AcknowledgeConfig(I2C2, DISABLE); - - /* Test on I2C2 EV7 and clear it */ - timeout = TIMEOUT_MAX; /* Initialize timeout value */ - while(!I2C_CheckEvent(I2C2, I2C_EVENT_MASTER_BYTE_RECEIVED)) - { - /* If the timeout delay is exeeded, exit with error code */ - if ((timeout--) == 0) return 0xFF; - } - - /* Prepare Stop after receiving data */ - I2C_GenerateSTOP(I2C2, ENABLE); - - /* Receive the Data */ - Data = I2C_ReceiveData(I2C2); - - /* return the read data */ - return Data; +static bool mt9v034_WriteReg(uint16_t Addr, uint16_t Data) { + /* Generate the Start Condition */ + I2C_GenerateSTART(I2C2, ENABLE); + + /* Test on I2C2 EV5 and clear it */ + if (!mt9v034_I2cWaitEvent(I2C_EVENT_MASTER_MODE_SELECT)) + return false; + + /* Send DCMI selected device slave Address for write */ + I2C_Send7bitAddress(I2C2, mt9v034_DEVICE_WRITE_ADDRESS, I2C_Direction_Transmitter); + + /* Test on I2C2 EV6 and clear it */ + if (!mt9v034_I2cWaitEvent(I2C_EVENT_MASTER_TRANSMITTER_MODE_SELECTED)) + return false; + + /* Send I2C2 location address LSB */ + I2C_SendData(I2C2, (uint8_t)(Addr)); + + /* Test on I2C2 EV8 and clear it */ + if (!mt9v034_I2cWaitEvent(I2C_EVENT_MASTER_BYTE_TRANSMITTED)) + return false; + + /* Send Data MSB */ + I2C_SendData(I2C2, Data >> 8); + + /* Test on I2C2 EV8 and clear it */ + if (!mt9v034_I2cWaitEvent(I2C_EVENT_MASTER_BYTE_TRANSMITTED)) + return false; + + /* Send Data LSB */ + I2C_SendData(I2C2, Data); + + /* Test on I2C2 EV8 and clear it */ + if (!mt9v034_I2cWaitEvent(I2C_EVENT_MASTER_BYTE_TRANSMITTED)) + return false; + + /* Send I2C2 STOP Condition */ + I2C_GenerateSTOP(I2C2, ENABLE); + + /* If operation is OK, return true */ + return true; } -/** - * @brief Reads from a specific Camera register - */ -uint16_t mt9v034_ReadReg16(uint8_t address) -{ - uint16_t result = mt9v034_ReadReg(address) << 8; // read upper byte - result |= mt9v034_ReadReg(0xF0); // read lower byte - return result; +static uint16_t mt9v034_ReadReg(uint16_t Addr) { + uint16_t Data = 0; + + /* Generate the Start Condition */ + I2C_GenerateSTART(I2C2, ENABLE); + + /* Test on I2C2 EV5 and clear it */ + if (!mt9v034_I2cWaitEvent(I2C_EVENT_MASTER_MODE_SELECT)) + return 0xFFFF; + + /* Send DCMI selected device slave Address for write */ + I2C_Send7bitAddress(I2C2, mt9v034_DEVICE_READ_ADDRESS, I2C_Direction_Transmitter); + + /* Test on I2C2 EV6 and clear it */ + if (!mt9v034_I2cWaitEvent(I2C_EVENT_MASTER_TRANSMITTER_MODE_SELECTED)) + return 0xFFFF; + + /* Send I2C2 location address LSB */ + I2C_SendData(I2C2, (uint8_t)(Addr)); + + /* Test on I2C2 EV8 and clear it */ + if (!mt9v034_I2cWaitEvent(I2C_EVENT_MASTER_BYTE_TRANSMITTED)) + return 0xFFFF; + + /* Clear AF flag if set */ + I2C2->SR1 |= (uint16_t)0x0400; + + /* Generate the Start Condition */ + I2C_GenerateSTART(I2C2, ENABLE); + + /* Test on I2C2 EV6 and clear it */ + if (!mt9v034_I2cWaitEvent(I2C_EVENT_MASTER_MODE_SELECT)) + return 0xFFFF; + + /* Send DCMI selected device slave Address for write */ + I2C_Send7bitAddress(I2C2, mt9v034_DEVICE_READ_ADDRESS, I2C_Direction_Receiver); + + /* Test on I2C2 EV6 and clear it */ + if (!mt9v034_I2cWaitEvent(I2C_EVENT_MASTER_RECEIVER_MODE_SELECTED)) + return 0xFFFF; + + /* Prepare an ACK for the next data received */ + I2C_AcknowledgeConfig(I2C2, ENABLE); + + /* Test on I2C2 EV7 and clear it */ + if (!mt9v034_I2cWaitEvent(I2C_EVENT_MASTER_BYTE_RECEIVED)) + return 0xFFFF; + + /* Receive the Data MSB */ + Data = ((uint16_t)I2C_ReceiveData(I2C2)) << 8; + + /* Prepare an NACK for the next data received */ + I2C_AcknowledgeConfig(I2C2, DISABLE); + + /* Test on I2C2 EV7 and clear it */ + if (!mt9v034_I2cWaitEvent(I2C_EVENT_MASTER_BYTE_RECEIVED)) + return 0xFFFF; + + /* Receive the Data LSB */ + Data |= I2C_ReceiveData(I2C2); + + /* Prepare Stop after receiving data */ + I2C_GenerateSTOP(I2C2, ENABLE); + + /* return the read data */ + return Data; } diff --git a/src/modules/flow/result_accumulator.c b/src/modules/flow/result_accumulator.c new file mode 100644 index 0000000..9ea0906 --- /dev/null +++ b/src/modules/flow/result_accumulator.c @@ -0,0 +1,277 @@ +/**************************************************************************** + * + * Copyright (C) 2013 PX4 Development Team. All rights reserved. + * Author: Petri Tanskanen + * Lorenz Meier + * Samuel Zihlmann + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * 3. Neither the name PX4 nor the names of its contributors may be + * used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED + * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * + ****************************************************************************/ + +#include +#include +#include + +#include "result_accumulator.h" +#include "i2c_frame.h" +#include "settings.h" +#include "gyro.h" + +void result_accumulator_init(result_accumulator_ctx *ctx) +{ + /* set everything to zero */ + memset(ctx, 0, sizeof(result_accumulator_ctx)); + /* set non-zero initial values */ + ctx->min_quality = 255; + ctx->last_ground_distance = -1; +} + +void result_accumulator_feed(result_accumulator_ctx *ctx, const result_accumulator_frame* frame) +{ + ctx->last_ground_distance = frame->ground_distance; + ctx->last_distance_age = frame->distance_age; + ctx->last_gyro_temp = frame->gyro_temp; + + /* accumulate the values */ + if (frame->qual > 0) { + ctx->px_flow_x_accu += frame->pixel_flow_x; + ctx->px_flow_y_accu += frame->pixel_flow_y; + + /* Convert the flow from pixels to rad: + * This is done with a linear relationship because the involved angles are low. + * + * Calculation for maximum possible error: (for an assumed drone configuration) + * With a 10Hz accumulation period and 5m/s at 1m distance the real speed in rad is 0.463rad per accumulation period. + * Each frame (taken at 400Hz) measures a distance of 4.16px (at 8mm focal length, 4x binning -> 3mm/px). + * With the linear formula this equates to 0.0125rad per frame. + * Accumulated value in rad is thus 40 * 0.0125 = 5rad. Error: 8%. + * At half the speed the error is just 2%. */ + float flow_x_rad = frame->pixel_flow_y * frame->rad_per_pixel; + float flow_y_rad = - frame->pixel_flow_x * frame->rad_per_pixel; + ctx->rad_flow_x_accu += flow_x_rad; + ctx->rad_flow_y_accu += flow_y_rad; + + ctx->gyro_x_accu += frame->x_rate * frame->dt; + ctx->gyro_y_accu += frame->y_rate * frame->dt; + ctx->gyro_z_accu += frame->z_rate * frame->dt; + + /* the quality is the minimum of all quality measurements */ + if (frame->qual < ctx->min_quality || ctx->valid_data_count == 0) { + ctx->min_quality = frame->qual; + } + + ctx->valid_data_count++; + ctx->valid_time += frame->dt; + + if(ctx->last_ground_distance >= 0){ + ctx->m_flow_x_accu += flow_x_rad * ctx->last_ground_distance; + ctx->m_flow_y_accu += flow_y_rad * ctx->last_ground_distance; + ctx->valid_dist_time += frame->dt; + } + } + + /* convert the algorithm's capability into a velocity in rad / s: */ + float flow_cap_mv_rad = frame->max_px_frame * frame->rad_per_pixel / frame->dt; + + if (ctx->data_count > 0) { + if (flow_cap_mv_rad < ctx->flow_cap_mv_rad) { + ctx->flow_cap_mv_rad = flow_cap_mv_rad; + } + } else { + ctx->flow_cap_mv_rad = flow_cap_mv_rad; + } + + ctx->data_count++; + ctx->frame_count++; + ctx->full_time += frame->dt + frame->dropped_dt; +} + +void result_accumulator_calculate_output_flow(result_accumulator_ctx *ctx, uint16_t min_valid_data_count_percent, result_accumulator_output_flow *out) +{ + if (ctx->valid_data_count * 100u >= ctx->data_count * min_valid_data_count_percent && ctx->valid_data_count > 0) { + /*** calculate the output values for the flow mavlink message ***/ + /* scale the averaged values from valid time to full time (we extrapolate the invalid data sets) */ + float time_scaling_f = ctx->full_time / ctx->valid_time; + out->flow_x = floor(ctx->px_flow_x_accu * time_scaling_f * 10.0f + 0.5f); + out->flow_y = floor(ctx->px_flow_y_accu * time_scaling_f * 10.0f + 0.5f); + if(ctx->valid_dist_time > 0){ + float time_scaling_dist = ctx->full_time / ctx->valid_dist_time; + out->flow_comp_m_x = ctx->m_flow_x_accu * time_scaling_dist; + out->flow_comp_m_y = ctx->m_flow_y_accu * time_scaling_dist; + }else{ + out->flow_comp_m_x = 0; + out->flow_comp_m_y = 0; + } + out->quality = ctx->min_quality; + } else { + /* not enough valid data */ + out->flow_x = 0; + out->flow_y = 0; + out->flow_comp_m_x = 0; + out->flow_comp_m_y = 0; + out->quality = 0; + } + /* averaging the distance is no use */ + out->ground_distance = ctx->last_ground_distance; +} + +void result_accumulator_calculate_output_flow_rad(result_accumulator_ctx *ctx, uint16_t min_valid_data_count_percent, result_accumulator_output_flow_rad *out) +{ + if (ctx->valid_data_count * 100u >= ctx->data_count * min_valid_data_count_percent && ctx->valid_data_count > 0) { + /*** calculate the output values for the flow_rad mavlink message ***/ + out->integration_time = ctx->valid_time * 1000000.0f; + out->integrated_x = ctx->rad_flow_x_accu; + out->integrated_y = ctx->rad_flow_y_accu; + out->integrated_xgyro = ctx->gyro_x_accu; + out->integrated_ygyro = ctx->gyro_y_accu; + out->integrated_zgyro = ctx->gyro_z_accu; + out->quality = ctx->min_quality; + /* averaging the distance and temperature is no use */ + out->temperature = ctx->last_gyro_temp; + } else { + /* not enough valid data */ + out->integration_time = 0; + out->integrated_x = 0; + out->integrated_y = 0; + out->integrated_xgyro = 0; + out->integrated_ygyro = 0; + out->integrated_zgyro = 0; + out->quality = 0; + out->temperature = ctx->last_gyro_temp; + } + out->time_delta_distance_us = ctx->last_distance_age; + out->ground_distance = ctx->last_ground_distance; +} + +void result_accumulator_calculate_output_flow_i2c(result_accumulator_ctx *ctx, uint16_t min_valid_data_count_percent, result_accumulator_output_flow_i2c *out) +{ + if (ctx->valid_data_count * 100u >= ctx->data_count * min_valid_data_count_percent && ctx->valid_data_count > 0) { + /*** calculate the output values for the i2c message ***/ + out->frame_count = ctx->frame_count; + out->valid_frames = ctx->valid_data_count; + out->integration_time = ctx->valid_time * 1000000.0f; + out->rad_flow_x = floor(ctx->rad_flow_x_accu * 10000.0f + 0.5f); + out->rad_flow_y = floor(ctx->rad_flow_y_accu * 10000.0f + 0.5f); + out->integrated_gyro_x = floor(ctx->gyro_x_accu * 10000.0f + 0.5f); + out->integrated_gyro_y = floor(ctx->gyro_y_accu * 10000.0f + 0.5f); + out->integrated_gyro_z = floor(ctx->gyro_z_accu * 10000.0f + 0.5f); + out->gyro_x = floor(ctx->gyro_x_accu * (100.0f / ctx->valid_time) + 0.5f); + out->gyro_y = floor(ctx->gyro_y_accu * (100.0f / ctx->valid_time) + 0.5f); + out->gyro_z = floor(ctx->gyro_z_accu * (100.0f / ctx->valid_time) + 0.5f); + out->quality = ctx->min_quality; + out->temperature = ctx->last_gyro_temp; + } else { + /* not enough valid data */ + out->frame_count = ctx->frame_count; + out->valid_frames = 0; + out->integration_time = 0; + out->rad_flow_x = 0; + out->rad_flow_y = 0; + out->integrated_gyro_x = 0; + out->integrated_gyro_y = 0; + out->integrated_gyro_z = 0; + out->gyro_x = 0; + out->gyro_y = 0; + out->gyro_z = 0; + out->quality = 0; + out->temperature = ctx->last_gyro_temp; + } + out->time_delta_distance_us = ctx->last_distance_age; + if (ctx->last_ground_distance >= 0) { + out->ground_distance = floor(ctx->last_ground_distance * 1000.0f + 0.5f); + } else { + out->ground_distance = 0; + } +} + +// TODO: why doesn't the above function write these structs directly? +void result_accumulator_fill_i2c_data(result_accumulator_ctx *ctx, i2c_frame* f, i2c_integral_frame* f_integral) { + result_accumulator_output_flow output_flow; + result_accumulator_output_flow_i2c output_i2c; + int min_valid_ratio = global_data.param[PARAM_ALGORITHM_MIN_VALID_RATIO]; + result_accumulator_calculate_output_flow(ctx, min_valid_ratio, &output_flow); + result_accumulator_calculate_output_flow_i2c(ctx, min_valid_ratio, &output_i2c); + + /* write the i2c_frame */ + f->frame_count = ctx->frame_count; + f->pixel_flow_x_sum = output_flow.flow_x; + f->pixel_flow_y_sum = output_flow.flow_y; + f->flow_comp_m_x = floor(output_flow.flow_comp_m_x * 1000.0f + 0.5f); + f->flow_comp_m_y = floor(output_flow.flow_comp_m_y * 1000.0f + 0.5f); + f->qual = output_flow.quality; + f->ground_distance = output_i2c.ground_distance; + + f->gyro_x_rate = output_i2c.gyro_x; + f->gyro_y_rate = output_i2c.gyro_y; + f->gyro_z_rate = output_i2c.gyro_z; + f->gyro_range = getGyroRange(); + + if (output_i2c.time_delta_distance_us < 255 * 1000) { + f->distance_timestamp = output_i2c.time_delta_distance_us / 1000; //convert to ms + } else { + f->distance_timestamp = 255; + } + + /* write the i2c_integral_frame */ + f_integral->frame_count_since_last_readout = output_i2c.valid_frames; + f_integral->gyro_x_rate_integral = output_i2c.integrated_gyro_x; //mrad*10 + f_integral->gyro_y_rate_integral = output_i2c.integrated_gyro_y; //mrad*10 + f_integral->gyro_z_rate_integral = output_i2c.integrated_gyro_z; //mrad*10 + f_integral->pixel_flow_x_integral = output_i2c.rad_flow_x; //mrad*10 + f_integral->pixel_flow_y_integral = output_i2c.rad_flow_y; //mrad*10 + f_integral->integration_timespan = output_i2c.integration_time; //microseconds + f_integral->ground_distance = output_i2c.ground_distance; //mmeters + f_integral->distance_timestamp = output_i2c.time_delta_distance_us; //microseconds + f_integral->qual = output_i2c.quality; //0-255 linear quality measurement 0=bad, 255=best + f_integral->gyro_temperature = output_i2c.temperature; //Temperature * 100 in centi-degrees Celsius +} + +void result_accumulator_reset(result_accumulator_ctx *ctx) +{ + ctx->px_flow_x_accu = 0; + ctx->px_flow_y_accu = 0; + + ctx->rad_flow_x_accu = 0; + ctx->rad_flow_y_accu = 0; + + ctx->m_flow_x_accu = 0; + ctx->m_flow_y_accu = 0; + + ctx->gyro_x_accu = 0; + ctx->gyro_y_accu = 0; + ctx->gyro_z_accu = 0; + + ctx->min_quality = 255; + + ctx->valid_data_count = 0; + ctx->data_count = 0; + ctx->valid_time = 0; + ctx->full_time = 0; + ctx->valid_dist_time = 0; +} diff --git a/src/modules/flow/settings.c b/src/modules/flow/settings.c index d367d5b..f82d029 100644 --- a/src/modules/flow/settings.c +++ b/src/modules/flow/settings.c @@ -77,9 +77,6 @@ void global_data_reset_param_defaults(void){ strcpy(global_data.param_name[PARAM_SYSTEM_SEND_LPOS], "SYS_SEND_LPOS"); global_data.param_access[PARAM_SYSTEM_SEND_LPOS] = READ_WRITE; - global_data.param[PARAM_SENSOR_POSITION] = 0; // BOTTOM - strcpy(global_data.param_name[PARAM_SENSOR_POSITION], "POSITION"); - global_data.param_access[PARAM_SENSOR_POSITION] = READ_WRITE; global_data.param[PARAM_USART2_BAUD] = 115200; strcpy(global_data.param_name[PARAM_USART2_BAUD], "USART_2_BAUD"); @@ -90,15 +87,17 @@ void global_data_reset_param_defaults(void){ strcpy(global_data.param_name[PARAM_USART3_BAUD], "USART_3_BAUD"); global_data.param_access[PARAM_USART3_BAUD] = READ_ONLY; + global_data.param[PARAM_FOCAL_LENGTH_MM] = 16.0f; strcpy(global_data.param_name[PARAM_FOCAL_LENGTH_MM], "LENS_FOCAL_LEN"); global_data.param_access[PARAM_FOCAL_LENGTH_MM] = READ_WRITE; - global_data.param[PARAM_IMAGE_WIDTH] = BOTTOM_FLOW_IMAGE_WIDTH; + + global_data.param[PARAM_IMAGE_WIDTH] = FLOW_IMAGE_WIDTH; strcpy(global_data.param_name[PARAM_IMAGE_WIDTH], "IMAGE_WIDTH"); global_data.param_access[PARAM_IMAGE_WIDTH] = READ_ONLY; - global_data.param[PARAM_IMAGE_HEIGHT] = BOTTOM_FLOW_IMAGE_HEIGHT; + global_data.param[PARAM_IMAGE_HEIGHT] = FLOW_IMAGE_HEIGHT; strcpy(global_data.param_name[PARAM_IMAGE_HEIGHT], "IMAGE_HEIGHT"); global_data.param_access[PARAM_IMAGE_HEIGHT] = READ_ONLY; @@ -115,6 +114,49 @@ void global_data_reset_param_defaults(void){ strcpy(global_data.param_name[PARAM_IMAGE_TEST_PATTERN], "IMAGE_TEST_PAT"); global_data.param_access[PARAM_IMAGE_TEST_PATTERN] = READ_WRITE; + global_data.param[PARAM_IMAGE_INTERVAL] = 1; + strcpy(global_data.param_name[PARAM_IMAGE_INTERVAL], "IMAGE_INTERVAL"); + global_data.param_access[PARAM_IMAGE_INTERVAL] = READ_WRITE; + + + global_data.param[PARAM_ALGORITHM_CHOICE] = 1; + strcpy(global_data.param_name[PARAM_ALGORITHM_CHOICE], "ALG_CHOICE"); + global_data.param_access[PARAM_ALGORITHM_CHOICE] = READ_WRITE; + + global_data.param[PARAM_ALGORITHM_CORNER_KAPPA] = 0.06; + strcpy(global_data.param_name[PARAM_ALGORITHM_CORNER_KAPPA], "ALG_CORN_KAPPA"); + global_data.param_access[PARAM_ALGORITHM_CORNER_KAPPA] = READ_WRITE; + + global_data.param[PARAM_ALGORITHM_OUTLIER_THR_BLOCK] = 1.0f; + strcpy(global_data.param_name[PARAM_ALGORITHM_OUTLIER_THR_BLOCK], "ALG_THR_BM"); + global_data.param_access[PARAM_ALGORITHM_OUTLIER_THR_BLOCK] = READ_WRITE; + + global_data.param[PARAM_ALGORITHM_OUTLIER_THR_KLT] = 0.2f; + strcpy(global_data.param_name[PARAM_ALGORITHM_OUTLIER_THR_KLT], "ALG_THR_KLT"); + global_data.param_access[PARAM_ALGORITHM_OUTLIER_THR_KLT] = READ_WRITE; + + global_data.param[PARAM_ALGORITHM_OUTLIER_THR_RATIO] = 0.15f; + strcpy(global_data.param_name[PARAM_ALGORITHM_OUTLIER_THR_RATIO], "ALG_THR_RATIO"); + global_data.param_access[PARAM_ALGORITHM_OUTLIER_THR_RATIO] = READ_WRITE; + + global_data.param[PARAM_ALGORITHM_MIN_VALID_RATIO] = 50; + strcpy(global_data.param_name[PARAM_ALGORITHM_MIN_VALID_RATIO], "ALG_VALID_RATIO"); + global_data.param_access[PARAM_ALGORITHM_MIN_VALID_RATIO] = READ_WRITE; + + + global_data.param[PARAM_KLT_MAX_ITERS] = 5; + strcpy(global_data.param_name[PARAM_KLT_MAX_ITERS], "KLT_MAX_ITERS"); + global_data.param_access[PARAM_KLT_MAX_ITERS] = READ_WRITE; + + global_data.param[PARAM_KLT_DET_VALUE_MIN] = 90; + strcpy(global_data.param_name[PARAM_KLT_DET_VALUE_MIN], "KLT_DET_MIN"); + global_data.param_access[PARAM_KLT_DET_VALUE_MIN] = READ_WRITE; + + global_data.param[PARAM_KLT_GYRO_ASSIST] = 1; + strcpy(global_data.param_name[PARAM_KLT_GYRO_ASSIST], "KLT_GYRO_ASSIST"); + global_data.param_access[PARAM_KLT_GYRO_ASSIST] = READ_WRITE; + + global_data.param[PARAM_GYRO_SENSITIVITY_DPS] = 250; strcpy(global_data.param_name[PARAM_GYRO_SENSITIVITY_DPS], "GYRO_SENS_DPS"); global_data.param_access[PARAM_GYRO_SENSITIVITY_DPS] = READ_WRITE; @@ -135,6 +177,7 @@ void global_data_reset_param_defaults(void){ strcpy(global_data.param_name[PARAM_SONAR_KALMAN_L2], "SONAR_KAL_L2"); global_data.param_access[PARAM_SONAR_KALMAN_L2] = READ_WRITE; + global_data.param[PARAM_USB_SEND_VIDEO] = 1; // send video over USB strcpy(global_data.param_name[PARAM_USB_SEND_VIDEO], "USB_SEND_VIDEO"); global_data.param_access[PARAM_USB_SEND_VIDEO] = READ_WRITE; @@ -155,54 +198,45 @@ void global_data_reset_param_defaults(void){ strcpy(global_data.param_name[PARAM_USB_SEND_DEBUG], "USB_SEND_DEBUG"); global_data.param_access[PARAM_USB_SEND_DEBUG] = READ_WRITE; - global_data.param[PARAM_VIDEO_ONLY] = 0; - strcpy(global_data.param_name[PARAM_VIDEO_ONLY], "VIDEO_ONLY"); - global_data.param_access[PARAM_VIDEO_ONLY] = READ_WRITE; + global_data.param[PARAM_USB_SEND_FLOW_OUTL] = 1; // send flow outlier debug + strcpy(global_data.param_name[PARAM_USB_SEND_FLOW_OUTL], "USB_SEND_FLW_OL"); + global_data.param_access[PARAM_USB_SEND_FLOW_OUTL] = READ_WRITE; + + global_data.param[PARAM_USB_SEND_QUAL_0] = 1; + strcpy(global_data.param_name[PARAM_USB_SEND_QUAL_0], "USB_SEND_Q_0"); + global_data.param_access[PARAM_USB_SEND_QUAL_0] = READ_WRITE; global_data.param[PARAM_VIDEO_RATE] = 150; strcpy(global_data.param_name[PARAM_VIDEO_RATE], "VIDEO_RATE"); global_data.param_access[PARAM_VIDEO_RATE] = READ_WRITE; - global_data.param[PARAM_MAX_FLOW_PIXEL] = BOTTOM_FLOW_SEARCH_WINDOW_SIZE; - strcpy(global_data.param_name[PARAM_MAX_FLOW_PIXEL], "BFLOW_MAX_PIX"); - global_data.param_access[PARAM_MAX_FLOW_PIXEL] = READ_ONLY; - -// global_data.param[PARAM_BOTTOM_FLOW_VALUE_THRESHOLD] = 8 * 8 * 20; - global_data.param[PARAM_BOTTOM_FLOW_VALUE_THRESHOLD] = 5000; // threshold is irrelevant with this value - strcpy(global_data.param_name[PARAM_BOTTOM_FLOW_VALUE_THRESHOLD], "BFLOW_V_THLD"); - global_data.param_access[PARAM_BOTTOM_FLOW_VALUE_THRESHOLD] = READ_WRITE; -// global_data.param[PARAM_BOTTOM_FLOW_FEATURE_THRESHOLD] = 100; - global_data.param[PARAM_BOTTOM_FLOW_FEATURE_THRESHOLD] = 30; // threshold is irrelevant with this value - strcpy(global_data.param_name[PARAM_BOTTOM_FLOW_FEATURE_THRESHOLD], "BFLOW_F_THLD"); - global_data.param_access[PARAM_BOTTOM_FLOW_FEATURE_THRESHOLD] = READ_WRITE; + global_data.param[PARAM_FLOW_MAX_PIXEL] = FLOW_SEARCH_WINDOW_SIZE; + strcpy(global_data.param_name[PARAM_FLOW_MAX_PIXEL], "FLOW_MAX_PIX"); + global_data.param_access[PARAM_FLOW_MAX_PIXEL] = READ_ONLY; - global_data.param[PARAM_BOTTOM_FLOW_HIST_FILTER] = 0; -// global_data.param[PARAM_BOTTOM_FLOW_HIST_FILTER] = 1; - strcpy(global_data.param_name[PARAM_BOTTOM_FLOW_HIST_FILTER], "BFLOW_HIST_FIL"); - global_data.param_access[PARAM_BOTTOM_FLOW_HIST_FILTER] = READ_WRITE; +// global_data.param[PARAM_FLOW_VALUE_THRESHOLD] = 8 * 8 * 20; + global_data.param[PARAM_FLOW_VALUE_THRESHOLD] = 5000; // threshold is irrelevant with this value + strcpy(global_data.param_name[PARAM_FLOW_VALUE_THRESHOLD], "FLOW_V_THLD"); + global_data.param_access[PARAM_FLOW_VALUE_THRESHOLD] = READ_WRITE; -// global_data.param[PARAM_BOTTOM_FLOW_GYRO_COMPENSATION] = 0; - global_data.param[PARAM_BOTTOM_FLOW_GYRO_COMPENSATION] = 0; - strcpy(global_data.param_name[PARAM_BOTTOM_FLOW_GYRO_COMPENSATION], "BFLOW_GYRO_COM"); - global_data.param_access[PARAM_BOTTOM_FLOW_GYRO_COMPENSATION] = READ_WRITE; +// global_data.param[PARAM_FLOW_FEATURE_THRESHOLD] = 100; + global_data.param[PARAM_FLOW_FEATURE_THRESHOLD] = 30; // threshold is irrelevant with this value + strcpy(global_data.param_name[PARAM_FLOW_FEATURE_THRESHOLD], "FLOW_F_THLD"); + global_data.param_access[PARAM_FLOW_FEATURE_THRESHOLD] = READ_WRITE; - global_data.param[PARAM_BOTTOM_FLOW_LP_FILTERED] = 0; - strcpy(global_data.param_name[PARAM_BOTTOM_FLOW_LP_FILTERED], "BFLOW_LP_FIL"); - global_data.param_access[PARAM_BOTTOM_FLOW_LP_FILTERED] = READ_WRITE; + global_data.param[PARAM_FLOW_GYRO_COMPENSATION] = 0; + strcpy(global_data.param_name[PARAM_FLOW_GYRO_COMPENSATION], "FLOW_GYRO_COM"); + global_data.param_access[PARAM_FLOW_GYRO_COMPENSATION] = READ_WRITE; - global_data.param[PARAM_BOTTOM_FLOW_WEIGHT_NEW] = 0.3f; - strcpy(global_data.param_name[PARAM_BOTTOM_FLOW_WEIGHT_NEW], "BFLOW_W_NEW"); - global_data.param_access[PARAM_BOTTOM_FLOW_WEIGHT_NEW] = READ_WRITE; + global_data.param[PARAM_FLOW_SERIAL_THROTTLE_FACTOR] = 10.0f; + strcpy(global_data.param_name[PARAM_FLOW_SERIAL_THROTTLE_FACTOR], "FLOW_THROTT"); + global_data.param_access[PARAM_FLOW_SERIAL_THROTTLE_FACTOR] = READ_WRITE; - global_data.param[PARAM_BOTTOM_FLOW_SERIAL_THROTTLE_FACTOR] = 10.0f; - strcpy(global_data.param_name[PARAM_BOTTOM_FLOW_SERIAL_THROTTLE_FACTOR], "BFLOW_THROTT"); - global_data.param_access[PARAM_BOTTOM_FLOW_SERIAL_THROTTLE_FACTOR] = READ_WRITE; global_data.param[DEBUG_VARIABLE] = 1; strcpy(global_data.param_name[DEBUG_VARIABLE], "DEBUG"); global_data.param_access[DEBUG_VARIABLE] = READ_WRITE; - } /** @@ -213,25 +247,3 @@ void global_data_reset(void) // not in use anymore } -/** - * @brief changes read only settings depending on sensor position - */ -void set_sensor_position_settings(uint8_t sensor_position) -{ - - switch(sensor_position) - { - case(BOTTOM): - global_data.param[PARAM_IMAGE_WIDTH] = BOTTOM_FLOW_IMAGE_WIDTH; - global_data.param[PARAM_IMAGE_HEIGHT] = BOTTOM_FLOW_IMAGE_HEIGHT; - break; - - default: - debug_int_message_buffer("Unused sensor position:", sensor_position); - return; - } - - debug_int_message_buffer("Set sensor position:", sensor_position); - return; -} - diff --git a/src/modules/flow/stm32f4xx_it.c b/src/modules/flow/stm32f4xx_it.c index eca0dc4..8e2eb69 100644 --- a/src/modules/flow/stm32f4xx_it.c +++ b/src/modules/flow/stm32f4xx_it.c @@ -30,6 +30,7 @@ #include "stm32f4xx_it.h" #include "stm32f4xx_conf.h" #include "main.h" +#include "timer.h" #include "usb_core.h" #include "usb_dcd_int.h" @@ -135,7 +136,7 @@ void PendSV_Handler(void) */ void SysTick_Handler(void) { - timer_update(); + timer_interrupt_fn(); } /******************************************************************************/ diff --git a/src/modules/flow/system_stm32f4xx.c b/src/modules/flow/system_stm32f4xx.c index a7e7563..fe62cd6 100644 --- a/src/modules/flow/system_stm32f4xx.c +++ b/src/modules/flow/system_stm32f4xx.c @@ -46,9 +46,9 @@ *----------------------------------------------------------------------------- * System Clock source | PLL (HSE) *----------------------------------------------------------------------------- - * SYSCLK(Hz) | 168000000 + * SYSCLK(Hz) | 192000000 *----------------------------------------------------------------------------- - * HCLK(Hz) | 168000000 + * HCLK(Hz) | 192000000 *----------------------------------------------------------------------------- * AHB Prescaler | 1 *----------------------------------------------------------------------------- @@ -60,11 +60,11 @@ *----------------------------------------------------------------------------- * PLL_M | 24 *----------------------------------------------------------------------------- - * PLL_N | 336 + * PLL_N | 384 *----------------------------------------------------------------------------- * PLL_P | 2 *----------------------------------------------------------------------------- - * PLL_Q | 7 + * PLL_Q | 8 *----------------------------------------------------------------------------- * PLLI2S_N | NA *----------------------------------------------------------------------------- diff --git a/src/modules/flow/timer.c b/src/modules/flow/timer.c new file mode 100644 index 0000000..ff89990 --- /dev/null +++ b/src/modules/flow/timer.c @@ -0,0 +1,211 @@ + +/**************************************************************************** + * + * Copyright (C) 2013 PX4 Development Team. All rights reserved. + * Author: Petri Tanskanen + * Lorenz Meier + * Samuel Zihlmann + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * 3. Neither the name PX4 nor the names of its contributors may be + * used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED + * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * + ****************************************************************************/ + +#include +#include +#include +#include "stm32f4xx.h" + +#include "timer.h" + +#define __INLINE inline +#include "core_cmInstr.h" + +/* boot time in milliseconds ticks */ +volatile uint32_t boot_time_ms = 0; +volatile uint32_t boot_time_us_base = 999; + +static uint32_t ticks_per_ms; +static uint32_t ticks_per_us; + +#define TIMER_FLAG_ENABLED 0x01 +#define TIMER_FLAG_TRIGGERED 0x02 + +typedef struct _timer_info { + uint32_t period; + uint32_t countdown; + void (*timer_fn)(void); + volatile uint8_t flags; +} timer_info; + +static timer_info timer[NTIMERS]; + +/** + * @brief Triggered by systen timer interrupt every millisecond. + * @param None + * @retval None + */ +void timer_interrupt_fn(void) +{ + boot_time_ms++; + /* we do it like this to have correct roll-over behaviour */ + boot_time_us_base += 1000u; + + /* each timer decrements every millisecond if > 0 */ + for (unsigned i = 0; i < NTIMERS; i++) { + /* dont need exclusive access for reading the enabled flag because it will not be changed very often */ + if (timer[i].flags & TIMER_FLAG_ENABLED) { + if (timer[i].countdown > 0) { + timer[i].countdown--; + } else { + timer[i].countdown = timer[i].period - 1; + /* set the flag: */ + timer[i].flags |= TIMER_FLAG_TRIGGERED; + /* force a fail of the flags access instructions: */ + __CLREX(); + } + } + } +} + +void timer_check(void) { + for (unsigned i = 0; i < NTIMERS; i++) { + bool triggered = false; + uint8_t tmp; + do { + tmp = __LDREXB(&timer[i].flags); + triggered = (tmp & TIMER_FLAG_TRIGGERED); + if (triggered) { + tmp &= ~TIMER_FLAG_TRIGGERED; + } + } while (__STREXB(tmp, &timer[i].flags)); + if (triggered) { + timer[i].timer_fn(); + } + } +} + +void timer_register(void (*timer_fn)(void), uint32_t period_ms) { + /* find free timer: */ + int idx; + for (idx = 0; idx < NTIMERS; idx++) { + if ((timer[idx].flags & TIMER_FLAG_ENABLED) == 0) { + break; + } + } + if (idx >= NTIMERS) { + /* capture error */ + while (1); + } + /* setup the info struct: */ + timer[idx].period = period_ms; + timer[idx].countdown = period_ms - 1; + timer[idx].timer_fn = timer_fn; + /* enable it: */ + timer[idx].flags = TIMER_FLAG_ENABLED; +} + +int timer_update(void (*timer_fn)(void), uint32_t period_ms) { + /* find the timer: */ + int idx; + for (idx = 0; idx < NTIMERS; idx++) { + if ((timer[idx].flags & TIMER_FLAG_ENABLED) != 0 && timer[idx].timer_fn == timer_fn) { + break; + } + } + if (idx >= NTIMERS) { + return -1; + } + /* update the period */ + if (timer[idx].period != period_ms) { + timer[idx].period = period_ms; + } + return 0; +} + +void timer_init(void) +{ + /* init clock */ + ticks_per_ms = SystemCoreClock / 1000u; + ticks_per_us = ticks_per_ms / 1000u; + /* init all timers */ + for (int idx = 0; idx < NTIMERS; idx++) { + /* disable it: */ + timer[idx].flags = 0; + } + /* enable systick timer: */ + if (SysTick_Config(ticks_per_ms)) /* set timer to trigger interrupt every 1 millisecond */ + { + /* capture clock error */ + while (1); + } +} + +uint32_t get_boot_time_ms(void) +{ + // clear the COUNTFLAG: + volatile bool dummy __attribute__((unused)) = (SysTick->CTRL & SysTick_CTRL_COUNTFLAG_Msk) != 0; + volatile uint32_t val; + do { + // read the value: + val = boot_time_ms; + // make sure it did not roll over in the mean-time: + } while(SysTick->CTRL & SysTick_CTRL_COUNTFLAG_Msk); + return val; +} + +uint32_t get_boot_time_us(void) +{ + // clear the COUNTFLAG: + volatile bool dummy __attribute__((unused)) = (SysTick->CTRL & SysTick_CTRL_COUNTFLAG_Msk) != 0; + volatile uint32_t val_us_base; + volatile uint32_t val_tick; + do { + // read the value: + val_us_base = boot_time_us_base; + if (SCB->ICSR & SCB_ICSR_PENDSTSET_Msk) val_us_base += 1000; + val_tick = SysTick->VAL; + // make sure it did not roll over in the mean-time: + } while(SysTick->CTRL & SysTick_CTRL_COUNTFLAG_Msk); + // return the calculated value: + return val_us_base - ((val_tick & SysTick_VAL_CURRENT_Msk) / ticks_per_us); +} + +uint32_t calculate_time_delta_us(uint32_t end, uint32_t start) { + return end - start; +} + +uint32_t get_time_delta_us(uint32_t start) +{ + uint32_t now = get_boot_time_us(); + return calculate_time_delta_us(now, start); +} + +void delay(uint16_t ms) { + int32_t endt = get_boot_time_us() + (uint32_t)ms * 1000u; + while ((endt - (int32_t)get_boot_time_us()) > 0); +} diff --git a/src/modules/flow/usart.c b/src/modules/flow/usart.c index fe17546..cf85bb0 100644 --- a/src/modules/flow/usart.c +++ b/src/modules/flow/usart.c @@ -295,7 +295,7 @@ void usart_init(void) /* Enable the USARTx Interrupt */ NVIC_InitStructure.NVIC_IRQChannel = USART2_IRQn; - NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 0; + NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 1; NVIC_InitStructure.NVIC_IRQChannelSubPriority = 0; NVIC_InitStructure.NVIC_IRQChannelCmd = ENABLE; NVIC_Init(&NVIC_InitStructure); diff --git a/src/modules/flow/usb_bsp.c b/src/modules/flow/usb_bsp.c index 5f19232..07906f4 100644 --- a/src/modules/flow/usb_bsp.c +++ b/src/modules/flow/usb_bsp.c @@ -87,7 +87,7 @@ void USB_OTG_BSP_EnableInterrupt(USB_OTG_CORE_HANDLE *pdev) NVIC_PriorityGroupConfig(NVIC_PriorityGroup_1); NVIC_InitStructure.NVIC_IRQChannel = OTG_FS_IRQn; - NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 1; + NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 2; NVIC_InitStructure.NVIC_IRQChannelSubPriority = 3; NVIC_InitStructure.NVIC_IRQChannelCmd = ENABLE; NVIC_Init(&NVIC_InitStructure); diff --git a/src/modules/flow/utils.c b/src/modules/flow/utils.c deleted file mode 100644 index 3252f23..0000000 --- a/src/modules/flow/utils.c +++ /dev/null @@ -1,185 +0,0 @@ -/**************************************************************************** - * - * Copyright (C) 2013 PX4 Development Team. All rights reserved. - * -> Code from CodeForge.com - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions - * are met: - * - * 1. Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in - * the documentation and/or other materials provided with the - * distribution. - * 3. Neither the name PX4 nor the names of its contributors may be - * used to endorse or promote products derived from this software - * without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS - * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE - * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, - * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, - * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS - * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED - * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT - * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN - * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * - ****************************************************************************/ - -#include -#include "utils.h" -#include - -typedef union { -long L; -float F; -} LF_t; - -char *ftoa(float f) //, int *status) -{ - long mantissa, int_part, frac_part; - short exp2; - LF_t x; - char *p; - static char outbuf[15]; - - //*status = 0; -#pragma GCC diagnostic push -#pragma GCC diagnostic ignored "-Wfloat-equal" - if (f == 0.0f) - { -#pragma GCC diagnostic pop - outbuf[0] = '0'; - outbuf[1] = '.'; - outbuf[2] = '0'; - outbuf[3] = 0; - return outbuf; - } - x.F = f; - - exp2 = (unsigned char)(x.L >> 23) - 127; - mantissa = (x.L & 0xFFFFFF) | 0x800000; - frac_part = 0; - int_part = 0; - - if (exp2 >= 31) - { - //*status = _FTOA_TOO_LARGE; - return 0; - } - else if (exp2 < -23) - { - //*status = _FTOA_TOO_SMALL; - return 0; - } - else if (exp2 >= 23) - int_part = mantissa << (exp2 - 23); - else if (exp2 >= 0) - { - int_part = mantissa >> (23 - exp2); - frac_part = (mantissa << (exp2 + 1)) & 0xFFFFFF; - } - else /* if (exp2 < 0) */ - frac_part = (mantissa & 0xFFFFFF) >> -(exp2 + 1); - - p = outbuf; - - if (x.L < 0) - *p++ = '-'; - - if (int_part == 0) - *p++ = '0'; - else - { - ltoa(p, int_part, 10); - while (*p) - p++; - } - *p++ = '.'; - - if (frac_part == 0) - *p++ = '0'; - else - { - char m, max; - - max = sizeof (outbuf) - (p - outbuf) - 1; - if (max > 7) - max = 7; - /* print BCD */ - for (m = 0; m < max; m++) - { - /* frac_part *= 10; */ - frac_part = (frac_part << 3) + (frac_part << 1); - - *p++ = (frac_part >> 24) + '0'; - frac_part &= 0xFFFFFF; - } - /* delete ending zeroes */ - for (--p; p[0] == '0' && p[-1] != '.'; --p) - ; - ++p; - } - *p = 0; - - return outbuf; -} - - -void ltoa(char *buf, unsigned long i, int base) -{ - char *s; - #define LEN 25 - int rem; - char rev[LEN+1]; - - if (i == 0) - s = "0"; - else - { - rev[LEN] = 0; - s = &rev[LEN]; - while (i) - { - rem = i % base; - if (rem < 10) - *--s = rem + '0'; - else if (base == 16) - *--s = "abcdef"[rem - 10]; - i /= base; - } - } - strcpy(buf, s); -} - -void itoa(char *buf, unsigned int i, int base) -{ - char *s; - const int len = 10; - int rem; - char rev[len+1]; - - if (i == 0) - s = "0"; - else - { - rev[len] = 0; - s = &rev[len]; - while (i) - { - rem = i % base; - if (rem < 10) - *--s = rem + '0'; - else if (base == 16) - *--s = "abcdef"[rem - 10]; - i /= base; - } - } - strcpy(buf, s); -} diff --git a/src/modules/flow/i2c.c b/src/modules/i2c_slave/i2c_slave.c similarity index 56% rename from src/modules/flow/i2c.c rename to src/modules/i2c_slave/i2c_slave.c index 097ce6e..b85ed0b 100644 --- a/src/modules/flow/i2c.c +++ b/src/modules/i2c_slave/i2c_slave.c @@ -30,35 +30,29 @@ * POSSIBILITY OF SUCH DAMAGE. * ****************************************************************************/ - -/** - * @file i2c.c - * Definition of i2c frames. - * @author Thomas Boehm - * @author James Goppert - */ + +#include +#include +#include "fmu_comm.h" #include "px4_config.h" #include "px4_macros.h" -#include "i2c.h" #include "stm32f4xx.h" #include "stm32f4xx_i2c.h" #include "stm32f4xx_rcc.h" #include "stm32f4xx_gpio.h" +#include "stm32f4xx_conf.h" -#include "led.h" #include "i2c_frame.h" #include "gyro.h" -#include "sonar.h" #include "main.h" +#include "result_accumulator.h" -#include "mavlink_bridge_header.h" -#include +#define I2C1_OWNADDRESS_1_BASE 0x42 //7bit base address /* prototypes */ void I2C1_EV_IRQHandler(void); void I2C1_ER_IRQHandler(void); -char readI2CAddressOffset(void); static char offset = 0; uint8_t dataRX = 0; @@ -72,8 +66,21 @@ uint8_t readout_done_frame1 = 1; uint8_t readout_done_frame2 = 1; uint8_t stop_accumulation = 0; -void i2c_init() { +static char readI2CAddressOffset(void) { + //read 3bit address offset of 7 bit address + offset = 0x00; + offset = GPIO_ReadInputData(GPIOC ) >> 13; //bit 0 + offset = offset | ((GPIO_ReadInputData(GPIOC ) >> 14) << 1); //bit 1 + offset = offset | ((GPIO_ReadInputData(GPIOC ) >> 15) << 2); //bit 2 + offset = (~offset) & 0x07; + return offset; +} + +static char i2c_get_ownaddress1(void) { + return (I2C1_OWNADDRESS_1_BASE + readI2CAddressOffset()) << 1; //add offset to base and shift 1 bit to generate valid 7 bit address +} +__EXPORT void fmu_comm_init(void) { I2C_DeInit(I2C1 ); //Deinit and reset the I2C to avoid it locking up I2C_SoftwareResetCmd(I2C1, ENABLE); I2C_SoftwareResetCmd(I2C1, DISABLE); @@ -107,13 +114,13 @@ void i2c_init() { NVIC_InitTypeDef NVIC_InitStructure, NVIC_InitStructure2; NVIC_InitStructure.NVIC_IRQChannel = I2C1_EV_IRQn; - NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 0; + NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 1; NVIC_InitStructure.NVIC_IRQChannelSubPriority = 0; NVIC_InitStructure.NVIC_IRQChannelCmd = ENABLE; NVIC_Init(&NVIC_InitStructure); NVIC_InitStructure2.NVIC_IRQChannel = I2C1_ER_IRQn; - NVIC_InitStructure2.NVIC_IRQChannelPreemptionPriority = 0; + NVIC_InitStructure2.NVIC_IRQChannelPreemptionPriority = 1; NVIC_InitStructure2.NVIC_IRQChannelSubPriority = 0; NVIC_InitStructure2.NVIC_IRQChannelCmd = ENABLE; NVIC_Init(&NVIC_InitStructure2); @@ -135,26 +142,22 @@ void i2c_init() { I2C_Cmd(I2C1, ENABLE); } +__EXPORT void fmu_comm_run(void) {} + void I2C1_EV_IRQHandler(void) { //uint8_t dataRX; static uint8_t txDataIndex1 = 0x00; static uint8_t txDataIndex2 = 0x00; static uint8_t rxDataIndex = 0x00; - switch (I2C_GetLastEvent(I2C1 )) { - - case I2C_EVENT_SLAVE_RECEIVER_ADDRESS_MATCHED : { - I2C1 ->SR1; - I2C1 ->SR2; - rxDataIndex = 0; - break; - } - case I2C_EVENT_SLAVE_TRANSMITTER_ADDRESS_MATCHED : { - I2C1 ->SR1; - I2C1 ->SR2; - break; - } - case I2C_EVENT_SLAVE_BYTE_RECEIVED : { + + if (I2C_GetITStatus(I2C1, I2C_IT_ADDR) == SET) { + uint8_t sr2 = I2C1->SR2; + if (sr2 & I2C_SR2_TRA) { + // Write + rxDataIndex = 0; + } + } else if (I2C_GetITStatus(I2C1, I2C_IT_RXNE)) { //receive address offset dataRX = I2C_ReceiveData(I2C1 ); rxDataIndex++; @@ -169,11 +172,7 @@ void I2C1_EV_IRQHandler(void) { //indicate sending readout_done_frame1 = 0; readout_done_frame2 = 0; - break; - } - case I2C_EVENT_SLAVE_BYTE_TRANSMITTING : - case I2C_EVENT_SLAVE_BYTE_TRANSMITTED : { - + } else if (I2C_GetITStatus(I2C1, I2C_IT_TXE)) { if (txDataIndex1 < (I2C_FRAME_SIZE)) { I2C_SendData(I2C1, txDataFrame1[publishedIndexFrame1][txDataIndex1]); @@ -184,7 +183,6 @@ void I2C1_EV_IRQHandler(void) { if (txDataIndex2 < I2C_INTEGRAL_FRAME_SIZE) { txDataIndex2++; } - } //check whether last byte is read frame1 @@ -197,20 +195,10 @@ void I2C1_EV_IRQHandler(void) { readout_done_frame2 = 1; stop_accumulation = 1; } - - break; - } - - case I2C_EVENT_SLAVE_ACK_FAILURE : { - I2C1 ->SR1 &= 0x00FF; - break; - } - - case I2C_EVENT_SLAVE_STOP_DETECTED : { - I2C1 ->SR1; - I2C1 ->CR1 |= 0x1; - break; - } + } else if (I2C_GetITStatus(I2C1, I2C_IT_AF)) { + I2C_ClearITPendingBit(I2C1, I2C_IT_AF); + } else if (I2C_GetITStatus(I2C1, I2C_IT_STOPF)) { + I2C_Cmd(I2C1, ENABLE); } } @@ -223,112 +211,36 @@ void I2C1_ER_IRQHandler(void) { } } -void update_TX_buffer(float pixel_flow_x, float pixel_flow_y, - float flow_comp_m_x, float flow_comp_m_y, uint8_t qual, - float ground_distance, float gyro_x_rate, float gyro_y_rate, - float gyro_z_rate, int16_t gyro_temp, legacy_12c_data_t *pd) { - static uint16_t frame_count = 0; - - i2c_frame f; - i2c_integral_frame f_integral; - - f.frame_count = frame_count; - f.pixel_flow_x_sum = pixel_flow_x * 10.0f; - f.pixel_flow_y_sum = pixel_flow_y * 10.0f; - f.flow_comp_m_x = flow_comp_m_x * 1000; - f.flow_comp_m_y = flow_comp_m_y * 1000; - f.qual = qual; - f.ground_distance = ground_distance * 1000; - - f.gyro_x_rate = gyro_x_rate * getGyroScalingFactor(); - f.gyro_y_rate = gyro_y_rate * getGyroScalingFactor(); - f.gyro_z_rate = gyro_z_rate * getGyroScalingFactor(); - f.gyro_range = getGyroRange(); - - uint32_t time_since_last_sonar_update; - time_since_last_sonar_update = (get_boot_time_us() - - get_sonar_measure_time()); +__EXPORT void fmu_comm_update(const result_accumulator_frame* frame_data) { + static result_accumulator_ctx accumulator; + static bool initialized = false; - if (time_since_last_sonar_update < 255 * 1000) { - f.sonar_timestamp = time_since_last_sonar_update / 1000; //convert to ms - } else { - f.sonar_timestamp = 255; + if (!initialized) { + result_accumulator_init(&accumulator); + initialized = true; } - static float accumulated_flow_x = 0; - static float accumulated_flow_y = 0; - static uint16_t accumulated_framecount = 0; - static uint16_t accumulated_quality = 0; - static float accumulated_gyro_x = 0; - static float accumulated_gyro_y = 0; - static float accumulated_gyro_z = 0; - static uint32_t integration_timespan = 0; - static uint32_t lasttime = 0; - - /* calculate focal_length in pixel */ - const float focal_length_px = ((global_data.param[PARAM_FOCAL_LENGTH_MM]) - / (4.0f * 6.0f) * 1000.0f); //original focal lenght: 12mm pixelsize: 6um, binning 4 enabled - - // reset if readout has been performed + /* reset if readout has been performed */ if (stop_accumulation == 1) { - - //debug output -// mavlink_msg_optical_flow_send(MAVLINK_COMM_2, get_boot_time_us(), -// global_data.param[PARAM_SENSOR_ID], accumulated_flow_x * 10.0f, -// accumulated_gyro_x * 10.0f, integration_timespan, -// accumulated_framecount, (uint8_t) (accumulated_quality / accumulated_framecount), ground_distance); - - integration_timespan = 0; - accumulated_flow_x = 0; //mrad - accumulated_flow_y = 0; //mrad - accumulated_framecount = 0; - accumulated_quality = 0; - accumulated_gyro_x = 0; //mrad - accumulated_gyro_y = 0; //mrad - accumulated_gyro_z = 0; //mrad + result_accumulator_reset(&accumulator); stop_accumulation = 0; } - //accumulate flow and gyro values between sucessive I2C readings - //update only if qual !=0 - if (qual > 0) { - uint32_t deltatime = (get_boot_time_us() - lasttime); - integration_timespan += deltatime; - accumulated_flow_x += pixel_flow_y * 1000.0f / focal_length_px * 1.0f;//mrad axis swapped to align x flow around y axis - accumulated_flow_y += pixel_flow_x * 1000.0f / focal_length_px * -1.0f; //mrad - accumulated_framecount++; - accumulated_quality += qual; - accumulated_gyro_x += gyro_x_rate * deltatime * 0.001f; //mrad gyro_x_rate * 1000.0f*deltatime/1000000.0f; - accumulated_gyro_y += gyro_y_rate * deltatime * 0.001f; //mrad - accumulated_gyro_z += gyro_z_rate * deltatime * 0.001f; //mrad - } + /* feed the accumulator and recalculate */ + result_accumulator_feed(&accumulator, frame_data); + + i2c_frame f; + i2c_integral_frame f_integral; - //update lasttime - lasttime = get_boot_time_us(); - - f_integral.frame_count_since_last_readout = accumulated_framecount; - f_integral.gyro_x_rate_integral = accumulated_gyro_x * 10.0f; //mrad*10 - f_integral.gyro_y_rate_integral = accumulated_gyro_y * 10.0f; //mrad*10 - f_integral.gyro_z_rate_integral = accumulated_gyro_z * 10.0f; //mrad*10 - f_integral.pixel_flow_x_integral = accumulated_flow_x * 10.0f; //mrad*10 - f_integral.pixel_flow_y_integral = accumulated_flow_y * 10.0f; //mrad*10 - f_integral.integration_timespan = integration_timespan; //microseconds - f_integral.ground_distance = ground_distance * 1000; //mmeters - f_integral.sonar_timestamp = time_since_last_sonar_update; //microseconds - f_integral.qual = - (uint8_t) (accumulated_quality / accumulated_framecount); //0-255 linear quality measurement 0=bad, 255=best - f_integral.gyro_temperature = gyro_temp;//Temperature * 100 in centi-degrees Celsius + result_accumulator_fill_i2c_data(&accumulator, &f, &f_integral); + /* perform buffer swapping */ notpublishedIndexFrame1 = 1 - publishedIndexFrame1; // choose not the current published 1 buffer notpublishedIndexFrame2 = 1 - publishedIndexFrame2; // choose not the current published 2 buffer - // HACK!! To get the data -TODO(TODO:Tom Please fix this); - uavcan_export(&pd->frame, &f, I2C_FRAME_SIZE); - uavcan_export(&pd->integral_frame, &f_integral, I2C_INTEGRAL_FRAME_SIZE); - // fill I2C transmitbuffer1 with frame1 values + // fill I2C transmitbuffer1 with frame1 values memcpy(&(txDataFrame1[notpublishedIndexFrame1]), &f, I2C_FRAME_SIZE); @@ -345,21 +257,4 @@ TODO(TODO:Tom Please fix this); if (readout_done_frame2) { publishedIndexFrame2 = 1 - publishedIndexFrame2; } - - frame_count++; - -} - -char readI2CAddressOffset(void) { - //read 3bit address offset of 7 bit address - offset = 0x00; - offset = GPIO_ReadInputData(GPIOC ) >> 13; //bit 0 - offset = offset | ((GPIO_ReadInputData(GPIOC ) >> 14) << 1); //bit 1 - offset = offset | ((GPIO_ReadInputData(GPIOC ) >> 15) << 2); //bit 2 - offset = (~offset) & 0x07; - return offset; -} - -char i2c_get_ownaddress1(void) { - return (I2C1_OWNADDRESS_1_BASE + readI2CAddressOffset()) << 1; //add offset to base and shift 1 bit to generate valid 7 bit address } diff --git a/src/modules/i2c_slave/module.mk b/src/modules/i2c_slave/module.mk new file mode 100644 index 0000000..4753321 --- /dev/null +++ b/src/modules/i2c_slave/module.mk @@ -0,0 +1,5 @@ +DEFAULT_VISIBILITY=default + +SRCS += i2c_slave.c + +$(info SRCS=$(SRCS)) diff --git a/src/modules/libc/module.mk b/src/modules/libc/module.mk index 260d945..1d3ae31 100644 --- a/src/modules/libc/module.mk +++ b/src/modules/libc/module.mk @@ -55,8 +55,9 @@ STRINGSRCS = string/lib_strncmp.c \ STDIOSRCS = stdio/lib_printf.c \ STDLIBSRCS= stdlib/lib_strtol.c \ - stdlib/lib_strtoul.c \ - stdlib/lib_checkbase.c \ + stdlib/lib_strtoul.c \ + stdlib/lib_checkbase.c \ + stdlib/lib_qsort.c \ SRCS = init.c \ diff --git a/src/modules/libc/stdlib/lib_qsort.c b/src/modules/libc/stdlib/lib_qsort.c index 1a13773..5b229e4 100644 --- a/src/modules/libc/stdlib/lib_qsort.c +++ b/src/modules/libc/stdlib/lib_qsort.c @@ -220,15 +220,15 @@ void qsort(void *base, size_t nmemb, size_t size, pn = (char *) base + nmemb * size; r = min(pa - (char *)base, pb - pa); vecswap(base, pb - r, r); - r = min(pd - pc, pn - pd - size); + r = min(pd - pc, pn - pd - (int)size); vecswap(pb, pn - r, r); - if ((r = pb - pa) > size) + if ((r = pb - pa) > (int)size) { qsort(base, r / size, size, compar); } - if ((r = pd - pc) > size) + if ((r = pd - pc) > (int)size) { /* Iterate rather than recurse to save stack space */ diff --git a/src/modules/lidar-lite/lidar.c b/src/modules/lidar-lite/lidar.c new file mode 100644 index 0000000..b792ca7 --- /dev/null +++ b/src/modules/lidar-lite/lidar.c @@ -0,0 +1,315 @@ +/**************************************************************************** + * + * Copyright (c) 2013-2015 PX4 Development Team. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * 3. Neither the name PX4 nor the names of its contributors may be + * used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED + * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * + ****************************************************************************/ + +#include +#include "distance.h" +#include "usbd_cdc_vcp.h" +#include "main.h" +#include "distance_mode_filter.h" +#include "timer.h" + +#include "stm32f4xx.h" +#include "stm32f4xx_i2c.h" +#include "stm32f4xx_rcc.h" +#include "stm32f4xx_gpio.h" +#include "misc.h" + +//Stated range of sensor +#define MINIMUM_DISTANCE 0.0f +#define MAXIMUM_DISTANCE 40.0f + +#define LIDAR_ADDRESS 0x62 +#define REG_CONTROL 0x0 +#define REG_STATUS 0x1 +//for bias calibration +#define REG_OFFSET 0x13 +#define REG_HWVER 0x41 +#define REG_STATUS2 0x47 +#define REG_SWVER 0x4f +#define REG_DATA 0x8f +//REG_DATA is 2-byte read... + +//CONTROL +#define BITS_ACQUIRE_WITH_CORRECTION 0x04 +//STATUS +#define BITS_HEALTH 0x01 +#define BITS_INVALID 0x08 +//STATUS2 +#define BITS_BUSY 0x01 + +bool i2c_started = false; + +static void i2c_init_master(void) +{ + //Reset I2C + I2C_DeInit(I2C1); + I2C_SoftwareResetCmd(I2C1, ENABLE); + I2C_SoftwareResetCmd(I2C1, DISABLE); + + //Turn on clocks + RCC_APB1PeriphClockCmd(RCC_APB1Periph_I2C1, ENABLE); + RCC_AHB1PeriphClockCmd(RCC_AHB1Periph_GPIOB, ENABLE); + + //Route I2C out to B8 (SCL) and B9 (SDA) + GPIO_InitTypeDef gpio_init; + gpio_init.GPIO_Pin = GPIO_Pin_8 | GPIO_Pin_9; + gpio_init.GPIO_Mode = GPIO_Mode_AF; + gpio_init.GPIO_Speed = GPIO_Speed_50MHz; + gpio_init.GPIO_PuPd = GPIO_PuPd_UP; + gpio_init.GPIO_OType = GPIO_OType_OD; + GPIO_Init(GPIOB, &gpio_init); + GPIO_PinAFConfig(GPIOB, GPIO_PinSource8, GPIO_AF_I2C1); //SCL + GPIO_PinAFConfig(GPIOB, GPIO_PinSource9, GPIO_AF_I2C1); //SDA + + //NVIC + NVIC_InitTypeDef nvic_init; + nvic_init.NVIC_IRQChannel = I2C1_EV_IRQn; + nvic_init.NVIC_IRQChannelPreemptionPriority = 0; + nvic_init.NVIC_IRQChannelSubPriority = 0; + nvic_init.NVIC_IRQChannelCmd = ENABLE; + NVIC_Init(&nvic_init); + nvic_init.NVIC_IRQChannel = I2C1_ER_IRQn; + NVIC_Init(&nvic_init); + + //I2C block setup + I2C_InitTypeDef i2c_init; + i2c_init.I2C_ClockSpeed = 100000; + i2c_init.I2C_Mode = I2C_Mode_I2C; + i2c_init.I2C_DutyCycle = I2C_DutyCycle_2; + i2c_init.I2C_OwnAddress1 = 0x0; + i2c_init.I2C_Ack = I2C_Ack_Disable; + i2c_init.I2C_AcknowledgedAddress = I2C_AcknowledgedAddress_7bit; + I2C_Init(I2C1, &i2c_init); + I2C_Cmd(I2C1, ENABLE); + + i2c_started = true; +} + +/* Data from the LIDAR */ + +float sample, sample_filter; +uint32_t measure_time = 0; +bool sample_valid = false; + +__EXPORT void distance_init(void) +{ + i2c_init_master(); + + /* Run interrupt-based drivers after config */ + I2C_ITConfig(I2C1, I2C_IT_EVT, ENABLE); + I2C_ITConfig(I2C1, I2C_IT_ERR, ENABLE); + I2C_ITConfig(I2C1, I2C_IT_BUF, ENABLE); + +} + +/* In order to minimize busywait time, everything is done with interrupt callbacks after config. + * lidar_trigger should be called periodically, and lidar_readback should be called periodically at least + * 20ms after lidar_trigger is called. + * An attempt is made to provide a clean interface (used in e.g. lidar_trigger), so that all the + * state machine nastiness is encapsulated in I2C1_EV_IRQHandler... + * Async I2C stuff eventually ends up in lidar_process... + */ + +#define IDLE 0 +#define WRITE 1 +#define READ 2 +//idle -> WRITE -> idle +//idle -> READ -> idle +uint8_t ld_state = IDLE; +uint8_t ld_reg = 0; +uint8_t ld_buffer[2] = {0}; +uint8_t ld_length = 0; +uint8_t ld_xferred = 0; +uint8_t ld_address = 0; +uint32_t ld_nextev = 0; + +__EXPORT void distance_trigger(void) +{ + if(!i2c_started) + return; + + if (ld_state != IDLE) { + // Previous reading did not complete + distance_init(); + sample_valid = false; + } + + ld_state = WRITE; + ld_reg = REG_CONTROL; + ld_buffer[0] = BITS_ACQUIRE_WITH_CORRECTION; + ld_length = 1; + ld_xferred = 0; + ld_address = LIDAR_ADDRESS; + ld_nextev = I2C_EVENT_MASTER_MODE_SELECT; + I2C_GenerateSTART(I2C1, ENABLE); +} + +__EXPORT void distance_readback(void) +{ + if(!i2c_started) + return; + ld_state = READ; + ld_reg = REG_DATA; + ld_length = 2; + ld_xferred = 0; + ld_address = LIDAR_ADDRESS; + ld_nextev = I2C_EVENT_MASTER_MODE_SELECT; + I2C_GenerateSTART(I2C1, ENABLE); +} + +static void lidar_process(void); + +void I2C1_EV_IRQHandler(void); +__EXPORT void I2C1_EV_IRQHandler(void) +{ + if(ld_nextev != 0 && I2C_CheckEvent(I2C1, ld_nextev)) { + switch(ld_nextev) { + //invariant: state != IDLE + case I2C_EVENT_MASTER_MODE_SELECT: + if(ld_state == WRITE || (ld_state == READ && ld_xferred == 0)) { + I2C_Send7bitAddress(I2C1, ld_address << 1, I2C_Direction_Transmitter); + ld_nextev = I2C_EVENT_MASTER_TRANSMITTER_MODE_SELECTED; + } else if(ld_state == READ) { + I2C_Send7bitAddress(I2C1, ld_address << 1, I2C_Direction_Receiver); + ld_nextev = I2C_EVENT_MASTER_BYTE_RECEIVED; + if(ld_xferred == ld_length) { // last byte + I2C_AcknowledgeConfig(I2C1, DISABLE); + I2C_GenerateSTOP(I2C1, ENABLE); + } else { + I2C_AcknowledgeConfig(I2C1, ENABLE); + } + } else { + I2C_GenerateSTOP(I2C1, ENABLE), ld_state = IDLE, ld_nextev = 0; + } + break; + //invariant: state != IDLE, ld_xferred == 0 + case I2C_EVENT_MASTER_TRANSMITTER_MODE_SELECTED: + if(ld_state != IDLE && ld_xferred == 0) { + I2C_SendData(I2C1, ld_reg), ld_xferred++; + ld_nextev = I2C_EVENT_MASTER_BYTE_TRANSMITTED; + } else { + I2C_GenerateSTOP(I2C1, ENABLE), ld_state = IDLE, ld_nextev = 0; + } + break; + + //invariant: state == WRITE, 0 < ld_xferred <= length+1 + //or: state == READ, ld_xferred == 1 + case I2C_EVENT_MASTER_BYTE_TRANSMITTED: + if(ld_state == WRITE && ld_xferred == (ld_length + 1)) { + I2C_GenerateSTOP(I2C1, ENABLE); + ld_nextev = 0, ld_state = IDLE; + } else if(ld_state == WRITE && ld_xferred > 0 && ld_xferred < (ld_length + 1)) { + I2C_SendData(I2C1, ld_buffer[ld_xferred-1]); + ld_xferred++; + } else if(ld_state == READ && ld_xferred == 1) { + I2C_GenerateSTOP(I2C1, ENABLE); + I2C_GenerateSTART(I2C1, ENABLE); + ld_nextev = I2C_EVENT_MASTER_MODE_SELECT; + } else { + I2C_GenerateSTOP(I2C1, ENABLE), ld_state = IDLE, ld_nextev = 0; + } + break; + + //invariant: state == READ, 0 < ld_xferred < length + 1 + case I2C_EVENT_MASTER_BYTE_RECEIVED: + if(ld_state == READ && ld_xferred == ld_length) { //last byte + ld_buffer[ld_xferred-1] = I2C_ReceiveData(I2C1); + lidar_process(); + ld_state = IDLE, ld_nextev = 0; + } else if(ld_state == READ && ld_xferred < ld_length && ld_xferred > 0) { + ld_buffer[ld_xferred-1] = I2C_ReceiveData(I2C1); + ld_xferred++; + } else { + I2C_GenerateSTOP(I2C1, ENABLE), ld_state = IDLE, ld_nextev = 0; + } + if(ld_nextev == I2C_EVENT_MASTER_BYTE_RECEIVED) { + if(ld_xferred == ld_length) { // last byte + I2C_AcknowledgeConfig(I2C1, DISABLE); + I2C_GenerateSTOP(I2C1, ENABLE); + } else { + I2C_AcknowledgeConfig(I2C1, ENABLE); + } + } + break; + default: + break; + } + } else if (I2C1->SR2 == 0 && I2C1->SR1 == 0x40) { + I2C_ReceiveData(I2C1); + } +} + +void I2C1_ER_IRQHandler(void); +__EXPORT void I2C1_ER_IRQHandler(void) { + if((I2C_ReadRegister(I2C1, I2C_Register_SR1) & 0xFF00) != 0x00) { + I2C_GenerateSTOP(I2C1, ENABLE); + I2C1->SR1 &= 0x00FF; + ld_state = IDLE; + } + sample_valid = false; +} + +static void lidar_process(void) +{ + if(ld_reg == REG_DATA) { + int16_t sample16 = (((int16_t)ld_buffer[1]) << 8) | ld_buffer[0]; + if(sample16 <= 0) { // 0 or less means invalid data + sample_valid = false; + return; + } + sample16 = ((sample16 >> 8) & 0xFF) | ((sample16 << 8) & 0xFF00); + sample = (sample16 / 100.0f); /* convert cm->m */ + if(sample < MINIMUM_DISTANCE || sample > MAXIMUM_DISTANCE) { + sample_valid = false; + return; + } + sample_filter = insert_distance_value_and_get_mode_value(sample); + measure_time = get_boot_time_us(); + sample_valid = true; + } +} + +__EXPORT bool distance_read(float* distance_filtered, float* distance_raw) +{ + if(sample_valid) { + *distance_raw = sample; + *distance_filtered = sample_filter; + return true; + } + return false; +} + +__EXPORT uint32_t get_distance_measure_time(void) +{ + return measure_time; +} diff --git a/src/modules/lidar-lite/module.mk b/src/modules/lidar-lite/module.mk new file mode 100644 index 0000000..3984c4b --- /dev/null +++ b/src/modules/lidar-lite/module.mk @@ -0,0 +1,5 @@ +DEFAULT_VISIBILITY=default + +SRCS += lidar.c + +$(info SRCS=$(SRCS)) diff --git a/src/modules/lidar-sf10/lidar.c b/src/modules/lidar-sf10/lidar.c new file mode 100644 index 0000000..0a532f8 --- /dev/null +++ b/src/modules/lidar-sf10/lidar.c @@ -0,0 +1,191 @@ +/**************************************************************************** + * + * Copyright (c) 2013-2015 PX4 Development Team. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * 3. Neither the name PX4 nor the names of its contributors may be + * used to endorse or promote products derived from this software + * without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS + * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE + * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, + * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS + * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED + * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN + * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + * + ****************************************************************************/ + +#include +#include "distance.h" +#include "usbd_cdc_vcp.h" +#include "main.h" +#include "distance_mode_filter.h" +#include "timer.h" + +#include "stm32f4xx.h" +#include "stm32f4xx_i2c.h" +#include "stm32f4xx_rcc.h" +#include "stm32f4xx_gpio.h" +#include "misc.h" + +//Stated range of sensor +#define MINIMUM_DISTANCE 0.0f +#define MAXIMUM_DISTANCE 60.0f + +#define LIDAR_ADDRESS 0x55 + +bool i2c_started = false; + +static void i2c_init_master(void) +{ + //Reset I2C + I2C_DeInit(I2C1); + I2C_SoftwareResetCmd(I2C1, ENABLE); + I2C_SoftwareResetCmd(I2C1, DISABLE); + + //Turn on clocks + RCC_APB1PeriphClockCmd(RCC_APB1Periph_I2C1, ENABLE); + RCC_AHB1PeriphClockCmd(RCC_AHB1Periph_GPIOB, ENABLE); + + //Route I2C out to B8 (SCL) and B9 (SDA) + GPIO_InitTypeDef gpio_init; + gpio_init.GPIO_Pin = GPIO_Pin_8 | GPIO_Pin_9; + gpio_init.GPIO_Mode = GPIO_Mode_AF; + gpio_init.GPIO_Speed = GPIO_Speed_50MHz; + gpio_init.GPIO_PuPd = GPIO_PuPd_UP; + gpio_init.GPIO_OType = GPIO_OType_OD; + GPIO_Init(GPIOB, &gpio_init); + GPIO_PinAFConfig(GPIOB, GPIO_PinSource8, GPIO_AF_I2C1); //SCL + GPIO_PinAFConfig(GPIOB, GPIO_PinSource9, GPIO_AF_I2C1); //SDA + + //NVIC + NVIC_InitTypeDef nvic_init; + nvic_init.NVIC_IRQChannel = I2C1_EV_IRQn; + nvic_init.NVIC_IRQChannelPreemptionPriority = 0; + nvic_init.NVIC_IRQChannelSubPriority = 0; + nvic_init.NVIC_IRQChannelCmd = ENABLE; + NVIC_Init(&nvic_init); + nvic_init.NVIC_IRQChannel = I2C1_ER_IRQn; + NVIC_Init(&nvic_init); + + //I2C block setup + I2C_InitTypeDef i2c_init; + i2c_init.I2C_ClockSpeed = 100000; + i2c_init.I2C_Mode = I2C_Mode_I2C; + i2c_init.I2C_DutyCycle = I2C_DutyCycle_2; + i2c_init.I2C_OwnAddress1 = 0x0; + i2c_init.I2C_Ack = I2C_Ack_Disable; + i2c_init.I2C_AcknowledgedAddress = I2C_AcknowledgedAddress_7bit; + I2C_Init(I2C1, &i2c_init); + I2C_Cmd(I2C1, ENABLE); + + i2c_started = true; +} + +/* Data from the LIDAR */ + +float sample, sample_filter; +uint32_t measure_time = 0; +bool sample_valid = false; + +__EXPORT void distance_init(void) +{ + i2c_init_master(); + + /* Run interrupt-based drivers after config */ + I2C_ITConfig(I2C1, I2C_IT_EVT, ENABLE); + I2C_ITConfig(I2C1, I2C_IT_ERR, ENABLE); +} + + +__EXPORT void distance_trigger(void) +{ + +} + +__EXPORT void distance_readback(void) +{ + if(!i2c_started) + return; + I2C_GenerateSTART(I2C1, ENABLE); +} + +static void lidar_process(uint16_t value); +void I2C1_EV_IRQHandler(void); +void I2C1_ER_IRQHandler(void); + +void I2C1_EV_IRQHandler(void); +__EXPORT void I2C1_EV_IRQHandler(void) +{ + if (I2C_GetITStatus(I2C1, I2C_IT_SB)) { + I2C_Send7bitAddress(I2C1, LIDAR_ADDRESS << 1, I2C_Direction_Receiver); + } else if (I2C_GetITStatus(I2C1, I2C_IT_ADDR)) { + I2C_AcknowledgeConfig(I2C1, ENABLE); + I2C_GenerateSTOP(I2C1, DISABLE); + (void) I2C1->SR2; + } else if (I2C_GetITStatus(I2C1, I2C_IT_BTF)) { + I2C_AcknowledgeConfig(I2C1, DISABLE); + I2C_GenerateSTOP(I2C1, ENABLE); + + uint16_t value = I2C_ReceiveData(I2C1); + value |= I2C_ReceiveData(I2C1) << 8; + lidar_process(value); + } +} + +void I2C1_ER_IRQHandler(void); +__EXPORT void I2C1_ER_IRQHandler(void) { + if((I2C_ReadRegister(I2C1, I2C_Register_SR1) & 0xFF00) != 0x00) { + I2C_GenerateSTOP(I2C1, ENABLE); + I2C1->SR1 &= 0x00FF; + } + sample_valid = false; +} + +static void lidar_process(uint16_t sample16) +{ + if(sample16 <= 0) { // 0 or less means invalid data + sample_valid = false; + return; + } + sample16 = ((sample16 >> 8) & 0xFF) | ((sample16 << 8) & 0xFF00); + sample = (sample16 / 100.0f); /* convert cm->m */ + if(sample < MINIMUM_DISTANCE || sample > MAXIMUM_DISTANCE) { + sample_valid = false; + return; + } + sample_filter = insert_distance_value_and_get_mode_value(sample); + measure_time = get_boot_time_us(); + sample_valid = true; +} + +__EXPORT bool distance_read(float* distance_filtered, float* distance_raw) +{ + if(sample_valid) { + *distance_raw = sample; + *distance_filtered = sample_filter; + return true; + } + return false; +} + +__EXPORT uint32_t get_distance_measure_time(void) +{ + return measure_time; +} diff --git a/src/modules/lidar-sf10/module.mk b/src/modules/lidar-sf10/module.mk new file mode 100644 index 0000000..3984c4b --- /dev/null +++ b/src/modules/lidar-sf10/module.mk @@ -0,0 +1,5 @@ +DEFAULT_VISIBILITY=default + +SRCS += lidar.c + +$(info SRCS=$(SRCS)) diff --git a/src/modules/sonar/module.mk b/src/modules/sonar/module.mk new file mode 100644 index 0000000..b03a009 --- /dev/null +++ b/src/modules/sonar/module.mk @@ -0,0 +1,5 @@ +DEFAULT_VISIBILITY=default + +SRCS += sonar.c + +$(info SRCS=$(SRCS)) diff --git a/src/modules/flow/sonar.c b/src/modules/sonar/sonar.c similarity index 93% rename from src/modules/flow/sonar.c rename to src/modules/sonar/sonar.c index 7a1f017..4d6a80e 100644 --- a/src/modules/flow/sonar.c +++ b/src/modules/sonar/sonar.c @@ -37,6 +37,7 @@ #include #include +#include #include #include "stm32f4xx.h" #include "stm32f4xx_gpio.h" @@ -46,11 +47,10 @@ #include "stm32f4xx_tim.h" #include "stm32f4xx_dma.h" #include "misc.h" -#include "utils.h" #include "usart.h" #include "settings.h" -#include "sonar.h" -#include "sonar_mode_filter.h" +#include "distance.h" +#include "distance_mode_filter.h" #define SONAR_SCALE 1000.0f #define SONAR_MIN 0.12f /** 0.12m sonar minimum distance */ @@ -88,14 +88,17 @@ bool sonar_valid = false; /**< the mode of all sonar measurements */ * * see datasheet for more info */ -void sonar_trigger(){ +__EXPORT void distance_trigger(){ GPIO_SetBits(GPIOE, GPIO_Pin_8); } +__EXPORT void distance_readback() {} + /** * @brief Sonar interrupt handler */ -void UART4_IRQHandler(void) +void UART4_IRQHandler(void); +__EXPORT void UART4_IRQHandler(void) { if (USART_GetITStatus(UART4, USART_IT_RXNE) != RESET) { @@ -136,7 +139,7 @@ void UART4_IRQHandler(void) dt = ((float)(measure_time - last_measure_time)) / 1000000.0f; valid_data = temp; - sonar_mode = insert_sonar_value_and_get_mode_value(valid_data / SONAR_SCALE); + sonar_mode = insert_distance_value_and_get_mode_value(valid_data / SONAR_SCALE); new_value = 1; sonar_valid = true; } else { @@ -181,7 +184,7 @@ static void sonar_filter(void) * @param sonar_value_filtered Filtered return value * @param sonar_value_raw Raw return value */ -bool sonar_read(float* sonar_value_filtered, float* sonar_value_raw) +__EXPORT bool distance_read(float* sonar_value_filtered, float* sonar_value_raw) { /* getting new data with only around 10Hz */ if (new_value) { @@ -204,7 +207,7 @@ bool sonar_read(float* sonar_value_filtered, float* sonar_value_raw) /** * @brief Configures the sonar sensor Peripheral. */ -void sonar_config(void) +__EXPORT void distance_init(void) { valid_data = 0; @@ -226,7 +229,7 @@ void sonar_config(void) /* Enable the USARTx Interrupt */ NVIC_InitStructure.NVIC_IRQChannel = UART4_IRQn; - NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 0; + NVIC_InitStructure.NVIC_IRQChannelPreemptionPriority = 1; NVIC_InitStructure.NVIC_IRQChannelSubPriority = 0; NVIC_InitStructure.NVIC_IRQChannelCmd = ENABLE; NVIC_Init(&NVIC_InitStructure); @@ -268,13 +271,8 @@ void sonar_config(void) } -uint32_t get_sonar_measure_time() +__EXPORT uint32_t get_distance_measure_time() { return sonar_measure_time; } -uint32_t get_sonar_measure_time_interrupt() -{ - return sonar_measure_time_interrupt; -} - diff --git a/src/modules/uavcannode/dsdl/uavcan/threedr/equipment/flow/optical_flow/1110.LegacyRawSample.uavcan b/src/modules/uavcannode/dsdl/uavcan/threedr/equipment/flow/optical_flow/1110.LegacyRawSample.uavcan deleted file mode 100644 index d65cea2..0000000 --- a/src/modules/uavcannode/dsdl/uavcan/threedr/equipment/flow/optical_flow/1110.LegacyRawSample.uavcan +++ /dev/null @@ -1,6 +0,0 @@ -# Current time. -# Note that the data type "uavcan.Timestamp" is defined by the UAVCAN specification. -uavcan.Timestamp time - -I2CFrame frame -I2CFrameIntegral integral diff --git a/src/modules/uavcannode/dsdl/uavcan/threedr/equipment/flow/optical_flow/1110.RawSample.uavcan b/src/modules/uavcannode/dsdl/uavcan/threedr/equipment/flow/optical_flow/1110.RawSample.uavcan new file mode 100644 index 0000000..eec44ce --- /dev/null +++ b/src/modules/uavcannode/dsdl/uavcan/threedr/equipment/flow/optical_flow/1110.RawSample.uavcan @@ -0,0 +1,15 @@ +# Raw (unfused) optical flow sample + +uavcan.Timestamp timestamp + +# Lateral flow (displacement) in radians & delta angle over timestep +float16[2] flow_integral_xy_radians +float16[3] gyro_rate_integral_xyz_radians +# The time period over which the flow & gyro data was collected +uint24 integration_time_usec +# The maximum velocity (rad/s) for which the flow data can be trusted +float16 max_axis_velocity_radians_sec +# The percentage (scaled from 0 to 255) of tiles for which the optical flow feature tracker could find a displacement +uint8 samples_matched_pct +# Gyro temperature +float16 gyro_temperature_celsius diff --git a/src/modules/uavcannode/dsdl/uavcan/threedr/equipment/flow/optical_flow/I2CFrame.uavcan b/src/modules/uavcannode/dsdl/uavcan/threedr/equipment/flow/optical_flow/I2CFrame.uavcan deleted file mode 100644 index a20cd65..0000000 --- a/src/modules/uavcannode/dsdl/uavcan/threedr/equipment/flow/optical_flow/I2CFrame.uavcan +++ /dev/null @@ -1,17 +0,0 @@ -# -# Nested Type -# Legacy I2C -# - -uint16 frame_count -int16 pixel_flow_x_sum -int16 pixel_flow_y_sum -int16 flow_comp_m_x -int16 flow_comp_m_y -int16 qual -int16 gyro_x_rate -int16 gyro_y_rate -int16 gyro_z_rate -uint8 gyro_range -uint8 sonarimestamp -int16 ground_distance diff --git a/src/modules/uavcannode/dsdl/uavcan/threedr/equipment/flow/optical_flow/I2CFrameIntegral.uavcan b/src/modules/uavcannode/dsdl/uavcan/threedr/equipment/flow/optical_flow/I2CFrameIntegral.uavcan deleted file mode 100644 index 978898e..0000000 --- a/src/modules/uavcannode/dsdl/uavcan/threedr/equipment/flow/optical_flow/I2CFrameIntegral.uavcan +++ /dev/null @@ -1,16 +0,0 @@ -# -# Nested Type -# Legacy I2C Integral -# - -uint16 frame_count_since_last_readout -int16 pixel_flow_x_integral -int16 pixel_flow_y_integral -int16 gyro_x_rate_integral -int16 gyro_y_rate_integral -int16 gyro_z_rate_integral -uint32 integration_timespan -uint32 sonar_timestamp -uint16 ground_distance -int16 gyro_temperature -uint8 qual diff --git a/src/modules/uavcannode/uavcannode_main.cpp b/src/modules/uavcannode/uavcannode_main.cpp index edae336..a65dac5 100644 --- a/src/modules/uavcannode/uavcannode_main.cpp +++ b/src/modules/uavcannode/uavcannode_main.cpp @@ -40,6 +40,15 @@ #include "boot_app_shared.h" #include +#include +#include + +extern "C" { + #include "settings.h" + #include "hrt.h" + #include "fmu_comm.h" + #include "result_accumulator.h" +} #define FW_GIT STRINGIFY(GIT_VERSION) @@ -93,10 +102,10 @@ UavcanNode *UavcanNode::_instance; UavcanNode::UavcanNode(uavcan::ICanDriver &can_driver, uavcan::ISystemClock &system_clock) : active_bitrate(0), _node(can_driver, system_clock), - _fw_update_listner(_node), + _fw_update_listener(_node), _time_sync_slave(_node), - _flow_pulisher(_node), - _range_pulisher(_node), + _flow_publisher(_node), + _range_publisher(_node), _reset_timer(_node) { @@ -220,7 +229,6 @@ void UavcanNode::cb_beginfirmware_update(const uavcan::ReceivedDataStructuretime_stamp_utc; - m.sensor_id = pdata->sensor_id; - m.beam_orientation_in_body_frame.fixed_axis_roll_pitch_yaw[0] = pdata->roll * m.beam_orientation_in_body_frame.ANGLE_MULTIPLIER; - m.beam_orientation_in_body_frame.fixed_axis_roll_pitch_yaw[1] = pdata->pitch * m.beam_orientation_in_body_frame.ANGLE_MULTIPLIER; - m.beam_orientation_in_body_frame.fixed_axis_roll_pitch_yaw[2] = pdata->yaw * m.beam_orientation_in_body_frame.ANGLE_MULTIPLIER; - m.beam_orientation_in_body_frame.orientation_defined = true; - m.field_of_view = pdata->field_of_view; - m.sensor_type = pdata->sensor_type; - m.reading_type= pdata->reading_type; - m.range = pdata->range; - _range_pulisher.broadcast(m); + _range_publisher.broadcast(m); return PX4_OK; } -int UavcanNode::publish(legacy_12c_data_t *pdata) + +int UavcanNode::publish(::threedr::equipment::flow::optical_flow::RawSample &r) { - ::threedr::equipment::flow::optical_flow::LegacyRawSample r; - r.time.usec = pdata->time_stamp_utc; - r.frame.frame_count = pdata->frame.frame_count; - r.frame.pixel_flow_x_sum = pdata->frame.pixel_flow_x_sum; - r.frame.pixel_flow_y_sum = pdata->frame.pixel_flow_y_sum; - r.frame.flow_comp_m_x = pdata->frame.flow_comp_m_x; - r.frame.flow_comp_m_y = pdata->frame.flow_comp_m_y; - r.frame.qual = pdata->frame.qual; - r.frame.gyro_x_rate = pdata->frame.gyro_x_rate; - r.frame.gyro_y_rate = pdata->frame.gyro_y_rate; - r.frame.gyro_z_rate = pdata->frame.gyro_z_rate; - r.frame.gyro_range = pdata->frame.gyro_range; - r.integral.frame_count_since_last_readout = pdata->integral_frame.frame_count_since_last_readout; - r.integral.pixel_flow_x_integral = pdata->integral_frame.pixel_flow_x_integral; - r.integral.pixel_flow_y_integral = pdata->integral_frame.pixel_flow_y_integral; - r.integral.gyro_x_rate_integral = pdata->integral_frame.gyro_x_rate_integral; - r.integral.gyro_y_rate_integral = pdata->integral_frame.gyro_y_rate_integral; - r.integral.gyro_z_rate_integral = pdata->integral_frame.gyro_z_rate_integral; - r.integral.integration_timespan = pdata->integral_frame.integration_timespan; - r.integral.sonar_timestamp = pdata->integral_frame.sonar_timestamp; - r.integral.ground_distance = pdata->integral_frame.ground_distance; - r.integral.gyro_temperature = pdata->integral_frame.gyro_temperature; - r.integral.qual = pdata->integral_frame.qual; - _flow_pulisher.broadcast(r); + _flow_publisher.broadcast(r); return PX4_OK; } @@ -344,16 +319,6 @@ UavcanNode::teardown() return 0; } - -/* - * App entry point - */ -static void print_usage() -{ - PX4_INFO("usage: \n" - "\tuavcannode {start|status|stop|arm|disarm}"); -} - static int uavcannode_start() { board_app_initialize(); @@ -394,69 +359,72 @@ TODO(Need non vol Paramter sotrage) return UavcanNode::start(node_id, bitrate); } -__EXPORT int uavcannode_publish_flow(legacy_12c_data_t *pdata) -{ - - UavcanNode *const inst = UavcanNode::instance(); +static result_accumulator_ctx accumulator; - if (!inst) { - PX4_ERR( "application not running"); - return 1; - } - return inst->publish(pdata); +__EXPORT void fmu_comm_init() { + result_accumulator_init(&accumulator); + if (!UavcanNode::instance()) { + uavcannode_start(); + } } -__EXPORT int uavcannode_publish_range(range_data_t *pdata) -{ +__EXPORT void fmu_comm_run() { + UavcanNode *const inst = UavcanNode::instance(); - UavcanNode *const inst = UavcanNode::instance(); + if (!inst) { + PX4_ERR( "application not running"); + return; + } - if (!inst) { - PX4_ERR( "application not running"); - return 1; - } - return inst->publish(pdata); + inst->run(); } -__EXPORT int uavcannode_run() -{ - UavcanNode *const inst = UavcanNode::instance(); - - if (!inst) { - PX4_ERR( "application not running"); - return 1; - } - return inst->run(); - -} +__EXPORT void fmu_comm_update(const result_accumulator_frame* frame_data) { + /* feed the accumulator and recalculate */ + result_accumulator_feed(&accumulator, frame_data); -int uavcannode_main(bool start_not_stop) -{ - if (start_not_stop) { + if (accumulator.frame_count % 32 == 0) { + UavcanNode *const inst = UavcanNode::instance(); - if (UavcanNode::instance()) { - PX4_ERR("already started"); - return 1; + if (!inst) { + PX4_ERR( "UAVCAN not running"); + return; } - return uavcannode_start(); - } - - /* commands below require the app to be started */ - UavcanNode *const inst = UavcanNode::instance(); - - if (!inst) { - PX4_ERR( "application not running"); - return 1; - } - - - if (!start_not_stop) { - inst->stop(); - return 0; + uint64_t timestamp = hrt_absolute_time(); + + result_accumulator_output_flow_rad flow_rad; + result_accumulator_calculate_output_flow_rad(&accumulator, global_data.param[PARAM_ALGORITHM_MIN_VALID_RATIO], &flow_rad); + ::threedr::equipment::flow::optical_flow::RawSample r; + r.timestamp.usec = timestamp; + r.flow_integral_xy_radians[0] = flow_rad.integrated_x; + r.flow_integral_xy_radians[1] = flow_rad.integrated_y; + r.gyro_rate_integral_xyz_radians[0] = flow_rad.integrated_xgyro; + r.gyro_rate_integral_xyz_radians[1] = flow_rad.integrated_ygyro; + r.gyro_rate_integral_xyz_radians[2] = flow_rad.integrated_zgyro; + r.integration_time_usec = flow_rad.integration_time; + r.max_axis_velocity_radians_sec = accumulator.flow_cap_mv_rad; + r.samples_matched_pct = flow_rad.quality; + r.gyro_temperature_celsius = flow_rad.temperature / 100.0f; + inst->publish(r); + + ::uavcan::equipment::range_sensor::Measurement m; + m.timestamp.usec = timestamp; + m.sensor_id = 0; + m.beam_orientation_in_body_frame.fixed_axis_roll_pitch_yaw[0] = 0 * m.beam_orientation_in_body_frame.ANGLE_MULTIPLIER; + m.beam_orientation_in_body_frame.fixed_axis_roll_pitch_yaw[1] = 0 * m.beam_orientation_in_body_frame.ANGLE_MULTIPLIER; + m.beam_orientation_in_body_frame.fixed_axis_roll_pitch_yaw[2] = 0 * m.beam_orientation_in_body_frame.ANGLE_MULTIPLIER; + m.beam_orientation_in_body_frame.orientation_defined = true; + m.field_of_view = 0; + m.sensor_type = ::uavcan::equipment::range_sensor::Measurement::SENSOR_TYPE_LIDAR; + m.reading_type = (accumulator.last_ground_distance >= 0) + ? ::uavcan::equipment::range_sensor::Measurement::READING_TYPE_VALID_RANGE + : ::uavcan::equipment::range_sensor::Measurement::READING_TYPE_UNDEFINED; + m.range = accumulator.last_ground_distance; + inst->publish(m); + + result_accumulator_reset(&accumulator); } - - print_usage(); - return 1; } + diff --git a/src/modules/uavcannode/uavcannode_main.hpp b/src/modules/uavcannode/uavcannode_main.hpp index e88b66e..92ba97a 100644 --- a/src/modules/uavcannode/uavcannode_main.hpp +++ b/src/modules/uavcannode/uavcannode_main.hpp @@ -38,11 +38,9 @@ #include #include #include -#include +#include #include -#include "uavcan_if.h" - /** * @file uavcan_main.hpp * @@ -103,9 +101,9 @@ class UavcanNode static UavcanNode *instance() { return _instance; } - int run(); - int publish(legacy_12c_data_t *pdata); - int publish(range_data_t *pdata); + int run(); + int publish(::threedr::equipment::flow::optical_flow::RawSample&); + int publish(uavcan::equipment::range_sensor::Measurement&); /* The bit rate that can be passed back to the bootloader */ @@ -127,10 +125,10 @@ class UavcanNode uavcan::ServiceResponseDataStructure &)> BeginFirmwareUpdateCallBack; - uavcan::ServiceServer _fw_update_listner; + uavcan::ServiceServer _fw_update_listener; uavcan::GlobalTimeSyncSlave _time_sync_slave; - uavcan::Publisher<::threedr::equipment::flow::optical_flow::LegacyRawSample> _flow_pulisher; - uavcan::Publisher _range_pulisher; + uavcan::Publisher< ::threedr::equipment::flow::optical_flow::RawSample> _flow_publisher; + uavcan::Publisher _range_publisher; void cb_beginfirmware_update(const uavcan::ReceivedDataStructure &req, uavcan::ServiceResponseDataStructure &rsp); diff --git a/src/platforms/compiler.h b/src/platforms/compiler.h index 9cf1192..afe5295 100644 --- a/src/platforms/compiler.h +++ b/src/platforms/compiler.h @@ -58,9 +58,6 @@ # define CONFIG_HAVE_FUNCTIONNAME 1 /* Has __FUNCTION__ */ # define CONFIG_HAVE_FILENAME 1 /* Has __FILE__ */ -/* Indicate that a local variable is not used */ - -# define UNUSED(a) ((void)(a)) /* Attributes * diff --git a/unittests/Makefile b/unittests/Makefile index d33b015..054fe81 100644 --- a/unittests/Makefile +++ b/unittests/Makefile @@ -4,7 +4,7 @@ CFLAGS=-I. -I../src -lm all: tests -TEST_FILES=../src/sonar_mode_filter.c \ +TEST_FILES=../src/distance_mode_filter.c \ tests.cpp tests: $(TEST_FILES) diff --git a/unittests/tests.c b/unittests/tests.c index 4dd7135..2edeb95 100644 --- a/unittests/tests.c +++ b/unittests/tests.c @@ -4,7 +4,7 @@ #include -#include "../inc/sonar_mode_filter.h" +#include "../inc/distance_mode_filter.h" int main(int argc, char *argv[]) { @@ -18,7 +18,7 @@ int main(int argc, char *argv[]) { float inf = in / 10.0f; - float out = insert_sonar_value_and_get_mode_value(inf); + float out = insert_distance_value_and_get_mode_value(inf); printf("in: %f\tout: %f\n", (double)inf, (double)out); }