Hardware components | ||||||
![]() |
| × | 6 | |||
![]() |
| × | 6 | |||
![]() |
| × | 1 | |||
Software apps and online services | ||||||
![]() |
| |||||
![]() |
|
En este proyecto, he creado un modelo de reconocimiento de voz utilizando Edge Impulse Studio para detectar las 3 palabras principales (Stop, Left y Right). Para el entrenamiento se utilizan datos de voz, para ello puse 3 voces diferentes para que la modelo tuviera más entrenamiento, cada audio son 3 minutos, es decir al final serán 9 audios de 3 minutos cada uno.
El modelo final se implementa en Arduino Nano 33 BLE Sense, utilizando también el acelerómetro de 3 ejes de la misma tarjeta, esto con el fin de simular un volante, además, con los 6 leds se simulan las señales direccionales del auto, si alguien dice "Izquierda" se encienden los primeros 3 leds, en cambio si dice "Derecha" se encienden los otros 3 leds. Cuando alguien dice "Stop", todos los leds se encienden. Los leds parpadean según lo ordenado. Para desactivar el "parpadeo", debe girar la placa de prueba hacia el lado derecho y luego volver a colocarla en la posición correcta. El video muestra un poco más de detalle sobre esto.
/* Edge Impulse ingestion SDK
* Copyright (c) 2022 EdgeImpulse Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
// If your target is limited in memory remove this macro to save 10K RAM
#define EIDSP_QUANTIZE_FILTERBANK 0
/**
* Define the number of slices per model window. E.g. a model window of 1000 ms
* with slices per model window set to 4. Results in a slice size of 250 ms.
* For more info: https://docs.edgeimpulse.com/docs/continuous-audio-sampling
*/
#define EI_CLASSIFIER_SLICES_PER_MODEL_WINDOW 4
/*
** NOTE: If you run into TFLite arena allocation issue.
**
** This may be due to may dynamic memory fragmentation.
** Try defining "-DEI_CLASSIFIER_ALLOCATION_STATIC" in boards.local.txt (create
** if it doesn't exist) and copy this file to
** `<ARDUINO_CORE_INSTALL_PATH>/arduino/hardware/<mbed_core>/<core_version>/`.
**
** See
** (https://support.arduino.cc/hc/en-us/articles/360012076960-Where-are-the-installed-cores-located-)
** to find where Arduino installs cores on your machine.
**
** If the problem persists then there's not enough memory for this model and application.
*/
/* Includes ---------------------------------------------------------------- */
#include <PDM.h>
#include <Inteligencia3.0_inferencing.h>
#include "MadgwickAHRS.h"
#include <Arduino_LSM9DS1.h>
/** Audio buffers, pointers and selectors */
typedef struct {
signed short *buffers[2];
unsigned char buf_select;
unsigned char buf_ready;
unsigned int buf_count;
unsigned int n_samples;
} inference_t;
static inference_t inference;
static bool record_ready = false;
static signed short *sampleBuffer;
static bool debug_nn = false; // Set this to true to see e.g. features generated from the raw signal
static int print_results = -(EI_CLASSIFIER_SLICES_PER_MODEL_WINDOW);
Madgwick filter;
float yaw = 0.0;
float yawThreshold = 5.0; // Umbral para detectar movimiento de yaw
float gx,gy,gz;
float ax,ay,az;
float mx,my,mz;
int dire;
int color;
float a,b,c,d,e;
const int ledPin1 = 2;
const int ledPin2 = 3;
const int ledPin3 = 4;
const int ledPin4 = 5;
const int ledPin5 = 6;
const int ledPin6 = 7;
/**
* @brief Arduino setup function
*/
void setup()
{
// put your setup code here, to run once:
Serial.begin(115200);
// comment out the below line to cancel the wait for USB connection (needed for native USB)
while (!Serial);
if (!IMU.begin()) {
Serial.println("Failed to initialize IMU!");
while (1);
}
filter.begin(100);
Serial.println("Edge Impulse Inferencing Demo");
pinMode(ledPin1, OUTPUT);
pinMode(ledPin2, OUTPUT);
pinMode(ledPin3, OUTPUT);
pinMode(ledPin4, OUTPUT);
pinMode(ledPin5, OUTPUT);
pinMode(ledPin6, OUTPUT);
pinMode(LEDR, OUTPUT);
pinMode(LEDG, OUTPUT);
pinMode(LEDB, OUTPUT);
digitalWrite(LEDR, HIGH); // will turn the LED off
digitalWrite(LEDG, HIGH); // will turn the LED off
digitalWrite(LEDB, HIGH);
// summary of inferencing settings (from model_metadata.h)
ei_printf("Inferencing settings:\n");
ei_printf("\tInterval: %.2f ms.\n", (float)EI_CLASSIFIER_INTERVAL_MS);
ei_printf("\tFrame size: %d\n", EI_CLASSIFIER_DSP_INPUT_FRAME_SIZE);
ei_printf("\tSample length: %d ms.\n", EI_CLASSIFIER_RAW_SAMPLE_COUNT / 16);
ei_printf("\tNo. of classes: %d\n", sizeof(ei_classifier_inferencing_categories) /
sizeof(ei_classifier_inferencing_categories[0]));
run_classifier_init();
if (microphone_inference_start(EI_CLASSIFIER_SLICE_SIZE) == false) {
ei_printf("ERR: Could not allocate audio buffer (size %d), this could be due to the window length of your model\r\n", EI_CLASSIFIER_RAW_SAMPLE_COUNT);
return;
}
}
/**
* @brief Arduino main function. Runs the inferencing loop.
*/
void micro()
{
bool m = microphone_inference_record();
if (!m) {
ei_printf("ERR: Failed to record audio...\n");
return;
}
signal_t signal;
signal.total_length = EI_CLASSIFIER_SLICE_SIZE;
signal.get_data = µphone_audio_signal_get_data;
ei_impulse_result_t result = {0};
EI_IMPULSE_ERROR r = run_classifier_continuous(&signal, &result, debug_nn);
if (r != EI_IMPULSE_OK) {
ei_printf("ERR: Failed to run classifier (%d)\n", r);
return;
}
if (++print_results >= (EI_CLASSIFIER_SLICES_PER_MODEL_WINDOW)) {
// print the predictions
ei_printf("Predictions ");
ei_printf("(DSP: %d ms., Classification: %d ms., Anomaly: %d ms.)",
result.timing.dsp, result.timing.classification, result.timing.anomaly);
ei_printf(": \n");
for (size_t ix = 0; ix < EI_CLASSIFIER_LABEL_COUNT; ix++) {
ei_printf(" %s: %.5f\n", result.classification[ix].label,
result.classification[ix].value);
}
#if EI_CLASSIFIER_HAS_ANOMALY == 1
ei_printf(" anomaly score: %.3f\n", result.anomaly);
#endif
print_results = 0;
}
a = result.classification[0].value;
b = result.classification[1].value;
c = result.classification[2].value;
d = result.classification[3].value;
e = result.classification[4].value;
}
void direccion(){
if (IMU.accelerationAvailable() && IMU.gyroscopeAvailable() && IMU.magneticFieldAvailable()) {
IMU.readAcceleration(ax,ay,az);
IMU.readGyroscope(gx,gy,gz);
IMU.readMagneticField(mx,my,mz);
// Obtener las mediciones de aceleración, giroscopio y magnetómetro
float accelX = ax;
float accelY = ay;
float accelZ = az;
float gyroX = gx;
float gyroY = gy;
float gyroZ = gz;
float magX = mx;
float magY = my;
float magZ = mz;
// Actualizar el filtro de Madgwick con los datos de los sensores
filter.update(gyroX, gyroY, gyroZ, accelX, accelY, accelZ, magX, magY, magZ);
// Obtener el ángulo de yaw del filtro
float currentYaw = filter.getYaw();
// Imprimir el ángulo de yaw
Serial.print("Yaw: ");
Serial.println(currentYaw);
// Detectar el movimiento de yaw
float yawDifference = (currentYaw - yaw)*5;
//Serial.println(yawDifference);
//Serial.println(yawThreshold);
if (abs(yawDifference) < yawThreshold) {
// No se mueve (centro)
//Serial.println("Centro");
dire = 0;
} else if (yawDifference > yawThreshold) {
// Movimiento a la derecha
//Serial.println("Izquierda");
dire = 1;
} else if (yawDifference < -yawThreshold) {
// Movimiento a la izquierda
//Serial.println("Derecha");
dire = 2;
}
// Actualizar el valor de yaw
yaw = currentYaw;
delay(100);
}
}
void loop(){
micro();
direccion();
if (a >= 0.80) {
Serial.println("Left LED on");
digitalWrite(LEDR, LOW); // will turn the LED on
digitalWrite(LEDG, HIGH); // will turn the LED off
digitalWrite(LEDB, HIGH); // will turn the LED off
color = 1;
digitalWrite(ledPin3, HIGH);
digitalWrite(ledPin2, HIGH);
digitalWrite(ledPin1, HIGH);
}
else if (c >= 0.80) {
Serial.println("Right LED on");
digitalWrite(LEDR, HIGH); // will turn the LED on
digitalWrite(LEDG, LOW); // will turn the LED off
digitalWrite(LEDB, HIGH); // will turn the LED off
color = 2;
digitalWrite(ledPin4, HIGH);
digitalWrite(ledPin5, HIGH);
digitalWrite(ledPin6, HIGH);
}
else if (d >= 0.80) {
Serial.println("Stop LED on");
color = 3;
}
if (color == 1 & dire == 1){
digitalWrite(LEDR, HIGH);
digitalWrite(ledPin3, HIGH);
digitalWrite(ledPin2, HIGH);
digitalWrite(ledPin1, HIGH);
delay(250);
digitalWrite(ledPin1, LOW);
digitalWrite(ledPin2, LOW);
digitalWrite(ledPin3, LOW);
delay(250);
digitalWrite(ledPin3, HIGH);
digitalWrite(ledPin2, HIGH);
digitalWrite(ledPin1, HIGH);
delay(250);
digitalWrite(ledPin1, LOW);
digitalWrite(ledPin2, LOW);
digitalWrite(ledPin3, LOW);
delay(250);
digitalWrite(ledPin3, HIGH);
digitalWrite(ledPin2, HIGH);
digitalWrite(ledPin1, HIGH);
delay(250);
digitalWrite(ledPin1, LOW);
digitalWrite(ledPin2, LOW);
digitalWrite(ledPin3, LOW);
delay(250);
digitalWrite(ledPin3, HIGH);
digitalWrite(ledPin2, HIGH);
digitalWrite(ledPin1, HIGH);
delay(250);
digitalWrite(ledPin1, LOW);
digitalWrite(ledPin2, LOW);
digitalWrite(ledPin3, LOW);
}
else if (color == 2 & dire == 2){
digitalWrite(LEDG, LOW);
digitalWrite(ledPin4, HIGH);
digitalWrite(ledPin5, HIGH);
digitalWrite(ledPin6, HIGH);
delay(250);
digitalWrite(ledPin4, LOW);
digitalWrite(ledPin5, LOW);
digitalWrite(ledPin6, LOW);
delay(250);
digitalWrite(ledPin4, HIGH);
digitalWrite(ledPin5, HIGH);
digitalWrite(ledPin6, HIGH);
delay(250);
digitalWrite(ledPin4, LOW);
digitalWrite(ledPin5, LOW);
digitalWrite(ledPin6, LOW);
delay(250);
digitalWrite(ledPin4, HIGH);
digitalWrite(ledPin5, HIGH);
digitalWrite(ledPin6, HIGH);
delay(250);
digitalWrite(ledPin4, LOW);
digitalWrite(ledPin5, LOW);
digitalWrite(ledPin6, LOW);
delay(250);
digitalWrite(ledPin4, HIGH);
digitalWrite(ledPin5, HIGH);
digitalWrite(ledPin6, HIGH);
delay(250);
digitalWrite(ledPin4, LOW);
digitalWrite(ledPin5, LOW);
digitalWrite(ledPin6, LOW);
}
else if (color == 3){
digitalWrite(LEDR, HIGH); // will turn the LED on
digitalWrite(LEDG, HIGH); // will turn the LED off
digitalWrite(LEDB, LOW); // will turn the LED off
digitalWrite(ledPin1, HIGH);
digitalWrite(ledPin2, HIGH);
digitalWrite(ledPin3, HIGH);
digitalWrite(ledPin4, HIGH);
digitalWrite(ledPin5, HIGH);
digitalWrite(ledPin6, HIGH);
delay(500);
digitalWrite(ledPin1, LOW);
digitalWrite(ledPin2, LOW);
digitalWrite(ledPin3, LOW);
digitalWrite(ledPin4, LOW);
digitalWrite(ledPin5, LOW);
digitalWrite(ledPin6, LOW);
delay(500);
}
}
/**
* @brief PDM buffer full callback
* Get data and call audio thread callback
*/
static void pdm_data_ready_inference_callback(void)
{
int bytesAvailable = PDM.available();
// read into the sample buffer
int bytesRead = PDM.read((char *)&sampleBuffer[0], bytesAvailable);
if (record_ready == true) {
for (int i = 0; i<bytesRead>> 1; i++) {
inference.buffers[inference.buf_select][inference.buf_count++] = sampleBuffer[i];
if (inference.buf_count >= inference.n_samples) {
inference.buf_select ^= 1;
inference.buf_count = 0;
inference.buf_ready = 1;
}
}
}
}
/**
* @brief Init inferencing struct and setup/start PDM
*
* @param[in] n_samples The n samples
*
* @return { description_of_the_return_value }
*/
static bool microphone_inference_start(uint32_t n_samples)
{
inference.buffers[0] = (signed short *)malloc(n_samples * sizeof(signed short));
if (inference.buffers[0] == NULL) {
return false;
}
inference.buffers[1] = (signed short *)malloc(n_samples * sizeof(signed short));
if (inference.buffers[1] == NULL) {
free(inference.buffers[0]);
return false;
}
sampleBuffer = (signed short *)malloc((n_samples >> 1) * sizeof(signed short));
if (sampleBuffer == NULL) {
free(inference.buffers[0]);
free(inference.buffers[1]);
return false;
}
inference.buf_select = 0;
inference.buf_count = 0;
inference.n_samples = n_samples;
inference.buf_ready = 0;
// configure the data receive callback
PDM.onReceive(&pdm_data_ready_inference_callback);
PDM.setBufferSize((n_samples >> 1) * sizeof(int16_t));
// initialize PDM with:
// - one channel (mono mode)
// - a 16 kHz sample rate
if (!PDM.begin(1, EI_CLASSIFIER_FREQUENCY)) {
ei_printf("Failed to start PDM!");
}
// set the gain, defaults to 20
PDM.setGain(127);
record_ready = true;
return true;
}
/**
* @brief Wait on new data
*
* @return True when finished
*/
static bool microphone_inference_record(void)
{
bool ret = true;
if (inference.buf_ready == 1) {
ei_printf(
"Error sample buffer overrun. Decrease the number of slices per model window "
"(EI_CLASSIFIER_SLICES_PER_MODEL_WINDOW)\n");
ret = false;
}
while (inference.buf_ready == 0) {
delay(1);
}
inference.buf_ready = 0;
return ret;
}
/**
* Get raw audio signal data
*/
static int microphone_audio_signal_get_data(size_t offset, size_t length, float *out_ptr)
{
numpy::int16_to_float(&inference.buffers[inference.buf_select ^ 1][offset], out_ptr, length);
return 0;
}
/**
* @brief Stop PDM and release buffers
*/
static void microphone_inference_end(void)
{
PDM.end();
free(inference.buffers[0]);
free(inference.buffers[1]);
free(sampleBuffer);
}
#if !defined(EI_CLASSIFIER_SENSOR) || EI_CLASSIFIER_SENSOR != EI_CLASSIFIER_SENSOR_MICROPHONE
#error "Invalid model for current sensor."
#endif
<html><head><meta http-equiv="Content-Type" content="text/html;charset=UTF-8"/><title>/* Edge Impulse ingestion SDK</title></head><body><p dir="ltr">/* Edge Impulse ingestion SDK<br>
* Copyright (c) 2022 EdgeImpulse Inc.<br>
*<br>
* Licensed under the Apache License, Version 2.0 (the "License");<br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at<br>
* http://www.apache.org/licenses/LICENSE-2.0<br>
*<br>
* Unless required by applicable law or agreed to in writing, software<br>
* distributed under the License is distributed on an "AS IS" BASIS,<br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<br>
* See the License for the specific language governing permissions and<br>
* limitations under the License.<br>
*<br>
*/</p>
<p dir="ltr">// If your target is limited in memory remove this macro to save 10K RAM<br>
#define EIDSP_QUANTIZE_FILTERBANK 0</p>
<p dir="ltr">/**<br>
* Define the number of slices per model window. E.g. a model window of 1000 ms<br>
* with slices per model window set to 4. Results in a slice size of 250 ms.<br>
* For more info: https://docs.edgeimpulse.com/docs/continuous-audio-sampling<br>
*/<br>
#define EI_CLASSIFIER_SLICES_PER_MODEL_WINDOW 4</p>
<p dir="ltr">/*<br>
** NOTE: If you run into TFLite arena allocation issue.<br>
**<br>
** This may be due to may dynamic memory fragmentation.<br>
** Try defining "-DEI_CLASSIFIER_ALLOCATION_STATIC" in boards.local.txt (create<br>
** if it doesn't exist) and copy this file to<br>
** `<ARDUINO_CORE_INSTALL_PATH>/arduino/hardware/<mbed_core>/<core_version>/`.<br>
**<br>
** See<br>
** (https://support.arduino.cc/hc/en-us/articles/360012076960-Where-are-the-installed-cores-located-)<br>
** to find where Arduino installs cores on your machine.<br>
**<br>
** If the problem persists then there's not enough memory for this model and application.<br>
*/</p>
<p dir="ltr">/* Includes ---------------------------------------------------------------- */<br>
#include <PDM.h><br>
#include <Inteligencia3.0_inferencing.h><br>
#include "MadgwickAHRS.h"<br>
#include <Arduino_LSM9DS1.h><br>
/** Audio buffers, pointers and selectors */<br>
typedef struct {<br>
signed short *buffers[2];<br>
unsigned char buf_select;<br>
unsigned char buf_ready;<br>
unsigned int buf_count;<br>
unsigned int n_samples;<br>
} inference_t;</p>
<p dir="ltr">static inference_t inference;<br>
static bool record_ready = false;<br>
static signed short *sampleBuffer;<br>
static bool debug_nn = false; // Set this to true to see e.g. features generated from the raw signal<br>
static int print_results = -(EI_CLASSIFIER_SLICES_PER_MODEL_WINDOW);<br>
Madgwick filter;<br>
float yaw = 0.0;<br>
float yawThreshold = 5.0; // Umbral para detectar movimiento de yaw<br>
float gx,gy,gz;<br>
float ax,ay,az;<br>
float mx,my,mz;<br>
int dire;<br>
int color;<br>
float a,b,c,d,e;<br>
const int ledPin1 = 2;<br>
const int ledPin2 = 3;<br>
const int ledPin3 = 4;<br>
const int ledPin4 = 5;<br>
const int ledPin5 = 6;<br>
const int ledPin6 = 7;<br>
/**<br>
* @brief Arduino setup function<br>
*/<br>
void setup()<br>
{<br>
// put your setup code here, to run once:<br>
Serial.begin(115200);<br>
// comment out the below line to cancel the wait for USB connection (needed for native USB)<br>
while (!Serial);<br>
if (!IMU.begin()) {<br>
Serial.println("Failed to initialize IMU!");<br>
while (1);<br>
}<br>
filter.begin(100);<br>
Serial.println("Edge Impulse Inferencing Demo");<br>
pinMode(ledPin1, OUTPUT);<br>
pinMode(ledPin2, OUTPUT);<br>
pinMode(ledPin3, OUTPUT);<br>
pinMode(ledPin4, OUTPUT);<br>
pinMode(ledPin5, OUTPUT);<br>
pinMode(ledPin6, OUTPUT);<br>
pinMode(LEDR, OUTPUT);<br>
pinMode(LEDG, OUTPUT);<br>
pinMode(LEDB, OUTPUT);<br>
digitalWrite(LEDR, HIGH); // will turn the LED off<br>
digitalWrite(LEDG, HIGH); // will turn the LED off<br>
digitalWrite(LEDB, HIGH);<br>
// summary of inferencing settings (from model_metadata.h)<br>
ei_printf("Inferencing settings:\n");<br>
ei_printf("\tInterval: %.2f ms.\n", (float)EI_CLASSIFIER_INTERVAL_MS);<br>
ei_printf("\tFrame size: %d\n", EI_CLASSIFIER_DSP_INPUT_FRAME_SIZE);<br>
ei_printf("\tSample length: %d ms.\n", EI_CLASSIFIER_RAW_SAMPLE_COUNT / 16);<br>
ei_printf("\tNo. of classes: %d\n", sizeof(ei_classifier_inferencing_categories) /<br>
sizeof(ei_classifier_inferencing_categories[0]));</p>
<p dir="ltr"> run_classifier_init();<br>
if (microphone_inference_start(EI_CLASSIFIER_SLICE_SIZE) == false) {<br>
ei_printf("ERR: Could not allocate audio buffer (size %d), this could be due to the window length of your model\r\n", EI_CLASSIFIER_RAW_SAMPLE_COUNT);<br>
return;<br>
}<br>
}</p>
<p dir="ltr">/**<br>
* @brief Arduino main function. Runs the inferencing loop.<br>
*/<br>
void micro()<br>
{<br>
bool m = microphone_inference_record();<br>
if (!m) {<br>
ei_printf("ERR: Failed to record audio...\n");<br>
return;<br>
}</p>
<p dir="ltr"> signal_t signal;<br>
signal.total_length = EI_CLASSIFIER_SLICE_SIZE;<br>
signal.get_data = &microphone_audio_signal_get_data;<br>
ei_impulse_result_t result = {0};</p>
<p dir="ltr"> EI_IMPULSE_ERROR r = run_classifier_continuous(&signal, &result, debug_nn);<br>
if (r != EI_IMPULSE_OK) {<br>
ei_printf("ERR: Failed to run classifier (%d)\n", r);<br>
return;<br>
}</p>
<p dir="ltr"> if (++print_results >= (EI_CLASSIFIER_SLICES_PER_MODEL_WINDOW)) {<br>
// print the predictions<br>
ei_printf("Predictions ");<br>
ei_printf("(DSP: %d ms., Classification: %d ms., Anomaly: %d ms.)",<br>
result.timing.dsp, result.timing.classification, result.timing.anomaly);<br>
ei_printf(": \n");<br>
for (size_t ix = 0; ix < EI_CLASSIFIER_LABEL_COUNT; ix++) {<br>
ei_printf(" %s: %.5f\n", result.classification[ix].label,<br>
result.classification[ix].value);<br>
<br>
}<br>
#if EI_CLASSIFIER_HAS_ANOMALY == 1<br>
ei_printf(" anomaly score: %.3f\n", result.anomaly);<br>
#endif</p>
<p dir="ltr"> print_results = 0;<br>
<br>
}</p>
<p dir="ltr">a = result.classification[0].value;<br>
b = result.classification[1].value;<br>
c = result.classification[2].value;<br>
d = result.classification[3].value;<br>
e = result.classification[4].value;<br>
}<br>
void direccion(){<br>
if (IMU.accelerationAvailable() && IMU.gyroscopeAvailable() && IMU.magneticFieldAvailable()) {<br>
IMU.readAcceleration(ax,ay,az);<br>
IMU.readGyroscope(gx,gy,gz);<br>
IMU.readMagneticField(mx,my,mz);</p>
<p dir="ltr"> // Obtener las mediciones de aceleración, giroscopio y magnetómetro<br>
float accelX = ax;<br>
float accelY = ay;<br>
float accelZ = az;<br>
float gyroX = gx;<br>
float gyroY = gy;<br>
float gyroZ = gz;<br>
float magX = mx;<br>
float magY = my;<br>
float magZ = mz;</p>
<p dir="ltr"> // Actualizar el filtro de Madgwick con los datos de los sensores<br>
filter.update(gyroX, gyroY, gyroZ, accelX, accelY, accelZ, magX, magY, magZ);</p>
<p dir="ltr"> // Obtener el ángulo de yaw del filtro<br>
float currentYaw = filter.getYaw();</p>
<p dir="ltr"> // Imprimir el ángulo de yaw<br>
Serial.print("Yaw: ");<br>
Serial.println(currentYaw);</p>
<p dir="ltr"> // Detectar el movimiento de yaw<br>
float yawDifference = (currentYaw - yaw)*5;<br>
//Serial.println(yawDifference);<br>
//Serial.println(yawThreshold);<br>
<br>
if (abs(yawDifference) < yawThreshold) {<br>
// No se mueve (centro)<br>
//Serial.println("Centro");<br>
dire = 0;<br>
} else if (yawDifference > yawThreshold) {<br>
// Movimiento a la derecha<br>
//Serial.println("Izquierda");<br>
dire = 1;<br>
} else if (yawDifference < -yawThreshold) {<br>
// Movimiento a la izquierda<br>
//Serial.println("Derecha");<br>
dire = 2;<br>
}<br>
</p>
<p dir="ltr"> // Actualizar el valor de yaw<br>
yaw = currentYaw;<br>
delay(100);<br>
}<br>
<br>
<br>
}<br><br><br><br><br><br></p>
<p dir="ltr">void loop(){<br>
micro();<br>
direccion();<br>
if (a >= 0.80) {<br>
Serial.println("Left LED on");<br>
digitalWrite(LEDR, LOW); // will turn the LED on<br>
digitalWrite(LEDG, HIGH); // will turn the LED off<br>
digitalWrite(LEDB, HIGH); // will turn the LED off<br>
color = 1;<br>
digitalWrite(ledPin3, HIGH);<br>
digitalWrite(ledPin2, HIGH);<br>
digitalWrite(ledPin1, HIGH);<br>
</p>
<p dir="ltr">}<br>
else if (c >= 0.80) {<br>
Serial.println("Right LED on");<br>
digitalWrite(LEDR, HIGH); // will turn the LED on<br>
digitalWrite(LEDG, LOW); // will turn the LED off<br>
digitalWrite(LEDB, HIGH); // will turn the LED off<br>
color = 2;<br>
digitalWrite(ledPin4, HIGH);<br>
digitalWrite(ledPin5, HIGH);<br>
digitalWrite(ledPin6, HIGH);<br>
}<br>
else if (d >= 0.80) {<br>
Serial.println("Stop LED on");<br>
color = 3;<br>
<br>
}</p>
<p dir="ltr">if (color == 1 & dire == 1){<br>
digitalWrite(LEDR, HIGH); <br>
digitalWrite(ledPin3, HIGH);<br>
digitalWrite(ledPin2, HIGH);<br>
digitalWrite(ledPin1, HIGH);<br>
delay(250);<br>
digitalWrite(ledPin1, LOW);<br>
digitalWrite(ledPin2, LOW);<br>
digitalWrite(ledPin3, LOW);<br>
delay(250);<br>
digitalWrite(ledPin3, HIGH);<br>
digitalWrite(ledPin2, HIGH);<br>
digitalWrite(ledPin1, HIGH);<br>
delay(250);<br>
digitalWrite(ledPin1, LOW);<br>
digitalWrite(ledPin2, LOW);<br>
digitalWrite(ledPin3, LOW);<br>
delay(250);<br>
digitalWrite(ledPin3, HIGH);<br>
digitalWrite(ledPin2, HIGH);<br>
digitalWrite(ledPin1, HIGH);<br>
delay(250);<br>
digitalWrite(ledPin1, LOW);<br>
digitalWrite(ledPin2, LOW);<br>
digitalWrite(ledPin3, LOW);<br>
delay(250);<br>
digitalWrite(ledPin3, HIGH);<br>
digitalWrite(ledPin2, HIGH);<br>
digitalWrite(ledPin1, HIGH);<br>
delay(250);<br>
digitalWrite(ledPin1, LOW);<br>
digitalWrite(ledPin2, LOW);<br>
digitalWrite(ledPin3, LOW);<br>
}<br>
else if (color == 2 & dire == 2){<br>
digitalWrite(LEDG, LOW);<br>
digitalWrite(ledPin4, HIGH);<br>
digitalWrite(ledPin5, HIGH);<br>
digitalWrite(ledPin6, HIGH);<br>
delay(250);<br>
digitalWrite(ledPin4, LOW);<br>
digitalWrite(ledPin5, LOW);<br>
digitalWrite(ledPin6, LOW);<br>
delay(250);<br>
digitalWrite(ledPin4, HIGH);<br>
digitalWrite(ledPin5, HIGH);<br>
digitalWrite(ledPin6, HIGH);<br>
delay(250);<br>
digitalWrite(ledPin4, LOW);<br>
digitalWrite(ledPin5, LOW);<br>
digitalWrite(ledPin6, LOW);<br>
delay(250);<br>
digitalWrite(ledPin4, HIGH);<br>
digitalWrite(ledPin5, HIGH);<br>
digitalWrite(ledPin6, HIGH);<br>
delay(250);<br>
digitalWrite(ledPin4, LOW);<br>
digitalWrite(ledPin5, LOW);<br>
digitalWrite(ledPin6, LOW);<br>
delay(250);<br>
digitalWrite(ledPin4, HIGH);<br>
digitalWrite(ledPin5, HIGH);<br>
digitalWrite(ledPin6, HIGH);<br>
delay(250);<br>
digitalWrite(ledPin4, LOW);<br>
digitalWrite(ledPin5, LOW);<br>
digitalWrite(ledPin6, LOW);<br>
}</p>
<p dir="ltr">else if (color == 3){<br>
digitalWrite(LEDR, HIGH); // will turn the LED on<br>
digitalWrite(LEDG, HIGH); // will turn the LED off<br>
digitalWrite(LEDB, LOW); // will turn the LED off<br>
digitalWrite(ledPin1, HIGH);<br>
digitalWrite(ledPin2, HIGH);<br>
digitalWrite(ledPin3, HIGH);<br>
digitalWrite(ledPin4, HIGH);<br>
digitalWrite(ledPin5, HIGH);<br>
digitalWrite(ledPin6, HIGH);<br>
delay(500);<br>
digitalWrite(ledPin1, LOW);<br>
digitalWrite(ledPin2, LOW);<br>
digitalWrite(ledPin3, LOW);<br>
digitalWrite(ledPin4, LOW);<br>
digitalWrite(ledPin5, LOW);<br>
digitalWrite(ledPin6, LOW);<br>
delay(500);<br>
<br>
<br>
<br>
}<br>
}<br>
/**<br>
* @brief PDM buffer full callback<br>
* Get data and call audio thread callback<br>
*/<br>
static void pdm_data_ready_inference_callback(void)<br>
{<br>
int bytesAvailable = PDM.available();</p>
<p dir="ltr"> // read into the sample buffer<br>
int bytesRead = PDM.read((char *)&sampleBuffer[0], bytesAvailable);</p>
<p dir="ltr"> if (record_ready == true) {<br>
for (int i = 0; i<bytesRead>> 1; i++) {<br>
inference.buffers[inference.buf_select][inference.buf_count++] = sampleBuffer[i];</p>
<p dir="ltr"> if (inference.buf_count >= inference.n_samples) {<br>
inference.buf_select ^= 1;<br>
inference.buf_count = 0;<br>
inference.buf_ready = 1;<br>
}<br>
}<br>
}<br>
}</p>
<p dir="ltr">/**<br>
* @brief Init inferencing struct and setup/start PDM<br>
*<br>
* @param[in] n_samples The n samples<br>
*<br>
* @return { description_of_the_return_value }<br>
*/<br>
static bool microphone_inference_start(uint32_t n_samples)<br>
{<br>
inference.buffers[0] = (signed short *)malloc(n_samples * sizeof(signed short));</p>
<p dir="ltr"> if (inference.buffers[0] == NULL) {<br>
return false;<br>
}</p>
<p dir="ltr"> inference.buffers[1] = (signed short *)malloc(n_samples * sizeof(signed short));</p>
<p dir="ltr"> if (inference.buffers[1] == NULL) {<br>
free(inference.buffers[0]);<br>
return false;<br>
}</p>
<p dir="ltr"> sampleBuffer = (signed short *)malloc((n_samples >> 1) * sizeof(signed short));</p>
<p dir="ltr"> if (sampleBuffer == NULL) {<br>
free(inference.buffers[0]);<br>
free(inference.buffers[1]);<br>
return false;<br>
}</p>
<p dir="ltr"> inference.buf_select = 0;<br>
inference.buf_count = 0;<br>
inference.n_samples = n_samples;<br>
inference.buf_ready = 0;</p>
<p dir="ltr"> // configure the data receive callback<br>
PDM.onReceive(&pdm_data_ready_inference_callback);</p>
<p dir="ltr"> PDM.setBufferSize((n_samples >> 1) * sizeof(int16_t));</p>
<p dir="ltr"> // initialize PDM with:<br>
// - one channel (mono mode)<br>
// - a 16 kHz sample rate<br>
if (!PDM.begin(1, EI_CLASSIFIER_FREQUENCY)) {<br>
ei_printf("Failed to start PDM!");<br>
}</p>
<p dir="ltr"> // set the gain, defaults to 20<br>
PDM.setGain(127);</p>
<p dir="ltr"> record_ready = true;</p>
<p dir="ltr"> return true;<br>
}</p>
<p dir="ltr">/**<br>
* @brief Wait on new data<br>
*<br>
* @return True when finished<br>
*/<br>
static bool microphone_inference_record(void)<br>
{<br>
bool ret = true;</p>
<p dir="ltr"> if (inference.buf_ready == 1) {<br>
ei_printf(<br>
"Error sample buffer overrun. Decrease the number of slices per model window "<br>
"(EI_CLASSIFIER_SLICES_PER_MODEL_WINDOW)\n");<br>
ret = false;<br>
}</p>
<p dir="ltr"> while (inference.buf_ready == 0) {<br>
delay(1);<br>
}</p>
<p dir="ltr"> inference.buf_ready = 0;</p>
<p dir="ltr"> return ret;<br>
}</p>
<p dir="ltr">/**<br>
* Get raw audio signal data<br>
*/<br>
static int microphone_audio_signal_get_data(size_t offset, size_t length, float *out_ptr)<br>
{<br>
numpy::int16_to_float(&inference.buffers[inference.buf_select ^ 1][offset], out_ptr, length);</p>
<p dir="ltr"> return 0;<br>
}</p>
<p dir="ltr">/**<br>
* @brief Stop PDM and release buffers<br>
*/<br>
static void microphone_inference_end(void)<br>
{<br>
PDM.end();<br>
free(inference.buffers[0]);<br>
free(inference.buffers[1]);<br>
free(sampleBuffer);<br>
}</p>
<p dir="ltr">#if !defined(EI_CLASSIFIER_SENSOR) || EI_CLASSIFIER_SENSOR != EI_CLASSIFIER_SENSOR_MICROPHONE<br>
#error "Invalid model for current sensor."<br>
#endif</p>
</body></html>
Comments