Sohaib ArifAlex PolonskyJuanito Yu-CoDavid W
Published © MIT

Farmaid: Plant Disease Detection Robot

Robot that drives around autonomously in greenhouse environment and identifies diseases.

AdvancedFull instructions providedOver 2 days5,113
Farmaid: Plant Disease Detection Robot

Things used in this project

Hardware components

Mountain Ark SR 13 chassis
×1
Raspberry Pi 3 Model B
Raspberry Pi 3 Model B
×2
Arduino Nano R3
Arduino Nano R3
×2
Jumper wires (generic)
Jumper wires (generic)
×1
SainSmart Wide Angle Fish-Eye Camera Lenses for Raspberry Pi Arduino
×1
ibt-2 h-bridge
×4

Software apps and online services

TensorFlow
TensorFlow
Arduino IDE
Arduino IDE

Story

Read more

Schematics

Farmaid Arduino Wiring

The tool we used did not have the IBT-2 and some other components but the basic wiring is the same. Please see the doc for details.

Code

getArduinoData.py

Python
# import curses and GPIO
import curses
import serial
import time
from picamera.array import PiRGBArray
from picamera import PiCamera
import cv2
import numpy as np
import datetime

ser = serial.Serial("/dev/ttyACM0", "9600")
cap = cv2.VideoCapture(0)
piCam = False
#check if picamera exists
try:
    camera = PiCamera()
    camera.resolution = (224,224)
    camera.framerate = 20
    rawCapture = PiRGBArray(camera, size=(224,224))
    piCam = True
except:
    print("Pi camera does not exist, using USB camera")
# Get the curses window, turn off echoing of keyboard to screen, turn on
# instant (no waiting) key response, and use special values for cursor keys
screen = curses.initscr()
curses.noecho() 
curses.cbreak()
screen.keypad(True)

keyRec = open('key_strokes.txt','w+')

train_data = []
curr_time = time.time()
try:
    while True:
        if piCam == True:
            for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
                image_np = np.array(frame.array)
                
                rawCapture.truncate(0)
                
                char = screen.getch()
                prev_time = curr_time
                curr_time = time.time()
                
                key = [0,0,0,0,1,0,0]
                if char == ord('x'):
                    key = [0,0,0,0,1,0,0]
                    ser.write(b'5')
                    keyRec.close()
                    np.save("train_data.npy", train_data)
                    curses.nocbreak(); screen.keypad(0); curses.echo()
                    curses.endwin()
                    break
                elif char == ord('w'):
                    ser.write(b'1')
                    key = [1,0,0,0,0,0,0]

                elif char == ord('s'):
                    ser.write(b'2')
                    key = [0,1,0,0,0,0,0]
                    
                elif char == ord('a'):
                    ser.write(b'3')
                    key = [0,0,1,0,0,0,0]
                    
                elif char == ord('d'):
                    ser.write(b'4')
                    key = [0,0,0,1,0,0,0]

                elif char == ord(' '):
                    ser.write(b'5')
                    key = [0,0,0,0,1,0,0]

                elif char == ord('q'):
                    ser.write(b'6')
                    key = [0,0,0,0,0,1,0]

                elif char == ord('e'):
                    ser.write(b'7')
                    key = [0,0,0,0,0,0,1]

                val_dict = {"input":key, "image":image_np, "time_diff":curr_time-prev_time}
                train_data.append(val_dict)
                keyRec.write(str(key)+"\n")
                
                if len(train_data) % 50 == 0:
                    np.save("train_data.npy", train_data)
        #no pi camera, using USB
        else:
            ret, image_np = cap.read()
                
            char = screen.getch()
            key = [0,0,0,0,1]
            if char == ord('x'):
                np.save("train_data.npy", train_data)
                ser.write(b'5')
                keyRec.close()
                curses.nocbreak(); screen.keypad(0); curses.echo()
                curses.endwin()
                break
            elif char == ord('w'):
                ser.write(b'1')
                key = [1,0,0,0,0,0,0]

            elif char == ord('s'):
                ser.write(b'2')
                key = [0,1,0,0,0,0,0]
                    
            elif char == ord('a'):
                ser.write(b'3')
                key = [0,0,1,0,0,0,0]
                    
            elif char == ord('d'):
                ser.write(b'4')
                key = [0,0,0,1,0,0,0]

            elif char == ord(' '):
                ser.write(b'5')
                key = [0,0,0,0,1,0,0]

            elif char == ord('q'):
                ser.write(b'6')
                key = [0,0,0,0,0,1,0]

            elif char == ord('e'):
                ser.write(b'7')
                key = [0,0,0,0,0,0,1]
                
            val_dict = {"input":key, "image":image_np}
            train_data.append(val_dict)
            keyRec.write(str(key)+"\n")
                
            if len(train_data) % 100 == 0:
                np.save("train_data.npy", train_data)
finally:
    #Close down curses properly, inc turn echo back on!
    keyRec.close()
    curses.nocbreak(); screen.keypad(0); curses.echo()
    curses.endwin()

keyboardArduino.py

Python
import curses
import serial
import numpy as np
# Get the curses window, turn off echoing of keyboard to screen, turn on
# instant (no waiting) key response, and use special values for cursor keys
screen = curses.initscr()
curses.noecho() 
curses.cbreak()
screen.keypad(True)

keyRec = open('key_strokes.txt','w+')
ser = serial.Serial("/dev/ttyUSB0", "9600")

try:
    while True:   
        char = screen.getch()
        if char == ord(' '):
            ser.write(b'5')
            #one-hot format ['up', 'down', 'left', 'right','pause','speedup','slowdown']
            key = [0,0,0,0,1,0,0]
            
        elif char == ord('q'):
            ser.write(b'6')
            key = [0,0,0,0,0,1,0]

        elif char == ord('e'):
            ser.write(b'7')
            key = [0,0,0,0,0,0,1]

        elif char == ord('w'):
            ser.write(b'1')
            key = [1,0,0,0,0,0,0]

        elif char == ord('s'):
            ser.write(b'2')
            key = [0,1,0,0,0,0,0]

        elif char == ord('a'):
            ser.write(b'3')
            key = [0,0,1,0,0,0,0]
                
        elif char == ord('d'):
            ser.write(b'4')
            key = [0,0,0,1,0,0,0]
        elif char == ord('x'):
            break
        else:
            print("unknown command, pausing, key not recorded")
            ser.write('5')
        keyRec.write(str(key)+"\n")
finally:
    #Close down curses properly, inc turn echo back on!
    keyRec.close()
    curses.nocbreak(); screen.keypad(0); curses.echo()
    curses.endwin()

test_training_file.py

Python
Uses the trained model to drive
import curses
import serial
import numpy as np
# Get the curses window, turn off echoing of keyboard to screen, turn on
# instant (no waiting) key response, and use special values for cursor keys
screen = curses.initscr()
curses.noecho() 
curses.cbreak()
screen.keypad(True)

keyRec = open('key_strokes.txt','w+')
ser = serial.Serial("/dev/ttyUSB0", "9600")

try:
    while True:   
        char = screen.getch()
        if char == ord(' '):
            ser.write(b'5')
            #one-hot format ['up', 'down', 'left', 'right','pause','speedup','slowdown']
            key = [0,0,0,0,1,0,0]
            
        elif char == ord('q'):
            ser.write(b'6')
            key = [0,0,0,0,0,1,0]

        elif char == ord('e'):
            ser.write(b'7')
            key = [0,0,0,0,0,0,1]

        elif char == ord('w'):
            ser.write(b'1')
            key = [1,0,0,0,0,0,0]

        elif char == ord('s'):
            ser.write(b'2')
            key = [0,1,0,0,0,0,0]

        elif char == ord('a'):
            ser.write(b'3')
            key = [0,0,1,0,0,0,0]
                
        elif char == ord('d'):
            ser.write(b'4')
            key = [0,0,0,1,0,0,0]
        elif char == ord('x'):
            break
        else:
            print("unknown command, pausing, key not recorded")
            ser.write('5')
        keyRec.write(str(key)+"\n")
finally:
    #Close down curses properly, inc turn echo back on!
    keyRec.close()
    curses.nocbreak(); screen.keypad(0); curses.echo()
    curses.endwin()

drive_model.py

Python
the driving model
from tensorflow.python.keras.layers import Input
from tensorflow.python.keras.models import Model
from tensorflow.python.keras.layers import Convolution2D, MaxPool2D, Reshape
from tensorflow.python.keras.layers import Dropout, Flatten, Dense
from tensorflow.python.keras.callbacks import ModelCheckpoint, EarlyStopping
import tensorflow as tf
import os
import numpy as np

def donkey_model():
    img_in = Input(shape=(224, 224, 3), name='img_in')
    x = img_in

    # Convolution2D class name is an alias for Conv2D
    x = Convolution2D(filters=24, kernel_size=(5, 5), strides=(2, 2), activation='relu')(x)
    x = Convolution2D(filters=32, kernel_size=(5, 5), strides=(2, 2), activation='relu')(x)
    x = Convolution2D(filters=64, kernel_size=(5, 5), strides=(2, 2), activation='relu')(x)
    x = Convolution2D(filters=64, kernel_size=(3, 3), strides=(2, 2), activation='relu')(x)
    x = Convolution2D(filters=64, kernel_size=(3, 3), strides=(1, 1), activation='relu')(x)

    x = Flatten(name='flattened')(x)
    x = Dense(units=100, activation='linear')(x)
    x = Dropout(rate=.1)(x)
    x = Dense(units=50, activation='linear')(x)
    x = Dropout(rate=.1)(x)

    # continous output of throttle
    control_out = Dense(units=5, activation='linear', name='control_out')(x)

    model = Model(inputs=[img_in], outputs=[control_out])

    model.compile(optimizer='adam',
                  loss={'control_out': 'categorical_crossentropy'},
                  metrics=['acc'])

    return model

def default_linear():
    "fully connected version of the default linear model"
    img_in = Input(shape=(224, 224, 3), name='img_in')
    x = img_in

    # Convolution2D class name is an alias for Conv2D
    x = Convolution2D(filters=64, kernel_size=(5, 5), strides=(2, 2), activation='elu')(x) #output shape 110x110
    
    x = Convolution2D(filters=64, kernel_size=(3, 3), strides=(2, 2), activation='elu')(x) #output shape 27x27
    x = MaxPool2D(pool_size=(2,2))(x) 
    x = Convolution2D(filters=64, kernel_size=(3, 3), strides=(2, 2), activation='relu')(x) #output shape 7x7
    x = MaxPool2D(pool_size=(2,2))(x)
    x = Convolution2D(filters=5, kernel_size=(3,3), strides=(2, 2), activation='relu')(x)
    x = MaxPool2D(pool_size=(2,2))(x) #output 5x5
    control_out = Flatten(name='control_out')(x)
    # control_out = Dense(units=4, activation='relu', name='control_out')(x)

    # continous output of throttle for later possibly
    # throttle_out = Dense(units=1, activation='linear', name='throttle_out')(x)

    model = Model(inputs=[img_in], outputs=[control_out])

    model.compile(optimizer='adam',
                  loss={'control_out': 'categorical_crossentropy'},
                  metrics=['acc'])

    return model

def main():
    data= np.load("train_data.npy", encoding='latin1')

    images = []
    labels = []
    for i in range(len(data)):
        images.append(data[i]['image'])
        labels.append(data[i]['input'])

    images = np.array(images)
    labels = np.array(labels)

    model = default_linear()
    print(model.summary())
    checkpoint_path = "training_1/cp.ckpt"
    checkpoint_dir = os.path.dirname(checkpoint_path)

    # Create checkpoint callback
    #cp_callback = tf.keras.callbacks.ModelCheckpoint(checkpoint_path, 
    #                                             save_weights_only=True,
    #                                             verbose=1)
    model.fit(images, labels, batch_size=20, epochs=500, validation_split=0.2)#, callbacks = [cp_callback])
    model.save_weights("training_1/model.h5")
    #test model 
    preds = model.predict(images[:10])
    for i in range(len(preds)):
        print(np.argmax(preds[i]), np.argmax(labels[i]))


if __name__ == "__main__":
    main()

SonarMovement.ino

Arduino
sonar movement code for arduino
// ---------------------------------------------------------------------------
// This example shows how to use NewPing's ping_timer method which uses the Timer2 interrupt to get the
// ping time. The advantage of using this method over the standard ping method is that it permits a more
// event-driven sketch which allows you to appear to do two things at once. An example would be to ping
// an ultrasonic sensor for a possible collision while at the same time navigating. This allows a
// properly developed sketch to multitask. Be aware that because the ping_timer method uses Timer2,
// other features or libraries that also use Timer2 would be effected. For example, the PWM function on
// pins 3 & 11 on Arduino Uno (pins 9 and 11 on Arduino Mega) and the Tone library. Note, only the PWM
// functionality of the pins is lost (as they use Timer2 to do PWM), the pins are still available to use.
// NOTE: For Teensy/Leonardo (ATmega32U4) the library uses Timer4 instead of Timer2.
// ---------------------------------------------------------------------------
#include <NewPing.h>

#define TRIGGER_PIN_F   13 // Arduino pin tied to trigger pin on ping sensor.
#define ECHO_PIN_F      12 // Arduino pin tied to echo pin on ping sensor.
#define TRIGGER_PIN_R     4
#define ECHO_PIN_R       2
#define MAX_DISTANCE 200 // Maximum distance we want to ping for (in centimeters). Maximum sensor distance is rated at 400-500cm.
#define RIGHT_FORWARD 9
#define RIGHT_REVERSE 10
#define LEFT_FORWARD 5
#define LEFT_REVERSE 6

NewPing sonar_f(TRIGGER_PIN_F, ECHO_PIN_F, MAX_DISTANCE); // NewPing setup of pins and maximum distance.
NewPing sonar_r(TRIGGER_PIN_R, ECHO_PIN_R, MAX_DISTANCE); // NewPing setup of pins and maximum distance.

unsigned int pingSpeed = 50; // How frequently are we going to send out a ping (in milliseconds). 50ms would be 20 times a second.
unsigned long pingTimer;     // Holds the next ping time.
float dist_f;
float dist_r;
int speed = 100;
int thresh = 40;
bool movingForward = false;
bool movingReverse = false;
void setup() {
  Serial.begin(19200); // Open serial monitor at 115200 baud to see ping results.
  pingTimer = millis(); // Start now.
}

void loop() {
  // Notice how there's no delays in this sketch to allow you to do other processing in-line while doing distance pings.
  if (millis() >= pingTimer) {   // pingSpeed milliseconds since last ping, do another ping.
    pingTimer += pingSpeed;      // Set the next ping time.
    sonar_f.ping_timer(echoCheck); // Send out the ping, calls "echoCheck" function every 24uS where you can check the ping status.
    sonar_r.ping_timer(echoCheck);
  }
  // Do other stuff here, really. Think of it as multi-tasking.
}
float forward(int speed, int dly=40){
  //movingForward = true;
  //movingReverse = false;
  for(int i=0; i< speed; i+= 20){
    analogWrite(RIGHT_FORWARD, i);
    analogWrite(LEFT_FORWARD, i);
    analogWrite(RIGHT_REVERSE, 0);
    analogWrite(LEFT_REVERSE, 0);
    delay(dly);
  }
}
float reverse(int speed, int dly=40){
  //movingForward = false;
  //movingReverse = true;
  for(int i=0; i< speed; i+= 20){
    analogWrite(RIGHT_FORWARD, 0);
    analogWrite(LEFT_FORWARD, 0);
    analogWrite(RIGHT_REVERSE, i);
    analogWrite(LEFT_REVERSE, i);
    delay(dly);
  }
}
float pause(){
  analogWrite(RIGHT_FORWARD, 0);
  analogWrite(LEFT_FORWARD, 0);
  analogWrite(RIGHT_REVERSE, 0);
  analogWrite(LEFT_REVERSE, 0);
  //movingForward = false;
  //movingReverse = false;
}
void echoCheck() { // Timer2 interrupt calls this function every 24uS where you can check the ping status.
  // Don't do anything here!
  if (sonar_f.check_timer() && sonar_r.check_timer()) { // This is how you check to see if the ping was received.
    // Here's where you can add code.
    dist_f = sonar_f.ping_result / US_ROUNDTRIP_CM;
    dist_r = sonar_r.ping_result / US_ROUNDTRIP_CM;
    if(dist_f > 60){
      Serial.println("moving forward");
      forward(speed);
    }
    else if(dist_r > 60){
      Serial.println("moving reverse");
      reverse(speed);

    }
    else{
      Serial.println("pausing");
      pause();
    }
    Serial.print("Ping front: ");Serial.print(dist_f);  Serial.print("cm  "); // Ping returned, uS result in ping_result, convert to cm with US_ROUNDTRIP_CM.
    Serial.print("Ping rear: ");Serial.print(dist_r);  Serial.println("cm"); 
  }
  // Don't do anything here!
}

Farmaid Bot Code

The motor controls folder: 1. keyboardArduino.py: tests the motor controls without recording data 2. getData.py: gathers data in npy file to train the model using Tensorflow/Keras the videoClassification folder: 1. pi_classification.py: classifies images of plants

Credits

Sohaib Arif

Sohaib Arif

3 projects • 34 followers
Alex Polonsky

Alex Polonsky

1 project • 27 followers
Juanito Yu-Co

Juanito Yu-Co

1 project • 21 followers
David W

David W

2 projects • 23 followers

Comments