20. Project development¶
My final project is going to be a second iteration of an artistic installation that I worked on for my Fabricademy Certification. I would like to re-design it, create my own boards that would be easy to set up in multiple locations, and add an interactive input element.
Research¶
More research on the ideation and the process can be seen on my Fabricademy website.
In Fabricademy, I used commercial boards and components
- ESP32 Wroom DA
- Relays
- 12V Pumps
- 12V Power Supply
- LED Strip (Neopixel)
connected to Thingspeak as my web server that is pulling data from a pre-trained Facial Emotions Recognition (FER) AI model.
In the First Iteration of the project the input was the FER and the output was the soft robotics networked through Thingspeak.
In this iteration I would like to add an input, possible a capacitive sensor, along with the FER, design my own boards, and have a setup that would be sturdy and easily set up.
Boards:
- Controller: ESP32-S3-Wroom-1
- Input: Capacitive Sensors
- Output: Pumps
First Iteration¶
Electronic Components¶
Quantity | Price | |
ESP32 microcontroller | 1 | $20 |
WS8213b LED strip | 8meters/960 LEDs | $84 |
12V Air Pump | 2 | $20 |
Relays | 2 | $8 |
Code¶
#include <WiFi.h>
#include <HTTPClient.h>
#include <Adafruit_NeoPixel.h>
const char* ssid = "Iaac-Wifi P100";
const char* password = "EnterIaac22@";
const char* thingspeak_read_api_key = "M6NP3QNQUWXCWY9F";
const char* thingspeak_channel_id = "2406762";
#define LED_PIN 27
#define NUM_LEDS 400
Adafruit_NeoPixel strip(NUM_LEDS, LED_PIN, NEO_GRB + NEO_KHZ800);
int currentRed = 0, currentGreen = 0, currentBlue = 0;
const int relayPin1 = 33; // Pump 1
const int relayPin2 = 13; // Pump 2
unsigned long previousMillis = 0;
const long interval = 5000;
unsigned long lastPumpMillis1 = 0;
unsigned long lastPumpMillis2 = 0;
bool pumpState1 = false;
bool pumpState2 = false;
float lastIntensity = -1;
long colorData = 0;
float intensity = 0;
bool transitioning = false;
int emotionNumber = 0;
void setup() {
Serial.begin(115200);
WiFi.begin(ssid, password);
while (WiFi.status() != WL_CONNECTED) {
delay(500);
Serial.println("Connecting to WiFi...");
}
Serial.println("WiFi Connected Successfully");
Serial.print("IP Address: ");
Serial.println(WiFi.localIP());
strip.begin();
strip.show();
pinMode(relayPin1, OUTPUT);
pinMode(relayPin2, OUTPUT);
digitalWrite(relayPin1, LOW);
digitalWrite(relayPin2, LOW);
}
void transitionToColor() {
static unsigned long lastTransitionMillis = 0;
static int step = 0;
const int transitionSteps = 50;
const int stepDelay = 10;
if (!transitioning) return;
unsigned long currentMillis = millis();
if (currentMillis - lastTransitionMillis > stepDelay) {
float progress = (float)step / (float)transitionSteps;
int redValue = currentRed + (int)((colorData >> 16 & 0xFF) * intensity - currentRed) * progress;
int greenValue = currentGreen + (int)((colorData >> 8 & 0xFF) * intensity - currentGreen) * progress;
int blueValue = currentBlue + (int)((colorData & 0xFF) * intensity - currentBlue) * progress;
for (int i = 0; i < NUM_LEDS; i++) {
strip.setPixelColor(i, strip.Color(redValue, greenValue, blueValue));
}
strip.show();
lastTransitionMillis = millis();
step++;
if (step > transitionSteps) {
transitioning = false;
step = 0;
currentRed = redValue;
currentGreen = greenValue;
currentBlue = blueValue;
}
}
}
void pumpControl() {
unsigned long currentMillis = millis();
Serial.println("PUMP CONTROL");
// If intensity is less than 0.3, keep pumps off
if (intensity <= 0.3) {
digitalWrite(relayPin1, LOW);
digitalWrite(relayPin2, LOW);
Serial.println("LOW INTENSITY PUMP OFF");
return;
}
// Turn on pumps based on detected emotion
if (pumpState1 && (currentMillis - lastPumpMillis1 > 5000)) { // Pump on duration
digitalWrite(relayPin1, LOW); // Turn pump 1 off
pumpState1 = false;
Serial.println("Pump 1 turned off");
return;
}
if (pumpState2 && (currentMillis - lastPumpMillis2 > 10000)) { // Pump on duration
digitalWrite(relayPin2, LOW); // Turn pump 2 off
pumpState2 = false;
Serial.println("Pump 2 turned off");
return;
}
if (!pumpState1 && !pumpState2) {
// Check detected emotion and turn on respective pumps
Serial.println("pumping");
Serial.println(emotionNumber);
if (intensity > 0.3) {
if (emotionNumber == 1) { // Angry
digitalWrite(relayPin1, HIGH); // Turn pump 1 on
digitalWrite(relayPin2, HIGH); // Turn pump 2 on
pumpState1 = true;
pumpState2 = true;
lastPumpMillis1 = currentMillis;
lastPumpMillis2 = currentMillis;
Serial.println("Pump 1 and 2 turned on (Angry)");
} else if (emotionNumber == 2) { // Sad
digitalWrite(relayPin2, HIGH); // Turn pump 2 on
pumpState2 = true;
lastPumpMillis2 = currentMillis;
Serial.println("Pump 2 turned on (Sad)");
} else if (emotionNumber == 3) { // Happy
digitalWrite(relayPin1, HIGH); // Turn pump 1 on
pumpState1 = true;
lastPumpMillis1 = currentMillis;
Serial.println("Pump 1 turned on (Happy)");
} else if (emotionNumber == 4) { // Surprise
digitalWrite(relayPin1, HIGH); // Turn pump 1 on
pumpState1 = true;
lastPumpMillis1 = currentMillis;
Serial.println("Pump 1 turned on (Surprise)");
} else if (emotionNumber == 5) { // Disgust
digitalWrite(relayPin2, HIGH); // Turn pump 2 on
pumpState2 = true;
lastPumpMillis2 = currentMillis;
Serial.println("Pump 2 turned on (Disgust)");
} else if (emotionNumber == 6) { // Fear
digitalWrite(relayPin1, HIGH); // Turn pump 1 on
digitalWrite(relayPin2, HIGH); // Turn pump 2 on
pumpState1 = true;
pumpState2 = true;
lastPumpMillis1 = currentMillis;
lastPumpMillis2 = currentMillis;
Serial.println("Pump 1 and 2 turned on (Fear)");
}
}
} else {
Serial.println("DOING NOTHING");
}
}
void fetchData() {
unsigned long currentMillis = millis();
if (currentMillis - previousMillis > interval) {
HTTPClient http;
String serverPath = "http://api.thingspeak.com/channels/" + String(thingspeak_channel_id) + "/feeds/last.json?api_key=" + String(thingspeak_read_api_key);
http.begin(serverPath);
int httpResponseCode = http.GET();
if (httpResponseCode == 200) {
String payload = http.getString();
Serial.println("Received data from ThingSpeak:");
Serial.println(payload);
long newColorData = strtol(payload.substring(payload.indexOf("field1\":\"") + 9, payload.indexOf("\",\"field2\"")).c_str(), NULL, 16);
float newIntensity = payload.substring(payload.indexOf("field2\":\"") + 9, payload.indexOf("\"}")).toFloat();
int newEmotionNumber = payload.substring(payload.indexOf("field3\":\"") + 9, payload.indexOf("}")).toInt();
Serial.println(newEmotionNumber);
if (newIntensity == 0) { // No faces detected, turn off the light
colorData = 0;
intensity = 0;
} else if (newColorData != colorData || newIntensity != intensity || newEmotionNumber != emotionNumber) {
colorData = newColorData;
intensity = newIntensity;
emotionNumber = newEmotionNumber;
transitioning = true; // Start new transition
previousMillis = currentMillis; // Update time stamp for fetch interval
}
} else {
Serial.print("Error on HTTP request, code: ");
Serial.println(httpResponseCode);
}
http.end();
}
}
void loop() {
fetchData();
transitionToColor();
pumpControl();
}
Second Iteration¶
ESP32-S3-Wroom-1 Controller, Board Design¶
To design this board I worked off of a board designed by Josep Marti. I used his initial placements and adjusted it to my needs.
Copmonents:
- ESP32-S3-Wroom-1
- Reset Button
- LED
- Boot Button
- USB
- As many exposed pins as possible (to create a shield later and give me flexibility in the inputs and outputs)
Schematic
PCB Design
Input PCB Design¶
Component | Quantity | |
10kOhm Resistor | 6 | |
Male Connectors | 15 |
PCB
Pressure Matrix Sensor Making
Output PCB Design¶
The output design and making is explained in Week 12: Output Devices
Component | Quantity | |
MOSFET 50V 16A | 2 | |
2pin Connector 5mm | 3 | |
10kOhm Resistor | 2 | |
0Ohm Resistor | 1 | |
Male Connectors | 9 | |
External Power Supply 12V | 1 | |
Pump 12V | 2 |
Schematic
PCB
Networking¶
The ESP32 connects the pressure sensor matrix to the pumps directly and depending on the SensorPoints activated and the values, the pump intensity is mapped.
The code is activated by the Facial Emotion Recognition.
The system is meant to go as such:
- Face is detected
- Emotion is displayed on the face with rectangle
- Text pops up saying "touch me"
- Pressure matrix detected
- Values of pressure + intensity of the emotion translated to the pumps in intensity
- Depending on the cases a distinct text pops up
ESP32 Code
This code runs on the ESP32 microcontroller. Its functions are to:
- Connect to Wifi
- Establish a websocket connection to server (allowing real-time data exchange)
- Read pressure matrix
- Control pumps based on pressure readings
#include <WebSocketsClient.h>
#include <WiFi.h>
// WiFi credentials
const char* ssid = "Iaac-Wifi";
const char* password = "EnterIaac22@";
// WebSocket server address
const char* webSocketServerAddress = "192.168.0.10";
const uint16_t webSocketServerPort = 80;
// WebSocket client
WebSocketsClient webSocket;
// Flag to track whether we are connected to the WebSocket server
bool webSocketConnected = false;
// Pin definitions
#define PUMP1_PIN 17
#define PUMP2_PIN 36
#define numRows 3
#define numCols 3
#define sensorPoints numRows * numCols
int rows[] = { 4, 5, 6 }; // Analog input pins (ADC1 channels)
int cols[] = { 40, 41, 42 }; // Digital output pins (GPIO pins)
int incomingValues[sensorPoints] = {}; // Array to store sensor values
// Callback function for WebSocket events
void onWebSocketEvent(WStype_t type, uint8_t * payload, size_t length) {
switch(type) {
case WStype_DISCONNECTED:
Serial.println("Disconnected from WebSocket server");
webSocketConnected = false;
break;
case WStype_CONNECTED:
Serial.println("Connected to WebSocket server");
webSocketConnected = true;
break;
case WStype_TEXT:
Serial.print("Received message: ");
Serial.println((char*)payload);
break;
default:
break;
}
}
void setup() {
// Initialize Serial communication
Serial.begin(115200);
delay(100);
// Connect to WiFi
Serial.println();
Serial.println("Connecting to WiFi...");
WiFi.begin(ssid, password);
while (WiFi.status() != WL_CONNECTED) {
delay(500);
Serial.print(".");
}
Serial.println("");
Serial.println("WiFi connected");
Serial.println("IP address: ");
Serial.println(WiFi.localIP());
// Initialize WebSocket client
webSocket.onEvent(onWebSocketEvent);
// Initialize pump pins
pinMode(PUMP1_PIN, OUTPUT);
pinMode(PUMP2_PIN, OUTPUT);
// Connect to WebSocket server
webSocket.begin(webSocketServerAddress, webSocketServerPort);
}
void loop() {
readPressureMatrix();
controlPumps();
webSocket.loop();
}
void readPressureMatrix() {
for (int colCount = 0; colCount < numCols; colCount++) {
digitalWrite(cols[colCount], HIGH); // Activate the current column
for (int rowCount = 0; rowCount < numRows; rowCount++) {
int index = colCount * numRows + rowCount;
incomingValues[index] = analogRead(rows[rowCount]); // Read analog value from the row
}
digitalWrite(cols[colCount], LOW); // Deactivate the current column
}
}
void controlPumps() {
int pump1Intensity = 0;
int pump2Intensity = 0;
for (int i = 0; i < sensorPoints; i++) {
int pressureIntensity = map(incomingValues[i], 0, 1023, 0, 255);
if (i < 5) { // Points 1-5 control pump 1
pump1Intensity = max(pump1Intensity, pressureIntensity);
} else { // Points 6-9 control pump 2
pump2Intensity = max(pump2Intensity, pressureIntensity);
}
}
analogWrite(PUMP1_PIN, pump1Intensity);
analogWrite(PUMP2_PIN, pump2Intensity);
}
Python Code
This code runs on the laptop and does the following:
- Detects emotions using a webcam and classifies them (using a pre-trained model)
- Sets up a WebSocket server to receive and handle the messages
import cv2
import numpy as np
import dlib
from imutils import face_utils
from keras.models import load_model
from statistics import mode
from utils.datasets import get_labels
from utils.inference import draw_text, draw_bounding_box, apply_offsets, preprocess_input
import threading
import asyncio
import websockets
import json
# Constants
emotion_model_path = 'models/emotion_model.hdf5'
emotion_labels = get_labels('fer2013')
frame_window = 10
emotion_offsets = (20, 40)
# client_address = "192.168.0.17"
client_address = "172.16.22.121"
# Load models
detector = dlib.get_frontal_face_detector()
emotion_classifier = load_model(emotion_model_path)
emotion_target_size = emotion_classifier.input_shape[1:3]
emotion_window = []
# Shared variable for emotions
emotion_data = {'emotion': "", 'probability': 0.0}
async def handle_emotion(websocket, path):
global emotion_data
try:
async for message in websocket:
data = json.loads(message)
emotion_data['emotion'] = data['emotion']
emotion_data['probability'] = data['probability']
print(f"Received emotion: {emotion_data['emotion']}, Probability: {emotion_data['probability']}")
except Exception as e:
print(f"Error: {e}")
def detect_emotions():
global emotion_data
cap = cv2.VideoCapture(0)
while cap.isOpened():
ret, bgr_image = cap.read()
if not ret:
continue
gray_image = cv2.cvtColor(bgr_image, cv2.COLOR_BGR2GRAY)
rgb_image = cv2.cvtColor(bgr_image, cv2.COLOR_BGR2RGB)
faces = detector(rgb_image)
for face_coordinates in faces:
x1, x2, y1, y2 = apply_offsets(face_utils.rect_to_bb(face_coordinates), emotion_offsets)
gray_face = gray_image[y1:y2, x1:x2]
try:
gray_face = cv2.resize(gray_face, (emotion_target_size))
except:
continue
gray_face = preprocess_input(gray_face, True)
gray_face = np.expand_dims(gray_face, 0)
gray_face = np.expand_dims(gray_face, -1)
emotion_prediction = emotion_classifier.predict(gray_face)
emotion_probability = np.max(emotion_prediction)
emotion_label_arg = np.argmax(emotion_prediction)
emotion_text = emotion_labels[emotion_label_arg]
emotion_window.append(emotion_text)
if len(emotion_window) > frame_window:
emotion_window.pop(0)
try:
emotion_mode = mode(emotion_window)
except:
continue
emotion_data = {'emotion': emotion_text, 'probability': float(emotion_probability)}
# Control pumps based on the received emotion
control_pumps(emotion_data['emotion'], emotion_data['probability'])
bgr_image = cv2.cvtColor(rgb_image, cv2.COLOR_RGB2BGR)
# cv2.imshow('Emotion Detection', bgr_image)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows()
def control_pumps(emotion, probability):
# Control pumps based on the received emotion and probability
if emotion == 'angry':
pump1_intensity = int(probability * 255)
pump2_intensity = 0
sentence = "Is it really worth it? Let it go."
elif emotion == 'sad':
pump1_intensity = 0
pump2_intensity = int(probability * 255)
sentence = "Its just a moment! Feel better soon."
elif emotion == 'happy':
pump1_intensity = int(probability * 255)
pump2_intensity = int(probability * 255)
sentence = "Yay! Lets party."
elif emotion == 'surprise':
pump1_intensity = int(probability * 255)
pump2_intensity = int(probability * 255 / 2)
sentence = "Wow."
else:
pump1_intensity = 0
pump2_intensity = 0
sentence = "What is feeling?"
# Apply pump intensities
# Replace these with your actual pump control logic
print(f"Pump 1 Intensity: {pump1_intensity}")
print(f"Pump 2 Intensity: {pump2_intensity}")
# Write the emotion, probability, and sentence to a text file
with open('emotion_sentences.txt', 'a') as file:
file.write(f"{sentence}\n")
if __name__ == "__main__":
threading.Thread(target=detect_emotions).start()
start_server = websockets.serve(handle_emotion, '0.0.0.0', 5000)
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()
The ESP32 and Python codes are connected via a WebSocket channel with a server at '192.168.0.10' and port '5000'.
Assembly¶
3D Model of the case
Final¶
Tracking¶
Throughout working on the project, the most useful tool had been 'document as you go' while working in parallel! In this project, there was no room for spiral development which is why the focus will be on future developments to spiral.
In documenting while working on the weeks, it has made it easier for me to follow my own progress and make sure that I am on track. I design, document, produce, document, and then check the documentation to make sure it is what the project needs!
Future Developments¶
In the future, I would change the felted wool side of the matrix to be layers of activated charcoal dyed wool and regular wool felt together so I wouldn't have to add the copper tape on this side. I would also change the recipe for the recycled cardboard to be more flexible and elastic so it can have a more sensitive range of values.
As for the text, I would like to test creating a Flask server and having a better and more immersive interface.