A project utilizing the motion detection and sound functions of the toio robots. Each toio bot will detect the five different motions mentioned in the section above; each motion will correlate to a different sound. When Person A performs a motion on Bot A, Bot B will play the corresponding sound. Additions that may be utilized are LED lights, identification, limbs, etc. The concept explores the missing sense of sight which people often become highly reliant on. The exploration of disabilities and the inclusivity of physical telepresence is the direction I would like to go in, and perhaps further down the line, we could include vibrations and such to compensate for deaf people.
///Credit to https://tetunori.github.io/p5.toio/
function setup() {
createCanvas(windowWidth, windowHeight);
background(220);
}
const connectedCubeArray = [];
const postures = {
up: 0x50, // note C4
down: 0x52, // note D4
right: 0x54, // note E4
left: 0x55, // note F4
front: 0x57, // note G4
back: 0x59 // note A4
};
function getPosture(cube1, cube2) {
// Posture change event
const type = 'sensorposturechange';
cube1?.addEventListener(type, posture => {
console.log(type, posture);
switch(posture) {
case 'top':
cube2?.playSingleNote(postures.up, 0x1E);
break;
case 'bottom':
cube2?.playSingleNote(postures.down, 0x1E);
break;
case 'right':
cube2?.playSingleNote(postures.right, 0x1E);
break;
case 'left':
cube2?.playSingleNote(postures.left, 0x1E);
break;
case 'front':
cube2?.playSingleNote(postures.front, 0x1E);
break;
case 'back':
cube2?.playSingleNote(postures.back, 0x1E);
break;
}
});
}
function mouseClicked() {
P5tCube.connectNewP5tCube().then(cube => {
connectedCubeArray.push(cube);
const cube1 = connectedCubeArray[0];
const cube2 = connectedCubeArray[1];
cube1.turnLightOn('white');
cube2.turnLightOn('#4CAF50');
if (cube1 && cube2) {
getPosture(cube1, cube2);
}
});
}