5Square — Final Project, PComp + ICM

Computational Media, Physical Computing

5Square

Make your own Music

Collaborators:

Amena Hayat

Max Horwich

Arnav Wagh

Please click the link to go to my previous blog post for the final project proposal:

GESTURAL GRAPHIC INTERFACE

 

 This project consists of
1. code in Tone.js library
2. animations for visuals
3. a glove that adds effects to music
———————————————————————————————–

Tone.js is a framework for creating interactive music in the browser. It provides advanced scheduling capabilities, synths and effects, and intuitive musical abstractions built on top of the Web Audio API.

We used Tone.js to create a musical interface that is easy, fun, interactive and engaging. The intended audience is a non-musician who wants to create music.

CODE

kickOn = false;
snareOn = false;
pianoOn = false;
bassOn = false;

//var upstate = false;
cChordPlaying = false;
fChordPlaying = false;
gChordPlaying = false;
amChordPlaying = false;

cBassPlaying = false;
fBassPlaying = false;
gBassPlaying = false;
amBassPlaying = false;

var serial;
var flex1 = 0,
flex2 = 0,
flex3 = 0;
flex4 = 0;

var pianoAnimation = false;

//KICK SOUND DEFINED
var kick = new Tone.MembraneSynth({
“envelope”: {
“sustain”: 0,
“attack”: 0.02,
“decay”: 0.8
},
“octaves”: 10
}).toMaster();

//KICK LOOP SET
var kickPart = new Tone.Loop(function(time) {
kick.triggerAttack(“C2”);
}, “2n”);

//SNARE FILTER
var snareFilter = new Tone.AutoFilter({
frequency: 1,
type: “sine”,
depth: 1,
baseFrequency: 400,
octaves: 2.6,
filter: {
type: “bandpass”,
rolloff: -12,
Q: 1
}
}).toMaster();

//SNARE SOUND DEFINED
var snare = new Tone.MetalSynth({
volume: -10,
frequency: 60,
envelope: {
attack: 0.001,
decay: 0.4,
release: 0.2
},
harmonicity: 5.1,
modulationIndex: 1,
resonance: 800,
octaves: 1.5
}).connect(snareFilter);

//SNARE LOOP SET
var snarePart = new Tone.Sequence(function(time, freq) {
snare.frequency.setValueAtTime(freq, time, Math.random() * 0.5 + 0.5);
snare.triggerAttack(time);
}, [null, 350, null, 350], “2n”);

//PIANO DELAY
var pianoDelay = new Tone.PingPongDelay({
“delayTime”: “4t”,
“maxDelayTime”: 2,
“wet”: .3,
“feedback”: .1
}).toMaster();

//PIANO TONE DEFINED
var piano = new Tone.MonoSynth(4, Tone.Synth, {
“volume”: -7,
“oscillator”: {
“partials”: [1, 2, 1],
},
“envelope”: {
“attack”: 0.001,
“decay”: 0.1,
“sustain”: 0.3,
“release”: 1
},
“portamento”: 0.001
}).connect(pianoDelay);

//PIANO CHORDS DEFINED
/*
var cChord = [“C4”, [“E4”, “G4”]];
var gChord = [[“B3”, “D4”], “G4”];
var amChord = [[“C4”, “E4”], “A4”];
var fChord = [[“C4”], “F4”, [“A4”]];
*/
var cChord = [“C4”, “E4”, “G4”];
var gChord = [“B3”, “D4”, “G4”];
var amChord = [“C4”, “E4”, “A4”];
var fChord = [“C4”, “F4”, “A4”];

var ellipseCoord = {};

//PIANO LOOP SET

var pianoPart = new Tone.Sequence(function(time, note) {
piano.triggerAttackRelease(note, “16n”, time);
//SEQUENCE OF CHORDS
}, [cChord]);
//pianoPart.probability = 0.5;

//Bass FFT & waveform
var fft = new Tone.FFT(32);
var spectrum = new Tone.Waveform(1024);

var bassDist = new Tone.Distortion({
“distortion”: 0.4,
“oversample”: ‘2x’
}).connect(spectrum).toMaster();

//BASS TONE DEFINED
var bass = new Tone.MonoSynth({
“volume”: -10,
“envelope”: {
“attack”: 0.1,
“decay”: 0.3,
“release”: 2,
},
“filterEnvelope”: {
“attack”: 0.001,
“decay”: 0.01,
“sustain”: 0.5,
“baseFrequency”: 200,
“octaves”: 2.6
}
}).connect(bassDist);

//BASS LOOP SET
var bassPart = new Tone.Sequence(function(time, note) {
bass.triggerAttackRelease(note, “16n”, time);
//SEQUENCE OF BASS NOTES
}, [“C2”]);
//bassPart.probability = 0.5;

//LEAD DELAY
var leadDelay = new Tone.PingPongDelay({
“delayTime”: “8n”,
“maxDelayTime”: 1,
“feedback”: 0.82,
“wet”: .40

}).toMaster();

//LEAD TONE DEFINED
var leadPaint = new Tone.PolySynth({
“volume”: -10,
“oscillator”: {
“type”: “square”
},
“envelope”: {
“attack”: 0.2
},
“portamento”: 0.05

}).connect(leadDelay);

//FX SENDS
var delayKickSend = kick.send(“delayKick”, -Infinity);
var delaySnareSend = snare.send(“delaySnare”, -Infinity);
var crushSend = piano.send(“crush”, -Infinity);
var chebySend = bass.send(“cheby”, -Infinity);

var delayKick = new Tone.FeedbackDelay(“4t”, 0.38)
.receive(“delayKick”)
.toMaster();
var delaySnare = new Tone.FeedbackDelay(“8t”, 0.25)
.receive(“delaySnare”)
.toMaster();
var crushPiano = new Tone.BitCrusher(4)
.receive(“crush”)
.toMaster();
var chebyBass = new Tone.Chebyshev(10)
.receive(“cheby”)
.toMaster();

//SLOWEST POSSIBLE TEMPO
//ALL OTHERS ARE SET AS MULTIPLE OF THIS
//
Tone.Transport.bpm.value = 60;

//HIT IT!!!
Tone.Transport.start();

//—————————————————————-
//BEGINNING OF SETUP

function setup() {
createCanvas(windowWidth, windowWidth/2);
background(230);

serial = new p5.SerialPort();
serial.on(‘list’, printList);
serial.on(‘data’, serialEvent);
serial.list();
serial.open(“/dev/cu.usbmodem1431”);
//ALWAYS CHECK IF USB PORT IS CORRECT FOR YOUR PERSONAL LAPTOP

sliderKick = createSlider(1, 6, 2);
sliderKick.position(5, height + 20);
sliderKick.size(width / 4 – 10, 10);
sliderBass = createSlider(1, 4, 1);
sliderBass.position(width / 4 + 5, height + 40);
sliderBass.size(width / 4 – 10, 10);
sliderPiano = createSlider(1, 4, 1);
sliderPiano.position(width / 4 + 5, height + 20);
sliderPiano.size(width / 4 – 10, 10);
sliderSnare = createSlider(1, 6, 2);
sliderSnare.position(5, height + 40);
sliderSnare.size(width / 4 – 10, 10);
//leadDelayPaint = createSlider(1,6,1);

sliderFX1 = createSlider(-100,0,-100);
sliderFX1.position(5, height+60);
sliderFX1.size(width/4-10, 10);
sliderFX2 = createSlider(-100,0,-100);
sliderFX2.position(5, height+80);
sliderFX2.size(width/4-10, 10);
sliderFX3 = createSlider(-100,0,-100);
sliderFX3.position(5, height+100);
sliderFX3.size(width/4-10, 10);
sliderFX4 = createSlider(-100,0,-100);
sliderFX4.position(5, height+120);
sliderFX4.size(width/4-10, 10);

ellipseCoord.c = [width/16, (height/2)*0.2, width/8, (height/2)*0.2, width/5.33, (height/2)*0.2];
ellipseCoord.g = [width/16, (height/2)*0.4, width/8, (height/2)*0.4, width/5.33, (height/2)*0.4];
ellipseCoord.am = [width/16, (height/2)*0.6, width/8, (height/2)*0.6, width/5.33, (height/2)*0.6];
ellipseCoord.f = [width/16, (height/2)*0.8, width/8, (height/2)*0.8, width/5.33, (height/2)*0.8];

// ellipseCoord.c = [80, 80, 160, 80, 240, 80];
// ellipseCoord.g = [80, 80 + 55, 160, 80 + 55, 240, 80 + 55];
// ellipseCoord.am = [80, 80 + 110, 160, 80 + 110, 240, 80 + 110];
// ellipseCoord.f = [80, 80 + 165, 160, 80 + 165, 240, 80 + 165];

}

//END OF SETUP
//—————————————————————-
//BEGIN OF DRAW
function draw() {

var kickPulse = kickPart.progress;
var snarePulse = snarePart.progress;
var pianoPulse = pianoPart.progress;
var bassPulse = bassPart.progress;
var loopstate = pianoPart.state;

var pp = map(pianoPulse, 0, 1, 0.5, 1.1);
var pt = map(snarePulse, 0, 0.3, 1, 1.2);
var pg = map(snarePulse, 0, 1, 1, 0.5);
var pf = map(snarePulse, 0, 1, 1, 0.2);

kickSwellOuter = map(kickPulse, 0, 1, 0, (width / 4.57)/35);
kickSwellMiddle = map(kickPulse, 0, 1, 0, (width / 5.33)/9);
kickSwellInner = map(kickPulse, 0, 1, 0, (width / 6.66)/3);

alphaOuter = map(kickSwellOuter, 0, 5, 50, 20);
alphaMiddle = map(kickSwellMiddle, 0, 16.7, 90, 30);

var wave = spectrum.getValue();
var fftwave = fft.getValue();

// fill(0);
// stroke(0);
// strokeWeight(2)
// //rect(width / 4, 0, width / 4, height / 2);
// rect(0,width,0,height);

//—————————–ANimations———————————
//KICK CIRCLE
push();
translate(0, 0);
fill(225);
noStroke();
rect(0, 0, width / 4, height / 2);
noFill();
ellipseMode(CENTER);

stroke(135, 206, 250, alphaOuter);
strokeWeight(2);
ellipse(width / 8, height / 4, width / 4.57 + (kickSwellOuter * -1));

stroke(135, 206, 250, alphaMiddle);
strokeWeight(2);
ellipse(width / 8, height / 4, width / 5.33 + (kickSwellMiddle * -1));

stroke(135, 206, 250);
strokeWeight(2);
ellipse(width / 8, height / 4, width / 6.66 + (kickSwellInner * -1));
//print(width/4-80);

pop();

//BASS SQUARE
beginShape();
push();
translate(width * 3 / 8, height * 3 / 4);
noStroke();
fill(225);
rect(0 – (width / 8), 0 – (height / 4), width / 4, height / 2)
//rect(0, 0, width / 4, height / 2)
beginShape();
noFill();
stroke(255, 100, 0); // waveform is red
strokeWeight(2);
rectMode(CENTER)
for (var i = 0; i < wave.length; i += 600) {
//var a = map(i, 0, wave.length, -3, 3);
var b = map(wave[i], -1, 1, -40, 40);
rect(0, 0, width / 6.15 + b, height / 3.07 + b);
}
pop();
endShape();

//PIANO TRIANGLE
push();
translate(width * 1 / 8, height/1.25);
print(height*1.25)
noStroke();
fill(225);
rect(0 – (width / 8), 0 – (height*0.25+height*0.05), width / 4, height / 2)
scale(pg);
stroke(255);
point(0, 0)
noFill();
strokeWeight(1);
triangle(width/-11.42, height/9.83, 0, height/-4.917, width/11.42, height/9.83);
//triangle(-70, 40.67, 0, -81.35, 70, 40.67); //for 800/400
pop();

//SLIDERS FOR TEMPO OF DIFFERENT INSTRUMENTS
//PLAYBACK RATE MULTIPLIES TEMPO FOR THAT PART
kickPart.playbackRate = sliderKick.value();
snarePart.playbackRate = sliderSnare.value();

// sliderKick.value(map(flex1, 0, 250, 1, 6));
// sliderSnare.value(map(flex1, 0, 250, 1, 6));
// sliderPiano.value(map(flex2, 0, 250, 1, 4));
// sliderBass.value(map(flex2, 0, 250, 1, 4));
// a = map(flex3, 0, 250, 0.1, 0.9);

push();
translate(width*0.25,0);
fill(225);
noStroke(0);
rect(0,0,width*0.25,height/2);
pop();

if (pianoAnimation == true) {
animateEllipseC(pianoPulse);
}
if (pianoAnimation == true) {
animateEllipseG(pianoPulse);
}
if (pianoAnimation == true) {
animateEllipseAM(pianoPulse);
}
if (pianoAnimation == true) {
animateEllipseF(pianoPulse);
}

//PIANO CHORD SLIDER
if (sliderPiano.value() == 1) {
if (cChordPlaying == false) {
//console.log(“inside value 1”);
pianoPart.removeAll();
pianoPart.add(0, cChord);
cChordPlaying = true;
fChordPlaying = false;
gChordPlaying = false;
amChordPlaying = false;
}
} else if (sliderPiano.value() == 2) {
if (fChordPlaying == false) {
pianoPart.removeAll();
pianoPart.add(0, fChord);
cChordPlaying = false;
fChordPlaying = true;
gChordPlaying = false;
amChordPlaying = false;
}
} else if (sliderPiano.value() == 3) {
if (gChordPlaying == false) {
pianoPart.removeAll();
pianoPart.add(0, gChord);
cChordPlaying = false;
fChordPlaying = false;
gChordPlaying = true;
amChordPlaying = false;
}
} else if (sliderPiano.value() == 4) {
if (amChordPlaying == false) {
pianoPart.removeAll();
pianoPart.add(0, amChord);
cChordPlaying = false;
fChordPlaying = false;
gChordPlaying = false;
amChordPlaying = true;
}
}

//BASS NOTE SLIDER
if (sliderBass.value() == 1) {
if (cBassPlaying == false) {
bassPart.remove(0);
bassPart.add(0, “C2”);
cBassPlaying = true;
fBassPlaying = false;
gBassPlaying = false;
amBassPlaying = false;
}
} else if (sliderBass.value() == 2) {
if (fBassPlaying == false) {
bassPart.remove(0);
bassPart.add(0, “F2”);
cBassBassPlaying = false;
fBassBassPlaying = true;
gBassBassPlaying = false;
amBassBassPlaying = false;
}
} else if (sliderBass.value() == 3) {
if (gBassPlaying == false) {
bassPart.remove(0);
bassPart.add(0, “G1”);
cBassPlaying = false;
fBassPlaying = false;
gBassPlaying = true;
amBassPlaying = false;
}
} else if (sliderBass.value() == 4) {
if (amBassPlaying == false) {
bassPart.remove(0);
bassPart.add(0, “A1”);
cBassPlaying = false;
fBassPlaying = false;
gBassPlaying = false;
amBassPlaying = true;
}
}

//FLEXER FX
delayKickSend.gain.value = sliderFX1.value();
delaySnareSend.gain.value = sliderFX2.value();
crushSend.gain.value = sliderFX3.value();
chebySend.gain.value = sliderFX4.value();

sliderFX1.value(map(flex1,310,400,-100,0));
sliderFX2.value(map(flex2,270,390,-100,0));
sliderFX3.value(map(flex3,230,340,-100,0));
sliderFX4.value(map(flex4,300,370,-100,0));

//RIGHT SIDE DRAWING
if (mouseX > width / 2 && mouseX < width &&
mouseY > 0 && mouseY < width) {
if (mouseIsPressed) {
for (i = 0; i < 15; i++) {
noStroke();
fill(255, 0, 0);
//fill(255, 100 + i * 3, 100 + i * 5, 255 / i);
ellipse(mouseX, mouseY, i, i);
}
}

//SLOW FADE
fill(230,15);
noStroke();
rect(width / 2, 0, width / 2, height);

//ERASE DRAWING AND KILL LEAD
if (keyIsPressed) {
fill(230);
rect(width / 2, 0, width / 2, height);
leadPaint.triggerRelease();
}

}
}

//END OF DRAW MODE
//—————————————-
//BEGINNING OF FUNCTIONS

function animateEllipseC(pianoPulse) {
//console.log(“inside chordAnimation”);
if (sliderPiano.value() == 1) {
var coord = ellipseCoord.c;
//console.log(coord);
var index = 0;
if (pianoPulse > 0 && pianoPulse < 0.3) {
index = 0;
} else if (pianoPulse > 0.3 && pianoPulse < 0.6) {
index = 2;
} else if (pianoPulse > 0.6) {
index = 4;
}
push();
translate(width / 4, 0)
point(0, 0)
fill(200, 50)
fill(124, 225, 0, 200);
noStroke();
var pp = map(pianoPulse, 0, 1, 0.5, 1.1);
ellipse(coord[index], coord[index + 1], pp*60, pp*60);
pop();
}
}

function animateEllipseF(pianoPulse) {
//console.log(“inside chordAnimation”);
if (sliderPiano.value() == 2) {
var coord = ellipseCoord.f;
//console.log(coord);
var index = 0;
if (pianoPulse > 0 && pianoPulse < 0.3) {
index = 0;
} else if (pianoPulse > 0.3 && pianoPulse < 0.6) {
index = 2;
} else if (pianoPulse > 0.6) {
index = 4;
}
push();
translate(width / 4, 0)
point(0, 0)
fill(200, 50)
fill(140, 180, 121, 200);
noStroke();
var pp = map(pianoPulse, 0, 1, 0.5, 1.1);
ellipse(coord[index], coord[index + 1], pp*80, pp*80);
pop();
}
}

function animateEllipseG(pianoPulse) {
//console.log(“inside chordAnimation”);
if (sliderPiano.value() == 3) {
var coord = ellipseCoord.g;
//console.log(coord);
var index = 0;
if (pianoPulse > 0 && pianoPulse < 0.3) {
index = 0;
} else if (pianoPulse > 0.3 && pianoPulse < 0.6) {
index = 2;
} else if (pianoPulse > 0.6) {
index = 4;
}
push();
translate(width / 4, 0)
point(0, 0)
fill(200, 50)
fill(100, 120, 190, 200);
noStroke();
var pp = map(pianoPulse, 0, 1, 0.5, 1.1);
ellipse(coord[index], coord[index + 1], pp*40, pp*40);
pop();
}
}

function animateEllipseAM(pianoPulse) {
//console.log(“inside chordAnimation”);
if (sliderPiano.value() == 4) {
var coord = ellipseCoord.am;
//console.log(coord);
var index = 0;
if (pianoPulse > 0 && pianoPulse < 0.3) {
index = 0;
} else if (pianoPulse > 0.3 && pianoPulse < 0.6) {
index = 2;
} else if (pianoPulse > 0.6) {
index = 4;
}
push();
translate(width / 4, 0)
point(0, 0)
fill(200, 50)
fill(90, 100, 60, 200);
noStroke();
var pp = map(pianoPulse, 0, 1, 0.5, 1.1);
ellipse(coord[index], coord[index + 1], pp*50, pp*50);
pop();
}
}

//DRAG TO PLAY FUNCTION
function touchMoved() {
var paintNote = [“C4”, “E4”, “G4”, “A4”, “C5”, “E5”, “G5”, “A5”, “C6”]

//right side of canvas
if (mouseX > width / 2 && mouseX < width &&
mouseY > 0 && mouseY < height) {

//————NOTE GRID!!!
//column1
if (mouseX > width / 2 && mouseX < width * 5 / 8 &&
mouseY < height && mouseY > height * 3 / 4) {
leadPaint.triggerAttackRelease(paintNote[0], “8n”);
} else if (mouseX > width / 2 && mouseX < width * 5 / 8 &&
mouseY < height * 3 / 4 && mouseY > height / 2) {
leadPaint.triggerAttackRelease(paintNote[1], “8n”);
} else if (mouseX > width / 2 && mouseX < width * 5 / 8 &&
mouseY < height / 2 && mouseY > height / 4) {
leadPaint.triggerAttackRelease(paintNote[2], “8n”);
} else if (mouseX > width / 2 && mouseX < width * 5 / 8 &&
mouseY < height / 2 && mouseY > 0) {
leadPaint.triggerAttackRelease(paintNote[3], “8n”);
//column2
} else if (mouseX > width * 5 / 8 && mouseX < width * 3 / 4 &&
mouseY < height && mouseY > height * 3 / 4) {
leadPaint.triggerAttackRelease(paintNote[1], “8n”);
} else if (mouseX > width * 5 / 8 && mouseX < width * 3 / 4 &&
mouseY < height * 3 / 4 && mouseY > height / 2) {
leadPaint.triggerAttackRelease(paintNote[2], “8n”);
} else if (mouseX > width * 5 / 8 && mouseX < width * 3 / 4 &&
mouseY < height / 2 && mouseY > height / 4) {
leadPaint.triggerAttackRelease(paintNote[3], “8n”);
} else if (mouseX > width * 5 / 8 && mouseX < width * 3 / 4 &&
mouseY < height / 4 && mouseY > 0) {
leadPaint.triggerAttackRelease(paintNote[4], “8n”);
//column3
} else if (mouseX > width * 3 / 4 && mouseX < width * 7 / 8 &&
mouseY < height && mouseY > height * 3 / 4) {
leadPaint.triggerAttackRelease(paintNote[2], “8n”);
} else if (mouseX > width * 3 / 4 && mouseX < width * 7 / 8 &&
mouseY < height * 3 / 4 && mouseY > height / 2) {
leadPaint.triggerAttackRelease(paintNote[3], “8n”);
} else if (mouseX > width * 3 / 4 && mouseX < width * 7 / 8 &&
mouseY < height / 2 && mouseY > height / 4) {
leadPaint.triggerAttackRelease(paintNote[4], “8n”);
} else if (mouseX > width * 3 / 4 && mouseX < width * 7 / 8 &&
mouseY < height / 4 && mouseY > 0) {
leadPaint.triggerAttackRelease(paintNote[5], “8n”);
//column4
} else if (mouseX > width * 7 / 8 && mouseX < width &&
mouseY < height && mouseY > height * 3 / 4) {
leadPaint.triggerAttackRelease(paintNote[3], “8n”);
} else if (mouseX > width * 7 / 8 && mouseX < width &&
mouseY < height * 3 / 4 && mouseY > height / 2) {
leadPaint.triggerAttackRelease(paintNote[4], “8n”);
} else if (mouseX > width * 7 / 8 && mouseX < width &&
mouseY < height / 2 && mouseY > height / 4) {
leadPaint.triggerAttackRelease(paintNote[5], “8n”);
} else if (mouseX > width * 7 / 8 && mouseX < width &&
mouseY < height / 4 && mouseY > 0) {
leadPaint.triggerAttackRelease(paintNote[6], “8n”);
}
}

// print(flex1);
}

//CLICK 2 PLAY FUNCTION
function mousePressed() {
if (mouseX > 0 && mouseX < width / 4 &&
mouseY > 0 && mouseY < height / 2) {
loopKick();
} else if (mouseX > width / 4 && mouseX < width / 2 &&
mouseY > 0 && mouseY < height / 2) {
loopPiano();
pianoAnimation = true;
} else if (mouseX > 0 && mouseX < width / 4 &&
mouseY > height / 2 && mouseY < height) {
loopSnare();
} else if (mouseX > width / 4 && mouseX < width / 2 &&
mouseY > height / 2 && mouseY < height) {
loopBass();
}
}

//LOOP FUNCTIONS
function loopKick() {
if (!kickOn) {
kickPart.start(0);
kickOn = !kickOn;
} else {
kickPart.stop();
kickOn = !kickOn;
}
}

function loopSnare() {
if (!snareOn) {
snarePart.start(0);
snarePart.loop = true;
snareOn = !snareOn;
} else {
snarePart.loop = false;
snareOn = !snareOn;
}
}

function loopPiano() {
if (!pianoOn) {
pianoPart.start(0);
pianoPart.loop = true;
pianoOn = !pianoOn;
} else {
pianoPart.loop = false;
pianoOn = !pianoOn;
}
}

function loopBass() {
if (!bassOn) {
bassPart.start(0);
bassPart.loop = true;
bassOn = !bassOn;
} else {
bassPart.loop = false;
bassOn = !bassOn;
}
}

function printList(portList) {
for (var i = 0; i < portList.length; i++) {
print(i + ” ” + portList[i]);
}
}

function serialEvent() {
var stringFromSerial = serial.readLine();
if (stringFromSerial.length > 0) {
var trimmedString = trim(stringFromSerial);
var myArray = split(trimmedString, “,”)
flex1 = Number(myArray[0]);
flex2 = Number(myArray[1]);
flex3 = Number(myArray[2]);
flex4 = Number(myArray[3]);
}
}

document.ontouchmove = function(event){
event.preventDefault();

}

ANIMATIONS

BEAT

BASS

PIANO

SNARE

LEAD MELODY

 

THE SKETCH

Click around and draw on the fifth square!

GLOVE

To make the glove, we used Adafruit Flora. FLORA is Adafruit’s fully-featured wearable electronics platform. It’s a round, sewable, Arduino-compatible microcontroller designed to empower  wearables.

Diagram 1: Sensor position on the glove

Arduino Code:

Diagram 2: Circuit for the glove

Initial testing

Serial communication from breadboard circuit:

Soldering:

Using potentiometers for controls for user testing

—————————————————————————

5square

performed by Max Horwich

enjoy

 

Gestural Graphic Interface

Computational Media, Physical Computing

Partnering with two other people, my final project for ICM and PComp is the same: A graphic and musical interface that uses gloves  to enable inexperienced people to create, enhance and play music. After brainstorming for about a week and letting our simple ideas multiply and evolve into something that stays faithful to our project goals, and researching the scope and methods of other similar projects, we decided to take on a multi-faceted, holistic music creating interface that lets the user draw basic music notes in the form of doodles on a screen, and enhance it with the glove. The program will take inputs from the coordinates of the drawings to play and loop it, and from the sensors in the glove to add drastic effects, both musical and visual, to show the fun and the power in the art form itself.

Our inspiration through this research has been the Mi.Mu glove made by Imogen Heap, a glove that records, loops, plays and adds effects to sounds and music, while the user performs.

Inspirations..

Hand gestures — to make the music making process as intuitive as possible

To make this glove possible, we plan to build one using flex sensors, accelerometers, and probably even some tactile sensors, however that will greatly rely on the degrees of control we need.

Circuit diagram of one of the simplest glove designs

Our aim with the glove involves giving super powers to user when it comes to affecting change to the music. the first iteration might look similar to this one.

For the interface on the screen, our designs are still coming together, but the current target is around halfway between a traditional step sequencer and Kandinsky from the Chrome Music Lab Experiments. We researched a new library of functions for our project, Tone.js, and learned about its abilities and pitfalls.

Check it out:

https://musiclab.chromeexperiments.com/Kandinsky

The Progress:

We are currently researching the timeline, looping and how we are add and delete new user-added sounds to an existing loop. It is challenging but our progress is motivational.

Using Tone.js, we have so far created a sketch that uses this library to play beats at different frequencies and note lengths, and a sketch that creates markers to let the user draw, and maps sounds to each coordinate (and adds it to an array).

[Please refresh in case the sketches below do not work]

Or click the link:

TONE.JS

 

Or click the link:

MARKERS AND TONES

We are still understanding the workings of Tone.js and the next step is to serially communicate with the glove and add more changeable parameters to our sounds in code. We’re still in the process of acquiring materials for construction of the glove. This project has turned into an enormous undertaking, but it’s moving steadily forward, and I can’t wait to see how it develops.

 

FACES

Physical Computing

I started this project for PComp due to my strong interest in manipulating faces. My aim with the project was to change facial features, expressions, or change the entire face from one person’s to another’s using different sensors. Since halloween is right around the corner, I decided to turn a beautiful woman into a witch using a proximity sensor.

This is the first sketch I made to use for the rest of the frames. It is my first shot at animation with any medium, but I have always been an artist at heart it came quite naturally:

Next, I built a little station to trace, and alter my frames in sketch form, and for that I used a lamp and a transparent platform to illuminate the sketches from below.

Necessity is the mother of invention right?

The FRAMES:

Here’s a GIF of the animation:

Sensor:

I used an ultrasonic sensor (HC-SR04) for this project, because it’s range was 3 cm to 4 m, which is perfect for a person walking towards the sensor and keeps the screen in visibility range.

The sensor works by using one pin as an output to emit a sound wave, and the other as an input to receive it back after it has bounced off the object in front of it. So my code, which was to use the sensor through the arduino and return a value for distance, had to include the simple distance/speed formula using the speed of sound.

 

Once my sensor started working efficiently to give the distance in centimeters, I noticed that the sensor was not working efficiently after about 1 meter, and it worked best when used which a smooth object, instead of a person.

The code for serial communication and the animation is below:

FACES from Amena Hayat on Vimeo.

 

http://alpha.editor.p5js.org/amena91/sketches/B1rKrlaTb

Servo Motor Sunflower

Physical Computing

For physical computing this week, I wanted to play with a sensor that takes input from natural things, like light and sound. After many inspirational instructables, I found someone making a sunflower with light as a sensor, using photo-resistors (facing opposite directions) as sensors.

I decided to give it my time because it felt like a gateway into the things I want to make in the future. After getting the general idea, I drew up the circuit and coded the micro-controller to take not just light but the direction of light as its input.

Schematic

Code:

I played with delays, and decided I wanted a slow, more pronounced movement toward the light.

Watch the Sunflower in action:

Servo Motor Sunflower from Amena Hayat on Vimeo.

 

“what kind of sunflower are you? the sun is that way”

Glass Half Full

Physical Computing

“FULL” light goes on once the glass is full!

This circuit is designed to light different lights which show the state of the glass of water. It completes when the glass of water fills up, connecting the ends of two wires (one at the bottom and one at the height which is considered “full”), and the micro-controller is programmed to take this input and light up LEDs accordingly. For conductivity purposes, salt water is used.

Salt water conducts electricity

The code:

Target Self Check-Out

Physical Computing

Target Self Checkout is, in my own experience, a faster, more autonomous yet introverted means of checking out items from a Target store, with considerably smaller lines (or in the most case none at all).

To process, you scan the barcode on the item and the item pops up on the screen, where you press continue to confirm and proceed to place it in the baggage. The machine weighs the product to ensure the right item is being placed in the baggage area. If any discrepancy is noticed, the machine has pops up a message asking to confirm whether the item placed matches with the item scanned.

I decided to go and observe people at the interactive self-checkout kiosks installed at the local Target store in my neighbourhood. Self-checkout at stores is always my preference, giving me the opportunity to get done with the transaction faster — but sometimes help from a store employee is required, especially in the case of human errors like beeping the item twice, or machine errors like not being able to beep at all. Consecutive unsuccessful tries also result in the machine requiring some form of supervision by a store cashier.

During the 30 minutes or so I spent observing customers using the kiosk, roughly 30-35 people checked out of the store. About a fifth of the total customers decided to use this form of checkout despite huge lines. I guess people want to be mindless about these things. Those who generally decided to use them seemed to be regular users experiencing little to no difficulties.

I decided to focus on 3 customers who had separate communication experiences with the kiosk. The first person seemed to use it seamlessly, knowing how the items had to be scanned and place. On those one off instances when the machine would display an error, they would delete the item, rescan it and proceed further. This person had about 6-7 items and his whole transaction time was not more than 3-4 minutes. The second person had a much bigger shopping basket, with about 20 items. While prepackaged product barcodes were easy to read and locate for the machine, fresh items such as fruits and vegetables were quite the task. The machine would prompt them to scan the correct barcode again. He was also unable to successfully delete the wrongly scanned items he has in the kiosk cart. Having failed consecutively, the machine requested assistance of a cashier. The cashier aided the customer by helping them manually look up the item in the inventory list. As per my observation, this was the hardest task, as the cashier has to search the item they want to put into the bag. This operation as much longer due to 2 reasons; a larger shopping cart and the help required from the store cashier. Therefore, I believe that this sort of situation is ideal for someone who has less than 10 items and does not want to wait 10 minutes to cash them out. The last interaction was actually the shortest where the person kept getting an error on the screen. The machine would ask them to retry as the weight in the bag and the weight of the item scanned did not match. The person eventually had to get in line and wait his turn out.

As far as the quality of interaction goes, it was high, with most customers getting the full benefit of the kiosk: leaving Target without waiting in line, and without any difficulty communicating with the machine, which is the goal of good interaction. The interface is intuitive and fairly straightforward, but there is definitely a need to eliminate the number of errors produced by the machine and need for employees to come over for guidance, hence the interactivity design can be worked on.

 

Dino Switch

Physical Computing

Inspired by this image, the dino switch combines the conducting power of  play-doh (yes, play doh conducts — quite simply because of the concentrated salts in it) with aluminium foil which I used to make the leave to feed it. I went with cute this time, will probably go for utility next time 😉

I <3 you, you <3 me, electrocute this monster already amiright

 

Feeding

And the LED turns on

Here, watch:

What Is Physical Interaction?

Physical Computing

Interaction by definition is the iterative process of two actors listening to each other, processing the information gathered, and responding. The key factors therefore, to good interaction, are reception of information — how accurately and how fast the instruction, or data, has been transferred, the quality of processing — how well the problem is solved: the degree to which it answers the question posed, the computational efficiency of the task at hand, and finally, the return.

Interaction is only possible when all three are key factors are present and the process of exchange is repeated to solve problems. It is a spectrum, not binary, like Chris Crawford explains in his Fridge Door Game example (which is a pretty dull interaction, but could be an interaction nonetheless). It shows that some interactions are superior to others in terms of the size of the problem they solve, the responsiveness, and the engagement of a reasonable mind.

Between humans, interactions mostly take place as conversation, where language is used for both listening and responding, while the brain processes the incoming and outgoing information. Between humans and machines however, additional key factors take major roles. These are somewhat extrapolations of the original pillars of interaction, and because humans and machines have different capabilities and limitations, both have to dumb themselves down in order for the interaction to take place.

Joseph Faber’s Euphonia – as non-interactive as they come. Although it was a mechanical human voice generator, and therefore lacked any processing at all, its use at play was visionary.

Out of the numerous things humans use to interact with one another, from the spoken word to body language and pheromones, only a few sensibilities are put to use when in contact with a computer, like the written word, or touch. Similarly, amongst all the machine’s abilities, from being able to see infra-red to sensing brain waves, only those that are able to respond to a human and in turn being understood by her is one of importance, unless a proper channel is produced for others.

Physical interaction between machines and humans need to carefully and creatively consider the many degrees of abilities found in either of them. In his brief rant, Bret Victor explains that any so-called visionary future that does not take into account the strongest tool available to both humans and machines is not visionary at all.

“A tool addresses human needs by amplifying human capabilities, that is, a tool converts what we can do into what we want to do. A great tool is designed to fit both sides.”

Technology is being invented at a rapid pace and of course, the newest machinery employs the most effective technological power to source it. But what designers of interaction need to visualise further is that technology needs to bend the knee to humans and human nature.

Victor goes on to describe the extent, intensity and versatility of the the utility provided by our hands to us. He presents the inspiring vision of inventing technology that is able to take cues from all these degrees in the human hand.

A physical interaction is good, therefore, when all the three pillars of interaction are supporting and complementing the way humans interact with the world around them. Be it our hands or eyes, our gestures, our tone and emotionality or simply our appearance, if it can be successfully inputted into a machine and meticulously processed by it, and if it can churn out exactly what solves the problem, that is good interaction.