Skip to content

Commit aa4df13

Browse files
committed
Beads for multichannel audio (via Jack)
1 parent cfbdea3 commit aa4df13

File tree

3 files changed

+68
-42
lines changed

3 files changed

+68
-42
lines changed

README.textile

+4-4
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,14 @@ Patch for this exhibition: "http://www.southlondongallery.org/page/sally-golding
77
* Edit each of the timeline arrays to determine when to fire events.
88
* Change the probability to determine the likelihood of each timeline event firing.
99

10-
Will likely use Syphon to plug through VDMX for the final presentation!
1110

1211
h2. Configure
1312

1413
Run in "Processing":http://processing.org (3.0).
1514

16-
It currently requires the "Processing Sound":https://processing.org/reference/libraries/sound/ library. "Install":https://github.com/processing/processing/wiki/How-to-Install-a-Contributed-Library with Sketch > Import Library > Add Library.
15+
It currently requires the "Beads Processing":http://www.beadsproject.net library. "Install":https://github.com/processing/processing/wiki/How-to-Install-a-Contributed-Library with Sketch > Import Library > Add Library.
16+
17+
Beads depends on an install of "Jack":http://jackaudio.org for multi channel sound. See this "forum thread":https://forum.processing.org/two/discussion/16611/multichannel-audio#latest for more info.
1718

1819
bc. # Data dir structure
1920
####################
@@ -32,6 +33,5 @@ bc. # Data dir structure
3233
h2. Thanks
3334

3435
- "Processing":http://processing.org
35-
- "Syphon":http://syphon.v002.info
36-
- "VDMX":http://vdmx.vidvox.net
36+
- "Beads":http://www.beadsproject.net
3737
- "South London Gallery":http://www.southlondongallery.org

file_utils.pde

+8-4
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
static class FileUtils {
1+
import beads.*;
22

3+
static class FileUtils {
34

45
static void loadImagesInto(ArrayList array, String path, PApplet sketch, int rWidth, int rHeight) {
56
ArrayList<File> allFiles = FileUtils.listFilesRecursive(path);
@@ -16,15 +17,18 @@ static class FileUtils {
1617
}
1718
}
1819

19-
static void loadSoundsInto(ArrayList array, String path, PApplet sketch) {
20+
static String[] loadSounds(String path) {
2021
ArrayList<File> allFiles = FileUtils.listFilesRecursive(path);
22+
ArrayList<String> sampleFileNames = new ArrayList<String>();
2123

2224
for (File f: allFiles) {
2325
if (!f.isDirectory() && !f.getName().startsWith(".")) {
24-
println("loading sound: " + f.getAbsolutePath());
25-
array.add(new SoundFile(sketch, f.getAbsolutePath()));
26+
//println("loading sound: " + f.getAbsolutePath());
27+
sampleFileNames.add(f.getAbsolutePath());
2628
}
2729
}
30+
31+
return sampleFileNames.toArray(new String[sampleFileNames.size()]);
2832
}
2933

3034
// This function returns all the files in a directory as an array of Strings

sally_slg.pde

+56-34
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
1-
import processing.sound.*;
2-
import java.util.Date;
1+
import beads.*;
2+
import org.jaudiolibs.beads.AudioServerIO;
33

4-
// ratio 768/1024 = 0.75
4+
// ratio 600/800 = 0.75
5+
int projectorWidth = 800;
6+
int projectorHeight = 600;
7+
boolean fullScreen = true;
58
float scaler = 1; // needs to be set to 1 when using 800x600 projectors! Maybe, 1.5 or 2 when testing without external projectors
6-
int screenWidth = (int)(800/scaler) * 3;
7-
int screenHeight = (int)(600/scaler);
9+
int screenWidth = (int)(projectorWidth/scaler) * 3;
10+
int screenHeight = (int)(projectorHeight/scaler);
811
int targetDisplay = 1; // triplehead is the only screen
912

1013
int keepImageForFramesCounter = 0;
@@ -14,38 +17,49 @@ ArrayList<PImage> images1 = new ArrayList<PImage>();
1417
ArrayList<PImage> images2 = new ArrayList<PImage>();
1518
ArrayList<PImage> images3 = new ArrayList<PImage>();
1619

17-
ArrayList<SoundFile> sounds1 = new ArrayList<SoundFile>();
18-
ArrayList<SoundFile> sounds2 = new ArrayList<SoundFile>();
19-
ArrayList<SoundFile> sounds3 = new ArrayList<SoundFile>();
20-
2120
PImage p1Image;
2221
PImage p2Image;
2322
PImage p3Image;
2423

25-
SoundFile p1Sound;
26-
SoundFile p2Sound;
27-
SoundFile p3Sound;
24+
SamplePlayer p1Player;
25+
SamplePlayer p2Player;
26+
SamplePlayer p3Player;
2827

2928
Timeline projector1;
3029
Timeline projector2;
3130
Timeline projector3;
3231

32+
AudioContext audioContext;
33+
IOAudioFormat audioFormat;
34+
float sampleRate = 44100;
35+
int buffer = 512;
36+
int bitDepth = 16;
37+
int inputs = 2;
38+
int outputs = 4;
39+
40+
boolean debugTimeline = true;
41+
3342
// PROCESSING
3443
void settings() {
35-
//size(screenWidth, screenHeight);
36-
fullScreen(targetDisplay); // needs to be set to the correct display!
44+
if (fullScreen) {
45+
fullScreen(targetDisplay);
46+
} else {
47+
size(screenWidth, screenHeight);
48+
}
3749
}
3850

3951
void setup() {
40-
//frameRate(60);
4152
background(0);
4253

54+
audioFormat = new IOAudioFormat(sampleRate, bitDepth, inputs, outputs);
55+
audioContext = new AudioContext(new AudioServerIO.Jack(), buffer, audioFormat);
56+
4357
FileUtils.loadImagesInto(images1, sketchPath()+"/data/projector-1/images", this, screenWidth/3, screenHeight);
4458
FileUtils.loadImagesInto(images2, sketchPath()+"/data/projector-2/images", this, screenWidth/3, screenHeight);
4559
FileUtils.loadImagesInto(images3, sketchPath()+"/data/projector-3/images", this, screenWidth/3, screenHeight);
46-
FileUtils.loadSoundsInto(sounds1, sketchPath()+"/data/projector-1/sounds", this);
47-
FileUtils.loadSoundsInto(sounds2, sketchPath()+"/data/projector-2/sounds", this);
48-
FileUtils.loadSoundsInto(sounds3, sketchPath()+"/data/projector-3/sounds", this);
60+
SampleManager.group("projector-1", FileUtils.loadSounds(sketchPath()+"/data/projector-1/sounds"));
61+
SampleManager.group("projector-2", FileUtils.loadSounds(sketchPath()+"/data/projector-2/sounds"));
62+
SampleManager.group("projector-3", FileUtils.loadSounds(sketchPath()+"/data/projector-3/sounds"));
4963

5064
projector1 = new Timeline(Config.timeline1, new TimelineRenderer() {
5165
public void action() {
@@ -54,15 +68,17 @@ void setup() {
5468
int ypos = 0;
5569

5670
if (chance > (100 - Config.likelihood)) {
57-
println("!!!! Projector 1 fire");
71+
if (debugTimeline) println("!!!! Projector 1 fire");
5872
p1Image = images1.get((int)random(images1.size()));
59-
p1Sound = sounds1.get((int)random(sounds1.size()));
73+
p1Player = new SamplePlayer(audioContext, SampleManager.randomFromGroup("projector-1"));
6074

6175
// tint(random(255), random(255), random(255), random(255));
6276
image(p1Image,xpos,ypos);
6377

64-
//p1Sound.rate((int)random(0.5, 20));
65-
p1Sound.play();
78+
Gain g = new Gain(audioContext, 2, 0.2);
79+
g.addInput(p1Player);
80+
audioContext.out.addInput(0, g, 0); // OUT 1
81+
audioContext.start();
6682
}
6783
}
6884
});
@@ -74,15 +90,17 @@ void setup() {
7490
int ypos = 0;
7591

7692
if (chance > (100 - Config.likelihood)) {
77-
println("!!!! Projector 2 fire");
93+
if (debugTimeline) println("!!!! Projector 2 fire");
7894
xpos = screenWidth/3;
7995
p2Image = images2.get((int)random(images2.size()));
80-
p2Sound = sounds2.get((int)random(sounds2.size()));
96+
p2Player = new SamplePlayer(audioContext, SampleManager.randomFromGroup("projector-2"));
8197

8298
image(p2Image,xpos,ypos);
8399

84-
//p2Sound.rate((int)random(0.5, 20));
85-
p2Sound.play();
100+
Gain g = new Gain(audioContext, 2, 0.2);
101+
g.addInput(p2Player);
102+
audioContext.out.addInput(1, g, 0); // OUT 2
103+
audioContext.start();
86104
}
87105
}
88106
});
@@ -94,25 +112,29 @@ void setup() {
94112
int ypos = 0;
95113

96114
if (chance > (100 - Config.likelihood)) {
97-
println("!!!! Projector 3 fire");
115+
if (debugTimeline) println("!!!! Projector 3 fire");
98116
xpos = (screenWidth/3) * 2;
99117

100118
p3Image = images3.get((int)random(images3.size()));
101-
p3Sound = sounds3.get((int)random(sounds3.size()));
119+
p3Player = new SamplePlayer(audioContext, SampleManager.randomFromGroup("projector-3"));
102120

103121
image(p3Image,xpos,ypos);
104122

105-
//p3Sound.rate((int)random(0.5, 20));
106-
p3Sound.play();
123+
Gain g = new Gain(audioContext, 2, 0.2);
124+
g.addInput(p3Player);
125+
audioContext.out.addInput(2, g, 0); // OUT 3
126+
audioContext.start();
107127
}
108128
}
109129
});
110130
}
111131

112132
void draw() {
113-
print((int)frameRate+"\t"); print(projector1); print(projector2); print(projector3);
114-
print("\t\t"+blurCounter+"\t >"+Config.blurFromFrame+"<"+Config.blurUntilFrame);
115-
println("");
133+
if (debugTimeline) {
134+
print((int)frameRate+"\t"); print(projector1); print(projector2); print(projector3);
135+
print("\t\t"+blurCounter+"\t >"+Config.blurFromFrame+"<"+Config.blurUntilFrame);
136+
println("");
137+
}
116138

117139
projector1.draw();
118140
projector2.draw();
@@ -123,7 +145,7 @@ void draw() {
123145
if (blurCounter >= Config.blurUntilFrame) {
124146
blurCounter = 0;
125147
} else if (blurCounter >= Config.blurFromFrame) {
126-
println("############### BLURING! ");
148+
if (debugTimeline) println("############### BLURING! ");
127149
filter(BLUR, 2);
128150
blurCounter++;
129151
} else {

0 commit comments

Comments
 (0)