ruby-processing 2.4.3 → 2.4.4

Sign up to get free protection for your applications and to get access to all the features.
Files changed (76) hide show
  1. checksums.yaml +4 -4
  2. data/.gitignore +3 -1
  3. data/CHANGELOG +5 -0
  4. data/README.md +4 -2
  5. data/lib/ruby-processing/exporters/application_exporter.rb +1 -0
  6. data/lib/ruby-processing/version.rb +1 -1
  7. data/library/boids/boids.rb +14 -13
  8. data/library/vecmath/lib/vec.rb +20 -4
  9. data/samples/contributed/circle_collision.rb +92 -149
  10. data/samples/contributed/drawolver.rb +13 -27
  11. data/samples/contributed/elegant_ball.rb +158 -0
  12. data/samples/contributed/fern.rb +16 -3
  13. data/samples/contributed/flight_patterns.rb +16 -4
  14. data/samples/external_library/java_processing/{pbox2d → box2d_processing}/bumpy_surface_noise.rb +4 -9
  15. data/samples/external_library/java_processing/{pbox2d → box2d_processing}/data/java_args.txt +0 -0
  16. data/samples/external_library/java_processing/{pbox2d → box2d_processing}/library/custom_shape/custom_shape.rb +1 -1
  17. data/samples/external_library/java_processing/{pbox2d → box2d_processing}/library/particle_system/particle_system.rb +7 -10
  18. data/samples/external_library/java_processing/{pbox2d → box2d_processing}/library/surface/surface.rb +2 -2
  19. data/samples/external_library/java_processing/{pbox2d → box2d_processing}/liquidy.rb +7 -7
  20. data/samples/external_library/java_processing/{pbox2d → box2d_processing}/polygons.rb +4 -9
  21. data/samples/external_library/java_processing/custom/README.md +15 -0
  22. data/samples/external_library/java_processing/custom/Rakefile +28 -0
  23. data/samples/external_library/java_processing/custom/landscape.rb +58 -0
  24. data/samples/external_library/java_processing/custom/src/nn/Connection.java +47 -0
  25. data/samples/external_library/java_processing/custom/src/nn/HiddenNeuron.java +20 -0
  26. data/samples/external_library/java_processing/custom/src/nn/InputNeuron.java +23 -0
  27. data/samples/external_library/java_processing/custom/src/nn/Network.java +136 -0
  28. data/samples/external_library/java_processing/custom/src/nn/Neuron.java +79 -0
  29. data/samples/external_library/java_processing/custom/src/nn/OutputNeuron.java +7 -0
  30. data/samples/external_library/java_processing/custom/xor.rb +88 -0
  31. data/samples/external_library/ruby_gem/README +1 -1
  32. data/samples/external_library/ruby_gem/data/data.json +8 -0
  33. data/samples/external_library/ruby_gem/draw_test.rb +171 -0
  34. data/samples/processing_app/basics/form/icosahedra.rb +39 -0
  35. data/samples/processing_app/basics/form/library/icosahedron/icosahedron.rb +60 -0
  36. data/samples/processing_app/basics/form/toroid.rb +78 -92
  37. data/samples/processing_app/basics/transform/birds.rb +6 -12
  38. data/samples/processing_app/basics/transform/cubes_in_cube.rb +25 -22
  39. data/samples/processing_app/basics/transform/library/cube/cube.rb +16 -16
  40. data/samples/processing_app/basics/transform/rotate_push_pop.rb +1 -1
  41. data/samples/processing_app/demos/graphics/bezier_patch.rb +18 -31
  42. data/samples/processing_app/demos/graphics/trefoil.rb +15 -15
  43. data/samples/processing_app/library/vecmath/acceleration_with_vectors.rb +3 -3
  44. data/samples/processing_app/library/vecmath/hilbert_fractal.rb +2 -2
  45. data/samples/processing_app/library/vecmath/library/flock/flock.rb +18 -21
  46. data/samples/processing_app/library/vecmath/library/hilbert/hilbert.rb +11 -8
  47. data/samples/processing_app/library/vecmath/library/wiggler/wiggler.rb +7 -15
  48. data/samples/processing_app/library/vecmath/seeking_neural.rb +172 -0
  49. data/samples/processing_app/topics/animation/animated_sprite.rb +5 -8
  50. data/samples/processing_app/topics/animation/sequential.rb +2 -3
  51. data/samples/processing_app/topics/create_shapes/library/particle/particle_system.rb +7 -7
  52. data/samples/processing_app/topics/create_shapes/particle_system_pshape.rb +2 -2
  53. data/samples/processing_app/topics/create_shapes/wiggle_pshape.rb +2 -1
  54. data/samples/processing_app/topics/lsystems/koch.rb +1 -1
  55. data/samples/processing_app/topics/lsystems/library/koch/koch_fractal.rb +24 -23
  56. data/samples/processing_app/topics/motion/circle_collision.rb +117 -160
  57. data/samples/processing_app/topics/motion/library/cube/cube.rb +1 -1
  58. data/samples/processing_app/topics/motion/morph.rb +1 -1
  59. data/samples/processing_app/topics/motion/reflection1.rb +17 -16
  60. data/samples/processing_app/topics/shaders/conway.rb +2 -2
  61. data/samples/processing_app/topics/shaders/data/conway.glsl +10 -10
  62. data/samples/processing_app/topics/shaders/glsl_heightmap_noise.rb +9 -8
  63. data/samples/processing_app/topics/shaders/landscape.rb +1 -1
  64. data/samples/processing_app/topics/simulate/flocking.rb +1 -1
  65. data/samples/processing_app/topics/simulate/library/flock/flock.rb +62 -57
  66. data/samples/processing_app/topics/simulate/multiple_particle_systems.rb +8 -28
  67. data/samples/processing_app/topics/simulate/simple_particle_system.rb +9 -7
  68. data/samples/processing_app/topics/simulate/smoke_particle_system.rb +12 -11
  69. data/vendors/Rakefile +2 -2
  70. metadata +26 -21
  71. data/samples/contributed/pong.rb +0 -177
  72. data/samples/contributed/simple_buffer.rb +0 -44
  73. data/samples/external_library/java_processing/pbox2d/contact_test.rb +0 -23
  74. data/samples/processing_app/basics/form/icosahedra/icosahedra.rb +0 -72
  75. data/samples/processing_app/basics/form/icosahedra/icosahedron.rb +0 -116
  76. data/samples/processing_app/basics/form/icosahedra/shape_3D.rb +0 -25
@@ -0,0 +1,15 @@
1
+ Building the custom nn Library
2
+ ==============================
3
+ This is an example of a java library that you can build yourself with the included Rakefile.
4
+ Ideally you will both have ant and jruby installed (you should also have a jdk-7+ installed).
5
+
6
+ If you have done all of that compiling and building jar is dead easy just issue the following command in a console.
7
+
8
+ `jruby -S rake`
9
+
10
+ you may also need to set JAVA_HOME to point to your jdk eg
11
+
12
+ `export JAVA_HOME=/opt/jdk1.7.0_51` for linux users
13
+
14
+
15
+ This example was taken from [The Nature of Code](https://github.com/shiffman/The-Nature-of-Code) by Dan Shiffman, except the example has been rubified (and even the java code was doctored to be more up to date, and to prefer double to float).
@@ -0,0 +1,28 @@
1
+ require 'ant'
2
+
3
+ PROJECT_NAME ='nn'
4
+ MAIN_SRC_DIR = 'src'
5
+ BUILD_DIR = 'build'
6
+ DIST_DIR = 'library/nn'
7
+
8
+ task :default => [:clean, :make_jars]
9
+
10
+ task :clean do
11
+ ant.delete :dir => BUILD_DIR
12
+ puts
13
+ end
14
+
15
+ task :make_jars => :clean do
16
+ make_jar MAIN_SRC_DIR, "#{PROJECT_NAME}.jar"
17
+ end
18
+
19
+ def make_jar(source_folder, jar_file_name)
20
+ ant.mkdir :dir => BUILD_DIR
21
+ ant.mkdir :dir => DIST_DIR
22
+ ant.javac :srcdir => source_folder, :destdir => BUILD_DIR,
23
+ :source => "1.7", :target => "1.7", :debug => "yes", :includeantruntime => "no"
24
+ ant.jar :jarfile => "#{DIST_DIR}/#{jar_file_name}", :basedir => BUILD_DIR
25
+ ant.delete :dir => BUILD_DIR
26
+ puts
27
+ end
28
+
@@ -0,0 +1,58 @@
1
+ # The Nature of Code
2
+ # Daniel Shiffman
3
+ # http:#natureofcode.com
4
+
5
+ # "Landscape" example
6
+
7
+ class Landscape
8
+ include Processing::Proxy
9
+
10
+ attr_reader :scl, :w, :h, :rows, :cols, :z, :zoff
11
+
12
+
13
+
14
+ def initialize(scl, w, h)
15
+ @scl, @w, @h = scl, w, h
16
+ @cols = w / scl
17
+ @rows = h / scl
18
+ @z = Array.new(cols, Array.new(rows, 0.0))
19
+ end
20
+
21
+
22
+ # Calculate height values (based off a neural network)
23
+ def calculate(nn)
24
+ val = ->(curr, nn, x, y){curr * 0.95 + 0.05 * (nn.feed_forward([x, y]) * 280.0 - 140.0)}
25
+ @z = (0 ... cols).map{|i|
26
+ (0 ... rows).map{|j| val.call(z[i][j], nn, i * 1.0/ cols, j * 1.0/cols)}
27
+ }
28
+ end
29
+
30
+ # Render landscape as grid of quads
31
+ def render
32
+ # Every cell is an individual quad
33
+ # (could use quad_strip here, but produces funny results, investigate this)
34
+ (0 ... z.size - 1).each do |x|
35
+ (0 ... z[0].size - 1).each do |y|
36
+
37
+ # one quad at a time
38
+ # each quad's color is determined by the height value at each vertex
39
+ # (clean this part up)
40
+ no_stroke
41
+ push_matrix
42
+ begin_shape(QUADS)
43
+ translate(x * scl - w * 0.5, y * scl - h * 0.5, 0)
44
+ fill(z[x][y]+127, 220)
45
+ vertex(0, 0, z[x][y])
46
+ fill(z[x+1][y]+127, 220)
47
+ vertex(scl, 0, z[x+1][y])
48
+ fill(z[x+1][y+1]+127, 220)
49
+ vertex(scl, scl, z[x+1][y+1])
50
+ fill(z[x][y+1]+127, 220)
51
+ vertex(0, scl, z[x][y+1])
52
+ end_shape
53
+ pop_matrix
54
+ end
55
+ end
56
+ end
57
+ end
58
+
@@ -0,0 +1,47 @@
1
+ // Daniel Shiffman
2
+ // The Nature of Code, Fall 2006
3
+ // Neural Network
4
+
5
+ // Class to describe a connection between two neurons
6
+
7
+ package nn;
8
+
9
+ public class Connection {
10
+
11
+ private final Neuron from; // Connection goes from. . .
12
+ private final Neuron to; // To. . .
13
+ private double weight; // Weight of the connection. . .
14
+
15
+ // Constructor builds a connection with a random weight
16
+ public Connection(Neuron a_, Neuron b_) {
17
+ from = a_;
18
+ to = b_;
19
+ weight = Math.random()*2-1;
20
+ }
21
+
22
+ // In case I want to set the weights manually, using this for testing
23
+ public Connection(Neuron a_, Neuron b_, double w) {
24
+ from = a_;
25
+ to = b_;
26
+ weight = w;
27
+ }
28
+
29
+ public Neuron getFrom() {
30
+ return from;
31
+ }
32
+
33
+ public Neuron getTo() {
34
+ return to;
35
+ }
36
+
37
+ public double getWeight() {
38
+ return weight;
39
+ }
40
+
41
+ // Changing the weight of the connection
42
+ public void adjustWeight(double deltaWeight) {
43
+ weight += deltaWeight;
44
+ }
45
+
46
+
47
+ }
@@ -0,0 +1,20 @@
1
+ //Daniel Shiffman
2
+ //The Nature of Code, Fall 2006
3
+ //Neural Network
4
+
5
+ // Hidden Neuron Class
6
+ // So far not necessary to differentiate these
7
+
8
+ package nn;
9
+
10
+ public class HiddenNeuron extends Neuron {
11
+
12
+ public HiddenNeuron() {
13
+ super();
14
+ }
15
+
16
+ public HiddenNeuron(int i) {
17
+ super(i);
18
+ }
19
+
20
+ }
@@ -0,0 +1,23 @@
1
+ //Daniel Shiffman
2
+ //The Nature of Code, Fall 2006
3
+ //Neural Network
4
+
5
+ // Input Neuron Class
6
+ // Has additional functionality to receive beginning input
7
+
8
+ package nn;
9
+
10
+ public class InputNeuron extends Neuron {
11
+ public InputNeuron() {
12
+ super();
13
+ }
14
+
15
+ public InputNeuron(int i) {
16
+ super(i);
17
+ }
18
+
19
+ public void input(double d) {
20
+ output = d;
21
+ }
22
+
23
+ }
@@ -0,0 +1,136 @@
1
+ // Daniel Shiffman
2
+ // The Nature of Code, Fall 2006
3
+ // Neural Network
4
+
5
+ // Class to describe the entire network
6
+ // Arrays for input_array neurons, hidden_array neurons, and output neuron
7
+
8
+ // Need to update this so that it would work with an array out outputs
9
+ // Rather silly that I didn't do this initially
10
+
11
+ // Also need to build in a "Layer" class so that there can easily
12
+ // be more than one hidden_array layer
13
+
14
+ package nn;
15
+
16
+ import java.util.ArrayList;
17
+
18
+ public class Network {
19
+
20
+ // Layers
21
+ InputNeuron[] input_array;
22
+ HiddenNeuron[] hidden_array;
23
+ OutputNeuron output;
24
+
25
+ public static final double LEARNING_CONSTANT = 0.5;
26
+
27
+ // Only One output now to start!!! (i can do better, really. . .)
28
+ // Constructor makes the entire network based on number of inputs & number of neurons in hidden_array layer
29
+ // Only One hidden_array layer!!! (fix this dood)
30
+
31
+ public Network(int inputs, int hidden_arraytotal) {
32
+
33
+ input_array = new InputNeuron[inputs+1]; // Got to add a bias input_array
34
+ hidden_array = new HiddenNeuron[hidden_arraytotal+1];
35
+
36
+ // Make input_array neurons
37
+ for (int i = 0; i < input_array.length-1; i++) {
38
+ input_array[i] = new InputNeuron();
39
+ }
40
+
41
+ // Make hidden_array neurons
42
+ for (int i = 0; i < hidden_array.length-1; i++) {
43
+ hidden_array[i] = new HiddenNeuron();
44
+ }
45
+
46
+ // Make bias neurons
47
+ input_array[input_array.length-1] = new InputNeuron(1);
48
+ hidden_array[hidden_array.length-1] = new HiddenNeuron(1);
49
+
50
+ // Make output neuron
51
+ output = new OutputNeuron();
52
+ for (InputNeuron input : input_array) {
53
+ for (int j = 0; j < hidden_array.length-1; j++) {
54
+ // Create the connection object and put it in both neurons
55
+ Connection c = new Connection(input, hidden_array[j]);
56
+ input.addConnection(c);
57
+ hidden_array[j].addConnection(c);
58
+ }
59
+ }
60
+ for (HiddenNeuron hidden : hidden_array) {
61
+ Connection c = new Connection(hidden, output);
62
+ hidden.addConnection(c);
63
+ output.addConnection(c);
64
+ }
65
+
66
+ }
67
+
68
+
69
+ public double feedForward(double[] inputVals) {
70
+
71
+ // Feed the input_array with an array of inputs
72
+ for (int i = 0; i < inputVals.length; i++) {
73
+ input_array[i].input(inputVals[i]);
74
+ }
75
+
76
+ // Have the hidden_array layer calculate its output
77
+ for (int i = 0; i < hidden_array.length-1; i++) {
78
+ hidden_array[i].calcOutput();
79
+ }
80
+
81
+ // Calculate the output of the output neuron
82
+ output.calcOutput();
83
+
84
+ // Return output
85
+ return output.getOutput();
86
+ }
87
+
88
+ public double train(double[] inputs, double answer) {
89
+ double result = feedForward(inputs);
90
+
91
+
92
+ // This is where the error correction all starts
93
+ // Derivative of sigmoid output function * diff between known and guess
94
+ double deltaOutput = result * (1 - result) * (answer - result);
95
+
96
+
97
+ // BACKPROPOGATION
98
+ // This is easier b/c we just have one output
99
+ // Apply Delta to connections between hidden_array and output
100
+ ArrayList<Connection> connections = output.getConnections();
101
+ for (int i = 0; i < connections.size(); i++) {
102
+ Connection c = connections.get(i);
103
+ Neuron neuron = c.getFrom();
104
+ double temp_output = neuron.getOutput();
105
+ double deltaWeight = temp_output * deltaOutput;
106
+ c.adjustWeight(LEARNING_CONSTANT * deltaWeight);
107
+ }
108
+ for (HiddenNeuron hidden : hidden_array) {
109
+ connections = hidden.getConnections();
110
+ double sum = 0;
111
+ // Sum output delta * hidden_array layer connections (just one output)
112
+ for (int j = 0; j < connections.size(); j++) {
113
+ Connection c = connections.get(j);
114
+ // Is this a connection from hidden_array layer to next layer (output)?
115
+ if (c.getFrom() == hidden) {
116
+ sum += c.getWeight() * deltaOutput;
117
+ }
118
+ }
119
+ // Then adjust the weights coming in based:
120
+ // Above sum * derivative of sigmoid output function for hidden_array neurons
121
+ for (int j = 0; j < connections.size(); j++) {
122
+ Connection c = connections.get(j);
123
+ // Is this a connection from previous layer (input_array) to hidden_array layer?
124
+ if (c.getTo() == hidden) {
125
+ double temp_output = hidden.getOutput();
126
+ double deltaHidden = temp_output * (1 - temp_output); // Derivative of sigmoid(x)
127
+ deltaHidden *= sum; // Would sum for all outputs if more than one output
128
+ Neuron neuron = c.getFrom();
129
+ double deltaWeight = neuron.getOutput() * deltaHidden;
130
+ c.adjustWeight(LEARNING_CONSTANT * deltaWeight);
131
+ }
132
+ }
133
+ }
134
+ return result;
135
+ }
136
+ }
@@ -0,0 +1,79 @@
1
+ //Daniel Shiffman
2
+ //The Nature of Code, Fall 2006
3
+ //Neural Network
4
+
5
+ //Generic Neuron Class
6
+ //Can be a bias neuron (true or false)
7
+
8
+ package nn;
9
+
10
+ import java.util.ArrayList;
11
+
12
+ public class Neuron {
13
+
14
+ protected double output;
15
+ protected ArrayList<Connection> connections;
16
+ protected boolean bias = false;
17
+
18
+ // A regular Neuron
19
+ public Neuron() {
20
+ output = 0;
21
+ // Using an arraylist to store list of connections to other neurons
22
+ connections = new ArrayList<>();
23
+ bias = false;
24
+ }
25
+
26
+ // Constructor for a bias neuron
27
+ public Neuron(int i) {
28
+ output = i;
29
+ connections = new ArrayList<>();
30
+ bias = true;
31
+ }
32
+
33
+ // Function to calculate output of this neuron
34
+ // Output is sum of all inputs * weight of connections
35
+ public void calcOutput() {
36
+ if (bias == false) {
37
+ double sum = 0;
38
+ double bias_value = 0;
39
+ //System.out.println("Looking through " + connections.size() + " connections");
40
+ for (int i = 0; i < connections.size(); i++) {
41
+ Connection c = connections.get(i);
42
+ Neuron from = c.getFrom();
43
+ Neuron to = c.getTo();
44
+ // Is this connection moving forward to us
45
+ // Ignore connections that we send our output to
46
+ if (to == this) {
47
+ // This isn't really necessary
48
+ // But I am treating the bias individually in case I need to at some point
49
+ if (from.bias) {
50
+ bias_value = from.getOutput()*c.getWeight();
51
+ } else {
52
+ sum += from.getOutput()*c.getWeight();
53
+ }
54
+ }
55
+ }
56
+ // Output is result of sigmoid function
57
+ output = f(bias_value + sum);
58
+ }
59
+ }
60
+
61
+ void addConnection(Connection c) {
62
+ connections.add(c);
63
+ }
64
+
65
+ double getOutput() {
66
+ return output;
67
+ }
68
+
69
+ // Sigmoid function
70
+ public static double f(double x) {
71
+ return 1.0 / (1.0 + Math.exp(-x));
72
+ }
73
+
74
+ public ArrayList<Connection> getConnections() {
75
+ return connections;
76
+ }
77
+
78
+
79
+ }
@@ -0,0 +1,7 @@
1
+ package nn;
2
+
3
+ public class OutputNeuron extends Neuron {
4
+ public OutputNeuron() {
5
+ super();
6
+ }
7
+ }
@@ -0,0 +1,88 @@
1
+ # The Nature of Code
2
+ # Daniel Shiffman
3
+ # http://natureofcode.com
4
+
5
+ # XOR Multi-Layered Neural Network Example
6
+ # Neural network java code is all in the "src" folder
7
+ load_library :nn
8
+
9
+ require_relative './landscape'
10
+ include_package 'nn'
11
+
12
+ ITERATIONS_PER_FRAME = 5
13
+
14
+ attr_reader :inputs, :nn, :count, :land, :theta, :f, :result, :known
15
+
16
+
17
+ def setup
18
+
19
+ size(400, 400, P3D)
20
+ @theta = 0.0
21
+ # Create a landscape object
22
+ @land = Landscape.new(20, 300, 300)
23
+ @f = create_font("Courier", 12, true)
24
+
25
+ @nn = Network.new(2, 4)
26
+ @count = 0
27
+ # Create a list of 4 training inputs
28
+ @inputs = []
29
+ inputs << [1.0, 0]
30
+ inputs << [0, 1.0]
31
+ inputs << [1.0, 1.0]
32
+ inputs << [0, 0.0]
33
+ end
34
+
35
+ def draw
36
+ lights
37
+ ITERATIONS_PER_FRAME.times do |i|
38
+ inp = inputs.sample
39
+ # Compute XOR
40
+ @known = ((inp[0] > 0.0 && inp[1] > 0.0) || (inp[0] < 1.0 && inp[1] < 1.0))? 0 : 1.0
41
+
42
+ # Train that sucker!
43
+ @result = nn.train(inp, known)
44
+ @count += 1
45
+ end
46
+
47
+ # Ok, visualize the solution space
48
+ background(175)
49
+ push_matrix
50
+ translate(width / 2, height / 2 + 20, -160)
51
+ rotate_x(Math::PI / 3)
52
+ rotate_z(theta)
53
+
54
+ # Put a little BOX on screen
55
+ push_matrix
56
+
57
+ stroke(50)
58
+ no_fill
59
+ translate(-10, -10, 0)
60
+ box(280)
61
+ land.calculate(nn)
62
+ land.render
63
+ # Draw the landscape
64
+ pop_matrix
65
+
66
+ @theta += 0.0025
67
+ pop_matrix
68
+
69
+ # Display overal neural net stats
70
+ network_status
71
+ end
72
+
73
+ def network_status
74
+ mse = 0.0
75
+ text_font(f)
76
+ fill(0)
77
+ text("Your friendly neighborhood neural network solving XOR.", 10, 20)
78
+ text("Total iterations: #{count}", 10, 40)
79
+ mse += (result - known) * (result - known)
80
+ rmse = Math::sqrt(mse / 4.0)
81
+ out = "Root mean squared error: #{format("%.5f", rmse)}"
82
+ hint DISABLE_DEPTH_SORT
83
+ text(out, 10, 60)
84
+ hint ENABLE_DEPTH_SORT
85
+ end
86
+
87
+
88
+