an преди 5 години
родител
ревизия
69e5d97364
променени са 2 файла, в които са добавени 39 реда и са изтрити 22 реда
  1. 28 21
      models/autoencoder.py
  2. 11 1
      models/swarm_test.py

+ 28 - 21
models/autoencoder.py

@@ -143,10 +143,10 @@ def view_encoder(encoder, N, samples=1000):
     pass
 
 def test_batch_sizes(autoencoder, sizes):
-    accuracy = []
+    cost = []
     for batch_size in sizes:
-        autoencoder.fit(x_train_ho, x_train_ho,
-                        epochs=1,
+        history = autoencoder.fit(x_train_ho, x_train_ho,
+                        epochs=4,
                         batch_size=batch_size,
                         shuffle=False,
                         validation_data=(x_test_ho, x_test_ho))
@@ -156,19 +156,18 @@ def test_batch_sizes(autoencoder, sizes):
 
         result = misc.int2bit_array(decoded_data.argmax(axis=1), n)
         print("Accuracy: %.4f" % accuracy_score(x_test_array, result.reshape(-1, )))
-        accuracy.append(accuracy_score(x_test_array, result.reshape(-1, )))
-    plt.plot(sizes,accuracy)
-    plt.xscale('log')
+        cost.append(history.history["loss"][-1])
+    plt.plot(sizes,cost)
     plt.grid()
     plt.xlabel('batch size')
-    plt.ylabel('Accuracy')
+    plt.ylabel('Cost')
     plt.legend()
     plt.show()
 
 def test_epoch_sizes(autoencoder, sizes):
-    accuracy = []
+    cost = []
     for epoch_size in sizes:
-        autoencoder.fit(x_train_ho, x_train_ho,
+        history = autoencoder.fit(x_train_ho, x_train_ho,
                         epochs=epoch_size,
                         shuffle=False,
                         validation_data=(x_test_ho, x_test_ho))
@@ -178,12 +177,12 @@ def test_epoch_sizes(autoencoder, sizes):
 
         result = misc.int2bit_array(decoded_data.argmax(axis=1), n)
         print("Accuracy: %.4f" % accuracy_score(x_test_array, result.reshape(-1, )))
-        accuracy.append(accuracy_score(x_test_array, result.reshape(-1, )))
-    plt.plot(sizes,accuracy)
+        cost.append(history["loss"][-1])
+    plt.plot(sizes,cost)
     plt.xscale('log')
     plt.grid()
     plt.xlabel('epoch size')
-    plt.ylabel('Accuracy')
+    plt.ylabel('Cost')
     plt.legend()
     plt.show()
 
@@ -331,20 +330,21 @@ if __name__ == '__main__':
 
     pass
 
-"""
+
     autoencoder = Autoencoder(n, -8)
     autoencoder.compile(optimizer='adam', loss=losses.MeanSquaredError())
 
-    batch_sizes = [10,20,30,40, 100,200,300,400,500, 1000,2000,3000,4000,5000]
+    batch_sizes = [i for i in range(5, 100, 5)]
     epoch_sizes = [1, 10, 20, 50, 100, 200, 500]
 
-    test_epoch_sizes(autoencoder, epoch_sizes)
-
+    test_batch_sizes(autoencoder, batch_sizes)
+"""
     pass
 
-"""
-    autoencoder.fit(x_train_ho, x_train_ho,
-                    epochs=1,
+    autoencoder = Autoencoder(n, -8)
+    autoencoder.compile(optimizer='adam', loss=losses.MeanSquaredError())
+    history = autoencoder.fit(x_train_ho, x_train_ho,
+                    epochs=100,
                     shuffle=False,
                     validation_data=(x_test_ho, x_test_ho))
 
@@ -353,6 +353,13 @@ if __name__ == '__main__':
 
     result = misc.int2bit_array(decoded_data.argmax(axis=1), n)
     print("Accuracy: %.4f" % accuracy_score(x_test_array, result.reshape(-1, )))
-    view_encoder(autoencoder.encoder, n)
-"""
+   # view_encoder(autoencoder.encoder, n)
+    plt.plot(history.history['loss'])
+    plt.plot(history.history['val_loss'])
+    plt.title('Cost vs Number of Epochs use in Training Neural Network')
+    plt.ylabel('Cost')
+    plt.xlabel('Number of Epochs')
+    plt.legend(['train', 'test'], loc='upper left')
+    plt.show()
+
 

+ 11 - 1
models/swarm_test.py

@@ -3,10 +3,13 @@ import numpy as np
 import matplotlib.pyplot as plt
 from sklearn.datasets import load_iris
 from sklearn.metrics import accuracy_score
+from IPython.display import Image
 
 # Import PySwarms
 import pyswarms as ps
 import misc
+from pyswarms.utils.functions import single_obj as fx
+from pyswarms.utils.plotters import (plot_cost_history, plot_contour, plot_surface)
 
 def logits_function(p):
     """ Calculate roll-back the weights and biases
@@ -164,7 +167,14 @@ dimensions = (n_inputs * n_hidden) + (n_hidden * n_classes) + n_hidden + n_class
 optimizer = ps.single.GlobalBestPSO(n_particles=100, dimensions=dimensions, options=options)
 
 # Perform optimization
-cost, pos = optimizer.optimize(f, iters=80)
+cost, pos = optimizer.optimize(f, iters=200)
+plot_cost_history(cost_history=optimizer.cost_history)
+#plt.rcParams.update({'font.size': 18})
+plt.title('Cost vs Number of Epochs using Particle Swarm Optimisation')
+plt.ylabel('Cost')
+plt.xlabel('Number of Epochs')
+plt.legend(['test'], loc='upper left')
+plt.show()
 
 results = predict(x_test_ho, pos, n)
 print("Accuracy: %.4f" % accuracy_score(x_test_ho, results))