Przeglądaj źródła

Fixed NN layer order

Min 4 lat temu
rodzic
commit
5ef3501faf
3 zmienionych plików z 95 dodań i 22 usunięć
  1. 65 10
      scripts/neuron_net_test.py
  2. 1 1
      scripts/number_conv.py
  3. 29 11
      src/neural/layer.sv

+ 65 - 10
scripts/neuron_net_test.py

@@ -13,15 +13,16 @@ WEIGTHS = [
         [-0.16126093, 0.5317601, 0.34316933, -0.7074082],
         [0.09219088, -0.624525, -0.61903083, -0.87057704]
     ],
-    [[0.36770403, -0.78046024, 0.3979908, 0.5494289, -0.13859335, 0.40053025, 0.08249452, -0.32528356],
-     [-0.17659009, 0.13901198, -0.45248222, -0.7894139, -0.81092286, -0.521815, 0.30632392, -0.3143816],
-     [-0.04314173, 0.14361085, 0.6259473, 0.3571782, -0.38011226, 0.01378736, 0.05794358, 0.09667788],
-     [-0.46864474, 0.36618456, -0.45595396, -0.39789405, 0.73964316, -0.30294785, 0.2482118, -0.2127953],
-     [-0.37941265, 0.45330787, -0.12066315, 0.5636705, 0.68990386, 0.6543718, 0.86367106, -0.5707757],
-     [-0.78606385, 0.24032554, -0.4472755, -0.24661142, -0.2698564, -0.8365823, -0.13674814, -0.39799848],
-     [0.11138931, 0.48950365, 0.12998834, 0.4947537, 0.516593, 0.82281274, 0.04789656, 0.30206403],
-     [0.23097174, 0.30290592, -0.596446, -0.40108407, 0.12246455, -0.47260976, -0.55030185, 0.44481543]
-     ]
+    [
+        [0.36770403, -0.78046024, 0.3979908, 0.5494289, -0.13859335, 0.40053025, 0.08249452, -0.32528356],
+        [-0.17659009, 0.13901198, -0.45248222, -0.7894139, -0.81092286, -0.521815, 0.30632392, -0.3143816],
+        [-0.04314173, 0.14361085, 0.6259473, 0.3571782, -0.38011226, 0.01378736, 0.05794358, 0.09667788],
+        [-0.46864474, 0.36618456, -0.45595396, -0.39789405, 0.73964316, -0.30294785, 0.2482118, -0.2127953],
+        [-0.37941265, 0.45330787, -0.12066315, 0.5636705, 0.68990386, 0.6543718, 0.86367106, -0.5707757],
+        [-0.78606385, 0.24032554, -0.4472755, -0.24661142, -0.2698564, -0.8365823, -0.13674814, -0.39799848],
+        [0.11138931, 0.48950365, 0.12998834, 0.4947537, 0.516593, 0.82281274, 0.04789656, 0.30206403],
+        [0.23097174, 0.30290592, -0.596446, -0.40108407, 0.12246455, -0.47260976, -0.55030185, 0.44481543]
+    ],
     [
         [0.5724262, 0.5853241, 0.3748752, -0.892384, -1.0270239, 0.2170913, -0.07271451, 0.14661156],
         [0.30391088, -0.92324615, 0.8088594, -1.0522624, 0.07374455, -0.550893, 0.8194236, -0.62796086]
@@ -36,10 +37,64 @@ BIAS = [
     [0.03787775, -0.03655371],
 ]
 
+RESULT = [
+    0x00000000, 0x3f800000,
+    0x3f800000, 0x00000000,
+    0x00000000, 0x00000000,
+    0x3f800000, 0x3f800000,
+    # 0x3f030126, 0x3f800000,
+    # 0x3e652010, 0x00000000,
+    # 0x3d2d25da, 0x3efea470,
+    # 0x3f1a4267, 0x3f06d0f4,
+]
+
 
 def generate_nn_values(dtype=np.float32):
     dsize = dtype_size(dtype)
 
 
+def sigm(x):
+    ex = 2.7182818 ** x
+    return ex / (ex + 1)
+
+
+def linr(x):
+    return x
+
+
+def process(value):
+    if value > 3 or value < 0:
+        raise ValueError("Value not in between 0 and 3")
+    onehot_mat = [
+        [0, 0, 0, 1],
+        [0, 0, 1, 0],
+        [0, 1, 0, 0],
+        [1, 0, 0, 0],
+    ]
+    L0 = onehot_mat[value]
+    L1_mult = [[L0[j] * WEIGTHS[0][i][j] for j in range(len(L0))] for i in range(8)]
+    L1 = [linr(sum(L1_mult[i]) + BIAS[0][i]) for i in range(8)]
+    L2 = [linr(sum([L1[j] * WEIGTHS[1][i][j] for j in range(len(L1))]) + BIAS[1][i]) for i in range(8)]
+    L3 = [sum([L2[j] * WEIGTHS[2][i][j] for j in range(len(L2))]) + BIAS[2][i] for i in range(2)]
+    # print(f"L0: {L0} \n\tL1mult: {L1_mult}\n\tL1: {L1} \n\tL2: {L2} \n\tL3: {L3}")
+    return [sigm(v) for v in L3]
+
+
+def show_results():
+    sim_data = np.array([process(0), process(1), process(2), process(3)]).T
+    hdl_data = np.frombuffer(b''.join([i.to_bytes(4, 'little') for i in RESULT]), dtype=np.float32).reshape(4, 2).T
+    print(hdl_data)
+
+    plt.plot(sim_data[0], sim_data[1], 'x', color='b')
+    plt.plot(hdl_data[0], hdl_data[1], 'x', color='r')
+    plt.xlabel('I')
+    plt.ylabel('Q')
+    # plt.xlim([0, 1])
+    # plt.ylim([0, 1])
+    plt.grid()
+    plt.show()
+
+
 if __name__ == '__main__':
-    generate_nn_values()
+    show_results()
+    # generate_nn_values()

+ 1 - 1
scripts/number_conv.py

@@ -7,7 +7,7 @@ def float2verilog(lines, dtype=np.float32):
     dsize = dtype_size(dtype)
     print("")
     for i, line in enumerate(lines):
-        arr = line.replace('[', '').replace(']', '').split()
+        arr = line.replace('[', '').replace(']', '').replace('#', '').replace('/', '').split()
         nums = np.array([float(f.strip(',')) for f in arr if f], dtype=dtype)
         b = nums.tobytes()
         print(f'[{i}] = {{' +

+ 29 - 11
src/neural/layer.sv

@@ -20,15 +20,15 @@ module neuron_layer#(parameter C, K, N=32)(clk, rst, x, y, w, b, left, right);
 
     genvar i, j;
     generate
-        for(i=0; i<CONNS; i++) begin
+        for(i=0; i<CONNS; i++) begin: gen_signal_conn
             assign stb[i] = left[i].stb;
             assign left[i].ack = &ack_t[i];
         end
     endgenerate
 
     generate
-        for(i=0; i<NEURONS; i++) begin
-            for(j=0; j<CONNS; j++) begin
+        for(i=0; i<NEURONS; i++) begin: gen_neruons
+            for(j=0; j<CONNS; j++) begin: gen_cross
                 assign ack_t[j][i] = ack[i][j];
             end
             neuron#(.K(C), .N(N)) n(
@@ -68,21 +68,24 @@ module neuron_network_tb;
     abus_io left[3:0]();
     abus_io right[1:0]();
 
-    reg [31:0] layer1_w [7:0][3:0];
-    reg [31:0] layer1_b [7:0];
+    reg [31:0] layer1_w [0:7][3:0];
+    reg [31:0] layer1_b [0:7];
     reg [31:0] layer1_o [7:0];
     abus_io layer1_io [7:0]();
 
-    reg [31:0] layer2_w [7:0][7:0];
+    reg [31:0] layer2_w [0:7][7:0];
     reg [31:0] layer2_b [7:0];
     reg [31:0] layer2_o [7:0];
     abus_io layer2_io [7:0]();
 
-    reg [31:0] layer3_w [1:0][7:0];
+    reg [31:0] layer3_w [0:1][7:0];
     reg [31:0] layer3_b [1:0];
     reg [31:0] layer3_o [1:0];
     abus_io layer3_io [1:0]();
 
+    logic y_stb;
+    assign y_stb = right[0].stb & right[1].stb;
+
     neuron_layer#(.C(2), .K(3)) layer1(
         .clk(clk),
         .rst(rst),
@@ -174,19 +177,34 @@ module neuron_network_tb;
 
         #15;
         rst = 0;
-        x = {0, 0, 0 , 'h3f800000};
+        x = {0, 0, 0, 'h3f800000};
+        read_value();
+        x = {0, 0, 'h3f800000, 0};
+        read_value();
+        x = {0, 'h3f800000, 0, 0};
+        read_value();
+        x = {'h3f800000, 0, 0, 0};
+        read_value();
+    end
+
+    task read_value;
         left[0].stb = 1;
         left[1].stb = 1;
         left[2].stb = 1;
         left[3].stb = 1;
-
         #15;
         left[0].stb = 0;
         left[1].stb = 0;
         left[2].stb = 0;
         left[3].stb = 0;
-
-    end
+        wait(y_stb == 1);
+        right[0].ack = 1;
+        right[1].ack = 1;
+        #15;
+        right[0].ack = 0;
+        right[1].ack = 0;
+        $display("0x%H, 0x%H", y[0], y[1]);
+    endtask : read_value
 
 
 endmodule : neuron_network_tb