|
2 | 2 | using System;
|
3 | 3 | using Tensorflow.Keras.Optimizers;
|
4 | 4 | using Tensorflow.NumPy;
|
| 5 | +using static Tensorflow.Binding; |
5 | 6 | using static Tensorflow.KerasApi;
|
6 | 7 |
|
7 | 8 | namespace Tensorflow.Keras.UnitTest
|
@@ -66,5 +67,79 @@ public void LeNetModel()
|
66 | 67 | var pred = model.predict((x1, x2));
|
67 | 68 | Console.WriteLine(pred);
|
68 | 69 | }
|
| 70 | + |
| 71 | + [TestMethod] |
| 72 | + public void LeNetModelDataset() |
| 73 | + { |
| 74 | + var inputs = keras.Input((28, 28, 1)); |
| 75 | + var conv1 = keras.layers.Conv2D(16, (3, 3), activation: "relu", padding: "same").Apply(inputs); |
| 76 | + var pool1 = keras.layers.MaxPooling2D((2, 2), 2).Apply(conv1); |
| 77 | + var conv2 = keras.layers.Conv2D(32, (3, 3), activation: "relu", padding: "same").Apply(pool1); |
| 78 | + var pool2 = keras.layers.MaxPooling2D((2, 2), 2).Apply(conv2); |
| 79 | + var flat1 = keras.layers.Flatten().Apply(pool2); |
| 80 | + |
| 81 | + var inputs_2 = keras.Input((28, 28, 1)); |
| 82 | + var conv1_2 = keras.layers.Conv2D(16, (3, 3), activation: "relu", padding: "same").Apply(inputs_2); |
| 83 | + var pool1_2 = keras.layers.MaxPooling2D((4, 4), 4).Apply(conv1_2); |
| 84 | + var conv2_2 = keras.layers.Conv2D(32, (1, 1), activation: "relu", padding: "same").Apply(pool1_2); |
| 85 | + var pool2_2 = keras.layers.MaxPooling2D((2, 2), 2).Apply(conv2_2); |
| 86 | + var flat1_2 = keras.layers.Flatten().Apply(pool2_2); |
| 87 | + |
| 88 | + var concat = keras.layers.Concatenate().Apply((flat1, flat1_2)); |
| 89 | + var dense1 = keras.layers.Dense(512, activation: "relu").Apply(concat); |
| 90 | + var dense2 = keras.layers.Dense(128, activation: "relu").Apply(dense1); |
| 91 | + var dense3 = keras.layers.Dense(10, activation: "relu").Apply(dense2); |
| 92 | + var output = keras.layers.Softmax(-1).Apply(dense3); |
| 93 | + |
| 94 | + var model = keras.Model((inputs, inputs_2), output); |
| 95 | + model.summary(); |
| 96 | + |
| 97 | + var data_loader = new MnistModelLoader(); |
| 98 | + |
| 99 | + var dataset = data_loader.LoadAsync(new ModelLoadSetting |
| 100 | + { |
| 101 | + TrainDir = "mnist", |
| 102 | + OneHot = false, |
| 103 | + ValidationSize = 59900, |
| 104 | + }).Result; |
| 105 | + |
| 106 | + var loss = keras.losses.SparseCategoricalCrossentropy(); |
| 107 | + var optimizer = new Adam(0.001f); |
| 108 | + model.compile(optimizer, loss, new string[] { "accuracy" }); |
| 109 | + |
| 110 | + NDArray x1 = np.reshape(dataset.Train.Data, (dataset.Train.Data.shape[0], 28, 28, 1)); |
| 111 | + |
| 112 | + var multiInputDataset = tf.data.Dataset.zip( |
| 113 | + tf.data.Dataset.from_tensor_slices(x1), |
| 114 | + tf.data.Dataset.from_tensor_slices(x1), |
| 115 | + tf.data.Dataset.from_tensor_slices(dataset.Train.Labels) |
| 116 | + ).batch(8); |
| 117 | + multiInputDataset.FirstInputTensorCount = 2; |
| 118 | + |
| 119 | + model.fit(multiInputDataset, epochs: 3); |
| 120 | + |
| 121 | + x1 = x1["0:8"]; |
| 122 | + |
| 123 | + multiInputDataset = tf.data.Dataset.zip( |
| 124 | + tf.data.Dataset.from_tensor_slices(x1), |
| 125 | + tf.data.Dataset.from_tensor_slices(x1), |
| 126 | + tf.data.Dataset.from_tensor_slices(dataset.Train.Labels["0:8"]) |
| 127 | + ).batch(8); |
| 128 | + multiInputDataset.FirstInputTensorCount = 2; |
| 129 | + |
| 130 | + (model as Engine.Model).evaluate(multiInputDataset); |
| 131 | + |
| 132 | + x1 = np.ones((1, 28, 28, 1), TF_DataType.TF_FLOAT); |
| 133 | + var x2 = np.zeros((1, 28, 28, 1), TF_DataType.TF_FLOAT); |
| 134 | + |
| 135 | + multiInputDataset = tf.data.Dataset.zip( |
| 136 | + tf.data.Dataset.from_tensor_slices(x1), |
| 137 | + tf.data.Dataset.from_tensor_slices(x2) |
| 138 | + ).batch(8); |
| 139 | + multiInputDataset.FirstInputTensorCount = 2; |
| 140 | + |
| 141 | + var pred = model.predict(multiInputDataset); |
| 142 | + Console.WriteLine(pred); |
| 143 | + } |
69 | 144 | }
|
70 | 145 | }
|
0 commit comments