|
2 | 2 | usingSystem; |
3 | 3 | usingTensorflow.Keras.Optimizers; |
4 | 4 | usingTensorflow.NumPy; |
| 5 | +usingstaticTensorflow.Binding; |
5 | 6 | usingstaticTensorflow.KerasApi; |
6 | 7 |
|
7 | 8 | namespaceTensorflow.Keras.UnitTest |
@@ -66,5 +67,79 @@ public void LeNetModel() |
66 | 67 | varpred=model.predict((x1,x2)); |
67 | 68 | Console.WriteLine(pred); |
68 | 69 | } |
| 70 | + |
| 71 | +[TestMethod] |
| 72 | +publicvoidLeNetModelDataset() |
| 73 | +{ |
| 74 | +varinputs=keras.Input((28,28,1)); |
| 75 | +varconv1=keras.layers.Conv2D(16,(3,3),activation:"relu",padding:"same").Apply(inputs); |
| 76 | +varpool1=keras.layers.MaxPooling2D((2,2),2).Apply(conv1); |
| 77 | +varconv2=keras.layers.Conv2D(32,(3,3),activation:"relu",padding:"same").Apply(pool1); |
| 78 | +varpool2=keras.layers.MaxPooling2D((2,2),2).Apply(conv2); |
| 79 | +varflat1=keras.layers.Flatten().Apply(pool2); |
| 80 | + |
| 81 | +varinputs_2=keras.Input((28,28,1)); |
| 82 | +varconv1_2=keras.layers.Conv2D(16,(3,3),activation:"relu",padding:"same").Apply(inputs_2); |
| 83 | +varpool1_2=keras.layers.MaxPooling2D((4,4),4).Apply(conv1_2); |
| 84 | +varconv2_2=keras.layers.Conv2D(32,(1,1),activation:"relu",padding:"same").Apply(pool1_2); |
| 85 | +varpool2_2=keras.layers.MaxPooling2D((2,2),2).Apply(conv2_2); |
| 86 | +varflat1_2=keras.layers.Flatten().Apply(pool2_2); |
| 87 | + |
| 88 | +varconcat=keras.layers.Concatenate().Apply((flat1,flat1_2)); |
| 89 | +vardense1=keras.layers.Dense(512,activation:"relu").Apply(concat); |
| 90 | +vardense2=keras.layers.Dense(128,activation:"relu").Apply(dense1); |
| 91 | +vardense3=keras.layers.Dense(10,activation:"relu").Apply(dense2); |
| 92 | +varoutput=keras.layers.Softmax(-1).Apply(dense3); |
| 93 | + |
| 94 | +varmodel=keras.Model((inputs,inputs_2),output); |
| 95 | +model.summary(); |
| 96 | + |
| 97 | +vardata_loader=newMnistModelLoader(); |
| 98 | + |
| 99 | +vardataset=data_loader.LoadAsync(newModelLoadSetting |
| 100 | +{ |
| 101 | +TrainDir="mnist", |
| 102 | +OneHot=false, |
| 103 | +ValidationSize=59900, |
| 104 | +}).Result; |
| 105 | + |
| 106 | +varloss=keras.losses.SparseCategoricalCrossentropy(); |
| 107 | +varoptimizer=newAdam(0.001f); |
| 108 | +model.compile(optimizer,loss,newstring[]{"accuracy"}); |
| 109 | + |
| 110 | +NDArrayx1=np.reshape(dataset.Train.Data,(dataset.Train.Data.shape[0],28,28,1)); |
| 111 | + |
| 112 | +varmultiInputDataset=tf.data.Dataset.zip( |
| 113 | +tf.data.Dataset.from_tensor_slices(x1), |
| 114 | +tf.data.Dataset.from_tensor_slices(x1), |
| 115 | +tf.data.Dataset.from_tensor_slices(dataset.Train.Labels) |
| 116 | +).batch(8); |
| 117 | +multiInputDataset.FirstInputTensorCount=2; |
| 118 | + |
| 119 | +model.fit(multiInputDataset,epochs:3); |
| 120 | + |
| 121 | +x1=x1["0:8"]; |
| 122 | + |
| 123 | +multiInputDataset=tf.data.Dataset.zip( |
| 124 | +tf.data.Dataset.from_tensor_slices(x1), |
| 125 | +tf.data.Dataset.from_tensor_slices(x1), |
| 126 | +tf.data.Dataset.from_tensor_slices(dataset.Train.Labels["0:8"]) |
| 127 | +).batch(8); |
| 128 | +multiInputDataset.FirstInputTensorCount=2; |
| 129 | + |
| 130 | +(modelasEngine.Model).evaluate(multiInputDataset); |
| 131 | + |
| 132 | +x1=np.ones((1,28,28,1),TF_DataType.TF_FLOAT); |
| 133 | +varx2=np.zeros((1,28,28,1),TF_DataType.TF_FLOAT); |
| 134 | + |
| 135 | +multiInputDataset=tf.data.Dataset.zip( |
| 136 | +tf.data.Dataset.from_tensor_slices(x1), |
| 137 | +tf.data.Dataset.from_tensor_slices(x2) |
| 138 | +).batch(8); |
| 139 | +multiInputDataset.FirstInputTensorCount=2; |
| 140 | + |
| 141 | +varpred=model.predict(multiInputDataset); |
| 142 | +Console.WriteLine(pred); |
| 143 | +} |
69 | 144 | } |
70 | 145 | } |