1414 Github: 245885195@qq.com
1515 Date: 2017.9.20
1616 - - - - - -- - - - - - - - - - - - - - - - - - - - - - -
17- '''
17+ '''
1818from __future__import print_function
1919
20+ import pickle
2021import numpy as np
2122import matplotlib .pyplot as plt
2223
2324class CNN ():
2425
25- def __init__ (self ,conv1_get ,size_p1 ,bp_num1 ,bp_num2 ,bp_num3 ,rate_w = 0.2 ,rate_t = 0.2 ):
26+ def __init__ (self ,conv1_get ,size_p1 ,bp_num1 ,bp_num2 ,bp_num3 ,rate_w = 0.2 ,rate_t = 0.2 ):
2627'''
2728 :param conv1_get: [a,c,d],size, number, step of convolution kernel
2829 :param size_p1: pooling size
@@ -48,32 +49,30 @@ def __init__(self,conv1_get,size_p1,bp_num1,bp_num2,bp_num3,rate_w=0.2,rate_t=0.
4849self .thre_bp3 = - 2 * np .random .rand (self .num_bp3 )+ 1
4950
5051
51- def save_model (self ,save_path ):
52+ def save_model (self ,save_path ):
5253#save model dict with pickle
53- import pickle
5454model_dic = {'num_bp1' :self .num_bp1 ,
55- 'num_bp2' :self .num_bp2 ,
56- 'num_bp3' :self .num_bp3 ,
57- 'conv1' :self .conv1 ,
58- 'step_conv1' :self .step_conv1 ,
59- 'size_pooling1' :self .size_pooling1 ,
60- 'rate_weight' :self .rate_weight ,
61- 'rate_thre' :self .rate_thre ,
62- 'w_conv1' :self .w_conv1 ,
63- 'wkj' :self .wkj ,
64- 'vji' :self .vji ,
65- 'thre_conv1' :self .thre_conv1 ,
66- 'thre_bp2' :self .thre_bp2 ,
67- 'thre_bp3' :self .thre_bp3 }
55+ 'num_bp2' :self .num_bp2 ,
56+ 'num_bp3' :self .num_bp3 ,
57+ 'conv1' :self .conv1 ,
58+ 'step_conv1' :self .step_conv1 ,
59+ 'size_pooling1' :self .size_pooling1 ,
60+ 'rate_weight' :self .rate_weight ,
61+ 'rate_thre' :self .rate_thre ,
62+ 'w_conv1' :self .w_conv1 ,
63+ 'wkj' :self .wkj ,
64+ 'vji' :self .vji ,
65+ 'thre_conv1' :self .thre_conv1 ,
66+ 'thre_bp2' :self .thre_bp2 ,
67+ 'thre_bp3' :self .thre_bp3 }
6868with open (save_path ,'wb' )as f :
6969pickle .dump (model_dic ,f )
7070
7171print ('Model saved: %s' % save_path )
7272
7373@classmethod
74- def ReadModel (cls ,model_path ):
74+ def ReadModel (cls ,model_path ):
7575#read saved model
76- import pickle
7776with open (model_path ,'rb' )as f :
7877model_dic = pickle .load (f )
7978
@@ -97,13 +96,13 @@ def ReadModel(cls,model_path):
9796return conv_ins
9897
9998
100- def sig (self ,x ):
99+ def sig (self ,x ):
101100return 1 / (1 + np .exp (- 1 * x ))
102101
103- def do_round (self ,x ):
102+ def do_round (self ,x ):
104103return round (x ,3 )
105104
106- def convolute (self ,data ,convs ,w_convs ,thre_convs ,conv_step ):
105+ def convolute (self ,data ,convs ,w_convs ,thre_convs ,conv_step ):
107106#convolution process
108107size_conv = convs [0 ]
109108num_conv = convs [1 ]
@@ -132,7 +131,7 @@ def convolute(self,data,convs,w_convs,thre_convs,conv_step):
132131focus_list = np .asarray (focus1_list )
133132return focus_list ,data_featuremap
134133
135- def pooling (self ,featuremaps ,size_pooling ,type = 'average_pool' ):
134+ def pooling (self ,featuremaps ,size_pooling ,type = 'average_pool' ):
136135#pooling process
137136size_map = len (featuremaps [0 ])
138137size_pooled = int (size_map / size_pooling )
@@ -153,7 +152,7 @@ def pooling(self,featuremaps,size_pooling,type='average_pool'):
153152featuremap_pooled .append (map_pooled )
154153return featuremap_pooled
155154
156- def _expand (self ,datas ):
155+ def _expand (self ,datas ):
157156#expanding three dimension data to one dimension list
158157data_expanded = []
159158for i in range (len (datas )):
@@ -164,14 +163,14 @@ def _expand(self,datas):
164163data_expanded = np .asarray (data_expanded )
165164return data_expanded
166165
167- def _expand_mat (self ,data_mat ):
166+ def _expand_mat (self ,data_mat ):
168167#expanding matrix to one dimension list
169168data_mat = np .asarray (data_mat )
170169shapes = np .shape (data_mat )
171170data_expanded = data_mat .reshape (1 ,shapes [0 ]* shapes [1 ])
172171return data_expanded
173172
174- def _calculate_gradient_from_pool (self ,out_map ,pd_pool ,num_map ,size_map ,size_pooling ):
173+ def _calculate_gradient_from_pool (self ,out_map ,pd_pool ,num_map ,size_map ,size_pooling ):
175174'''
176175 calcluate the gradient from the data slice of pool layer
177176 pd_pool: list of matrix
@@ -190,7 +189,7 @@ def _calculate_gradient_from_pool(self,out_map,pd_pool,num_map,size_map,size_poo
190189pd_all .append (pd_conv2 )
191190return pd_all
192191
193- def trian (self ,patterns ,datas_train ,datas_teach ,n_repeat ,error_accuracy ,draw_e = bool ):
192+ def train (self ,patterns ,datas_train ,datas_teach ,n_repeat ,error_accuracy ,draw_e = bool ):
194193#model traning
195194print ('----------------------Start Training-------------------------' )
196195print ((' - - Shape: Train_Data ' ,np .shape (datas_train )))
@@ -206,7 +205,7 @@ def trian(self,patterns,datas_train, datas_teach, n_repeat, error_accuracy,draw_
206205data_train = np .asmatrix (datas_train [p ])
207206data_teach = np .asarray (datas_teach [p ])
208207data_focus1 ,data_conved1 = self .convolute (data_train ,self .conv1 ,self .w_conv1 ,
209- self .thre_conv1 ,conv_step = self .step_conv1 )
208+ self .thre_conv1 ,conv_step = self .step_conv1 )
210209data_pooled1 = self .pooling (data_conved1 ,self .size_pooling1 )
211210shape_featuremap1 = np .shape (data_conved1 )
212211'''
@@ -231,7 +230,7 @@ def trian(self,patterns,datas_train, datas_teach, n_repeat, error_accuracy,draw_
231230pd_conv1_pooled = pd_i_all / (self .size_pooling1 * self .size_pooling1 )
232231pd_conv1_pooled = pd_conv1_pooled .T .getA ().tolist ()
233232pd_conv1_all = self ._calculate_gradient_from_pool (data_conved1 ,pd_conv1_pooled ,shape_featuremap1 [0 ],
234- shape_featuremap1 [1 ],self .size_pooling1 )
233+ shape_featuremap1 [1 ],self .size_pooling1 )
235234#weight and threshold learning process---------
236235#convolution layer
237236for k_conv in range (self .conv1 [1 ]):
@@ -268,15 +267,15 @@ def draw_error():
268267draw_error ()
269268return mse
270269
271- def predict (self ,datas_test ):
270+ def predict (self ,datas_test ):
272271#model predict
273272produce_out = []
274273print ('-------------------Start Testing-------------------------' )
275274print ((' - - Shape: Test_Data ' ,np .shape (datas_test )))
276275for p in range (len (datas_test )):
277276data_test = np .asmatrix (datas_test [p ])
278277data_focus1 ,data_conved1 = self .convolute (data_test ,self .conv1 ,self .w_conv1 ,
279- self .thre_conv1 ,conv_step = self .step_conv1 )
278+ self .thre_conv1 ,conv_step = self .step_conv1 )
280279data_pooled1 = self .pooling (data_conved1 ,self .size_pooling1 )
281280data_bp_input = self ._expand (data_pooled1 )
282281
@@ -289,11 +288,11 @@ def predict(self,datas_test):
289288res = [list (map (self .do_round ,each ))for each in produce_out ]
290289return np .asarray (res )
291290
292- def convolution (self ,data ):
291+ def convolution (self ,data ):
293292#return the data of image after convoluting process so we can check it out
294293data_test = np .asmatrix (data )
295294data_focus1 ,data_conved1 = self .convolute (data_test ,self .conv1 ,self .w_conv1 ,
296- self .thre_conv1 ,conv_step = self .step_conv1 )
295+ self .thre_conv1 ,conv_step = self .step_conv1 )
297296data_pooled1 = self .pooling (data_conved1 ,self .size_pooling1 )
298297
299298return data_conved1 ,data_pooled1
@@ -303,4 +302,4 @@ def convolution(self,data):
303302pass
304303'''
305304 I will put the example on other file
306- '''
305+ '''