CNN变体网络之--LeNet-5

LeNet-5是CNN的变种之一。

对CNN的学习是看吴恩达的课程。

暂时没明白LeNet-5效果那么好的原因,但是明白了结构,于是用tensorflow 2.0实现的一下,并且在Hyperspectral Image上运用了下。(自己有个困惑:对于池化层的训练参数的个数不理解。不知道是如何计算出来的)

学习的资料分别如下:
https://my.oschina.net/u/876354/blog/1632862
https://blog.csdn.net/d5224/article/details/68928083
https://blog.csdn.net/yanzi6969/article/details/78019683
https://blog.csdn.net/dcxhun3/article/details/46878999

在学习过程中发现个问题,明明LeNet-5网络里面说激活函数是tanh,但是我看网上博客的代码里面写的都是relu,自己有点困惑。幸好我在github找到了一个人和LeNet-5论文里写的一样,不然我还以为我错了。

学习的github地址:https://github.com/olramde/LeNet-keras

LeNet-5网络架构

LeNet-5网络架构

代码如下:

  • 导入库
from sklearn import preprocessing
import numpy as np
from sklearn.model_selection import train_test_split
from tensorflow.keras import layers
import tensorflow.keras as keras
import matplotlib.pyplot as plt
from scipy.io import loadmat
  • 定义切分高光谱数据的函数
def get_coordinates_labels(y_hsi):
    max_label = np.max(y_hsi)
    row_coords = []
    col_coords = []
    labels = []
    for lbl in range(1, max_label+1):
        real_label = lbl - 1
        lbl_locs = np.where(y_hsi == lbl)
        row_coords.append(lbl_locs[0])
        col_coords.append(lbl_locs[1])
        length = len(lbl_locs[0])
        labels.append(np.array([real_label]*length))
    row_coords = np.expand_dims(np.concatenate(row_coords), axis=-1)
    col_coords = np.expand_dims(np.concatenate(col_coords), axis=-1)
    return np.concatenate([row_coords, col_coords], axis=-1), np.concatenate(labels)

def standartizeData(X):
    newX = np.reshape(X, (-1, X.shape[2]))
    #min_max_sacler = preprocessing.MinMaxScaler()
    #scaler = min_max_sacler.fit(newX)
    scaler = preprocessing.StandardScaler().fit(newX)
    newX = scaler.transform(newX)
    #newX = min_max_sacler.transform(newX)
    newX = np.reshape(newX, (X.shape[0],X.shape[1],X.shape[2]))
    return newX, scaler

def padWithZeros(X, margin=2):
    newX = np.zeros((X.shape[0] + 2 * margin, X.shape[1] + 2* margin,
                     X.shape[2]))
    x_offset = margin
    y_offset = margin
    newX[x_offset:X.shape[0] + x_offset, y_offset:X.shape[1] +
         y_offset, :] = X
    return newX

def create_pitches(hsi, target_coords, pitch_size = 3):
    if len(target_coords.shape) == 1:
        target_coords = np.expand_dims(target_coords, axis=0)
    margin = int((pitch_size - 1) / 2)
    zeroPaddedX = padWithZeros(hsi, margin=margin)
    pitches = [zeroPaddedX[target_coords[i,0]:target_coords[i,0] + 2*margin + 1, target_coords[i,1]
                                                   :target_coords[i,1] + 2*margin + 1] for i in range(len(target_coords))]
    pitches = [np.expand_dims(pitch, axis=0) for pitch in pitches]

    pitches_coords_0 = [np.expand_dims(
        np.tile(np.expand_dims(np.arange(
            target_coords[i,0], target_coords[i,0]+2*margin+1),axis=0),
            reps=(pitch_size,1)), axis=0)
        for i in range(len(target_coords))]

    pitches_coords_1 = [np.expand_dims(
        np.tile(np.expand_dims(np.arange(
            target_coords[i, 1], target_coords[i, 1] + 2 * margin + 1), axis=0),
            reps=(pitch_size, 1)), axis=0)
        for i in range(len(target_coords))]

    pitches_coords_0 = np.expand_dims(np.concatenate(pitches_coords_0, axis=0), axis=-1)
    pitches_coords_1 = np.expand_dims(np.concatenate(pitches_coords_1,axis=0), axis=-1)
    pitch_coords = np.concatenate([pitches_coords_0, pitches_coords_1], axis=3)

    return np.squeeze(np.concatenate(pitches, axis=0)), pitch_coords
  • LeNet-5
model = keras.Sequential()
model.add(layers.Conv2D(input_shape = (X_train.shape[1], X_train.shape[2], X_train.shape[3]),
                        strides = (1, 1), filters = 6, kernel_size = (5, 5), padding = 'same',
                        activation = 'tanh'))
model.add(layers.AveragePooling2D(pool_size = (2, 2)))
model.add(layers.Conv2D(filters = 16, kernel_size = (5, 5), strides = (1, 1), padding = 'valid', activation = 'tanh'))
model.add(layers.AveragePooling2D(pool_size = (2, 2)))
model.add(layers.Conv2D(filters = 120, kernel_size = (5, 5), strides = (1, 1), padding = 'valid', activation = 'tanh'))
model.add(layers.Flatten())
model.add(layers.Dense(84, activation = 'tanh'))
model.add(layers.Dense(y_train.shape[1], activation = 'softmax'))
model.compile(loss = keras.losses.CategoricalCrossentropy(),optimizer = keras.optimizers.SGD(),
             metrics = ['accuracy'])
model.summary()
  • 准备数据
X = loadmat('D:\\data\\Indian_pines_corrected.mat')['indian_pines_corrected']
y = loadmat('D:\\data\\Indian_pines_gt.mat')['indian_pines_gt']
X, _ = standartizeData(X)
coords, labels = get_coordinates_labels(y)
train_coords, test_coords, train_labels, test_labels = train_test_split(coords, labels, test_size=0.9)
X_train, X_train_coords = create_pitches(hsi=X, target_coords=train_coords, pitch_size=29)
X_test, X_test_coords = create_pitches(hsi=X, target_coords=test_coords, pitch_size=29)
y_train = train_labels
y_test = test_labels
y_train = keras.utils.to_categorical(y_train)
y_test = keras.utils.to_categorical(y_test)
  • 运行
n_epochs = 200
batch_size = 128
plt.figure()
plt.plot(history.history['loss'], label='train_loss')
plt.plot(history.history['val_loss'], label='val_loss')
plt.legend()
plt.xlabel('epochs')
plt.ylabel('loss')
plt.title('LeNet-5_train')

plt.figure()
plt.plot(history.history['accuracy'], label='train_accuracy')
plt.plot(history.history['val_accuracy'], label='val_accuracy')
plt.legend()
plt.xlabel('epochs')
plt.ylabel('accuracy')
plt.title('LeNet-5_accuracy')

plt.show()

test = model.evaluate(X_test, y_test)
print(test)

结果

  • 网络结构


    网络结构
  • loss


    loss
  • accuracy


    accuracy
  • 测试集上结果
    10000/10000 [==============================] - 1s 104us/sample - loss: 0.1650 - accuracy: 0.9510
    [0.16500180927813052, 0.951]

参考资料:
https://my.oschina.net/u/876354/blog/1632862
https://blog.csdn.net/d5224/article/details/68928083
https://blog.csdn.net/yanzi6969/article/details/78019683
https://blog.csdn.net/dcxhun3/article/details/46878999
https://github.com/olramde/LeNet-keras

最后编辑于
©著作权归作者所有,转载或内容合作请联系作者
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。

推荐阅读更多精彩内容