python_deeplearning02_使用python制作神经网络

20180421 qzd

ch02 - 使用python制作神经网络


  1. 构建框架
  • 初始化函数 -- 设定输入层节点、隐藏层节点和输出层节点的数量。
  • 训练 -- 学习给定训练集样本后,优化权重(权重--网络的核心)。
  • 查询 -- 给定输入,从输出节点给出答案。
#neural network class definition
class neuralNetwork:
    
    #initialise the neural network
    def __init__():
        pass
    
    #train the neural network
    def train():
        pass
    
    #query the neural network
    def query():
        pass
  1. 初始化网络
 def __init__(self,inputnodes,hiddennodes,outputnodes,learningrate ):
  1. 权重 -- 网络的核心
  • 在输入层与隐藏层之间的链接权重矩阵W input_hidden,大小为hidden_nodes乘以input_nodes。
  • 在隐藏层和输出层之间的链接权重矩阵W hidden_output,大小为hidden_nodes乘以output_nodes。
  • 初始化权重
#self.wih = (np.random.rand(self.hnodes,self.inodes) - 0.5)
#self.who = (np.random.rand(self.onodes,self.hnodes) - 0.5)
self.wih = np.random.normal(0.0,pow(self.hnodes,-0.5),(self.hnodes,self.inodes))
self.who = np.random.normal(0.0,pow(self.onodes,-0.5),(self.onodes,self.hnodes))
  1. 查询网络
def query(self,inputs_list):
  1. 训练网络(权重部分)
  • 第一部分,针对给定的训练样本计算输出。这与我们刚刚在query()函数上所做的没什么区别。
  • 第二部分,将计算得到的输出与所需输出对比,使用差值来指导网络权重的更新。
  1. 完整的神经网络代码
import numpy as np
#scipy.special for the sigmoid function expit()
import scipy.special

#neural network class definition
class neuralNetwork:
    
    #initialise the neural network
    def __init__(self,inputnodes,hiddennodes,outputnodes,learningrate ):
        #set number of nodes in each input, hidden, output layer
        self.inodes = inputnodes
        self.hnodes = hiddennodes
        self.onodes = outputnodes
        
        #link weight matrices, wih and who
        #weights inside the arrays are w_i_j, where link is from node i to node j in the next layer
        # w11 w21
        # w12 w22 etc
        #self.wih = (np.random.rand(self.hnodes,self.inodes) - 0.5)
        #self.who = (np.random.rand(self.onodes,self.hnodes) - 0.5)
        self.wih = np.random.normal(0.0,pow(self.hnodes,-0.5),(self.hnodes,self.inodes))
        self.who = np.random.normal(0.0,pow(self.onodes,-0.5),(self.onodes,self.hnodes))
        
        #learning rate
        self.lr = learningrate
        
        #activation function is the sigmoid function
        self.activation_function = lambda x:scipy.special.expit(x)
        
        pass
    
    #train the neural network
    def train(self,inputs_list,targets_list):
        #convert inputs list to 2d array
        inputs = np.array(inputs_list,ndmin=2).T
        targets = np.array(targets_list,ndmin=2).T
        
        #calculate signals into hidden layer
        hidden_inputs = np.dot(self.wih,inputs)
        #calculate the signals emerging from hidden layer
        hidden_outputs = self.activation_function(hidden_inputs)
        
        #calculate signals into final output layer
        final_inputs = np.dot(self.who,hidden_outputs)
        #calculate the signals emerging from final output layer
        final_outputs = self.activation_function(final_inputs)
        
        #output layer error is the (target - actual)
        output_errors = targets - final_outputs
        #hidden layer error is the output_errors, split by weights, recombined at hidden nodes
        hidden_errors = np.dot(self.who.T,output_errors)
        
        #update the weights for the links between thehidden and output layers
        self.who += self.lr *np.dot((output_errors * final_outputs*(1.0-final_outputs)), np.transpose(hidden_outputs))
        #update the weights for the links between the input and hidden layers
        self.wih += self.lr *np.dot((hidden_errors * hidden_outputs*(1.0 - hidden_outputs)),np.transpose(inputs))
        
        pass
    
    #query the neural network
    def query(self,inputs_list):
        #convert inputs list to 2d array
        inputs = np.array(inputs_list,ndmin=2).T
        
        #calculate signals into hidden layer
        hidden_inputs = np.dot(self.wih,inputs)
        #calculate the signals emerging from hidden layer
        hidden_outputs = self.activation_function(hidden_inputs)
        
        #calculate signals into final output layer
        final_inputs = np.dot(self.who,hidden_outputs)
        #calculate the signals emerging from final output layer
        final_outputs = self.activation_function(final_inputs)
        
        return final_outputs
#number of input, hidden and output nodes
input_nodes = 3
hidden_nodes = 3
output_nodes = 3

#learning rate is 0.3
learning_rate = 0.3

#create instance of neural network
n = neuralNetwork(input_nodes,hidden_nodes,output_nodes,learning_rate)

n.query([1.0,0.5,-1.5])
  • 输出结果

array([[ 0.46356062],
[ 0.58538226],
[ 0.5905356 ]])

最后编辑于
©著作权归作者所有,转载或内容合作请联系作者
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。

推荐阅读更多精彩内容