• 大小: 10KB
    文件类型: .rar
    金币: 2
    下载: 1 次
    发布日期: 2021-06-10
  • 语言: Python
  • 标签: 深度学习  

资源简介

python写的深度学习代码,包括DBN,SDA等模型,一份不错的学习深度神经网络以及python的代码

资源截图

代码片段和文件信息

#!/usr/bin/env python
# -*- coding: utf-8 -*-

‘‘‘
 DBN  w/ continuous-valued inputs (Linear Energy)

 References :
   - Y. Bengio P. Lamblin D. Popovici H. Larochelle: Greedy layer-Wise
   Training of Deep Networks Advances in Neural Information Processing
   Systems 19 2007

‘‘‘

import sys
import numpy
from Hiddenlayer import Hiddenlayer
from LogisticRegression import LogisticRegression
from RBM import RBM
from CRBM import CRBM
from DBN import DBN
from utils import *



 
class CDBN(DBN):
    def __init__(self input=None label=None\
                 n_ins=2 hidden_layer_sizes=[3 3] n_outs=2\
                 numpy_rng=None):
        
        self.x = input
        self.y = label

        self.sigmoid_layers = []
        self.rbm_layers = []
        self.n_layers = len(hidden_layer_sizes)  # = len(self.rbm_layers)

        if numpy_rng is None:
            numpy_rng = numpy.random.RandomState(1234)

        
        assert self.n_layers > 0


        # construct multi-layer
        for i in xrange(self.n_layers):
            # layer_size
            if i == 0:
                input_size = n_ins
            else:
                input_size = hidden_layer_sizes[i - 1]

            # layer_input
            if i == 0:
                layer_input = self.x
            else:
                layer_input = self.sigmoid_layers[-1].sample_h_given_v()
                
            # construct sigmoid_layer
            sigmoid_layer = Hiddenlayer(input=layer_input
                                        n_in=input_size
                                        n_out=hidden_layer_sizes[i]
                                        numpy_rng=numpy_rng
                                        activation=sigmoid)
            self.sigmoid_layers.append(sigmoid_layer)

            # construct rbm_layer
            if i == 0:
                rbm_layer = CRBM(input=layer_input     # continuous-valued inputs
                                 n_visible=input_size
                                 n_hidden=hidden_layer_sizes[i]
                                 W=sigmoid_layer.W     # W b are shared
                                 hbias=sigmoid_layer.b)
            else:
                rbm_layer = RBM(input=layer_input
                                n_visible=input_size
                                n_hidden=hidden_layer_sizes[i]
                                W=sigmoid_layer.W     # W b are shared
                                hbias=sigmoid_layer.b)
                
            self.rbm_layers.append(rbm_layer)


        # layer for output using Logistic Regression
        self.log_layer = LogisticRegression(input=self.sigmoid_layers[-1].sample_h_given_v()
                                            label=self.y
                                            n_in=hidden_layer_sizes[-1]
                                            n_out=n_outs)

        # finetune cost: the negative log likelihood of the logistic regression layer
        self.f

 属性            大小     日期    时间   名称
----------- ---------  ---------- -----  ----

     文件       4286  2013-03-27 08:13  python\CDBN.py

     文件       1853  2013-03-27 08:13  python\CRBM.py

     文件       4868  2013-03-27 08:13  python\dA.py

     文件       5882  2013-03-27 08:13  python\DBN.py

     文件       1558  2013-03-27 08:13  python\Hiddenlayer.py

     文件       2690  2013-03-27 08:13  python\LogisticRegression.py

     文件       5113  2013-03-27 08:13  python\RBM.py

     文件       5877  2013-03-27 08:13  python\SdA.py

     文件        545  2013-03-27 08:13  python\utils.py

     目录          0  2013-03-27 08:13  python

----------- ---------  ---------- -----  ----

                32672                    10


评论

共有 条评论