• 大小: 22KB
    文件类型: .rar
    金币: 2
    下载: 2 次
    发布日期: 2021-06-04
  • 语言: Python
  • 标签: dbn  python  

资源简介

很多网上的DBN都调不通,我这个一定能调通

资源截图

代码片段和文件信息

#!/usr/bin/env python
# -*- coding: utf-8 -*-

‘‘‘
 DBN  w/ continuous-valued inputs (Linear Energy)

 References :
   - Y. Bengio P. Lamblin D. Popovici H. Larochelle: Greedy layer-Wise
   Training of Deep Networks Advances in Neural Information Processing
   Systems 19 2007

‘‘‘

import sys
import numpy
from Hiddenlayer import Hiddenlayer
from LogisticRegression import LogisticRegression
from RBM import RBM
from CRBM import CRBM
from DBN import DBN
from utils import *



 
class CDBN(DBN):
    def __init__(self input=None label=None\
                 n_ins=2 hidden_layer_sizes=[3 3] n_outs=2\
                 numpy_rng=None):
        
        self.x = input
        self.y = label

        self.sigmoid_layers = []
        self.rbm_layers = []
        self.n_layers = len(hidden_layer_sizes)  # = len(self.rbm_layers)

        if numpy_rng is None:
            numpy_rng = numpy.random.RandomState(1234)

        
        assert self.n_layers > 0


        # construct multi-layer
        for i in xrange(self.n_layers):
            # layer_size
            if i == 0:
                input_size = n_ins
            else:
                input_size = hidden_layer_sizes[i - 1]

            # layer_input
            if i == 0:
                layer_input = self.x
            else:
                layer_input = self.sigmoid_layers[-1].sample_h_given_v()
                
            # construct sigmoid_layer
            sigmoid_layer = Hiddenlayer(input=layer_input
                                        n_in=input_size
                                        n_out=hidden_layer_sizes[i]
                                        numpy_rng=numpy_rng
                                        activation=sigmoid)
            self.sigmoid_layers.append(sigmoid_layer)

            # construct rbm_layer
            if i == 0:
                rbm_layer = CRBM(input=layer_input     # continuous-valued inputs
                                 n_visible=input_size
                                 n_hidden=hidden_layer_sizes[i]
                                 W=sigmoid_layer.W     # W b are shared
                                 hbias=sigmoid_layer.b)
            else:
                rbm_layer = RBM(input=layer_input
                                n_visible=input_size
                                n_hidden=hidden_layer_sizes[i]
                                W=sigmoid_layer.W     # W b are shared
                                hbias=sigmoid_layer.b)
                
            self.rbm_layers.append(rbm_layer)


        # layer for output using Logistic Regression
        self.log_layer = LogisticRegression(input=self.sigmoid_layers[-1].sample_h_given_v()
                                            label=self.y
                                            n_in=hidden_layer_sizes[-1]
                                            n_out=n_outs)

        # finetune cost: the negative log likelihood of the logistic regression layer
        self.f

 属性            大小     日期    时间   名称
----------- ---------  ---------- -----  ----

     文件       2269  2016-11-07 16:34  CRBM.pyc

     文件       4868  2013-03-27 08:13  dA.py

     文件       4901  2016-11-09 14:51  dA.pyc

     文件       6637  2016-11-09 16:08  DBN.py

     文件       4090  2016-11-09 16:14  DBN.pyc

     文件       2080  2016-11-09 14:50  Hiddenlayer.py

     文件       1853  2016-11-09 14:51  Hiddenlayer.pyc

     文件       3477  2016-11-09 15:45  LogisticRegression.py

     文件       2951  2016-11-09 15:46  LogisticRegression.pyc

     文件       6102  2016-11-09 11:41  RBM.py

     文件       5019  2016-11-09 11:43  RBM.pyc

     文件       5877  2013-03-27 08:13  SdA.py

     文件        545  2013-03-27 08:13  utils.py

     文件        760  2016-11-07 16:34  utils.pyc

     文件       4286  2013-03-27 08:13  CDBN.py

     文件       1853  2013-03-27 08:13  CRBM.py

----------- ---------  ---------- -----  ----

                57568                    16


评论

共有 条评论