• 大小: 18.19MB
    文件类型: .rar
    金币: 1
    下载: 0 次
    发布日期: 2023-06-15
  • 语言: Matlab
  • 标签:

资源简介

深度学习的简单matlab代码,经检验可以用。 对手写数字小图像进行有标签学习,分类为10,单机运行5000张图片后,可进行0-9的手写数字识别。

资源截图

代码片段和文件信息

% Version 1.000
%
% Code provided by Ruslan Salakhutdinov and Geoff Hinton
%
% Permission is granted for anyone to copy use modify or distribute this
% program and accompanying programs and documents for any purpose provided
% this copyright notice is retained and prominently displayed along with
% a note saying that the original programs are available from our
% web page.
% The programs and documents are distributed without any warranty express or
% implied.  As the programs were written for research purposes only they have
% not been tested to the degree that would be advisable in any important
% application.  All use of these programs is entirely at the user‘s own risk.

% This program fine-tunes an autoencoder with backpropagation.
% Weights of the autoencoder are going to be saved in mnist_weights.mat
% and trainig and test reconstruction errors in mnist_error.mat
% You can also set maxepoch default value is 200 as in our paper.  

maxepoch=200;
fprintf(1‘\nFine-tuning deep autoencoder by minimizing cross entropy error. \n‘);
fprintf(1‘60 batches of 1000 cases each. \n‘);

load mnistvh
load mnisthp
load mnisthp2
load mnistpo 

makebatches;
[numcases numdims numbatches]=size(batchdata);
N=numcases; 

%%%% PREINITIALIZE WEIGHTS OF THE AUTOENCODER %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
w1=[vishid; hidrecbiases];
w2=[hidpen; penrecbiases];
w3=[hidpen2; penrecbiases2];
w4=[hidtop; toprecbiases];
w5=[hidtop‘; topgenbiases]; 
w6=[hidpen2‘; hidgenbiases2]; 
w7=[hidpen‘; hidgenbiases]; 
w8=[vishid‘; visbiases];

%%%%%%%%%% END OF PREINITIALIZATIO OF WEIGHTS  %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

l1=size(w11)-1;
l2=size(w21)-1;
l3=size(w31)-1;
l4=size(w41)-1;
l5=size(w51)-1;
l6=size(w61)-1;
l7=size(w71)-1;
l8=size(w81)-1;
l9=l1; 
test_err=[];
train_err=[];


for epoch = 1:maxepoch

%%%%%%%%%%%%%%%%%%%% COMPUTE TRAINING RECONSTRUCTION ERROR %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
err=0; 
[numcases numdims numbatches]=size(batchdata);
N=numcases;
 for batch = 1:numbatches
  data = [batchdata(::batch)];
  data = [data ones(N1)];
  w1probs = 1./(1 + exp(-data*w1)); w1probs = [w1probs  ones(N1)];
  w2probs = 1./(1 + exp(-w1probs*w2)); w2probs = [w2probs ones(N1)];
  w3probs = 1./(1 + exp(-w2probs*w3)); w3probs = [w3probs  ones(N1)];
  w4probs = w3probs*w4; w4probs = [w4probs  ones(N1)];
  w5probs = 1./(1 + exp(-w4probs*w5)); w5probs = [w5probs  ones(N1)];
  w6probs = 1./(1 + exp(-w5probs*w6)); w6probs = [w6probs  ones(N1)];
  w7probs = 1./(1 + exp(-w6probs*w7)); w7probs = [w7probs  ones(N1)];
  dataout = 1./(1 + exp(-w7probs*w8));
  err= err +  1/N*sum(sum( (data(:1:end-1)-dataout).^2 )); 
  end
 train_err(epoch)=err/numbatches;

%%%%%%%%%%%%%% END OF COMPUTING TRAINING RECONSTRUCTION ERROR %%%%%%%%%%%%%%%%%%%%%%%%%%%%%

%%%% DISPLAY FIGURE TOP ROW REAL DATA BOTTOM ROW RECONSTRUCTIONS %%%%%%%%%%%%%%%%%%%%%%%%%
fprintf(1‘Displaying in figure 1: Top row - real data Bottom row -- reconstructions \n‘);
output=[];
 for i

 属性            大小     日期    时间   名称
----------- ---------  ---------- -----  ----

    .......        22  2012-06-12 08:49  DeepLearning-master\.gitignore

    .......      5594  2012-06-12 08:49  DeepLearning-master\backprop.m

    .......      5474  2012-06-12 08:49  DeepLearning-master\backpropclassify.m

    .......      1853  2012-06-12 08:49  DeepLearning-master\CG_CLASSIFY.m

    .......      1136  2012-06-12 08:49  DeepLearning-master\CG_CLASSIFY_INIT.m

    .......      2727  2012-06-12 08:49  DeepLearning-master\CG_MNIST.m

    .......      3011  2012-06-12 08:49  DeepLearning-master\converter.m

    .......      4169  2012-06-12 08:49  DeepLearning-master\makebatches.m

    .......      8995  2012-06-12 08:49  DeepLearning-master\minimize.m

    .......      1902  2012-06-12 08:49  DeepLearning-master\mnistclassify.m

    .......      2199  2012-06-12 08:49  DeepLearning-master\mnistdeepauto.m

    .......      1084  2012-06-12 08:49  DeepLearning-master\mnistdisp.m

    .......      3914  2012-06-12 08:49  DeepLearning-master\rbm.m

    .......      3964  2012-06-12 08:49  DeepLearning-master\rbmhidlinear.m

    .......        55  2012-06-12 08:49  DeepLearning-master\README.md

    .......      2934  2012-06-12 08:49  DeepLearning-master\README.txt

    .......        52  2015-02-11 11:03  neural-networks-and-deep-learning-master\.gitignore

     文件       3207  2015-07-21 20:13  neural-networks-and-deep-learning-master\data\1.py

    .......  17051982  2015-02-11 11:03  neural-networks-and-deep-learning-master\data\mnist.pkl.gz

    .......     29523  2015-02-11 11:03  neural-networks-and-deep-learning-master\fig\backprop_magnitude_nabla.png

    .......      2790  2015-02-11 11:03  neural-networks-and-deep-learning-master\fig\backprop_magnitude_nabla.py

    .......   5375943  2015-02-11 11:03  neural-networks-and-deep-learning-master\fig\data_1000.json

    .......      8414  2015-02-11 11:03  neural-networks-and-deep-learning-master\fig\digits.png

    .......      8218  2015-02-11 11:03  neural-networks-and-deep-learning-master\fig\digits_separate.png

    .......    150522  2015-02-11 11:03  neural-networks-and-deep-learning-master\fig\false_minima.png

    .......      1066  2015-02-11 11:03  neural-networks-and-deep-learning-master\fig\false_minima.py

     文件       3846  2015-07-21 19:35  neural-networks-and-deep-learning-master\fig\generate_gradient.py

     文件        270  2015-07-21 19:35  neural-networks-and-deep-learning-master\fig\initial_gradient.json

    .......    190268  2015-02-11 11:03  neural-networks-and-deep-learning-master\fig\misleading_gradient.png

    .......      1207  2015-02-11 11:03  neural-networks-and-deep-learning-master\fig\misleading_gradient.py

............此处省略102个文件信息

评论

共有 条评论