资源简介
adaboost 演示demo(基于Matlab,学习算法包括决策树、神经网络、线性回归、在线贝叶斯分类器等),动态GUI显示学习过程、vote过程等
代码片段和文件信息
classdef Adaboost < handle
%Adaboost
properties
k_max;
k_next;
k_cur;
part_trains;
base_learner;
learners;
learner_weights;
early_terminate;
d_weights;
d_size;
end
methods
function a = Adaboost(k_max base_learner early_terminate)
%k-max: Size of the model integer >= 1
%base_learner: Model constructor function e.g. @()CART()
%The model must be an object with methods a.train(inputs outputs)
%and outputs = a.test(inputs).
%early_terminate: Optional detemines whether to terminate if base
%learner error is greater than 0.5 (default is to continue and the
%base learner receives a negative weight).
if (nargin < 3)
early_terminate = false;
end
a.k_max = k_max;
a.part_trains = a.k_max;
a.k_next = a.k_max;
a.learners = cell(a.k_max 1);
a.learner_weights = zeros(a.k_max 1);
a.base_learner = base_learner;
a.early_terminate = early_terminate;
a.k_cur = 0;
end
function part_train(a inputs outputs k_next)
a.k_next = k_next;
a.train(inputs outputs);
a.k_next = a.k_max;
end
function train(a inputs outputs)
if (a.k_cur == 0)
a.d_size = size(inputs 1);
a.d_weights = ones(a.d_size 1) ./ a.d_size;
end
while (a.k_cur < a.k_next)
a.k_cur = a.k_cur + 1;
%Create an NxN square of cumulatively summed weights (i.e. N
%rows containing the weight cumsum). Create an NxN square of random
%numbers (N columns containing the same N random numbers). Compare
%them and sum the rows. The number of weights that are less than
%each random number indicates the index sampled by that row.
indices = sum(repmat(cumsum(a.d_weights)‘ a.d_size 1) <= ...
repmat(rand(a.d_size 1) 1 a.d_size) 2) + 1;
a.learners{a.k_cur} = a.base_learner();
a.learners{a.k_cur}.train(inputs(indices :) outputs(indices));
predictions = a.learners{a.k_cur}.test(inputs) == outputs;
weighted_error = sum(~predictions .* a.d_weights);
if (a.early_terminate && weighted_error > 0.5)
a.k_max = a.k_cur - 1;
a.k_cur = a.k_cur - 1;
a.learner_weights = a.learner_weights(1:a.k_max);
break;
end
weighted_error = min(max(weighted_error 0.01) 0.99);
a.learner_weights(a.k_cur) = 0.5 * log((1 - weighted_error) / weighted_error);
a.d_weights = feval(@(x)x./sum(x) a.d_weights .* ...
exp(-a.learner_weights(a.k_cur) * ssign(predictions)));
end
end
function outputs = test(a inputs)
outputs = ssign(a.margins(inputs));
end
function margins = margins(a inputs)
margins = cell2mat(arrayfun(@(x)a.learners{x}.test(i
属性 大小 日期 时间 名称
----------- --------- ---------- ----- ----
文件 323 2010-11-02 15:09 boosting_demo\ssign.m
文件 1189 2010-11-02 15:10 boosting_demo\NeuralNetwork.m
文件 2619 2010-11-02 15:06 boosting_demo\OnlineNaiveBayes.m
文件 18577 2010-11-02 15:02 boosting_demo\boosting_demo.m
文件 692 2010-11-02 15:10 boosting_demo\SVM.m
文件 5002 2010-11-02 15:06 boosting_demo\LinearRegression.m
文件 784 2010-11-02 15:03 boosting_demo\CART.m
文件 3095 2010-11-02 15:05 boosting_demo\DataGen.m
文件 3166 2010-11-02 15:02 boosting_demo\Adaboost.m
文件 1395 2010-11-02 15:10 boosting_demo\Stump.m
文件 1319 2014-02-12 13:18 license.txt
相关资源
- logistic回归matlab
- 随机森林matlab代码分类RF/回归RF
- 支持向量机SVM机器学习方法
- 机器学习、人工智能、数据挖掘中经
- 马尔科夫链蒙特卡洛MCMC仿真带MATLAB代
- 增广拉格朗日乘子法ALM算法matlab代码
- MATLAB 2016b 安装包
- RBF and svm matlab code matlab回归预测的源
- BP神经网络进行多分类matlab代码 (c
- Fisher-Score机器学习
- RVM2 基于稀疏贝叶斯框架的机器学习算
- MATLAB 神经网络43个案例分析
- popular-UCI-datasets 一些非常有用的数据
- Machine-Learning-exercises_finished andrew NG上
- Machine-Learning 《机器学习》
- spider 机器学习matlab源代码
- psoSVMcgForClass.m
- HOG+SVM图像分类算法
- 遗传算法求解0-1背包问题matlab代码.
- BP神经网络matlab程序
- 基于形态学的权重自适应图像去噪
- 粒子群算法的彩色图像分割(聚类)
- matlab实现决策
- 单类支持向量机
- 山东大学软件学院机器学习实验四
- 粒子群改进蝙蝠算法matlab代码
- HHT 希尔伯特黄变换 Hilbert-Huang transf
- 奇异值阈值SVT算法的matlab代码
- 经典降维算法局部保持投影LPP算法代
评论
共有 条评论