function model = SMOforSVM(X, y, C )
%sequential minimal optimization,SMO tol = 0.001; maxIters = 3000; global i1 i2 K Alpha M1 m1 w b [m, n] = size(X); K = (X*X'); Alpha = zeros(m,1); w = 0; b = 0;
flag =1;iters = 1;
while flag >0 & iters < maxIters
[i1,i2,m1,M1] = selectWorkSet(y, C);
if m1 - M1 <= tol
break;
end
solveOptimization(X, y, C)
iters = iters +1;
end model.alpha = Alpha; id = find(Alpha < C & Alpha >0);
% b = mean(y(id)' - (y.*Alpha)'*K(:, id)); id = id(1);
b = y(id)' - (y.*Alpha)'*K(:, id); w= (y.*Alpha)'* X;
model.w = w;
model.b = b;
end %Selecting working set B
function [i1,i2,m1,M1]=selectWorkSet(y, C)
global K Alpha I_up =find ((Alpha < C & y == 1) | (Alpha > 0 & y == -1));
I_low = find( (Alpha < C & y == -1) | (Alpha > 0 & y == 1));
yGradient = - y.* (((y * y').* K) * Alpha - 1); [m1 , i1] = max(yGradient(I_up));
[M1 , i2] = min(yGradient(I_low)); i1 = I_up (i1);
i2 = I_low(i2); end %Solving the two-variables optimization problem
function solveOptimization(X, y, C)
global Alpha K i1 i2 E
alpha1_old = Alpha(i1);
alpha2_old = Alpha(i2);
y1 = y(i1);
y2 = y(i2); % x1 = X(i1,:)';
% x2 = X(i2,:)';
beta11 = K(i1,i1); beta22 = K(i2,i2); beta12 = K(i1,i2);
id =[1: length(Alpha)];
id([i1 i2]) = [];
beta1 = sum( y(id).*Alpha(id).*K(id,i1));
beta2 = sum( y(id).*Alpha(id).*K(id,i2)); E = beta1 - beta2 + alpha1_old * y1 * (beta11 - beta12) +alpha2_old*y2 * (beta12 - beta22) - y1 + y2;
kk = beta11 + beta22 - 2 * beta12;
alpha2_new_unc = alpha2_old + (y2 * E)/kk; if y1 ~= y2
L = max([0 , alpha2_old - alpha1_old]);
H = min([C, C - alpha1_old + alpha2_old]);
else
L = max([0 , alpha1_old + alpha2_old - C]);
H = min([C, alpha1_old + alpha2_old]);
end if alpha2_new_unc > H
alpha2_new = H;
elseif alpha2_new_unc < L
alpha2_new = L;
else
alpha2_new = alpha2_new_unc ;
end alpha1_new = alpha1_old + y1 * y2 * (alpha2_old - alpha2_new); Alpha(i1) = alpha1_new;
Alpha(i2) = alpha2_new; % for i=1:length(E)
% E(i) = sum(y .* Alphas .* K(i,:)) - b - y(i);
% end
%
%
% E1 = E(i1);
% E2 = E(i2);
%
% b1 = E1 + y1 * (a1 - alph1) * K(i1,i1) + y2 * (a2 - alph2) * K(i1,i2) - b;
% b2 = E2 + y1 * (a1 - alph1) * K(i1,i2) + y2 * (a2 - alph2) * K(i2,i2) - b;
%
% if b1 == b2
% b = b1;
% else
% b = mean([b1 b2]);
% end % w = w - y1 * (alpha1_new -alpha1_old) * X(i1,:)' - y2 * (alpha2_new -alpha2_old) * X(i2,:)'; end

  

clear
X = []; Y=[];
figure;
% Initialize training data to empty; will get points from user
% Obtain points froom the user:
trainPoints=X;
trainLabels=Y;
clf;
axis([-5 5 -5 5]);
if isempty(trainPoints)
% Define the symbols and colors we'll use in the plots later
symbols = {'o','x'};
classvals = [-1 1];
trainLabels=[];
hold on; % Allow for overwriting existing plots
xlim([-5 5]); ylim([-5 5]); for c = 1:2
title(sprintf('Click to create points from class %d. Press enter when finished.', c));
[x, y] = getpts; plot(x,y,symbols{c},'LineWidth', 2, 'Color', 'black'); % Grow the data and label matrices
trainPoints = vertcat(trainPoints, [x y]);
trainLabels = vertcat(trainLabels, repmat(classvals(c), numel(x), 1));
end end C = 10;
par = SMOforSVM(trainPoints, trainLabels , C );
p=length(par.b); m=size(trainPoints,2);
if m==2
% for i=1:p
% plot(X(lc(i)-l(i)+1:lc(i),1),X(lc(i)-l(i)+1:lc(i),2),'bo')
% hold on
% end
k = -par.w(1)/par.w(2);
b0 = - par.b/par.w(2);
bdown=(-par.b-1)/par.w(2);
bup=(-par.b+1)/par.w(2);
for i=1:p
hold on
h = refline(k,b0(i));
set(h, 'Color', 'r')
hdown=refline(k,bdown(i));
set(hdown, 'Color', 'b')
hup=refline(k,bup(i));
set(hup, 'Color', 'b')
end
end
xlim([-5 5]); ylim([-5 5]);

以上代码结果写的比较粗糙,可能不稳定,我重新贴了一个新的代码:

http://www.cnblogs.com/huadongw/p/4994657.html

sequential minimal optimization,SMO for SVM, (MATLAB code)的更多相关文章

  1. Sequential Minimal Optimization (SMO) 算法

    SVM 最终关于 $a$ 目标函数为凸优化问题,该问题具有全局最优解,许多最优化算法都可以解决该问题,但当样本容量相对很大时,通常采用 SMO 算法(比如 LIBSVM),该算法为启发式算法,考虑在约 ...

  2. 支持向量机的smo算法(MATLAB code)

    建立smo.m % function [alpha,bias] = smo(X, y, C, tol) function model = smo(X, y, C, tol) % SMO: SMO al ...

  3. SMO优化算法(Sequential minimal optimization)

    原文:http://www.cnblogs.com/jerrylead/archive/2011/03/18/1988419.html SMO算法由Microsoft Research的John C. ...

  4. Jordan Lecture Note-8: The Sequential Minimal Optimization Algorithm (SMO).

    The Sequential Minimal Optimization Algorithm (SMO) 本文主要介绍用于解决SVM对偶模型的算法,它于1998年由John Platt在论文“Seque ...

  5. Support Vector Machine (2) : Sequential Minimal Optimization

    目录 Support Vector Machine (1) : 简单SVM原理 Support Vector Machine (2) : Sequential Minimal Optimization ...

  6. Sequential Minimal Optimization: A Fast Algorithm for Training Support Vector Machines 论文研读

    摘要 本文提出了一种用于训练支持向量机的新算法:序列最小优化算法(SMO).训练支持向量机需要解决非常大的二 次规划(QP)优化问题.SMO 将这个大的 QP 问题分解为一系列最小的 QP 问题.这些 ...

  7. Sequential Minimal Optimization(SMO,序列最小优化算法)初探

    什么是SVM SVM是Support Vector Machine(支持向量机)的英文缩写,是上世纪九十年代兴起的一种机器学习算法,在目前神经网络大行其道的情况下依然保持着生命力.有人说现在是神经网络 ...

  8. SMO(Sequential Minimal Optimization) 伪代码(注释)

    Algorithm: Simplified SMO 这个版本是简化版的,并没有采用启发式选择,但是比较容易理解. 输入: C: 调和系数 tol: 容差 (tolerance) max passes: ...

  9. 借One-Class-SVM回顾SMO在SVM中的数学推导--记录毕业论文5

    上篇记录了一些决策树算法,这篇是借OC-SVM填回SMO在SVM中的数学推导这个坑. 参考文献: http://research.microsoft.com/pubs/69644/tr-98-14.p ...

随机推荐

  1. [CVE:2013-4810]Apache Tomcat/JBoss远程命令执行

    <?php $host=gethostbyname($argv[1]); $port=$argv[2]; $cmd=$argv[3]; //small jsp shell //change th ...

  2. python介绍(转载)

    Python简介 python的创始人为吉多·范罗苏姆(Guido van Rossum).1989年的圣诞节期间,吉多·范罗苏姆为了在阿姆斯特丹打发时间,决心开发一个新的脚本解释程序,作为ABC语言 ...

  3. hdu 4828 Grids 卡特兰数+逆元

    Grids Time Limit: 10000/5000 MS (Java/Others)    Memory Limit: 65535/65535 K (Java/Others) Problem D ...

  4. Linux添加新硬盘自动挂载硬盘

    Linux添加新硬盘自动挂载硬盘的具体步骤 1.插入新硬盘,启动Linux服务器,使用fdisk -l 查看硬盘 #fdisk -l Disk /dev/sdb: 1000.2 GB, 1000204 ...

  5. MonkeyRunner学习(2)常用命令

    目录: 1.截图 2.暂停 (时延秒) 3.屏幕操作 4.打印 5.字符串发送到键盘输入(登录输入) 6.唤醒设备屏幕 7.重起手机 8.按键(系统键) 9.回车键 10.for 循环 11.循环截图 ...

  6. Android 开源项目分类汇总(转)

    Android 开源项目分类汇总(转) ## 第一部分 个性化控件(View)主要介绍那些不错个性化的 View,包括 ListView.ActionBar.Menu.ViewPager.Galler ...

  7. ajax获取城市和相应的地区

    <!DOCTYPE html><html lang="zh-CN"><head> <meta charset="UTF-8&qu ...

  8. 如何设置DIV水平、垂直居中

    一.水平居中 需要设置两点: 1  设置DIV 的width属性即宽度. 2  设置div的margin-left和margin-right属性即可 代码: <div style="w ...

  9. Bootstrap强调相关的类

    在Bootstrap中除了使用标签<strong>.<em>等说明正文某些字词.句子的重要性,Bootstrap还定义了一套类名,这里称其为强调类名(类似前面说的“.lead” ...

  10. python语法笔记(二)

    1. 循环对象 循环对象是一类特殊的对象,它包含一个next()方法(在python3中是 __next__()方法),该方法的目的是进行到下一个结果,而在结束一系列结果之后,举出 StopItera ...