Changeset 830


Ignore:
Timestamp:
09/19/15 11:26:58 (8 months ago)
Author:
mmckerns
Message:

cleaning and documentation for SVR and SVC examples; rename StandardLinearKernel?

Location:
mystic
Files:
6 edited

Legend:

Unmodified
Added
Removed
  • mystic/examples/test_svc1.py

    r827 r830  
    1414from mystic.svctools import * 
    1515 
    16 # a common objective function for solving a QP problem 
    17 # (see http://www.mathworks.com/help/optim/ug/quadprog.html) 
    18 def objective(x, H, f): 
    19     return 0.5 * dot(dot(x,H),x) + dot(f,x) 
     16# define the objective function to match standard QP solver 
     17# (see: http://www.mathworks.com/help/optim/ug/quadprog.html) 
     18# the objective funciton is very similar to the dual for SVC 
     19# (see: http://scikit-learn.org/stable/modules/svm.html#svc) 
     20def objective(x, Q, b): 
     21    return 0.5 * dot(dot(x,Q),x) + dot(b,x) 
    2022 
     23# define the data points for each class 
    2124c1 = array([[0., 0.],[1., 0.],[ 0.2, 0.2],[0.,1.]]) 
    2225c2 = array([[0, 1.1], [1.1, 0.],[0, 1.5],[0.5,1.2],[0.8, 1.7]]) 
    2326 
    24 # the Kernel Matrix (with the linear kernel) 
    25 # Q = multiply.outer(X,X)   # NOTE: requires X is a list of scalars 
     27# define the full data set 
     28X = concatenate([c1,c2]); nx = X.shape[0] 
     29# define the labels (+1 for c1; -1 for c2) 
     30y = concatenate([ones(c1.shape[0]), -ones(c2.shape[0])]).reshape(1,nx) 
    2631 
     32# build the Kernel Matrix (with the linear kernel) 
     33# get the QP quadratic and linear terms 
    2734XX = concatenate([c1,-c2]) 
    28 nx = XX.shape[0] 
     35Q = KernelMatrix(XX)  # Q_ij = K(x_i, x_j) 
     36b = -ones(nx)         # b_i = 1  (in dual) 
    2937 
    30 # quadratic and linear terms of QP 
    31 Q = KernelMatrix(XX) 
    32 b = -1 * ones(nx) 
    33  
    34 H = Q 
    35 f = b 
    36 Aeq = concatenate([ones(c1.shape[0]), -ones(c2.shape[0])]).reshape(1,nx) 
     38# build the constraints (y.T * x = 0.0) 
     39# 1.0*x0 + 1.0*x1 + ... - 1.0*xN = 0.0 
     40Aeq = y 
    3741Beq = array([0.]) 
     42# set the bounds 
    3843lb = zeros(nx) 
    3944ub = 99999 * ones(nx) 
    4045 
     46# build the constraints operator 
    4147from mystic.symbolic import linear_symbolic, solve, \ 
    4248     generate_solvers as solvers, generate_constraint as constraint 
     
    5258#mon = VerboseMonitor(1) 
    5359 
     60# solve the dual for alpha 
    5461from mystic.solvers import diffev 
    55 alpha = diffev(objective, zip(lb,ub), args=(H,f), npop=nx*3, gtol=200, \ 
     62alpha = diffev(objective, zip(lb,ub), args=(Q,b), npop=nx*3, gtol=200, \ 
    5663#              itermon=mon, \ 
    5764               ftol=1e-8, bounds=zip(lb,ub), constraints=conserve, disp=1) 
     
    5966print 'solved x: ', alpha 
    6067print "constraint A*x == 0: ", inner(Aeq, alpha) 
    61 print "minimum 0.5*x'Hx + f'*x: ", objective(alpha, H, f) 
     68print "minimum 0.5*x'Qx + b'*x: ", objective(alpha, Q, b) 
    6269 
    63 # the labels and the points 
    64 X = concatenate([c1,c2]) 
    65 y = concatenate([ones(c1.shape[0]), -ones(c2.shape[0])]).reshape(1,nx) 
    66  
     70# calculate weight vectors, support vectors, and bias 
    6771wv = WeightVector(alpha, X, y) 
    6872sv1, sv2 = SupportVectors(alpha,y,eps=1e-6) 
     
    8084 
    8185# plot data 
    82 pylab.plot(c1[:,0], c1[:,1], 'bo') 
    83 pylab.plot(c2[:,0], c2[:,1], 'yo') 
     86pylab.plot(c1[:,0], c1[:,1], 'bo', markersize=5) 
     87pylab.plot(c2[:,0], c2[:,1], 'yo', markersize=5) 
    8488 
    8589# plot hyperplane: wv[0] x + wv[1] y + bias = 0 
     
    8993pylab.plot(hx, hy, 'k-') 
    9094#pylab.axis([xmin,xmax,ymin,ymax]) 
     95 
     96# plot the support points 
     97pylab.plot(XX[sv1,0], XX[sv1,1], 'bo', markersize=8) 
     98pylab.plot(-XX[sv2,0], -XX[sv2,1], 'yo', markersize=8) 
     99#pylab.axis('equal') 
    91100pylab.show() 
    92101 
  • mystic/examples/test_svc2.py

    r827 r830  
    1717import os.path 
    1818 
    19 # a common objective function for solving a QP problem 
    20 # (see http://www.mathworks.com/help/optim/ug/quadprog.html) 
    21 def objective(x, H, f): 
    22     return 0.5 * dot(dot(x,H),x) + dot(f,x) 
     19# define the objective function as the dual for SVC 
     20# (see: http://scikit-learn.org/stable/modules/svm.html#svc) 
     21# the objective funciton is very similar to the dual for SVC 
     22# (see: http://scikit-learn.org/stable/modules/svm.html#svc) 
     23def objective(x, Q, b): 
     24    return 0.5 * dot(dot(x,Q),x) + dot(b,x) 
    2325 
    2426# SETTINGS 
     
    2628overlap = False # reduce the distance between the datasets 
    2729 
     30# define the data points for each class 
    2831c1 = loadtxt(os.path.join('DATA','g1.pts')) 
    2932c2 = loadtxt(os.path.join('DATA','g2.pts')) 
     
    3437if overlap: c1[:,0] += 3 # make the datasets overlap a little 
    3538 
    36 # the Kernel Matrix (with the linear kernel) 
    37 # Q = multiply.outer(X,X)   # NOTE: requires X is a list of scalars 
     39# define the full data set 
     40X = concatenate([c1,c2]); nx = X.shape[0] 
     41# define the labels (+1 for c1; -1 for c2) 
     42y = concatenate([ones(c1.shape[0]), -ones(c2.shape[0])]).reshape(1,nx) 
    3843 
     44# build the Kernel Matrix (with the linear kernel) 
     45# get the QP quadratic and linear terms 
    3946XX = concatenate([c1,-c2]) 
    40 nx = XX.shape[0] 
     47Q = KernelMatrix(XX)  # Q_ij = K(x_i, x_j) 
     48b = -ones(nx)         # b_i = 1  (in dual) 
    4149 
    42 # quadratic and linear terms of QP 
    43 Q = KernelMatrix(XX) 
    44 b = -1 * ones(nx) 
    45  
    46 H = Q 
    47 f = b 
    48 Aeq = concatenate([ones(c1.shape[0]), -ones(c2.shape[0])]).reshape(1,nx) 
     50# build the constraints (y.T * x = 0.0) 
     51# 1.0*x0 + 1.0*x1 + ... - 1.0*xN = 0.0 
     52Aeq = y 
    4953Beq = array([0.]) 
     54# set the bounds 
    5055lb = zeros(nx) 
    5156ub = 1 * ones(nx) 
    5257_b = .1 * ones(nx) # good starting value if most solved xi should be zero 
    5358 
     59# build the constraints operator 
    5460from mystic.symbolic import linear_symbolic, solve, \ 
    5561     generate_solvers as solvers, generate_constraint as constraint 
     
    7177mon = VerboseMonitor(10) 
    7278 
     79# solve the dual for alpha 
    7380from mystic.solvers import diffev 
    74 alpha = diffev(objective, zip(lb,_b), args=(H,f), npop=nx*3, gtol=200,\ 
     81alpha = diffev(objective, zip(lb,_b), args=(Q,b), npop=nx*3, gtol=200,\ 
    7582               itermon=mon, \ 
    7683               ftol=1e-8, bounds=zip(lb,ub), constraints=conserve, disp=1) 
     
    7885print 'solved x: ', alpha 
    7986print "constraint A*x == 0: ", inner(Aeq, alpha) 
    80 print "minimum 0.5*x'Hx + f'*x: ", objective(alpha, H, f) 
     87print "minimum 0.5*x'Qx + b'*x: ", objective(alpha, Q, b) 
    8188 
    82 # the labels and the points 
    83 X = concatenate([c1,c2]) 
    84 y = concatenate([ones(c1.shape[0]), -ones(c2.shape[0])]).reshape(1,nx) 
    85  
     89# calculate weight vectors, support vectors, and bias 
    8690wv = WeightVector(alpha, X, y) 
    8791sv1, sv2 = SupportVectors(alpha,y,eps=1e-6) 
     
    110114 
    111115# plot the support points 
    112 pylab.plot(XX[sv1,0], XX[sv1,1], 'b^',markersize=10) 
    113 pylab.plot(-XX[sv2,0], -XX[sv2,1], 'y^',markersize=10) 
     116pylab.plot(XX[sv1,0], XX[sv1,1], 'bo', markersize=8) 
     117pylab.plot(-XX[sv2,0], -XX[sv2,1], 'yo', markersize=8) 
    114118#pylab.axis('equal') 
    115119pylab.show() 
  • mystic/examples/test_svr1.py

    r828 r830  
    1414from mystic.svmtools import * 
    1515 
    16 # a common objective function for solving a QP problem 
    17 # (see http://www.mathworks.com/help/optim/ug/quadprog.html) 
    18 def objective(x, H, f): 
    19     return 0.5 * dot(dot(x,H),x) + dot(f,x) 
     16# define the objective function to match standard QP solver 
     17# (see: http://www.mathworks.com/help/optim/ug/quadprog.html) 
     18def objective(x, Q, b): 
     19    return 0.5 * dot(dot(x,Q),x) + dot(b,x) 
    2020 
    21 # linear data with scatter 
    22 x = arange(-5, 5.001) 
    23 y = x + 7*random.rand(x.size) 
     21# define the data points (linear data with uniform scatter) 
     22x = arange(-5, 5.001); nx = x.size 
     23y = x + 7*random.rand(nx) 
     24N = 2*nx 
    2425 
    25 l = x.size 
    26 N = 2*l 
    27  
    28 # the Kernel Matrix (with the linear kernel) 
     26# build the Kernel Matrix (with linear kernel) 
     27# get the QP quadratic term 
    2928X = concatenate([x,-x]) 
    30 Q = multiply.outer(X,X)+1 
    31  
    32 # the linear term for the QP 
     29Q = 1 + multiply.outer(X,X) 
     30# get the QP linear term 
    3331Y = concatenate([y,-y]) 
    3432svr_epsilon = 3 
    3533b = Y + svr_epsilon * ones(Y.size) 
    3634 
    37 H = Q 
    38 f = b 
    39 Aeq = concatenate([ones(l), -ones(l)]).reshape(1,N) 
     35# build the constraints (y.T * x = 0.0) 
     36# 1.0*x0 + 1.0*x1 + ... - 1.0*xN = 0.0 
     37Aeq = concatenate([ones(nx), -ones(nx)]).reshape(1,N) 
    4038Beq = array([0.]) 
     39# set the bounds 
    4140lb = zeros(N) 
    4241ub = zeros(N) + 0.5 
    4342 
     43# build the constraints operator 
    4444from mystic.symbolic import linear_symbolic, solve, \ 
    4545     generate_solvers as solvers, generate_constraint as constraint 
     
    5555mon = VerboseMonitor(10) 
    5656 
     57# solve for alpha 
    5758from mystic.solvers import diffev 
    58 alpha = diffev(objective, zip(lb,.1*ub), args=(H,f), npop=N*3, gtol=200, \ 
     59alpha = diffev(objective, zip(lb,.1*ub), args=(Q,b), npop=N*3, gtol=200, \ 
    5960               itermon=mon, \ 
    6061               ftol=1e-5, bounds=zip(lb,ub), constraints=conserve, disp=1) 
     
    6263print 'solved x: ', alpha 
    6364print "constraint A*x == 0: ", inner(Aeq, alpha) 
    64 print "minimum 0.5*x'Hx + f'*x: ", objective(alpha, H, f) 
     65print "minimum 0.5*x'Qx + b'*x: ", objective(alpha, Q, b) 
    6566 
    66 sv1 = SupportVectors(alpha[:l]) 
    67 sv2 = SupportVectors(alpha[l:]) 
    68  
     67# calculate support vectors and regression function 
     68sv1 = SupportVectors(alpha[:nx]) 
     69sv2 = SupportVectors(alpha[nx:]) 
    6970R = RegressionFunction(x, y, alpha, svr_epsilon, LinearKernel) 
    7071 
  • mystic/examples/test_svr2.py

    r828 r830  
    1414from mystic.svmtools import * 
    1515 
    16 # a common objective function for solving a QP problem 
    17 # (see http://www.mathworks.com/help/optim/ug/quadprog.html) 
    18 def objective(x, H, f): 
    19     return 0.5 * dot(dot(x,H),x) + dot(f,x) 
     16# define the objective function to match standard QP solver 
     17# (see: http://www.mathworks.com/help/optim/ug/quadprog.html) 
     18def objective(x, Q, b): 
     19    return 0.5 * dot(dot(x,Q),x) + dot(b,x) 
    2020 
    21 # quadratic data plus scatter 
    22 x = arange(-5, 5.001) 
    23 y = x*x + 8*random.rand(x.size) 
     21# define the data points (quadratic data with uniform scatter) 
     22x = arange(-5, 5.001); nx = x.size 
     23y = x*x + 8*random.rand(nx) 
     24N = 2*nx 
    2425 
    25 l = x.size 
    26 N = 2*l 
    27  
    28 # the Kernel Matrix  
     26# build the Kernel Matrix (with custom k) 
     27# get the QP quadratic term 
    2928X = concatenate([x,-x]) 
    3029def pk(a1,a2): 
    3130    return (1+a1*a2)*(1+a1*a2) 
    3231Q = KernelMatrix(X, pk) 
    33  
    34 # the linear term for the QP 
     32# get the QP linear term 
    3533Y = concatenate([y,-y]) 
    36 svr_epsilon = 4; 
     34svr_epsilon = 4 
    3735b = Y + svr_epsilon * ones(Y.size) 
    3836 
    39 H = Q 
    40 f = b 
    41 Aeq = concatenate([ones(l), -ones(l)]).reshape(1,N) 
     37# build the constraints (y.T * x = 0.0) 
     38# 1.0*x0 + 1.0*x1 + ... - 1.0*xN = 0.0 
     39Aeq = concatenate([ones(nx), -ones(nx)]).reshape(1,N) 
    4240Beq = array([0.]) 
     41# set the bounds 
    4342lb = zeros(N) 
    4443ub = zeros(N) + 0.5 
    4544 
     45# build the constraints operator 
    4646from mystic.symbolic import linear_symbolic, solve, \ 
    4747     generate_solvers as solvers, generate_constraint as constraint 
     
    5757mon = VerboseMonitor(10) 
    5858 
     59# solve for alpha 
    5960from mystic.solvers import diffev 
    60 alpha = diffev(objective, zip(lb,.1*ub), args=(H,f), npop=N*3, gtol=200, \ 
     61alpha = diffev(objective, zip(lb,.1*ub), args=(Q,b), npop=N*3, gtol=400, \ 
    6162               itermon=mon, \ 
    6263               ftol=1e-5, bounds=zip(lb,ub), constraints=conserve, disp=1) 
     
    6465print 'solved x: ', alpha 
    6566print "constraint A*x == 0: ", inner(Aeq, alpha) 
    66 print "minimum 0.5*x'Hx + f'*x: ", objective(alpha, H, f) 
     67print "minimum 0.5*x'Qx + b'*x: ", objective(alpha, Q, b) 
    6768 
    68 sv1 = SupportVectors(alpha[:l]) 
    69 sv2 = SupportVectors(alpha[l:]) 
    70  
     69# calculate support vectors and regression function 
     70sv1 = SupportVectors(alpha[:nx]) 
     71sv2 = SupportVectors(alpha[nx:]) 
    7172R = RegressionFunction(x, y, alpha, svr_epsilon, pk) 
    7273 
  • mystic/mystic/svctools.py

    r826 r830  
    2525 
    2626 
    27 def SupportVectors(alpha, y=None, eps = 0): 
     27def SupportVectors(alpha, y=None, eps=0): 
     28    """indicies of nonzero alphas (at tolerance eps) 
     29 
     30If labels y are provided, then group indicies by label 
     31    """ 
    2832    import mystic.svmtools as svm 
    2933    sv = svm.SupportVectors(alpha,eps) 
  • mystic/mystic/svmtools.py

    r776 r830  
    1111from numpy import zeros, multiply, ndarray, vectorize, array 
    1212 
    13 def KernelMatrix(X, k): 
     13def InnerProduct(i1, i2): 
     14    return i1 * i2 
     15 
     16def LinearKernel(i1, i2): 
     17    return 1. + i1 * i2 
     18 
     19def KernelMatrix(X, k=InnerProduct): 
    1420    n = X.size 
    1521    Q = zeros((n,n)) 
     
    2228 
    2329def SupportVectors(alpha, eps=0): 
    24     # return index of nonzero alphas (at a tolerance of epsilon) 
     30    """indicies of nonzero alphas (at tolerance eps)""" 
    2531    return (abs(alpha)>eps).nonzero()[0] 
    2632 
    27 def StandardInnerProduct(i1, i2): 
    28     return i1 * i2 
    29  
    30 def LinearKernel(i1, i2): 
    31     return 1. + i1 * i2 
    32  
    33 def Bias(x, y, alpha, eps, kernel=StandardInnerProduct): 
     33def Bias(x, y, alpha, eps, kernel=InnerProduct): 
    3434    """ Compute regression bias for epsilon insensitive loss regression """ 
    3535    N = len(alpha)/2 
     
    4040    return b 
    4141 
    42 def RegressionFunction(x, y, alpha, eps, kernel=StandardInnerProduct): 
     42def RegressionFunction(x, y, alpha, eps, kernel=InnerProduct): 
    4343    """ The Support Vector expansion. f(x) = Sum (ap - am) K(xi, x) + b """ 
    4444    bias = Bias(x, y, alpha, eps, kernel) 
Note: See TracChangeset for help on using the changeset viewer.