Ignore:
Timestamp:
09/19/15 11:26:58 (8 months ago)
Author:
mmckerns
Message:

cleaning and documentation for SVR and SVC examples; rename StandardLinearKernel?

File:
1 edited

Legend:

Unmodified
Added
Removed
  • mystic/examples/test_svc2.py

    r827 r830  
    1717import os.path 
    1818 
    19 # a common objective function for solving a QP problem 
    20 # (see http://www.mathworks.com/help/optim/ug/quadprog.html) 
    21 def objective(x, H, f): 
    22     return 0.5 * dot(dot(x,H),x) + dot(f,x) 
     19# define the objective function as the dual for SVC 
     20# (see: http://scikit-learn.org/stable/modules/svm.html#svc) 
     21# the objective funciton is very similar to the dual for SVC 
     22# (see: http://scikit-learn.org/stable/modules/svm.html#svc) 
     23def objective(x, Q, b): 
     24    return 0.5 * dot(dot(x,Q),x) + dot(b,x) 
    2325 
    2426# SETTINGS 
     
    2628overlap = False # reduce the distance between the datasets 
    2729 
     30# define the data points for each class 
    2831c1 = loadtxt(os.path.join('DATA','g1.pts')) 
    2932c2 = loadtxt(os.path.join('DATA','g2.pts')) 
     
    3437if overlap: c1[:,0] += 3 # make the datasets overlap a little 
    3538 
    36 # the Kernel Matrix (with the linear kernel) 
    37 # Q = multiply.outer(X,X)   # NOTE: requires X is a list of scalars 
     39# define the full data set 
     40X = concatenate([c1,c2]); nx = X.shape[0] 
     41# define the labels (+1 for c1; -1 for c2) 
     42y = concatenate([ones(c1.shape[0]), -ones(c2.shape[0])]).reshape(1,nx) 
    3843 
     44# build the Kernel Matrix (with the linear kernel) 
     45# get the QP quadratic and linear terms 
    3946XX = concatenate([c1,-c2]) 
    40 nx = XX.shape[0] 
     47Q = KernelMatrix(XX)  # Q_ij = K(x_i, x_j) 
     48b = -ones(nx)         # b_i = 1  (in dual) 
    4149 
    42 # quadratic and linear terms of QP 
    43 Q = KernelMatrix(XX) 
    44 b = -1 * ones(nx) 
    45  
    46 H = Q 
    47 f = b 
    48 Aeq = concatenate([ones(c1.shape[0]), -ones(c2.shape[0])]).reshape(1,nx) 
     50# build the constraints (y.T * x = 0.0) 
     51# 1.0*x0 + 1.0*x1 + ... - 1.0*xN = 0.0 
     52Aeq = y 
    4953Beq = array([0.]) 
     54# set the bounds 
    5055lb = zeros(nx) 
    5156ub = 1 * ones(nx) 
    5257_b = .1 * ones(nx) # good starting value if most solved xi should be zero 
    5358 
     59# build the constraints operator 
    5460from mystic.symbolic import linear_symbolic, solve, \ 
    5561     generate_solvers as solvers, generate_constraint as constraint 
     
    7177mon = VerboseMonitor(10) 
    7278 
     79# solve the dual for alpha 
    7380from mystic.solvers import diffev 
    74 alpha = diffev(objective, zip(lb,_b), args=(H,f), npop=nx*3, gtol=200,\ 
     81alpha = diffev(objective, zip(lb,_b), args=(Q,b), npop=nx*3, gtol=200,\ 
    7582               itermon=mon, \ 
    7683               ftol=1e-8, bounds=zip(lb,ub), constraints=conserve, disp=1) 
     
    7885print 'solved x: ', alpha 
    7986print "constraint A*x == 0: ", inner(Aeq, alpha) 
    80 print "minimum 0.5*x'Hx + f'*x: ", objective(alpha, H, f) 
     87print "minimum 0.5*x'Qx + b'*x: ", objective(alpha, Q, b) 
    8188 
    82 # the labels and the points 
    83 X = concatenate([c1,c2]) 
    84 y = concatenate([ones(c1.shape[0]), -ones(c2.shape[0])]).reshape(1,nx) 
    85  
     89# calculate weight vectors, support vectors, and bias 
    8690wv = WeightVector(alpha, X, y) 
    8791sv1, sv2 = SupportVectors(alpha,y,eps=1e-6) 
     
    110114 
    111115# plot the support points 
    112 pylab.plot(XX[sv1,0], XX[sv1,1], 'b^',markersize=10) 
    113 pylab.plot(-XX[sv2,0], -XX[sv2,1], 'y^',markersize=10) 
     116pylab.plot(XX[sv1,0], XX[sv1,1], 'bo', markersize=8) 
     117pylab.plot(-XX[sv2,0], -XX[sv2,1], 'yo', markersize=8) 
    114118#pylab.axis('equal') 
    115119pylab.show() 
Note: See TracChangeset for help on using the changeset viewer.