Back

Explore Courses Blog Tutorials Interview Questions
0 votes
2 views
in Machine Learning by (19k points)

I'd like to implement my own Gaussian kernel in Python, just for exercise. I'm using: sklearn.svm.SVC(kernel=my_kernel) but I really don't understand what is going on.

I expect the function my_kernel to be called with the columns of the X matrix as parameters, instead I got it called with X, X as arguments. Looking at the examples things are not clearer.

What am I missing?

This is my code:

import scipy.io

import numpy as np

from sklearn import svm

import matplotlib.pyplot as plt

def svm_class(fileName):

    data = scipy.io.loadmat(fileName)

    X = data['X']

    y = data['y']

    f = svm.SVC(kernel = 'rbf', gamma=50, C=1.0)

    f.fit(X,y.flatten())

    plotData(np.hstack((X,y)), X, f)

    return

def plotData(arr, X, f):

    ax = plt.subplot(111)

    ax.scatter(arr[arr[:,2]==0][:,0], arr[arr[:,2]==0][:,1], c='r', marker='o', label='Zero')

    ax.scatter(arr[arr[:,2]==1][:,0], arr[arr[:,2]==1][:,1], c='g', marker='+', label='One')

    h = .02  # step size in the mesh

    # create a mesh to plot in

    x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1

    y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1

    xx, yy = np.meshgrid(np.arange(x_min, x_max, h),

                         np.arange(y_min, y_max, h))

    # Plot the decision boundary. For that, we will assign a color to each

    # point in the mesh [x_min, m_max]x[y_min, y_max].

    Z = f.predict(np.c_[xx.ravel(), yy.ravel()])

    # Put the result into a color plot

    Z = Z.reshape(xx.shape)

    plt.contour(xx, yy, Z)

    plt.xlim(np.min(arr[:,0]), np.max(arr[:,0]))

    plt.ylim(np.min(arr[:,1]), np.max(arr[:,1]))

    plt.show()

    return

def gaussian_kernel(x1,x2):

    sigma = 0.5

    return np.exp(-np.sum((x1-x2)**2)/(2*sigma**2))

if __name__ == '__main__':

    fileName = 'ex6data2.mat'

    svm_class(fileName)

1 Answer

0 votes
by (33.1k points)

You should use svm.SVC() with kernel=precomputed.

Then simply compute a Gram Matrix a.k.a. Kernel Matrix.

Then use this Gram Matrix as the first argument to svm.SVC().fit():

For example:

C=0.1

model = svmTrain(X, y, C, "gaussian")

that calls sklearn.svm.SVC() in svmTrain(), and then sklearn.svm.SVC().fit():

from sklearn import svm

if kernelFunction == "gaussian":

    clf = svm.SVC(C = C, kernel="precomputed")

    return clf.fit(gaussianKernelGramMatrix(X,X), y)

The Gram Matrix computation used as a parameter for sklearn.svm.SVC().fit() - is done in gaussianKernelGramMatrix():

import numpy as np

def gaussianKernelGramMatrix(X1, X2, K_function=gaussianKernel):

    gram_matrix = np.zeros((X1.shape[0], X2.shape[0]))

    for i, x1 in enumerate(X1):

        for j, x2 in enumerate(X2):

            gram_matrix[i, j] = K_function(x1, x2)

    return gram_matrix

which uses gaussianKernel() to get a radial basis function kernel between x1 and x2:

def gaussianKernel(x1, x2, sigma=0.1):

    x1 = x1.flatten()

    x2 = x2.flatten()

    sim = np.exp(- np.sum( np.power((x1 - x2),2) ) /        float( 2*(sigma**2) ) )

    return sim

If the model is trained with this custom kernel, we will predict the kernel between the test data and the training data":

predictions = model.predict( gaussianKernelGramMatrix(Xval, X) )

To use a custom SVM Gaussian kernel, you can use this snippet:

import numpy as np

from sklearn import svm

def gaussianKernelGramMatrixFull(X1, X2, sigma=0.1):

    gram_matrix = np.zeros((X1.shape[0], X2.shape[0]))

    for i, x1 in enumerate(X1):

        for j, x2 in enumerate(X2):

            x1 = x1.flatten()

            x2 = x2.flatten()

            gram_matrix[i, j] = np.exp(- np.sum(                    np.power((x1 - x2),2) ) / float( 2*(sigma**2) ) )

    return gram_matrix

C=0.1

clf = svm.SVC(C = C, kernel="precomputed")

model = clf.fit( gaussianKernelGramMatrixFull(X,X), y )

p = model.predict( gaussianKernelGramMatrixFull(Xval, X))

Hope this answer helps!

Browse Categories

...