Attachment 'sheet06.m'
Download 1 function sheet05
2 fprintf('loading data...\n')
3 load('stud-data.mat')
4
5 % compute kernel matrices
6 fprintf('computing kernel matrices...\n')
7 KR = full(Xtr'*Xtr);
8 KS = full(Xts'*Xts);
9 KSR = full(Xts'*Xtr);
10
11 % compute the alphas
12 fprintf('learning one-class-SVM...')
13 C = ?; % <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<< adjust C accordingly!
14 [a, alpha] = oneclass(KR, C);
15
16 % compute predicted outlier scores
17 as = compute_scores(KS, KSR, KR, alpha);
18
19 Ap = (as > 1);
20
21 predicted_attacks = find(Ap)'
22 % ...
23
24 function [x,y] = pr_loqo2(c, H, A, b, l, u)
25 %[X,Y] = PR_LOQO2(c, H, A, b, l, u)
26 %
27 %loqo solves the quadratic programming problem
28 %
29 %minimize c' * x + 1/2 x' * H * x
30 %subject to A'*x = b
31 % l <= x <= u
32 %
33 % Dimensions: c : N-column vector
34 % H : NxN matrix
35 % A : N-row vector
36 % b : real number
37 % l : N-column vector
38 % b : N-column vector
39 %
40 % x : N-column vector
41 % y : ??
42 %
43 %for a documentation see R. Vanderbei, LOQO: an Interior Point Code
44 % for Quadratic Programming
45 margin = 0.05; bound = 100; sigfig_max = 8; counter_max = 50;
46 [m, n] = size(A); H_x = H; H_diag = diag(H);
47 b_plus_1 = 1; c_plus_1 = norm(c) + 1;
48 one_x = -ones(n,1); one_y = -ones(m,1);
49 for i = 1:n H_x(i,i) = H_diag(i) + 1; end;
50 H_y = eye(m); c_x = c; c_y = 0;
51 R = chol(H_x); H_Ac = R \ ([A; c_x'] / R)';
52 H_A = H_Ac(:,1:m); H_c = H_Ac(:,(m+1):(m+1));
53 A_H_A = A * H_A; A_H_c = A * H_c;
54 H_y_tmp = (A_H_A + H_y); y = H_y_tmp \ (c_y + A_H_c);
55 x = H_A * y - H_c; g = max(abs(x - l), bound);
56 z = max(abs(x), bound); t = max(abs(u - x), bound);
57 s = max(abs(x), bound); mu = (z' * g + s' * t)/(2 * n);
58 sigfig = 0; counter = 0; alfa = 1;
59 while ((sigfig < sigfig_max) * (counter < counter_max)),
60 counter = counter + 1; H_dot_x = H * x;
61 rho = - A * x + b; nu = l - x + g; tau = u - x - t;
62 sigma = c - A' * y - z + s + H_dot_x;
63 gamma_z = - z; gamma_s = - s;
64 x_dot_H_dot_x = x' * H_dot_x;
65 primal_infeasibility = norm([tau; nu]) / b_plus_1;
66 dual_infeasibility = norm([sigma]) / c_plus_1;
67 primal_obj = c' * x + 0.5 * x_dot_H_dot_x;
68 dual_obj = - 0.5 * x_dot_H_dot_x + l' * z - u' * s + b'*y; %%%
69 old_sigfig = sigfig;
70 sigfig = max(-log10(abs(primal_obj - dual_obj)/(abs(primal_obj) + 1)), 0);
71 hat_nu = nu + g .* gamma_z ./ z; hat_tau = tau - t .* gamma_s ./ s;
72 d = z ./ g + s ./ t;
73 for i = 1:n H_x(i,i) = H_diag(i) + d(i); end;
74 H_y = 0; c_x = sigma - z .* hat_nu ./ g - s .* hat_tau ./ t;
75 c_y = rho; R = chol(H_x); H_Ac = R \ ([A; c_x'] / R)';
76 H_A = H_Ac(:,1:m); H_c = H_Ac(:,(m+1):(m+1));
77 A_H_A = A * H_A; A_H_c = A * H_c; H_y_tmp = (A_H_A + H_y);
78 delta_y = H_y_tmp \ (c_y + A_H_c); delta_x = H_A * delta_y - H_c;
79 delta_s = s .* (delta_x - hat_tau) ./ t;
80 delta_z = z .* (hat_nu - delta_x) ./ g;
81 delta_g = g .* (gamma_z - delta_z) ./ z;
82 delta_t = t .* (gamma_s - delta_s) ./ s;
83 gamma_z = mu ./ g - z - delta_z .* delta_g ./ g;
84 gamma_s = mu ./ t - s - delta_s .* delta_t ./ t;
85 hat_nu = nu + g .* gamma_z ./ z;
86 hat_tau = tau - t .* gamma_s ./ s;
87 c_x = sigma - z .* hat_nu ./ g - s .* hat_tau ./ t;
88 c_y = rho; H_Ac = R \ ([A; c_x'] / R)';
89 H_A = H_Ac(:,1:m); H_c = H_Ac(:,(m+1):(m+1));
90 A_H_A = A * H_A; A_H_c = A * H_c;
91 H_y_tmp = (A_H_A + H_y); delta_y = H_y_tmp \ (c_y + A_H_c);
92 delta_x = H_A * delta_y - H_c; delta_s = s .* (delta_x - hat_tau) ./ t;
93 delta_z = z .* (hat_nu - delta_x) ./ g;
94 delta_g = g .* (gamma_z - delta_z) ./ z;
95 delta_t = t .* (gamma_s - delta_s) ./ s;
96 alfa = - 0.95 / min([delta_g ./ g; delta_t ./ t;
97 delta_z ./ z; delta_s ./ s; -1]);
98 mu = (z' * g + s' * t)/(2 * n);
99 mu = mu * ((alfa - 1) / (alfa + 10))^2;
100 x = x + delta_x * alfa; g = g + delta_g * alfa;
101 t = t + delta_t * alfa; y = y + delta_y * alfa;
102 z = z + delta_z * alfa; s = s + delta_s * alfa;
103 end
104
105 %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
106 %
107 % Your solutions below!
108 %
109
110 % 3. Train a one-class SVM given the kernel matrix K and the
111 % regularization constant C.
112 function [a, alpha] = oneclass(K, C)
113 % ...
114
115 % 4. Compute the outlier scores given
116 %
117 % KR: kernel matrix on training data
118 % KS: kernel matrix on test data
119 % KSR: kernel matrix on test data / training data
120 % alpha: learned kernel coefficients
121 function scores = compute_scores(KS, KSR, KR, alpha)
122 % ...
Attached Files
To refer to attachments on a page, use attachment:filename, as shown below in the list of files. Do NOT use the URL of the [get] link, since this is subject to change and can break easily.You are not allowed to attach a file to this page.