一、使用循环:

1.1原始版逻辑回归:

    

1 function g = sigmoid(z)
2 g = zeros(size(z));
3 g = 1 ./ (1 + exp(-z));
4 end

 

 1 function [J, grad] = costFunction(theta, X, y)
 2 
 3 % Initialize some useful values
 4 m = length(y); % number of training examples
 5 
 6 J = 0;
 7 grad = zeros(size(theta));
 8 
 9 n = size(theta);
10 gradtemp = 0;
11 
12 %compute costfunction
13 for i = 1:m
14   temp1 = y(i) * log(sigmoid(X(i,:) * theta)) + (1 - y(i)) * log(1 - sigmoid(X(i,:) * theta));
15   J = J + temp1;
16 endfor
17 
18 J = -(J / m);
19 
20 %compute grade
21 for j = 1:n
22   for i = 1:m
23     temp2 = (sigmoid(X(i,:) * theta) - y(i)) * X(i,j);
24     gradtemp = gradtemp + temp2;
25   endfor
26   gradtemp = gradtemp / m;
27   grad(j) = gradtemp;
28   gradtemp = 0;
29 endfor
30 
31 end

 

 1 function p = predict(theta, X)
 2 
 3 m = size(X, 1); % Number of training examples
 4 
 5 p = zeros(m, 1);
 6 
 7 for i =1:m
 8   h = sigmoid(X(i,:) * theta);
 9   if h >= 0.5
10     y = 1;
11   else
12     y = 0;
13   endif
14   p(i) = y;
15 endfor
16 
17 end

 

1.2正则化版逻辑回归:

 1 function [J, grad] = costFunctionReg(theta, X, y, lambda)
 2 
 3 % Initialize some useful values
 4 m = length(y); % number of training examples
 5 
 6 J = 0;
 7 grad = zeros(size(theta));
 8 
 9 n = length(theta)
10 temp1 = 0;
11 temp2 = 0;
12 temp3 = 0;
13 result1 = 0;
14 result2 = 0;
15 %compute costfunction
16 for i = 1:m
17   h = sigmoid(X(i,:) * theta)
18   temp1 = y(i) * log(h) + (1 - y(i)) * log(1-h);
19   result1 = result1 + temp1;
20 endfor
21 for j = 2:n
22   temp2 = theta(j) * theta(j);
23   result2 = result2 + temp2;
24 endfor
25 
26 J = (-1 / m) * result1 + lambda / (2 * m) * result2;
27 
28 temp3 = 0
29 temp4 = 0
30 result3 = 0
31 for i = 1:m
32     h = (sigmoid(X(i,:) * theta))
33     temp3 = (h - y(i)) * X(i,1);
34     result3 = result3 + temp3;
35 endfor
36 grad(1) = (1 / m) * result3
37 
38 
39 
40 %compute grade
41 for j = 2:n
42   result4 = 0;
43   for i = 1:m
44     h = (sigmoid(X(i,:) * theta))
45     temp4 = (h - y(i)) * X(i,j);
46     result4 = result4 + temp4;
47   endfor
48   grad(j) = (1 / m) * result4 + (lambda / m) * theta(j)
49 endfor
50 
51 end

 

二、矩阵向量方式整体运算:

2.1原始版逻辑回归:

 

1 function g = sigmoid(z)
2 
3 g = zeros(size(z));
4 
5 g = 1 ./ (1 + exp(-z));
6 
7 end

 

 

 1 function [J, grad] = costFunction(theta, X, y)
 2 % Initialize some useful values
 3 m = length(y); % number of training examples
 4 
 5 J = 0;
 6 grad = zeros(size(theta));
 7 
 8 h = sigmoid(X * theta);
 9 
10 J = sum(- y .* log(h) - (1 - y) .* log(1 - h)) / m;
11 
12 grad = (1/m * sum((h - y).* X))';
13 
14 end

 

 1 function p = predict(theta, X)
 2 
 3 m = size(X, 1); % Number of training examples
 4 
 5 p = zeros(m, 1);
 6 
 7 h = sigmoid(X * theta);
 8 
 9 p(find(h >= 0.5)) = 1;
10 
11 end

2.2正则化版逻辑回归:

 

 1 function [J, grad] = costFunctionReg(theta, X, y, lambda)
 2 
 3 % Initialize some useful values
 4 m = length(y); % number of training examples
 5 
 6 J = 0;
 7 grad = zeros(size(theta));
 8 
 9 n = size(theta,1);
10 
11 h = sigmoid(X * theta);
12 
13 J = sum(- y .* log(h) - (1 - y) .* log(1 - h)) / m + lambda / (2 * m) * sum(theta(2:n) .^ 2);
14 
15 grad = (1/m * sum((h - y).* X))';
16 
17 grad(2:n) = grad(2:n) + lambda / m * theta(2:n);
18 
19 end

 

 

 

 

内容来源于网络如有侵权请私信删除
你还没有登录,请先登录注册
  • 还没有人评论,欢迎说说您的想法!