forked from Schuture/Convex-optimization
-
Notifications
You must be signed in to change notification settings - Fork 0
/
projected_vs_nesterov_second_fixed_step.m
79 lines (73 loc) · 1.48 KB
/
projected_vs_nesterov_second_fixed_step.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
% f(x) = g(x) + h(x) = 0.5*x'*Q*x-b'*x + IC(x)
%IC(x) is a projection
%%%%%%%%%%%%%%%%%% generate data %%%%%%%%%%%%%%%%%%
global n Q b
n = 500;
xbar = randn(n,1);
Q = randn(n,n);
Q = Q+Q'+eye(n); %positive definite
b = Q*xbar;
L = norm(Q');
t = 1/L;
%%%%%%%%%%%%% projected gradient method fixed step %%%%%%
gra = zeros(50,1);
x = zeros(n,1);
for k = 1:50
grad1 = grad(x);
y = x - t*grad1;
x = proj(y);
gra(k) = g(x);
disp(k)
end
semilogy(gra(1:50))
hold on
%%%%%%%%% Nesterov¡¯s second method fixed step %%%%%%%%%%
gra = zeros(50,1);
x = zeros(n,1);
v = x;
for k = 1:50
theta = 2/(k+1);
y = (1-theta)*x + theta*v;
v_next = prox(y,v,t,theta);
x = (1-theta)*x + theta*v_next;
gra(k) = g(x);
disp(k)
end
semilogy(gra(1:50))
title('Projected gradient vs Nesterov¡¯s second method with 1/L step')
legend('Projected gradient','Nesterov¡¯s second method')
xlabel('Iteration')
ylabel('function value')
%%%%%%%%%%%%%%define functions%%%%%%%%%%%%%%
function [ret]=g(x)
global Q b
ret = 0.5*x'*Q*x-b'*x;
end
function [ret]=grad(x) % gradient of g
global Q b
ret = Q*x-b;
end
function [ret] = prox(y,v,t,theta) %prox(v-t*grad(y)/theta)
global n
ret = v-t*grad(y)/theta;
for i=1:n % do projection
if ret(i)>2
ret(i) = 2;
end
if ret(i)<1
ret(i) = 1;
end
end
end
function [ret] = proj(y) %proj(y)
global n
ret = y;
for i=1:n % do projection
if ret(i)>2
ret(i) = 2;
end
if ret(i)<1
ret(i) = 1;
end
end
end