淺談壓縮感知(二十八):壓縮感知重構算法之廣義正交匹配追蹤(gOMP)

主要內容:算法

  1. gOMP的算法流程
  2. gOMP的MATLAB實現
  3. 一維信號的實驗與結果
  4. 稀疏度K與重構成功機率關係的實驗與結果

1、gOMP的算法流程

廣義正交匹配追蹤(Generalized OMP, gOMP)算法能夠看做爲OMP算法的一種推廣。OMP每次只選擇與殘差相關最大的一個,而gOMP則是簡單地選擇最大的S個。之因此這裏表述爲"簡單地選擇"是相比於ROMP之類算法的,不進行任何其它處理,只是選擇最大的S個而已。測試

gOMP的算法流程:ui

2、gOMP的MATLAB實現(CS_gOMP.m)

function [ theta ] = CS_gOMP( y,A,K,S )
%   CS_gOMP
%   Detailed explanation goes here
%   y = Phi * x
%   x = Psi * theta
%    y = Phi*Psi * theta
%   令 A = Phi*Psi, 則y=A*theta
%   如今已知y和A,求theta
%   Reference: Jian Wang, Seokbeop Kwon, Byonghyo Shim.  Generalized 
%   orthogonal matching pursuit, IEEE Transactions on Signal Processing, 
%   vol. 60, no. 12, pp. 6202-6216, Dec. 2012. 
%   Available at: http://islab.snu.ac.kr/paper/tsp_gOMP.pdf
    if nargin < 4
        S = round(max(K/4, 1));
    end
    [y_rows,y_columns] = size(y);
    if y_rows<y_columns
        y = y';%y should be a column vector
    end
    [M,N] = size(A);%傳感矩陣A爲M*N矩陣
    theta = zeros(N,1);%用來存儲恢復的theta(列向量)
    Pos_theta = [];%用來迭代過程當中存儲A被選擇的列序號
    r_n = y;%初始化殘差(residual)爲y
    for ii=1:K%迭代K次,K爲稀疏度
        product = A'*r_n;%傳感矩陣A各列與殘差的內積
        [val,pos]=sort(abs(product),'descend');%降序排列
        Sk = union(Pos_theta,pos(1:S));%選出最大的S個
        if length(Sk)==length(Pos_theta)
            if ii == 1
                theta_ls = 0;
            end
            break;
        end
        if length(Sk)>M
            if ii == 1
                theta_ls = 0;
            end
            break;
        end
        At = A(:,Sk);%將A的這幾列組成矩陣At
        %y=At*theta,如下求theta的最小二乘解(Least Square)
        theta_ls = (At'*At)^(-1)*At'*y;%最小二乘解
        %At*theta_ls是y在At)列空間上的正交投影
        r_n = y - At*theta_ls;%更新殘差
        Pos_theta = Sk;
        if norm(r_n)<1e-6
            break;%quit the iteration
        end
    end
    theta(Pos_theta)=theta_ls;%恢復出的theta
end

3、一維信號的實驗與結果

%壓縮感知重構算法測試
clear all;close all;clc;
M = 128;%觀測值個數
N = 256;%信號x的長度
K = 30;%信號x的稀疏度
Index_K = randperm(N);
x = zeros(N,1);
x(Index_K(1:K)) = 5*randn(K,1);%x爲K稀疏的,且位置是隨機的
Psi = eye(N);%x自己是稀疏的,定義稀疏矩陣爲單位陣x=Psi*theta
Phi = randn(M,N)/sqrt(M);%測量矩陣爲高斯矩陣
A = Phi * Psi;%傳感矩陣
y = Phi * x;%獲得觀測向量y

%% 恢復重構信號x
tic
theta = CS_gOMP( y,A,K);
x_r = Psi * theta;% x=Psi * theta
toc

%% 繪圖
figure;
plot(x_r,'k.-');%繪出x的恢復信號
hold on;
plot(x,'r');%繪出原信號x
hold off;
legend('Recovery','Original')
fprintf('\n恢復殘差:');
norm(x_r-x)%恢復殘差

4、稀疏數K與重構成功機率關係的實驗與結果

%   壓縮感知重構算法測試CS_Reconstuction_KtoPercentagegOMP.m
%   Reference: Jian Wang, Seokbeop Kwon, Byonghyo Shim.  Generalized 
%   orthogonal matching pursuit, IEEE Transactions on Signal Processing, 
%   vol. 60, no. 12, pp. 6202-6216, Dec. 2012. 
%   Available at: http://islab.snu.ac.kr/paper/tsp_gOMP.pdf

clear all;close all;clc;
addpath(genpath('../../OMP/'))

%% 參數配置初始化
CNT = 1000; %對於每組(K,M,N),重複迭代次數
N = 256; %信號x的長度
Psi = eye(N); %x自己是稀疏的,定義稀疏矩陣爲單位陣x=Psi*theta
M_set = [128]; %測量值集合
KIND = ['OMP      ';'ROMP     ';'StOMP    ';'SP       ';'CoSaMP   ';...
    'gOMP(s=3)';'gOMP(s=6)';'gOMP(s=9)'];
Percentage = zeros(N,length(M_set),size(KIND,1)); %存儲恢復成功機率

%% 主循環,遍歷每組(K,M,N)
tic
for mm = 1:length(M_set)
    M = M_set(mm); %本次測量值個數
    K_set = 5:5:70; %信號x的稀疏度K不必所有遍歷,每隔5測試一個就能夠了
    %存儲此測量值M下不一樣K的恢復成功機率
    PercentageM = zeros(size(KIND,1),length(K_set));
    for kk = 1:length(K_set)
       K = K_set(kk); %本次信號x的稀疏度K
       P = zeros(1,size(KIND,1));
       fprintf('M=%d,K=%d\n',M,K);
       for cnt = 1:CNT  %每一個觀測值個數均運行CNT次
            Index_K = randperm(N);
            x = zeros(N,1);
            x(Index_K(1:K)) = 5*randn(K,1); %x爲K稀疏的,且位置是隨機的                
            Phi = randn(M,N)/sqrt(M); %測量矩陣爲高斯矩陣
            A = Phi * Psi; %傳感矩陣
            y = Phi * x; %獲得觀測向量y
            %(1)OMP
            theta = CS_OMP(y,A,K); %恢復重構信號theta
            x_r = Psi * theta; % x=Psi * theta
            if norm(x_r-x)<1e-6 %若是殘差小於1e-6則認爲恢復成功
                P(1) = P(1) + 1;
            end
            %(2)ROMP
            theta = CS_ROMP(y,A,K); %恢復重構信號theta
            x_r = Psi * theta; % x=Psi * theta
            if norm(x_r-x)<1e-6 %若是殘差小於1e-6則認爲恢復成功
                P(2) = P(2) + 1;
            end
            %(3)StOMP
            theta = CS_StOMP(y,A); %恢復重構信號theta
            x_r = Psi * theta; % x=Psi * theta
            if norm(x_r-x)<1e-6 %若是殘差小於1e-6則認爲恢復成功
                P(3) = P(3) + 1;
            end
            %(4)SP
            theta = CS_SP(y,A,K); %恢復重構信號theta
            x_r = Psi * theta; % x=Psi * theta
            if norm(x_r-x)<1e-6 %若是殘差小於1e-6則認爲恢復成功
                P(4) = P(4) + 1;
            end
            %(5)CoSaMP
            theta = CS_CoSaMP(y,A,K); %恢復重構信號theta
            x_r = Psi * theta; % x=Psi * theta
            if norm(x_r-x)<1e-6 %若是殘差小於1e-6則認爲恢復成功
                P(5) = P(5) + 1;
            end
            %(6)gOMP,S=3
            theta = CS_gOMP(y,A,K,3); %恢復重構信號theta
            x_r = Psi * theta; % x=Psi * theta
            if norm(x_r-x)<1e-6 %若是殘差小於1e-6則認爲恢復成功
                P(6) = P(6) + 1;
            end
            %(7)gOMP,S=6
            theta = CS_gOMP(y,A,K,6); %恢復重構信號theta
            x_r = Psi * theta; % x=Psi * theta
            if norm(x_r-x)<1e-6 %若是殘差小於1e-6則認爲恢復成功
                P(7) = P(7) + 1;
            end
            %(8)gOMP,S=9
            theta = CS_gOMP(y,A,K,9); %恢復重構信號theta
            x_r = Psi * theta; % x=Psi * theta
            if norm(x_r-x)<1e-6 %若是殘差小於1e-6則認爲恢復成功
                P(8) = P(8) + 1;
            end
       end
       for iii = 1:size(KIND,1)
           PercentageM(iii,kk) = P(iii)/CNT*100; %計算恢復機率
       end
    end
    for jjj = 1:size(KIND,1)
        Percentage(1:length(K_set),mm,jjj) = PercentageM(jjj,:);
    end
end
toc
save KtoPercentage1000gOMP %運行一次不容易,把變量所有存儲下來

%% 繪圖
S = ['-ks';'-ko';'-yd';'-gv';'-b*';'-r.';'-rx';'-r+'];
figure;
for mm = 1:length(M_set)
    M = M_set(mm);
    K_set = 5:5:70;
    L_Kset = length(K_set);
    for ii = 1:size(KIND,1)
        plot(K_set,Percentage(1:L_Kset,mm,ii),S(ii,:)); %繪出x的恢復信號
        hold on;
    end
end
hold off;
xlim([5 70]);
legend('OMP','ROMP','StOMP','SP','CoSaMP',...
    'gOMP(s=3)','gOMP(s=6)','gOMP(s=9)');
xlabel('Sparsity level K');
ylabel('The Probability of Exact Reconstruction');
title('Prob. of exact recovery vs. the signal sparsity K(M=128,N=256)(Gaussian)');

結論:gOMP只是在OMP基礎上修改了一下原子選擇的個數,效果就好不少。spa

6、參考文章

http://blog.csdn.net/jbb0523/article/details/45693027.net

相關文章
相關標籤/搜索