David83
Advanced Member level 1
hi
what is wrong in my MATLAB code?
clear all;
clc
N=1000000;
SNRdB=[0:2:10];
SNR=10.^(SNRdB./10);
b1=rand(1,N)>0.5;
b2=rand(1,N)>0.5;
d1=2.*b1-1;
d2=2.*b2-1;
d=d1+1i.*d2;
for ii=1:length(SNR)
n=(1/sqrt(2))*(randn(1,N)+1i*randn(1,N));
yAWGN=d+(1/sqrt(SNR(ii))).*n;
bHatAWGN1=real(yAWGN)>0.5;
bHatAWGN2=imag(yAWGN)>0.5;
berAWGN1=sum(xor(bHatAWGN1,b1))/N;
berAWGN2=sum(xor(bHatAWGN2,b2))/N;
berAWGN(ii)=mean([berAWGN1 berAWGN2]);
end
BERAWGNth=0.5.*erfc(sqrt(SNR));
semilogy(SNRdB,berAWGN,SNRdB,BERAWGNth)
legend('AWGN','AWGN Th')
what is wrong in my MATLAB code?