1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163
|
/***********************************************/
/**
* @file instrument2PowerSpectralDensity.cpp
*
* @brief Compute PSD from instrument files.
*
* @author Andreas Kvas
* @date 2016-02-02
*
*/
/***********************************************/
// Latex documentation
#define DOCSTRING docstring
static const char *docstring = R"(
This program computes the power spectral density (PSD) for all data fields in an \file{instrument file}{instrument}.
The PSD is computed using Lomb's method. For each arc and each frequency $f$, a sinusoid is fit to the data
\begin{equation}
l_i = a \cos(2\pi f t_i) + b \sin(2\pi f t_i) + e_i
\end{equation}
The PSD for this frequency is then computed by forming the square sum of adjusted observations:
\begin{equation}
P(f) = \sum_i \hat{l}^2_i.
\end{equation}
The resulting PSD is the average over all arcs. For regularly sampled time series,
this method yields the same results as FFT based PSD estimates.
A regular frequency grid based on the longest arc and the median sampling is computed.
The maximum number of epochs per arc is determined by
\begin{equation}
N = \frac{t_{\text{end}} - t_{\text{start}}}{\Delta t_{\text{median}} } + 1,
\end{equation}
the Nyquist frequency is given by
\begin{equation}
f_{\text{nyq}} = \frac{1}{2\Delta t_{\text{median}}}.
\end{equation}
If it is suspected that \configFile{inputfileInstrument}{instrument} contains secular variations,
the input should be detrended using \program{InstrumentDetrend}.
See also \program{Instrument2CovarianceFunctionVCE},
\program{CovarianceFunction2PowerSpectralDensity}, \program{PowerSpectralDensity2CovarianceFunction}.
)";
/***********************************************/
#include "programs/program.h"
#include "base/fourier.h"
#include "files/fileMatrix.h"
#include "files/fileInstrument.h"
/***** CLASS ***********************************/
/** @brief Compute PSD from instrument files.
* @ingroup programsGroup */
class Instrument2PowerSpectralDensity
{
public:
void run(Config &config, Parallel::CommunicatorPtr comm);
};
GROOPS_REGISTER_PROGRAM(Instrument2PowerSpectralDensity, PARALLEL, "Compute PSD from instrument files.", Instrument, Covariance)
GROOPS_RENAMED_PROGRAM(InstrumentComputePSD, Instrument2PowerSpectralDensity, date2time(2018, 7, 18))
GROOPS_RENAMED_PROGRAM(InstrumentComputePsd, Instrument2PowerSpectralDensity, date2time(2020, 7, 7))
/***********************************************/
void Instrument2PowerSpectralDensity::run(Config &config, Parallel::CommunicatorPtr comm)
{
try
{
FileName fileNameInstrument, fileNamePSD;
readConfig(config, "outputfilePSD", fileNamePSD, Config::MUSTSET, "", "estimated PSD: column 0: frequency vector, column 1-(n-1): PSD estimate for each channel");
readConfig(config, "inputfileInstrument", fileNameInstrument, Config::MUSTSET, "", "");
if(isCreateSchema(config)) return;
logStatus<<"read instrument data"<<Log::endl;
InstrumentFile instrumentFile(fileNameInstrument);
const UInt arcCount = instrumentFile.arcCount();
const UInt dataCount = instrumentFile.dataCount(TRUE/*mustDefined*/);
Vector freqs;
UInt arcEpochCount;
Double sampling = 1.0;
if(Parallel::isMaster(comm))
{
std::vector<Time> times;
Time maxArcLen;
for(UInt arcNo=0; arcNo<arcCount; arcNo++)
{
Arc arc = instrumentFile.readArc(arcNo);
if(arc.size() == 0)
continue;
std::vector<Time> arcTimes = arc.times();
maxArcLen = std::max(arcTimes.back() - arcTimes.front(), maxArcLen);
times.insert(times.end(), arcTimes.begin(), arcTimes.end());
}
sampling = medianSampling(times).seconds();
arcEpochCount = static_cast<UInt>(std::round(maxArcLen.seconds()/sampling)+1);
logInfo<<" maximum arc length: "<<arcEpochCount<<" epochs"<<Log::endl;
logInfo<<" median sampling: "<<sampling<<" seconds"<<Log::endl;
freqs = Fourier::frequencies(arcEpochCount, sampling);
}
Parallel::broadCast(freqs, 0, comm);
Parallel::broadCast(arcEpochCount, 0, comm);
logStatus<<"compute PSD"<<Log::endl;
Matrix PSD(freqs.rows(), dataCount+1);
Parallel::forEach(arcCount, [&](UInt arcNo)
{
Arc arc = instrumentFile.readArc(arcNo);
Matrix data = arc.matrix();
// time vector
Vector t(arc.size());
for(UInt i=0; i<t.rows(); i++)
t(i) = (arc.at(i).time-arc.at(0).time).seconds();
// square sum of observations
Vector lPl(data.columns()-1);
for(UInt i=0; i<lPl.rows(); i++)
lPl(i) = quadsum(data.column(i+1));
// estimate the power of each frequency
for(UInt k=0; k<freqs.size(); k++)
{
Matrix l = data.column(1, data.columns()-1);
Matrix A(l.rows(), 2);
const Double f = 2*PI*freqs.at(k);
for(UInt i=0; i<A.rows(); i++)
{
A(i, 0) = std::cos(f*t(i));
A(i, 1) = std::sin(f*t(i));
}
if((freqs.at(k) == 0) || (std::fabs(freqs.at(k)*sampling-0.5) < 1e-5)) // zero or nyquist freq?
A = A.column(0);
reduceLeastSquaresFit(A, l); // l = e_hat
for(UInt i=0; i<l.columns(); i++)
PSD(k, i+1) += lPl(i) - quadsum(l.column(i));
}
}, comm);
Parallel::reduceSum(PSD, 0, comm);
if(Parallel::isMaster(comm))
{
PSD *= sampling/arcCount; // PSD unit: input^2/Hz
copy(freqs, PSD.column(0));
logStatus<<"write PSD to file <"<<fileNamePSD<<">"<<Log::endl;
writeFileMatrix(fileNamePSD, PSD);
}
}
catch(std::exception &e)
{
GROOPS_RETHROW(e)
}
}
/***********************************************/
|