ifw-daq  3.0.0-pre2
IFW Data Acquisition modules
merge.cpp
Go to the documentation of this file.
1 /**
2  * @file
3  * @ingroup daq_dpm_merge
4  * @copyright ESO - European Southern Observatory
5  */
6 #include "merge.hpp"
7 
8 #include <fmt/format.h>
9 #include <log4cplus/loggingmacros.h>
10 
11 #include <daq/fits/cfitsio.hpp>
12 
13 namespace daq::dpm::merge {
14 
16  try {
17  return source.GetKeywordRuleProcessor().Process(
18  source.GetKeywords(),
19  source.GetInitialKeywords().value_or(KeywordRuleProcessor::DefaultRule::User));
20  } catch (...) {
21  auto msg = fmt::format("Processing keyword rules failed for JSON keywords source '{}'",
22  source.GetName());
23  LOG4CPLUS_ERROR(ops.logger, msg);
24  std::throw_with_nested(std::runtime_error(msg));
25  }
26 }
27 
29  auto logger = log4cplus::Logger::getInstance("daq.dpmmerge");
30  auto literal_kws = fits::ReadKeywords(source.GetFitsFile(), hdu_num);
31  fits::KeywordVector result;
32  result.reserve(literal_kws.size());
33  std::copy(std::make_move_iterator(literal_kws.begin()),
34  std::make_move_iterator(literal_kws.end()),
35  std::back_inserter(result));
36 
37  try {
38  return source.GetKeywordRuleProcessor().Process(
39  result, source.GetInitialKeywords().value_or(KeywordRuleProcessor::DefaultRule::User));
40  } catch (...) {
41  auto msg = fmt::format("Processing keyword rules failed for FITS file keywords source '{}'",
42  source.GetFilePath().string());
43  LOG4CPLUS_ERROR(logger, msg);
44  std::throw_with_nested(std::runtime_error(msg));
45  }
46 }
47 
48 /**
49  * Compiles unique keywords and formats primary HDU keywords
50  */
52  TargetSource& target,
53  std::vector<SourceTypes> const& sources) {
54  auto logger = log4cplus::Logger::getInstance("daq.dpmmerge");
55  constexpr const int primary_hdu_num = 1;
56  fits::KeywordVector result;
57  auto literal_kws = fits::ReadKeywords(target.GetFitsFile(), primary_hdu_num);
58  LOG4CPLUS_DEBUG(logger, "Read keywords from " << target.GetFilePath());
59  std::for_each(std::begin(literal_kws), std::end(literal_kws), [&](auto const& kw) {
60  LOG4CPLUS_DEBUG(logger, kw);
61  });
62  result.reserve(literal_kws.size());
63  std::copy(std::begin(literal_kws), std::end(literal_kws), std::back_inserter(result));
64  // Filter
65  try {
66  // note: By default we keep all keywords in the target
67  result = target.GetKeywordRuleProcessor().Process(
68  result, target.GetInitialKeywords().value_or(KeywordRuleProcessor::DefaultRule::All));
69  LOG4CPLUS_DEBUG(logger, "Result after keyword processing: " << target.GetFilePath());
70  std::for_each(std::begin(result), std::end(result), [&](auto const& kw) {
71  LOG4CPLUS_DEBUG(logger, kw);
72  });
73  } catch (...) {
74  auto msg = fmt::format("Processing keyword rules failed for target FITS file '{}'",
75  target.GetFilePath().string());
76  LOG4CPLUS_ERROR(logger, msg);
77  std::throw_with_nested(std::runtime_error(msg));
78  }
79 
80  for (auto const& source : sources) {
81  std::string path_or_name;
82  auto kws = std::visit(
83  [&](auto const& source) -> fits::KeywordVector {
84  using T = std::decay_t<decltype(source)>;
85  if constexpr (std::is_same_v<T, FitsKeywordsSource>) {
86  path_or_name = fmt::format("{}: (keyword list)", source.GetName());
87  return CompileKeywords(ops, source);
88  } else if constexpr (std::is_same_v<T, FitsFileSource>) {
89  path_or_name =
90  fmt::format("{}: {}", source.GetName(), source.GetFilePath().native());
91  return CompileKeywords(ops, source, 1);
92  }
93  },
94  source);
95  LOG4CPLUS_DEBUG(logger, "Updating with keywords from : " << path_or_name);
96  std::for_each(
97  std::begin(kws), std::end(kws), [&](auto const& kw) { LOG4CPLUS_DEBUG(logger, kw); });
98  // Don't overwrite conflicting keywords
100  }
101 
102  return result;
103 }
104 
105 std::vector<fits::LiteralKeyword> FormatKeywords(fits::KeywordVector::const_iterator begin,
106  fits::KeywordVector::const_iterator end,
107  KeywordFormatter& fmt) {
108  auto const& logger = log4cplus::Logger::getInstance("daq.dpmmerge");
109  std::vector<fits::LiteralKeyword> result;
110  LOG4CPLUS_DEBUG(logger, "Formatting keywords ...");
112  begin, end, std::back_inserter(result), [&](auto const& kw) -> fits::LiteralKeyword {
113  LOG4CPLUS_DEBUG(logger, "Formatting keyword input: \"" << kw << "\"");
114  auto formatted = fmt.FormatKeyword(kw);
115  LOG4CPLUS_DEBUG(logger, "Formatting keyword result: \"" << formatted << "\"");
116  return formatted;
117  });
118  LOG4CPLUS_DEBUG(logger, "Formatting keywords done.");
119  return result;
120 }
121 
122 template <class Container>
123 void LogKeywords(log4cplus::Logger const& logger, Container const& keywords) {
124  std::for_each(std::begin(keywords), std::end(keywords), [&](auto const& kw) {
125  LOG4CPLUS_DEBUG(logger, kw);
126  });
127 }
128 
130  Params const& params,
131  TargetSource& target,
132  std::vector<SourceTypes> const& sources,
133  bool dry_run) {
134  auto const& logger = ops.logger;
135  LOG4CPLUS_INFO(logger, "Merge primary HDU keywords");
136  LOG4CPLUS_INFO(logger, "Compile primary keywords");
137  constexpr const int primary_hdu_num = 1;
138  auto primary_hdu_keywords = CompilePrimaryHduKeywords(ops, target, sources);
139 
140  // Add ORIGFILE and ARCFILE last (will be sorted to be last of value keywords)
141  fits::KeywordVector mandatory;
142  mandatory.emplace_back(std::in_place_type<fits::ValueKeyword>, "ORIGFILE", params.origfile);
143  mandatory.emplace_back(std::in_place_type<fits::ValueKeyword>, "ARCFILE", params.arcfile);
145  primary_hdu_keywords, primary_hdu_keywords.end(), mandatory.begin(), mandatory.end());
146 
147  LOG4CPLUS_INFO(logger, "Format keywords");
148  auto formatted = FormatKeywords(
149  std::begin(primary_hdu_keywords), std::end(primary_hdu_keywords), ops.keyword_formatter);
150  {
151  LOG4CPLUS_INFO(logger, "Sort keywords");
152  ops.keyword_sorter.SortKeywords(formatted);
153  LOG4CPLUS_DEBUG(logger, "Sorted keywords");
154  LogKeywords(logger, formatted);
155  }
156 
157  // Write keywords back
158  // Write keywords will make room as necessary.
159  if (!dry_run) {
160  LOG4CPLUS_INFO(logger, "Clear keywords to make room for writing back sorted keywords.");
161  fits::DeleteAllKeywords(target.GetFitsFile(), primary_hdu_num);
162  LOG4CPLUS_INFO(logger, "Writing keywords");
163  std::optional<ssize_t> remaining_size;
164  fits::WriteKeywords(target.GetFitsFile(), primary_hdu_num, formatted, &remaining_size);
165  if (remaining_size) {
166  if (*remaining_size < 0) {
167  auto needed = -*remaining_size;
168  // Reallocation occurred
169  auto msg = fmt::format(
170  "Writing keywords required resizing of primary HDU: Add space for at least {} "
171  "keywords to avoid resize",
172  needed);
173  ops.status_reporter.PostAlert("primary_hdu_resize", msg);
174  } else {
175  auto msg = fmt::format("Primary HDU keyword space remaining: {} ", *remaining_size);
176  LOG4CPLUS_INFO(logger, msg);
177  }
178  }
179  } else {
180  LOG4CPLUS_INFO(logger, "Writing keywords SKIPPED (dry-run)");
181  }
182 }
183 
184 /**
185  * Copy all extensions from source to target
186  */
188  TargetSource& target,
189  FitsFileSource const& source,
190  bool dry_run) {
191  auto const& logger = ops.logger;
192  LOG4CPLUS_INFO(
193  logger,
194  "Merging HDU extensions from " << source.GetName() << "(" << source.GetFilePath() << ")");
195 
196  int status = 0;
197  fitsfile* source_fits = source.GetFitsFile();
198  fitsfile* target_fits = target.GetFitsFile();
199  int num_hdus = 0;
200  fits_get_num_hdus(source_fits, &num_hdus, &status);
201  if (status != 0) {
202  auto msg =
203  fmt::format("Failed to get number of HDUs from '{}'", target.GetFilePath().c_str());
204  LOG4CPLUS_ERROR(logger, msg);
205  throw fits::CfitsioError(status, msg);
206  }
207 
208  if (num_hdus == 1) {
209  // @todo There should be a proper check of the first HDU to see that it does not contain
210  // any data!
211  LOG4CPLUS_INFO(ops.logger,
212  "Note: No HDU extensions to merge from " << source.GetFilePath());
213  return;
214  }
215 
216  // Select HDU 2 to copy
217  fits::SelectHduNum(source_fits, 2);
218  if (!dry_run) {
219  // @todo Execute HDU by HDU to allow aborting in-between?
220  int previous = false;
221  int current = true;
222  int following = true;
223  fits_copy_file(source_fits, target_fits, previous, current, following, &status);
224  if (status != 0) {
225  auto const msg = "FITS function fits_copy_file failed";
226  LOG4CPLUS_ERROR(logger, msg);
227  throw fits::CfitsioError(status, msg);
228  }
229  } else {
230  LOG4CPLUS_INFO(logger,
231  "Merging HDU extensions from " << source.GetName() << "("
232  << source.GetFilePath()
233  << ") SKIPPED (dry-run)");
234  }
235 }
236 
238  TargetSource& target,
239  std::vector<SourceTypes> const& sources,
240  bool dry_run) {
241  auto const& logger = ops.logger;
242  LOG4CPLUS_INFO(logger, "Merging HDU extensions");
243 
244  for (auto const& source_var : sources) {
245  if (!std::holds_alternative<FitsFileSource>(source_var)) {
246  continue;
247  }
248  FitsFileSource const& source = std::get<FitsFileSource>(source_var);
249  try {
250  CopyExtensions(ops, target, source, dry_run);
251  } catch (...) {
252  std::throw_with_nested(
253  std::runtime_error(fmt::format("Failed to copy HDU extensions from '{}' to '{}'",
254  source.GetFilePath().c_str(),
255  target.GetFilePath().c_str())));
256  }
257  }
258  LOG4CPLUS_INFO(logger, "Merging HDU extensions completed successfully");
259 }
260 
261 void UpdateChecksums(Operations ops, TargetSource& target, bool dry_run) {
262  auto const& logger = ops.logger;
263  LOG4CPLUS_INFO(logger, "Updating checksums for all HDUs");
264  int status = 0;
265  fitsfile* target_fits = target.GetFitsFile();
266  int num_hdus = 0;
267  fits_get_num_hdus(target_fits, &num_hdus, &status);
268  if (status != 0) {
269  auto msg =
270  fmt::format("Failed to get number of HDUs from '{}'", target.GetFilePath().c_str());
271  LOG4CPLUS_ERROR(logger, msg);
272  throw fits::CfitsioError(status, msg);
273  }
274 
275  if (!dry_run) {
276  for (int hdu_num = 1; hdu_num <= num_hdus; ++hdu_num) {
277  LOG4CPLUS_DEBUG(logger, "Updating checksum for HDU " << hdu_num);
278  fits::WriteChecksum(target_fits, hdu_num);
279  }
280  } else {
281  LOG4CPLUS_INFO(logger, "Updating checkum keywords SKIPPED (dry-run)");
282  }
283  LOG4CPLUS_INFO(logger, "Updating checkum keywords completed for all HDUs successfully");
284 }
285 
286 void Merge(Operations ops,
287  Params const& params,
288  TargetSource& target,
289  std::vector<SourceTypes> const& sources,
290  bool dry_run) {
291  auto const& logger = ops.logger;
292  /*
293  * 1. Merge target primary HDU keywords
294  * Since adding keywords to primary HDU may shift the data section we do this first, before
295  * copying any HDU extensions
296  *
297  * 1. Compile keywords to be merged to target primary HDU.
298  * 2. Format and validate keywords.
299  * - Already formatted keywords (literal keywords) are compared with expected format.
300  * - Unformatted keywords are formatted using dictionary format string.
301  * 3. Sort keywords
302  * 4. Delete all existing keywords.
303  * 4. Write keywords cleanly (remove all then write back)
304  * 2. Copy HDU extensions to target.
305  */
306  // @todo Validate that there is no primary HDU data in any of the sources before starting the
307  // process.
308  LOG4CPLUS_INFO(logger, "Starting merge operation");
309  try {
310  MergePrimaryHduKeywords(ops, params, target, sources, dry_run);
311  } catch (...) {
312  std::throw_with_nested(std::runtime_error("Failed to merge primary HDU keywords"));
313  }
314 
315  try {
316  MergeHduExtensions(ops, target, sources, dry_run);
317  } catch (...) {
318  std::throw_with_nested(std::runtime_error("Failed to merge primary HDU keywords"));
319  }
320  try {
321  UpdateChecksums(ops, target, dry_run);
322  } catch (...) {
323  std::throw_with_nested(std::runtime_error("Failed to update checksums"));
324  }
325 
326  LOG4CPLUS_INFO(logger, "Completed successfully");
327 }
328 
329 } // namespace daq::dpm::merge
Contains functions and data structures related to cfitsio.
@ User
Default is to keep only user-keywords.
@ All
Default rule is to keep all keywords (useful for in-place merge)
Input FITS source file.
Definition: sources.hpp:144
fits::KeywordVector const & GetKeywords() const &noexcept
Definition: sources.hpp:181
virtual fits::LiteralKeyword FormatKeyword(fits::KeywordVariant const &keyword)=0
Format keyword.
virtual void SortKeywords(std::vector< fits::LiteralKeyword > &keywords)=0
Sort keywords.
virtual void PostAlert(std::string const &id, std::string const &message)=0
Post event.
Represents errors from cfitsio.
Definition: cfitsio.hpp:90
Represents the literal 80-character FITS keyword record.
Definition: keyword.hpp:125
fits::KeywordVector CompilePrimaryHduKeywords(Operations ops, TargetSource &target, std::vector< SourceTypes > const &sources)
Compiles unique keywords and formats primary HDU keywords.
Definition: merge.cpp:51
std::vector< fits::LiteralKeyword > FormatKeywords(fits::KeywordVector::const_iterator begin, fits::KeywordVector::const_iterator end, KeywordFormatter &fmt)
Definition: merge.cpp:105
KeywordFormatter & keyword_formatter
Definition: merge.hpp:90
std::string arcfile
Definition: merge.hpp:101
fits::KeywordVector CompileKeywords(Operations ops, FitsKeywordsSource const &source)
Definition: merge.cpp:15
log4cplus::Logger const & logger
Definition: merge.hpp:92
std::string origfile
Definition: merge.hpp:102
void UpdateChecksums(Operations ops, TargetSource &target, bool dry_run)
Definition: merge.cpp:261
KeywordSorter & keyword_sorter
Definition: merge.hpp:91
void CopyExtensions(Operations ops, TargetSource &target, FitsFileSource const &source, bool dry_run)
Copy all extensions from source to target.
Definition: merge.cpp:187
void Merge(Operations ops, Params const &params, TargetSource &target, std::vector< SourceTypes > const &sources, bool dry_run)
Merge sources into the target target.
Definition: merge.cpp:286
void LogKeywords(log4cplus::Logger const &logger, Container const &keywords)
Definition: merge.cpp:123
void MergeHduExtensions(Operations ops, TargetSource &target, std::vector< SourceTypes > const &sources, bool dry_run)
Definition: merge.cpp:237
StatusReporter & status_reporter
Definition: merge.hpp:89
void MergePrimaryHduKeywords(Operations ops, Params const &params, TargetSource &target, std::vector< SourceTypes > const &sources, bool dry_run)
Definition: merge.cpp:129
void SelectHduNum(fitsfile *ptr, int hdu_num)
Select current HDU number.
Definition: cfitsio.cpp:89
void InsertKeywords(KeywordVector &keywords, KeywordVector::iterator position, KeywordVector::const_iterator from_first, KeywordVector::const_iterator from_last)
Insert keywords.
Definition: keyword.cpp:570
void WriteKeywords(fitsfile *ptr, int hdu_num, std::vector< LiteralKeyword > const &keywords, std::optional< ssize_t > *remaining_size)
Write keywords to HDU identified by number hdu_num.
Definition: cfitsio.cpp:247
void WriteChecksum(fitsfile *ptr, int hdu_num)
Write or update checksum keywords DATASUM and CHECKSUM to HDU specified by hdu_num.
Definition: cfitsio.cpp:283
void DeleteAllKeywords(fitsfile *ptr, int hdu_num)
Delete all keywords from HDU.
Definition: cfitsio.cpp:209
std::vector< KeywordVariant > KeywordVector
Vector of keywords.
Definition: keyword.hpp:414
void UpdateKeywords(KeywordVector &to, KeywordVector const &from, ConflictPolicy policy=ConflictPolicy::Replace)
Updates to with keywords from from.
Definition: keyword.cpp:554
std::vector< LiteralKeyword > ReadKeywords(fitsfile *ptr, int hdu_num)
Read keywords from HDU identifed by absolute position hdu_num.
Definition: cfitsio.cpp:181
@ Skip
Skip keyword that conflicts.
auto const & transform