ifw-daq  3.0.0-pre2
IFW Data Acquisition modules
testMakeDpSpec.cpp
Go to the documentation of this file.
1 /**
2  * @file
3  * @ingroup daq_libdaq_test
4  * @copyright
5  * (c) Copyright ESO 2022
6  * All Rights Reserved
7  * ESO (eso.org) is an Intergovernmental Organisation, and therefore special legal conditions apply.
8  */
9 #include <daq/makeDpSpec.hpp>
10 
11 #include <gtest/gtest.h>
12 #include <log4cplus/loggingmacros.h>
13 namespace daq {
14 struct MakeDpSpecV1 : ::testing::Test {
15  void SetUp() override {
16  m_ctx.id = "id";
17  m_ctx.file_id = "file_id";
18  m_ctx.dp_name_prefix = "prefix";
19  {
20  auto& s = m_ctx.meta_sources.emplace_back();
21  s.name = "meta";
22  s.rr_uri = "zpb.rr://127.0.0.1/daq";
23  }
24  {
25  m_ctx.results.emplace_back("location", "path");
26  fits::KeywordVector keywords;
27  keywords.push_back(fits::EsoKeyword("FOO BAR", "value"));
28  m_ctx.results.emplace_back("location", keywords);
29  }
30  }
31  log4cplus::Logger logger = log4cplus::Logger::getInstance("test");
33 };
34 
36  void SetUp() override {
37  m_ctx.id = "id";
38  m_ctx.file_id = "file_id";
39  m_ctx.dp_name_prefix = "prefix";
40 
41  // Add specification that mirrors duplicated state as well as user configurable elements
42  auto& spec = m_ctx.specification.emplace();
43  spec.file_prefix = m_ctx.dp_name_prefix;
44  spec.id = m_ctx.id;
45  spec.file_prefix = "prefix";
46 
47  // Add 3 data sources and corresponding results
48  // Merge order should be:
49  // - primary
50  // - meta1
51  // - meta2
52  {
54  ds.source_name = "primary";
55  ds.rr_uri = "zpb.rr://127.0.0.1/daq";
56  spec.sources.emplace_back(ds);
57  m_ctx.results.emplace_back(ds.source_name, "host:/path.fits");
58  }
59  {
61  ds.source_name = "meta1";
62  ds.rr_uri = "zpb.rr://127.0.0.1/daq";
63  // Add keyword rules
64  auto& rule = ds.keyword_rules.emplace_back(std::in_place_type<json::KeywordFilter>);
65  std::get<json::KeywordFilter>(rule).selection_patterns.push_back("+e *");
66 
67  spec.sources.emplace_back(ds);
68  // Insert first to exercise sorting
69  m_ctx.results.emplace(std::begin(m_ctx.results), ds.source_name, "host:/path.fits");
70  }
71  {
73  ds.source_name = "meta2";
74  ds.rr_uri = "zpb.rr://127.0.0.1/daq";
75  spec.sources.emplace_back(ds);
76  // Insert first to exercise sorting
77  m_ctx.results.emplace(std::begin(m_ctx.results), ds.source_name, "host:/path.fits");
78  }
79  {
81  ds.source_name = "meta3";
82  ds.rr_uri = "zpb.rr://127.0.0.1/daq";
83  // Add keyword rules
84  auto& rule = ds.keyword_rules.emplace_back(std::in_place_type<json::KeywordFilter>);
85  std::get<json::KeywordFilter>(rule).selection_patterns.push_back("+e FOO BAR");
86 
87  spec.sources.emplace_back(ds);
88 
90  v.emplace_back(std::in_place_type<fits::EsoKeyword>, "FOO BAR", true);
91  m_ctx.results.emplace(std::begin(m_ctx.results), ds.source_name, v);
92  }
93 
94  spec.merge_target.emplace().source_name = "primary";
95  }
96 };
97 
98 TEST_F(MakeDpSpecV1, DefaultDaqContextFails) {
99  // Test
100  EXPECT_THROW(MakeDataProductSpecification(DaqContext(), logger), std::invalid_argument);
101 }
102 
103 TEST_F(MakeDpSpecV1, Success) {
104  // Test
105  auto original = MakeDataProductSpecification(m_ctx, logger);
106  nlohmann::json j;
107  to_json(j, original);
108 
109  // Check that it can be parsed by DPM
110  json::DpSpec dpspec = json::ParseDpSpec(j);
111 }
112 
113 TEST_F(MakeDpSpecV2, DefaultDaqContextFails) {
114  // Test
115  EXPECT_THROW(MakeDataProductSpecification(DaqContext(), logger), std::invalid_argument);
116 }
117 
118 TEST_F(MakeDpSpecV2, SuccessWithMergeTarget) {
119  // Setup
120  ASSERT_EQ(m_ctx.id, "id");
121  ASSERT_EQ(m_ctx.file_id, "file_id");
122  ASSERT_EQ(m_ctx.dp_name_prefix, "prefix");
123 
124  // Test
125  auto original = MakeDataProductSpecification(m_ctx, logger);
126  nlohmann::json j;
127  to_json(j, original);
128  LOG4CPLUS_DEBUG("test", "Serialized DpSpec: " << j.dump(2));
129 
130  EXPECT_EQ(j["id"], "id");
131  EXPECT_EQ(j["target"]["fileId"], "file_id");
132  EXPECT_EQ(j["target"]["filePrefix"], "prefix");
133  EXPECT_EQ(j["target"]["source"]["sourceName"], "primary") << "Merge target not as expected: ";
134 
135  EXPECT_EQ(j["sources"].size(), 3u);
136 
137  {
138  EXPECT_EQ(j["sources"][0]["sourceName"], "meta1");
139  EXPECT_EQ(j["sources"][0]["type"], "fitsFile");
140 
141  EXPECT_TRUE(j["sources"][0].contains("keywordRules"));
142  EXPECT_EQ(j["sources"][0]["keywordRules"].size(), 1u);
143  EXPECT_EQ(j["sources"][0]["keywordRules"][0]["selectionPatterns"][0], "+e *");
144  }
145 
146  {
147  EXPECT_EQ(j["sources"][1]["sourceName"], "meta2");
148  EXPECT_EQ(j["sources"][1]["type"], "fitsFile");
149  EXPECT_FALSE(j["sources"][1].contains("keywordRules"));
150  }
151 
152  {
153  EXPECT_EQ(j["sources"][2]["sourceName"], "meta3");
154  EXPECT_EQ(j["sources"][2]["type"], "fitsKeywords");
155  EXPECT_EQ(j["sources"][2]["keywords"].size(), 1u);
156  EXPECT_EQ(j["sources"][2]["keywords"][0]["name"], "FOO BAR");
157 
158  EXPECT_TRUE(j["sources"][2].contains("keywordRules"));
159  EXPECT_EQ(j["sources"][2]["keywordRules"].size(), 1u);
160  EXPECT_EQ(j["sources"][2]["keywordRules"][0]["selectionPatterns"][0], "+e FOO BAR");
161  }
162 
163  // Check that it can be parsed by DPM
164  json::DpSpec dpspec = json::ParseDpSpec(j);
165 }
166 
167 TEST_F(MakeDpSpecV2, SuccessWithoutMergeTarget) {
168  // Test
169  // Remove merge-target
170  m_ctx.specification->merge_target = std::nullopt;
171 
172  auto original = MakeDataProductSpecification(m_ctx, logger);
173  nlohmann::json j;
174  to_json(j, original);
175 
176  LOG4CPLUS_DEBUG("test", "Serialized DpSpec: " << j.dump(2));
177 
178  EXPECT_EQ(j["id"], "id");
179  EXPECT_EQ(j["target"]["fileId"], "file_id");
180  EXPECT_EQ(j["target"]["filePrefix"], "prefix");
181  EXPECT_FALSE(j["target"].contains("source"));
182 
183  EXPECT_EQ(j["sources"].size(), 4u);
184 
185  {
186  EXPECT_EQ(j["sources"][0]["sourceName"], "primary");
187  EXPECT_EQ(j["sources"][0]["type"], "fitsFile");
188  EXPECT_FALSE(j["sources"][0].contains("keywordRules"));
189  }
190  {
191  EXPECT_EQ(j["sources"][1]["sourceName"], "meta1");
192  EXPECT_EQ(j["sources"][1]["type"], "fitsFile");
193 
194  EXPECT_TRUE(j["sources"][1].contains("keywordRules"));
195  EXPECT_EQ(j["sources"][1]["keywordRules"].size(), 1u);
196  EXPECT_EQ(j["sources"][1]["keywordRules"][0]["selectionPatterns"][0], "+e *");
197  }
198 
199  {
200  EXPECT_EQ(j["sources"][2]["sourceName"], "meta2");
201  EXPECT_EQ(j["sources"][2]["type"], "fitsFile");
202  EXPECT_FALSE(j["sources"][2].contains("keywordRules"));
203  }
204 
205  {
206  EXPECT_EQ(j["sources"][3]["sourceName"], "meta3");
207  EXPECT_EQ(j["sources"][3]["type"], "fitsKeywords");
208  EXPECT_EQ(j["sources"][3]["keywords"].size(), 1u);
209  EXPECT_EQ(j["sources"][3]["keywords"][0]["name"], "FOO BAR");
210 
211  EXPECT_TRUE(j["sources"][3].contains("keywordRules"));
212  EXPECT_EQ(j["sources"][3]["keywordRules"].size(), 1u);
213  EXPECT_EQ(j["sources"][3]["keywordRules"][0]["selectionPatterns"][0], "+e FOO BAR");
214  }
215 
216  // Check that it can be parsed by DPM
217  json::DpSpec dpspec = json::ParseDpSpec(j);
218 }
219 } // namespace daq
std::vector< KeywordVariant > KeywordVector
Vector of keywords.
Definition: keyword.hpp:414
DpSpec ParseDpSpec(Json const &json)
Parse JSON to construct the DpSpec structure.
Definition: dpSpec.cpp:47
Close representation of the JSON structure but with stronger types.
Definition: dpSpec.hpp:30
TEST_F(TestDpmDaqController, StatusUpdateInNotScheduledSucceeds)
NLOHMANN_JSON_SERIALIZE_ENUM(State, { {State::NotStarted, "NotStarted"}, {State::Starting, "Starting"}, {State::Acquiring, "Acquiring"}, {State::Stopping, "Stopping"}, {State::Stopped, "Stopped"}, {State::NotScheduled, "NotScheduled"}, {State::Scheduled, "Scheduled"}, {State::Transferring, "Transferring"}, {State::Merging, "Merging"}, {State::Releasing, "Releasing"}, {State::AbortingAcquiring, "AbortingAcquiring"}, {State::AbortingMerging, "AbortingMerging"}, {State::Aborted, "Aborted"}, {State::Completed, "Completed"}, }) void to_json(nlohmann void to_json(nlohmann::json &j, Alert const &p)
Definition: json.cpp:48
json::DpSpec MakeDataProductSpecification(DaqContext const &ctx, log4cplus::Logger &logger)
Creates a Data Product Specification as serialized JSON from the provided DaqContext.
Definition: makeDpSpec.cpp:266
Structure carrying context needed to start a Data Acquisition and construct a Data Product Specificat...
Definition: daqContext.hpp:44
std::vector< Source > meta_sources
Definition: daqContext.hpp:77
DpParts results
Results from Data Acquisition (FITS files and keywords).
Definition: daqContext.hpp:102
std::optional< json::StartDaqV2Spec > specification
Optional specification, if DAQ was started using StartDaqV2.
Definition: daqContext.hpp:116
std::string file_id
Data Product FileId as specified by OLAS ICD.
Definition: daqContext.hpp:65
std::string dp_name_prefix
Data product file name prefix.
Definition: daqContext.hpp:75
std::string id
DAQ identfier, possibly provided by user.
Definition: daqContext.hpp:60
log4cplus::Logger logger
void SetUp() override
void SetUp() override
A type safe version of LiteralKeyword that consist of the three basic components of a FITS keyword ke...
Definition: keyword.hpp:266
EXPECT_EQ(meta.rr_uri, "zpb.rr://meta")
ASSERT_EQ(meta.keyword_rules.size(), 1u)