Coverage for pesummary/tests/meta_file_test.py: 15.7%
178 statements
« prev ^ index » next coverage.py v7.4.4, created at 2024-12-09 22:34 +0000
« prev ^ index » next coverage.py v7.4.4, created at 2024-12-09 22:34 +0000
1# Licensed under an MIT style license -- see LICENSE.md
3import json
4import os
5import shutil
7import h5py
8import numpy as np
10from pesummary.core.file import meta_file as core_meta_file
11from pesummary.gw.file import meta_file
12from pesummary.gw.file.meta_file import _GWMetaFile
13from pesummary.gw.cli.inputs import MetaFileInput
14from pesummary.utils.samples_dict import SamplesDict
15from pesummary.utils.array import Array
16from .base import data_dir
17import tempfile
19tmpdir_main = tempfile.TemporaryDirectory(prefix=".", dir=".").name
21__author__ = ["Charlie Hoy <charlie.hoy@ligo.org>"]
24def test_recursively_save_dictionary_to_hdf5_file():
25 tmpdir = tempfile.TemporaryDirectory(prefix=".", dir=".").name
26 if os.path.isdir(tmpdir):
27 shutil.rmtree(tmpdir)
28 os.makedirs(tmpdir)
30 data = {
31 "H1_L1_IMRPhenomPv2": {
32 "posterior_samples": {
33 "parameters": ["mass_1", "mass_2"],
34 "samples": [[10, 2], [50, 5], [100, 90]]
35 },
36 },
37 "H1_L1_IMRPhenomP": {
38 "posterior_samples": {
39 "parameters": ["ra", "dec"],
40 "samples": [[0.5, 0.8], [1.2, 0.4], [0.9, 1.5]]
41 },
42 },
43 "H1_SEOBNRv4": {
44 "posterior_samples": {
45 "parameters": ["psi", "phi"],
46 "samples": [[1.2, 0.2], [3.14, 0.1], [0.5, 0.3]]
47 }
48 },
49 }
51 with h5py.File("{}/test.h5".format(tmpdir), "w") as f:
52 core_meta_file.recursively_save_dictionary_to_hdf5_file(
53 f, data, extra_keys=list(data.keys()))
55 f = h5py.File("{}/test.h5".format(tmpdir), "r")
56 assert sorted(list(f.keys())) == sorted(list(data.keys()))
57 assert sorted(
58 list(f["H1_L1_IMRPhenomPv2/posterior_samples"].keys())) == sorted(
59 ["parameters", "samples"]
60 )
61 assert f["H1_L1_IMRPhenomPv2/posterior_samples/parameters"][0].decode("utf-8") == "mass_1"
62 assert f["H1_L1_IMRPhenomPv2/posterior_samples/parameters"][1].decode("utf-8") == "mass_2"
63 assert f["H1_L1_IMRPhenomP/posterior_samples/parameters"][0].decode("utf-8") == "ra"
64 assert f["H1_L1_IMRPhenomP/posterior_samples/parameters"][1].decode("utf-8") == "dec"
65 assert f["H1_SEOBNRv4/posterior_samples/parameters"][0].decode("utf-8") == "psi"
66 assert f["H1_SEOBNRv4/posterior_samples/parameters"][1].decode("utf-8") == "phi"
68 assert all(
69 i == j for i,j in zip(f["H1_L1_IMRPhenomPv2/posterior_samples/samples"][0],
70 [10, 2]
71 )
72 )
73 assert all(
74 i == j for i,j in zip(f["H1_L1_IMRPhenomPv2/posterior_samples/samples"][1],
75 [50, 5]
76 )
77 )
78 assert all(
79 i == j for i,j in zip(f["H1_L1_IMRPhenomPv2/posterior_samples/samples"][2],
80 [100, 90]
81 )
82 )
83 assert all(
84 i == j for i,j in zip(f["H1_L1_IMRPhenomP/posterior_samples/samples"][0],
85 [0.5, 0.8]
86 )
87 )
88 assert all(
89 i == j for i,j in zip(f["H1_L1_IMRPhenomP/posterior_samples/samples"][1],
90 [1.2, 0.4]
91 )
92 )
93 assert all(
94 i == j for i,j in zip(f["H1_L1_IMRPhenomP/posterior_samples/samples"][2],
95 [0.9, 1.5]
96 )
97 )
98 assert all(
99 i == j for i,j in zip(f["H1_SEOBNRv4/posterior_samples/samples"][0],
100 [1.2, 0.2]
101 )
102 )
103 assert all(
104 i == j for i,j in zip(f["H1_SEOBNRv4/posterior_samples/samples"][1],
105 [3.14, 0.1]
106 )
107 )
108 assert all(
109 i == j for i,j in zip(f["H1_SEOBNRv4/posterior_samples/samples"][2],
110 [0.5, 0.3]
111 )
112 )
115def test_softlinks():
116 """
117 """
118 tmpdir = tempfile.TemporaryDirectory(prefix=".", dir=".").name
119 if os.path.isdir(tmpdir):
120 shutil.rmtree(tmpdir)
121 os.makedirs(tmpdir)
123 data = {
124 "label1": {
125 "psds": {
126 "H1": [[10, 20], [30, 40]],
127 "L1": [[10, 20], [30, 40]]
128 },
129 "config_file": {
130 "paths": {
131 "webdir": "example/webdir"
132 },
133 "condor": {
134 "lalsuite-install": "/example/install",
135 "executable": "%(lalsuite-install)s/executable",
136 "memory": 1000
137 },
138 },
139 },
140 "label2": {
141 "psds": {
142 "H1": [[10, 22], [30, 40]],
143 "L1": [[10, 20], [30, 45]]
144 },
145 "config_file": {
146 "paths": {
147 "webdir": "example/webdir2"
148 },
149 "condor": {
150 "lalsuite-install": "/example/install2",
151 "executable": "%(lalsuite-install)s/executable",
152 "memory": 1000.0
153 }
154 }
155 }
156 }
158 simlinked_dict = _GWMetaFile._create_softlinks(data)
159 repeated_entries = [
160 {
161 "label1/psds/H1": [
162 data["label1"]["psds"]["H1"],
163 simlinked_dict["label1"]["psds"]["H1"]
164 ],
165 "label1/psds/L1": [
166 data["label1"]["psds"]["L1"],
167 simlinked_dict["label1"]["psds"]["L1"]
168 ]
169 },
170 {
171 "label1/config_file/condor/executable": [
172 data["label1"]["config_file"]["condor"]["executable"],
173 simlinked_dict["label1"]["config_file"]["condor"]["executable"]
174 ],
175 "label2/config_file/condor/executable": [
176 data["label2"]["config_file"]["condor"]["executable"],
177 simlinked_dict["label2"]["config_file"]["condor"]["executable"]
178 ]
179 },
180 {
181 "label1/config_file/condor/memory": [
182 data["label1"]["config_file"]["condor"]["memory"],
183 simlinked_dict["label1"]["config_file"]["condor"]["memory"]
184 ],
185 "label2/config_file/condor/memory": [
186 data["label2"]["config_file"]["condor"]["memory"],
187 simlinked_dict["label2"]["config_file"]["condor"]["memory"]
188 ]
189 }
190 ]
191 for repeat in repeated_entries:
192 keys = list(repeat.keys())
193 assert \
194 repeat[keys[0]][1] == "softlink:/{}".format(keys[1]) and \
195 repeat[keys[1]][1] == repeat[keys[1]][0] or \
196 repeat[keys[1]][1] == "softlink:/{}".format(keys[0]) and \
197 repeat[keys[0]][1] == repeat[keys[0]][0]
199 print(simlinked_dict)
200 with h5py.File("{}/test.h5".format(tmpdir), "w") as f:
201 core_meta_file.recursively_save_dictionary_to_hdf5_file(
202 f, simlinked_dict, extra_keys=meta_file.DEFAULT_HDF5_KEYS + ["label1", "label2"])
204 with h5py.File("{}/no_softlink.h5".format(tmpdir), "w") as f:
205 core_meta_file.recursively_save_dictionary_to_hdf5_file(
206 f, data, extra_keys=meta_file.DEFAULT_HDF5_KEYS + ["label1", "label2"])
208 softlink_size = os.stat("{}/test.h5".format(tmpdir)).st_size
209 no_softlink_size = os.stat('{}/no_softlink.h5'.format(tmpdir)).st_size
210 assert softlink_size < no_softlink_size
212 with h5py.File("{}/test.h5".format(tmpdir), "r") as f:
213 assert \
214 f["label2"]["config_file"]["condor"]["executable"][0] == \
215 f["label1"]["config_file"]["condor"]["executable"][0]
216 assert \
217 all(
218 i == j for i, j in zip(
219 f["label1"]["psds"]["H1"][0], f["label1"]["psds"]["L1"][0]
220 )
221 )
222 assert \
223 all(
224 i == j for i, j in zip(
225 f["label1"]["psds"]["H1"][1], f["label1"]["psds"]["L1"][1]
226 )
227 )
230class TestMetaFile(object):
231 """Class the test the pesummary.gw.file.meta_file._GWMetaFile class
232 """
233 def setup_method(self):
234 """Setup the Test class
235 """
236 if not os.path.isdir("{}/samples".format(tmpdir_main)):
237 os.makedirs("{}/samples".format(tmpdir_main))
239 self.samples = np.array([np.random.random(10) for i in range(15)])
240 self.input_parameters = [
241 "mass_1", "mass_2", "a_1", "a_2", "tilt_1", "tilt_2", "phi_jl",
242 "phi_12", "psi", "theta_jn", "ra", "dec", "luminosity_distance",
243 "geocent_time", "log_likelihood"]
244 self.input_data = {"EXP1": SamplesDict(self.input_parameters, self.samples)}
245 distance = np.random.random(10) * 500
246 self.input_data["EXP1"]["luminosity_distance"] = Array(distance)
247 self.input_labels = ["EXP1"]
248 self.input_file_version = {"EXP1": "3.0"}
249 self.input_injection_data = np.random.random(15)
250 self.input_injection = {"EXP1": {
251 i: j for i, j in zip(self.input_parameters, self.input_injection_data)}}
252 self.input_file_kwargs = {"EXP1": {
253 "sampler": {"flow": 10}, "meta_data": {"samplerate": 10}
254 }}
255 self.input_config = [data_dir + "/config_lalinference.ini"]
256 psd_data = MetaFileInput.extract_psd_data_from_file(data_dir + "/psd_file.txt")
257 self.psds = {"EXP1": {"H1": psd_data}}
258 calibration_data = MetaFileInput.extract_calibration_data_from_file(
259 data_dir + "/calibration_envelope.txt")
260 self.calibration = {"EXP1": {"H1": calibration_data}}
262 object = _GWMetaFile(
263 self.input_data, self.input_labels, self.input_config,
264 self.input_injection, self.input_file_version, self.input_file_kwargs,
265 webdir=tmpdir_main, psd=self.psds, calibration=self.calibration)
266 object.make_dictionary()
267 object.save_to_json(object.data, object.meta_file)
268 object = _GWMetaFile(
269 self.input_data, self.input_labels, self.input_config,
270 self.input_injection, self.input_file_version, self.input_file_kwargs,
271 webdir=tmpdir_main, psd=self.psds, calibration=self.calibration,
272 hdf5=True)
273 object.make_dictionary()
274 object.save_to_hdf5(
275 object.data, object.labels, object.samples, object.meta_file
276 )
278 with open("{}/samples/posterior_samples.json".format(tmpdir_main), "r") as f:
279 self.json_file = json.load(f)
280 self.hdf5_file = h5py.File(
281 "{}/samples/posterior_samples.h5".format(tmpdir_main), "r"
282 )
284 def teardown_method(self):
285 """Remove all files and directories created from this class
286 """
287 self.hdf5_file.close()
288 if os.path.isdir(tmpdir_main):
289 shutil.rmtree(tmpdir_main)
291 def test_parameters(self):
292 """Test the parameters stored in the metafile
293 """
294 for num, data in enumerate([self.json_file, self.hdf5_file]):
295 assert sorted(list(data.keys())) == sorted(
296 self.input_labels + ["version", "history"]
297 )
298 if num == 0:
299 assert list(
300 sorted(data["EXP1"]["posterior_samples"].keys())) == [
301 "parameter_names", "samples"]
302 if num == 0:
303 try:
304 assert all(
305 i.decode("utf-8") == j for i, j in zip(
306 sorted(data["EXP1"]["posterior_samples"]["parameter_names"]),
307 sorted(self.input_parameters)))
308 except AttributeError:
309 assert all(
310 i == j for i, j in zip(
311 sorted(data["EXP1"]["posterior_samples"]["parameter_names"]),
312 sorted(self.input_parameters)))
313 else:
314 try:
315 assert all(
316 i.decode("utf-8") == j for i, j in zip(
317 sorted(data["EXP1"]["posterior_samples"].dtype.names),
318 sorted(self.input_parameters)))
319 except AttributeError:
320 assert all(
321 i == j for i, j in zip(
322 sorted(data["EXP1"]["posterior_samples"].dtype.names),
323 sorted(self.input_parameters)))
325 def test_samples(self):
326 """Test the samples stored in the metafile
327 """
328 for num, data in enumerate([self.json_file, self.hdf5_file]):
329 if num == 0:
330 parameters = data["EXP1"]["posterior_samples"]["parameter_names"]
331 samples = np.array(data["EXP1"]["posterior_samples"]["samples"]).T
332 else:
333 parameters = [j for j in data["EXP1"]["posterior_samples"].dtype.names]
334 samples = np.array([j.tolist() for j in data["EXP1"]["posterior_samples"]]).T
335 posterior_data = {"EXP1": {i: j for i, j in zip(parameters, samples)}}
336 for param, samp in posterior_data["EXP1"].items():
337 if isinstance(param, bytes):
338 param = param.decode("utf-8")
339 for ind in np.arange(len(samp)):
340 np.testing.assert_almost_equal(
341 samp[ind], self.input_data["EXP1"][param][ind]
342 )
344 def test_file_version(self):
345 """Test the file version stored in the metafile
346 """
347 for data in [self.json_file, self.hdf5_file]:
348 for i, j in zip(data["EXP1"]["version"], [self.input_file_version["EXP1"]]):
349 version = i
350 if isinstance(i, bytes):
351 version = version.decode("utf-8")
352 assert version == j
354 def test_meta_data(self):
355 """Test the meta data stored in the metafile
356 """
357 for num, data in enumerate([self.json_file, self.hdf5_file]):
358 assert sorted(list(data.keys())) == sorted(
359 self.input_labels + ["version", "history"]
360 )
361 assert sorted(
362 list(data["EXP1"]["meta_data"].keys())) == ["meta_data", "sampler"]
363 assert all(
364 all(
365 k == l for k, l in zip(
366 self.input_file_kwargs["EXP1"][i],
367 data["EXP1"]["meta_data"][j]
368 )
369 ) for i, j in zip(
370 sorted(self.input_file_kwargs["EXP1"].keys()),
371 sorted(data["EXP1"]["meta_data"].keys())
372 )
373 )
375 try:
376 assert all(
377 all(
378 self.input_file_kwargs["EXP1"][i][k] == data["EXP1"]["meta_data"][j][l]
379 for k, l in zip(
380 self.input_file_kwargs["EXP1"][i],
381 data["EXP1"]["meta_data"][j]
382 )
383 ) for i, j in zip(
384 sorted(self.input_file_kwargs["EXP1"].keys()),
385 sorted(data["EXP1"]["meta_data"].keys())
386 )
387 )
388 except Exception:
389 assert all(
390 all(
391 self.input_file_kwargs["EXP1"][i][k] == data["EXP1"]["meta_data"][j][l][0]
392 for k, l in zip(
393 self.input_file_kwargs["EXP1"][i],
394 data["EXP1"]["meta_data"][j]
395 )
396 ) for i, j in zip(
397 sorted(self.input_file_kwargs["EXP1"].keys()),
398 sorted(data["EXP1"]["meta_data"].keys())
399 )
400 )
402 def test_psd(self):
403 """Test the psd is stored in the metafile
404 """
405 for data in [self.json_file, self.hdf5_file]:
406 assert sorted(list(data.keys())) == sorted(
407 self.input_labels + ["version", "history"]
408 )
409 assert list(
410 data["EXP1"]["psds"].keys()) == ["H1"]
411 for i, j in zip(self.psds["EXP1"]["H1"], data["EXP1"]["psds"]["H1"]):
412 for k, l in zip(i, j):
413 assert k == l
415 def test_calibration(self):
416 """Test the calibration envelope is stored in the metafile
417 """
418 for data in [self.json_file, self.hdf5_file]:
419 assert sorted(list(data.keys())) == sorted(
420 self.input_labels + ["version", "history"]
421 )
422 assert list(
423 data["EXP1"]["calibration_envelope"].keys()) == ["H1"]
424 for i, j in zip(self.calibration["EXP1"]["H1"], data["EXP1"]["calibration_envelope"]["H1"]):
425 for k, l in zip(i, j):
426 assert k == l
428 def test_config(self):
429 """Test the configuration file is stored in the metafile
430 """
431 import configparser
433 config_data = []
434 for i in self.input_config:
435 config = configparser.ConfigParser()
436 config.optionxform = str
437 config.read(i)
438 config_data.append(config)
440 for num, data in enumerate([self.json_file, self.hdf5_file]):
441 assert sorted(list(data.keys())) == sorted(
442 self.input_labels + ["version", "history"]
443 )
444 assert all(
445 i == j for i, j in zip(
446 sorted(list(config_data[0].sections())),
447 sorted(list(data["EXP1"]["config_file"].keys()))))
448 all_options = {
449 i: {
450 j: k for j, k in config_data[0][i].items()
451 } for i in config_data[0].sections()
452 }
454 assert all(
455 all(
456 k == l for k, l in zip(
457 sorted(all_options[i]),
458 sorted(data["EXP1"]["config_file"][j])
459 )
460 ) for i, j in zip(
461 sorted(list(all_options.keys())),
462 sorted(list(data["EXP1"]["config_file"].keys()))
463 )
464 )
466 if num == 0:
467 assert all(
468 all(
469 all_options[i][k] == data["EXP1"]["config_file"][j][l]
470 for k, l in zip(
471 sorted(all_options[i]),
472 sorted(data["EXP1"]["config_file"][j])
473 )
474 ) for i, j in zip(
475 sorted(list(all_options.keys())),
476 sorted(list(data["EXP1"]["config_file"].keys()))
477 )
478 )
479 if num == 1:
480 assert all(
481 all(
482 all_options[i][k] == data["EXP1"]["config_file"][j][l][0].decode("utf-8")
483 for k, l in zip(
484 sorted(all_options[i]),
485 sorted(data["EXP1"]["config_file"][j])
486 )
487 ) for i, j in zip(
488 sorted(list(all_options.keys())),
489 sorted(list(data["EXP1"]["config_file"].keys()))
490 )
491 )
493 def test_injection_data(self):
494 """Test the injection data stored in the metafile
495 """
496 for data in [self.json_file, self.hdf5_file]:
497 assert sorted(list(data.keys())) == sorted(
498 self.input_labels + ["version", "history"]
499 )
500 if data == self.json_file:
501 for num, i in enumerate(list(self.input_injection["EXP1"].keys())):
502 assert self.input_injection["EXP1"][i] == data["EXP1"]["injection_data"]["samples"][num]
503 else:
504 for num, i in enumerate(list(self.input_injection["EXP1"].keys())):
505 assert self.input_injection["EXP1"][i] == data["EXP1"]["injection_data"][i]