LALBurst 2.0.7.1-eeff03c
lalburst_power_calc_likelihood.py
Go to the documentation of this file.
1##python
2#
3# Copyright (C) 2006--2010 Kipp Cannon
4#
5# This program is free software; you can redistribute it and/or modify it
6# under the terms of the GNU General Public License as published by the
7# Free Software Foundation; either version 2 of the License, or (at your
8# option) any later version.
9#
10# This program is distributed in the hope that it will be useful, but
11# WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
13# Public License for more details.
14#
15# You should have received a copy of the GNU General Public License along
16# with this program; if not, write to the Free Software Foundation, Inc.,
17# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18
19
20#
21# =============================================================================
22#
23# Preamble
24#
25# =============================================================================
26#
27
28
29from optparse import OptionParser
30import sys
31import sqlite3
32
33
34from igwn_ligolw import dbtables
35from lal.utils import CacheEntry
36from lalburst import git_version
37from lalburst import burca_tailor
38from lalburst import calc_likelihood
39from lalburst import SnglBurstUtils
40
41
42__author__ = "Kipp Cannon <kipp.cannon@ligo.org>"
43__version__ = "git id %s" % git_version.id
44__date__ = git_version.date
45
46
47#
48# =============================================================================
49#
50# Command Line
51#
52# =============================================================================
53#
54
55
57 parser = OptionParser(
58 version = "Name: %%prog\n%s" % git_version.verbose_msg,
59 usage = "%prog [options] [file ...]",
60 description = "%prog uses likelihood ratio data stored in LIGO Light-Weight XML files to compute likelihood ratio values for excess power coincs in SQLite databases."
61 )
62 parser.add_option("-c", "--comment", metavar = "text", help = "Set comment string in process table (default = None).")
63 parser.add_option("-p", "--program", metavar = "name", help = "Set the name of the program that generated the events as it appears in the process table (required). The program name is used to extract live time information from the search summary tables in the input files.")
64 parser.add_option("--likelihood-data", metavar = "filename", default = [], action = "append", help = "Read likelihood data from this XML file. (use lalburst_power_meas_likelihood to generate these files)")
65 parser.add_option("--likelihood-data-cache", metavar = "filename", help = "Read likelihood data from the XML files described by this LAL cache. For each trigger file, the live time of the trigger file is established and all likelihood data files whose segments intersect the trigger file's live time are loaded and merged into a single distribution data set. (use lalburst_power_meas_likelihood to generate these files)")
66 parser.add_option("--tmp-space", metavar = "path", help = "Path to a directory suitable for use as a work area while manipulating the database file. The database file will be worked on in this directory, and then moved to the final location when complete. This option is intended to improve performance when running in a networked environment, where there might be a local disk with higher bandwidth than is available to the filesystem on which the final output will reside.")
67 parser.add_option("-v", "--verbose", action = "store_true", help = "Be verbose.")
68 options, filenames = parser.parse_args()
69
70 #
71 # check and convert and bunch of arguments
72 #
73
74 options.likelihood_data = set(options.likelihood_data)
75 if (not options.likelihood_data) and (options.likelihood_data_cache is None):
76 raise ValueError("must set one of --likelihood-data or --likelihood-data-cache")
77 if options.likelihood_data and (options.likelihood_data_cache is not None):
78 raise ValueError("cannot set both --likelihood-data and --likelihood-data-cache")
79 if options.likelihood_data_cache:
80 options.likelihood_data_cache = set([CacheEntry(line) for line in file(options.likelihood_data_cache)])
81 else:
82 options.likelihood_data_cache = set()
83 if options.program is None:
84 raise ValueError("missing required argument --program")
85
86 #
87 # done
88 #
89
90 return options, (filenames or [None])
91
92
93#
94# =============================================================================
95#
96# Main
97#
98# =============================================================================
99#
100
101
102#
103# Command line
104#
105
106
107options, filenames = parse_command_line()
108
109
110#
111# How to load likelihood data
112#
113
114
115def load_likelihood_data(filenames, verbose = False):
116 distributions, ignored = burca_tailor.EPGalacticCoreCoincParamsDistributions.from_filenames(filenames, "lalburst_power_meas_likelihood", verbose = verbose)
117 distributions.finish()
118 return distributions
119
120
121#
122# Iterate over files.
123#
124
125
126cached_likelihood_files = set()
127
128
129for n, filename in enumerate(filenames):
130 #
131 # Open the file.
132 #
133
134
135 if options.verbose:
136 print("%d/%d: %s" % (n + 1, len(filenames), filename), file=sys.stderr)
137 working_filename = dbtables.get_connection_filename(filename, tmp_path = options.tmp_space, verbose = options.verbose)
138 connection = sqlite3.connect(str(working_filename))
139 connection.execute("PRAGMA temp_store_directory = '%s';" % dbtables.tempfile.gettempdir())
140 database = SnglBurstUtils.CoincDatabase(connection, options.program)
141 if options.verbose:
142 SnglBurstUtils.summarize_coinc_database(database)
143
144
145 #
146 # Retrieve appropriate likelihood data.
147 #
148
149
150 if options.likelihood_data_cache:
151 likelihood_files = set(c.path for c in options.likelihood_data_cache if c.segmentlistdict.intersects(database.seglists))
152 else:
153 likelihood_files = options.likelihood_data
154 if likelihood_files != cached_likelihood_files:
155 distributions = load_likelihood_data(likelihood_files, verbose = options.verbose)
156 cached_likelihood_files = likelihood_files
157
158
159 #
160 # Run likelihood ratio calculation.
161 #
162
163
164 calc_likelihood.assign_likelihood_ratios(
165 connection = database.connection,
166 coinc_def_id = database.bb_definer_id,
167 offset_vectors = database.time_slide_table.as_dict(),
168 vetoseglists = database.vetoseglists,
169 events_func = lambda cursor, coinc_event_id: calc_likelihood.sngl_burst_events_func(cursor, coinc_event_id, database.sngl_burst_table.row_from_cols),
170 veto_func = calc_likelihood.sngl_burst_veto_func,
171 ln_likelihood_ratio_func = distributions.ln_lr_from_triggers,
172 verbose = options.verbose
173 )
174
175
176 #
177 # Done with this file.
178 #
179
180
181 connection.close()
182 dbtables.put_connection_filename(filename, working_filename, verbose = options.verbose)
def load_likelihood_data(filenames, verbose=False)