29from optparse
import OptionParser
34from igwn_ligolw
import dbtables
36from lalburst
import git_version
37from lalburst
import burca_tailor
38from lalburst
import calc_likelihood
39from lalburst
import SnglBurstUtils
42__author__ =
"Kipp Cannon <kipp.cannon@ligo.org>"
43__version__ =
"git id %s" % git_version.id
44__date__ = git_version.date
57 parser = OptionParser(
58 version =
"Name: %%prog\n%s" % git_version.verbose_msg,
59 usage =
"%prog [options] [file ...]",
60 description =
"%prog uses likelihood ratio data stored in LIGO Light-Weight XML files to compute likelihood ratio values for excess power coincs in SQLite databases."
62 parser.add_option(
"-c",
"--comment", metavar =
"text", help =
"Set comment string in process table (default = None).")
63 parser.add_option(
"-p",
"--program", metavar =
"name", help =
"Set the name of the program that generated the events as it appears in the process table (required). The program name is used to extract live time information from the search summary tables in the input files.")
64 parser.add_option(
"--likelihood-data", metavar =
"filename", default = [], action =
"append", help =
"Read likelihood data from this XML file. (use lalburst_power_meas_likelihood to generate these files)")
65 parser.add_option(
"--likelihood-data-cache", metavar =
"filename", help =
"Read likelihood data from the XML files described by this LAL cache. For each trigger file, the live time of the trigger file is established and all likelihood data files whose segments intersect the trigger file's live time are loaded and merged into a single distribution data set. (use lalburst_power_meas_likelihood to generate these files)")
66 parser.add_option(
"--tmp-space", metavar =
"path", help =
"Path to a directory suitable for use as a work area while manipulating the database file. The database file will be worked on in this directory, and then moved to the final location when complete. This option is intended to improve performance when running in a networked environment, where there might be a local disk with higher bandwidth than is available to the filesystem on which the final output will reside.")
67 parser.add_option(
"-v",
"--verbose", action =
"store_true", help =
"Be verbose.")
68 options, filenames = parser.parse_args()
74 options.likelihood_data = set(options.likelihood_data)
75 if (
not options.likelihood_data)
and (options.likelihood_data_cache
is None):
76 raise ValueError(
"must set one of --likelihood-data or --likelihood-data-cache")
77 if options.likelihood_data
and (options.likelihood_data_cache
is not None):
78 raise ValueError(
"cannot set both --likelihood-data and --likelihood-data-cache")
79 if options.likelihood_data_cache:
80 options.likelihood_data_cache = set([CacheEntry(line)
for line
in file(options.likelihood_data_cache)])
82 options.likelihood_data_cache = set()
83 if options.program
is None:
84 raise ValueError(
"missing required argument --program")
90 return options, (filenames
or [
None])
116 distributions, ignored = burca_tailor.EPGalacticCoreCoincParamsDistributions.from_filenames(filenames,
"lalburst_power_meas_likelihood", verbose = verbose)
117 distributions.finish()
126cached_likelihood_files = set()
129for n, filename
in enumerate(filenames):
136 print(
"%d/%d: %s" % (n + 1, len(filenames), filename), file=sys.stderr)
137 working_filename = dbtables.get_connection_filename(filename, tmp_path = options.tmp_space, verbose = options.verbose)
138 connection = sqlite3.connect(str(working_filename))
139 connection.execute(
"PRAGMA temp_store_directory = '%s';" % dbtables.tempfile.gettempdir())
140 database = SnglBurstUtils.CoincDatabase(connection, options.program)
142 SnglBurstUtils.summarize_coinc_database(database)
150 if options.likelihood_data_cache:
151 likelihood_files = set(c.path
for c
in options.likelihood_data_cache
if c.segmentlistdict.intersects(database.seglists))
153 likelihood_files = options.likelihood_data
154 if likelihood_files != cached_likelihood_files:
156 cached_likelihood_files = likelihood_files
164 calc_likelihood.assign_likelihood_ratios(
165 connection = database.connection,
166 coinc_def_id = database.bb_definer_id,
167 offset_vectors = database.time_slide_table.as_dict(),
168 vetoseglists = database.vetoseglists,
169 events_func =
lambda cursor, coinc_event_id: calc_likelihood.sngl_burst_events_func(cursor, coinc_event_id, database.sngl_burst_table.row_from_cols),
170 veto_func = calc_likelihood.sngl_burst_veto_func,
171 ln_likelihood_ratio_func = distributions.ln_lr_from_triggers,
172 verbose = options.verbose
182 dbtables.put_connection_filename(filename, working_filename, verbose = options.verbose)
def load_likelihood_data(filenames, verbose=False)