package Subs::BAT2FITS; ############################################################################## # # DESCRIPTION: This subroutine Converts BAT telemetry to FITS files. # DESCRIPTION: It then merges event and rate data files. # # HISTORY: # HISTORY: $Log: BAT2FITS.pm,v $ # HISTORY: Revision 1.51 2009/02/17 18:10:33 apsop # HISTORY: Fix bug in adding time offset to event files. # HISTORY: # HISTORY: Revision 1.50 2008/05/15 19:24:21 apsop # HISTORY: Add in a small time offset to event times to aviod binning artifacts # HISTORY: # HISTORY: Revision 1.49 2007/08/20 20:38:11 apsop # HISTORY: Another scaled map bug fix. # HISTORY: # HISTORY: Revision 1.48 2007/08/20 17:17:09 apsop # HISTORY: Fix bugs in new scaled map code. # HISTORY: # HISTORY: Revision 1.47 2007/07/06 20:00:36 apsop # HISTORY: Allow for scaled maps with non-zero indexes. # HISTORY: # HISTORY: Revision 1.46 2007/06/28 21:00:36 apsop # HISTORY: Merge scaled maps across observations using the shared repository. # HISTORY: # HISTORY: Revision 1.45 2007/04/02 19:06:46 apsop # HISTORY: Remove gzipped bcat files after use. # HISTORY: # HISTORY: Revision 1.44 2007/04/02 13:43:36 apsop # HISTORY: Remove bcat files gotten from repository after use. # HISTORY: # HISTORY: Revision 1.43 2006/08/01 20:08:35 apsop # HISTORY: Remove any zero exposure americium DPH files # HISTORY: # HISTORY: Revision 1.42 2006/04/28 18:38:29 apsop # HISTORY: Do not try and fetch a catalogue if there is no time info. # HISTORY: # HISTORY: Revision 1.41 2006/01/20 19:40:30 apsop # HISTORY: No longer bother to fetch from repos cat files of type rcatalog. # HISTORY: # HISTORY: Revision 1.40 2006/01/18 16:38:11 apsop # HISTORY: Fix bug in event file renaming. # HISTORY: # HISTORY: Revision 1.39 2006/01/11 21:02:34 apsop # HISTORY: Corrections to bat event list file names. # HISTORY: # HISTORY: Revision 1.38 2005/12/19 16:08:38 apsop # HISTORY: Append tdrss lc attitude data to tdrss lightcure. Move exporting of catalogues to SW0Wrapup # HISTORY: # HISTORY: Revision 1.37 2005/11/29 17:38:30 apsop # HISTORY: Fix up file names for bat failed trigger HK files. # HISTORY: # HISTORY: Revision 1.36 2005/11/08 16:59:51 apsop # HISTORY: Comment out the production of the trimmed bat catalog. # HISTORY: # HISTORY: Revision 1.35 2005/09/26 21:30:40 apsop # HISTORY: Fetch and export bat catalogs in the repository. Make restricted bat catalogue file for housekeeping. # HISTORY: # HISTORY: Revision 1.34 2005/08/30 13:51:33 apsop # HISTORY: Mods for newest version of bat2fits, which takes .queue file as input. # HISTORY: # HISTORY: Revision 1.33 2005/06/01 17:36:34 apsop # HISTORY: Handle failed BAT triggers in second pass, but only if this is the final processing. # HISTORY: # HISTORY: Revision 1.32 2005/05/04 21:30:03 apsop # HISTORY: Updated type of raw mask tagged light curves. # HISTORY: # HISTORY: Revision 1.31 2005/05/04 13:17:00 apsop # HISTORY: Fix bug in renaming of files for mission=st # HISTORY: # HISTORY: Revision 1.30 2005/04/19 16:03:10 apsop # HISTORY: Do not use override option for st mission, jsut rename files. Change names of rate files. # HISTORY: # HISTORY: Revision 1.29 2005/03/25 20:16:39 apsop # HISTORY: Fix up file names for hk event files. # HISTORY: # HISTORY: Revision 1.28 2004/12/10 02:18:11 apsop # HISTORY: Set override to yes in bat2fits only if mission is not sw. # HISTORY: # HISTORY: Revision 1.27 2004/12/07 15:31:01 apsop # HISTORY: Set lpdspan=no in bat2fits; rename tdrss messages is mission is not sw. # HISTORY: # HISTORY: Revision 1.26 2004/12/05 23:22:34 apsop # HISTORY: Add override option to bat2fits. # HISTORY: # HISTORY: Revision 1.25 2004/09/15 22:30:27 apsop # HISTORY: Changes to account for different convention for naming mask tagged weights and lightcurves. # HISTORY: # HISTORY: Revision 1.24 2004/09/03 12:36:05 apsop # HISTORY: Give mask tagged lc their own type and file class # HISTORY: # HISTORY: Revision 1.23 2004/09/03 00:21:45 apsop # HISTORY: Give mask tagged lc their own type and file class # HISTORY: # HISTORY: Revision 1.22 2004/07/23 19:47:41 apsop # HISTORY: Time sort bat lpd header file # HISTORY: # HISTORY: Revision 1.21 2004/07/11 20:36:17 apsop # HISTORY: Allow for BAT rate files to not have GTIs. # HISTORY: # HISTORY: Revision 1.20 2004/06/18 16:13:08 apsop # HISTORY: Use bat2fits6. # HISTORY: # HISTORY: Revision 1.19 2004/05/06 20:02:33 dah # HISTORY: Add version number back into the header comments. # HISTORY: # HISTORY: Revision 1.18 2004/04/28 13:47:35 dah # HISTORY: Make one method for extracting hk, and put it in Swift2FTIS superclass. # HISTORY: # HISTORY: Revision 1.17 2004/04/16 20:21:18 dah # HISTORY: Begin using embedded history records # HISTORY: # # VERSION: 0.0 # # ############################################################################## use Subs::Swift2FITS; use Util::PseudoFtool; use Util::GTIlist; @ISA = ('Subs::Swift2FITS'); use strict; sub new { my $proto=shift; my $self=$proto->SUPER::new(); $self->{DESCRIPTION}="Decoding BAT Telemetry"; return $self; } ################## # METHODS: ################## sub body { my $self=shift; my $log =$self->log(); my $filename=$self->filename(); my $procpar =$self->procpar(); my $jobpar =$self->jobpar(); ####################################### # get a time-sorted list of LDP files ####################################### my $ldps = Util::LDPlist->new($filename->get("telemetry", "bat", "ldp", "*") ) ->sort(); ################################################## # append that to the rest of the telemetry files ################################################## my $list = Util::FileList->new($filename->get("telemetry", "bat", "head[23]", "*"), $ldps->files() ); ############################# # check if we got anything ############################# if($list->count() == 0) { $log->entry("No BAT CCSDS files to process"); return; } else { ######################## # log what we are doing ######################## $log->entry("Running bat2fits on the following files: ". join " ", $list->files() ); } ################################ # Get latest bat catalogue file ################################ my $late = $jobpar->{TIMELIST}->{stop}; my @bcat; @bcat = ($filename->fetch_from_repository('bcatalog', 'b', '', $late)) if $late; ################################### # set up and run BAT2FITS ################################### my $bin = $procpar->read("bat2fits"); my $bat2fits = Util::PseudoFtool->new("$bin/bat2fits6"); my $mission = $jobpar->read("mission"); my $sequence = $jobpar->read("sequence"); my $filename_base = $mission . $sequence; $bat2fits->params({fitsname => $filename_base, override => 'no', ldpspan => 'no', align => $filename->fetch_cal('alignment'), runas => 'pipeline', path => "./", telemetry => $list->as_param(), clobber => "yes", chatter => 4}); $bat2fits->params({'timeline' => "$sequence.queue"}) if -f "$sequence.queue"; $bat2fits->params({'catalog' => $bcat[0]}) if $bcat[0]; $bat2fits->run(); ############################################################# # Handle event and aux data from failed triggers that may be # in this sequence. Only do this on the last go around ############################################################# my $fail_trig_file = 'bat2fits_failed_trigger.list'; if( $jobpar->{TIMELIST}->{final} && -f $fail_trig_file ){ $bat2fits->params({fitsname => 'sw', override => 'no', ldpspan => 'no', align => $filename->fetch_cal('alignment'), path => "./", telemetry => '@'.$fail_trig_file, clobber => "yes", chatter => 4 }); $bat2fits->run(); my @tdatt = $filename->get('tdrsslcatt', 'bat', '', '*'); my @tdlc = $filename->get('tdlcurve', 'bat', '', '*'); foreach my $att (@tdatt){ my $root = ( $att =~ /^(s.\d{11})/ )[0]; my $lc = ( grep /^${root}/, @tdlc )[0]; if( $lc ){ Util::FITSfile->new($att) ->append_to($lc); } } ############################################################ # Rename failed trigger files to be trdss, ie with the 'ms' # pneumonic ############################################################ my $output = $bat2fits->stdout(); while($output =~ /\nSTATS: FILE (\S+)\s*\n/g){ my ($name, $newname) = ($1) x 2; rename $name, $newname if $newname =~ s/^(s.[t\d]\d{10})b/${1}msb/; } } unlink $fail_trig_file if -f $fail_trig_file; if( $mission ne 'sw' ){ foreach my $file ( glob('sw[0-9t]??????????ms[sbf]*'), glob('sw[0-9t]??????????[sbf]*') ){ my $newfile = $file; $newfile =~ s/^sw/${mission}/; rename $file, $newfile; } } $self->sort_evts(); ################################# # Fix up names of event hk files ################################# foreach my $hkevfile ($filename->get('unfiltered', 'bat', 'ev??to', '*')){ my ($inst, $mode, $index) = $filename->parse($hkevfile, 'unfiltered'); next if $mode =~ /evsh/; $mode =~ s/^(.*)to$/$1sp/; rename $hkevfile, $filename->get('hk', 'bat', $mode, $index); } foreach my $hkevfile ($filename->get('hk', 'bat', 'ev??to', '*')){ my ($inst, $mode, $index) = $filename->parse($hkevfile, 'hk'); $mode =~ s/^(.*)to$/$1sp/; rename $hkevfile, $filename->get('hk', 'bat', $mode, $index); } foreach my $hkevfile ( $filename->get('tdhk', 'bat', 'ev??to', '*'), $filename->get('tdunfilter', 'bat', 'ev??to', '*') ){ my $new = $hkevfile; $new =~ s/(msbev..)to/$1sp/; rename $hkevfile, $new; } ############################################ # merge the event files and the rate files ############################################ my @targ_ids; $self->merge_rates(\@targ_ids); $self->combine_masktag(); @bcat = grep !/${filename_base}/ || /\.gz$/ , @bcat; unlink @bcat if @bcat; ########################################################## # Make a catalogue file of just the sources from this obs # This involves making a FITS row filtering expression of # the catalogue ids ########################################################## ## @bcat = ($filename->fetch_from_repository('bcatalog', 'b', '', $late), ## $filename->fetch_from_repository('rcatalog', 'b', '', $late)); ## my $bcat_name = shift @bcat; ## unlink @bcat if @bcat; ## ## my $fcat = Util::FITSfile->new($bcat_name); ## push @targ_ids, $jobpar->read('target'); ## @targ_ids = map "CATNUM==".int, @targ_ids; ## ## $fcat->specs('['. join('||',@targ_ids) .']'); ## $fcat->copy('batcat.tmp'); ## unlink $bcat_name; ## ## $bcat_name =~ s/(s[wt])[\dt]\d{10}/$1${sequence}/; ## rename 'batcat.tmp', $bcat_name; ## ############################################### # Remove any zero exposure americium DPH files ############################################### foreach my $amfile ($filename->get('bamdph', 'bat', '*', '*')){ my $amfits = Util::FITSfile->new($amfile); if( $amfits->keyword('EXPOSURE') == 0 ){ $log->error(1, "Deleting file $amfile because it has zero exposure."); unlink $amfile; } } ############################################## # extract HK files using the unpacket tool ############################################## $self->hk_extract('bat'); $self->hk_combine('bat'); ######################################## # handle combining of scaled map files. ######################################## my $repos_val = $filename->{INFO}->{b}->{bscalemap}->{repository}; delete $filename->{INFO}->{b}->{bscalemap}->{repository}; my @bsmfiles = $filename->get('bscalemap', 'bat', '*', '*'); $filename->{INFO}->{b}->{bscalemap}->{repository} = $repos_val; ## my %bsmhash; ## @bsmhash{@bsmfiles} = (1) x @bsmfiles; ## @bsmfiles = keys %bsmhash; @bsmfiles = sort @bsmfiles; my $diff = Util::Ftool->new('fdiff') ->params({exclude => 'CALDBVER,PROCVER,SEQPNUM,SOFTVER', verbose => 'yes'}); $diff->verbose(0); my $tmpfile = 'bsmfile.tmp'; my $tmpfile2 = 'bsmfile2.tmp'; foreach my $smfile (@bsmfiles){ $log->entry("Checking for updates to $smfile"); my $smfits = Util::FITSfile->new($smfile); my $smobs = $smfits->keyword('OBS_ID'); my $index = substr($smobs, 2, 8); $index =~ s/^0+//; rename $smfile, $tmpfile; my $repos = $filename->fetch_from_repository('bscalemap', 'bat', '', $index); unless($repos){ ########################################### # No current scaled map, so export this one ########################################### rename $tmpfile, $smfile; my $stat = Util::HEAdas->new('ftstat') ->params({infile => $smfile .'[SCALED][col TIME]', centroid => 'no'}); $stat->run(); my $par = $stat->parfile(); my $min = $par->read('min'); my $max = $par->read('max'); my $start_date = Util::Date->new($min); my $stop_date = Util::Date->new($max); for(my $ext=0; $ext<=2; $ext++){ $smfits->ext($ext); $smfits->keyword('TSTART', $min); $smfits->keyword('TSTOP', $max); $smfits->keyword('DATE-OBS', $start_date->date().'T'.$start_date->time() ); $smfits->keyword('DATE-END', $stop_date->date().'T'.$stop_date->time() ); } $filename->export_to_repository('bscalemap', 'bat', $smfile); unlink $smfile; next; } $diff->params({file1 => $repos, file2 => $tmpfile}) ->run(); if( $diff->parfile->read('numdiffs') == 0 ){ $log->entry("File $smfile has not changed, do nothing."); unlink $tmpfile, $repos; next; } $log->entry("Merging information from scaled map files."); Util::HEAdas->new('ftmerge') ->params({infile => "$repos\[SCALED\], $tmpfile\[SCALED\]", outfile => $tmpfile2, clobber => 'yes'}) ->run(); $repos =~ s/\.gz$//; Util::HEAdas->new('ftmerge') ->params({infile => "$tmpfile2\[ATTITUDE\], $tmpfile\[ATTITUDE\]", outfile => $repos, clobber => 'yes'}) ->run(); Util::HEAdas->new('fsort') ->params({infile => "$repos\[1\]", columns => 'TIME', unique => 'yes'}) ->run(); Util::HEAdas->new('fsort') ->params({infile => "$repos\[2\]", columns => 'TIME QPARAM', unique => 'yes'}) ->run(); my $stat = Util::HEAdas->new('ftstat') ->params({infile => $repos .'[SCALED][col TIME]', centroid => 'no'}); $stat->run(); my $par = $stat->parfile(); my $min = $par->read('min'); my $max = $par->read('max'); my $start_date = Util::Date->new($min); my $stop_date = Util::Date->new($max); my $rpfits = Util::FITSfile->new($repos); for(my $ext=0; $ext<=2; $ext++){ $rpfits->ext($ext); $rpfits->keyword('TSTART', $min); $rpfits->keyword('TSTOP', $max); $rpfits->keyword('DATE-OBS', $start_date->date().'T'.$start_date->time() ); $rpfits->keyword('DATE-END', $stop_date->date().'T'.$stop_date->time() ); } $filename->export_to_repository('bscalemap', 'bat', $repos); unlink $repos; } unlink $tmpfile, $tmpfile2; } # end of body method ############################################################################## # Sort and duplicate remove short event file ############################################################################## sub sort_evts { my $self=shift; my $filename=$self->filename(); foreach my $file ($filename->get('unfiltered', 'bat', 'evsh*', '*')) { my $fits = new Util::FITSfile($file, 'EVENTS'); $fits->cols('TIME', 'DET_ID')->sort('shell', 'unique'); ####################################################################### # Add in a small time offset to event times to aviod binning artifacts ####################################################################### my $is_offset = $fits->keyword('BTOFFAPP'); unless( $is_offset && $is_offset =~ /^T\s*/ ){ my $tempfile = 'event_offset.tmp'; my $copy = Util::HEAdas->new('fcopy') ->params({infile => $file . '[EVENTS][col *; TIME = TIME + 5E-6]', outfile => $tempfile}); $copy->run(); unless( $copy->had_error() ){ rename $tempfile, $file; $fits->keyword('BTOFFAPP', 'T', 'BAT time offset correction was applied'); $fits->keyword('BTOFFVAL', 5E-6, '[s] BAT time offset correction value'); }else{ unlink $tempfile; } } } my $hd_file = $filename->get('hk', 'bat', 'hd', '*'); if($hd_file){ my $fits = new Util::FITSfile($hd_file, 'LDP_HEADER'); $fits->cols('TIME')->sort('shell', 'unique'); } } ############################################################################### # merge the rate files ############################################################################### sub merge_rates{ my $self=shift; my $targets=shift; my $log =$self->log(); my $filename=$self->filename(); my $procpar =$self->procpar(); my $jobpar =$self->jobpar(); $log->entry("Merging rate files"); my @modes = ('rt1s', 'rtms', 'rtmc', 'rtqd', 'mt' ); my ($mode, %all_targs); foreach $mode (@modes) { $log->entry("Mode = $mode"); my $type = 'lightcurve'; $type = 'rawlc' if $mode eq 'mt'; ##################################################################### # Get of list of targets. Needed for mt lightcurves. ##################################################################### my %targ_list; my $targ; foreach my $file ($filename->get($type, 'b', $mode.'*', '*')){ my $tot_mode = ( $filename->parse($file, $type) )[1]; if( $tot_mode =~ /(\d{8})/ ){ $targ_list{$1} = 1; }else{ $targ_list{''} = 1; } } %all_targs = (%all_targs, %targ_list); foreach $targ (keys %targ_list){ ############################################################### # get a list of all the raw rate files for this mode and index ############################################################### my $list = Util::FITSlist->new($filename->get($type, 'b', "${mode}??${targ}", '*') ); $list->extension(1); ######################################### # make sure there are some files and log # what we are doing ######################################### if($list->count() == 0 ) { $log->entry("No raw rate files for mode $mode"); next; } else { $log->entry("Merging ". join ' ', $list->files() ); } ################################ # merge the files ################################ my $rate = ($list->files())[0]; $rate =~ s/${mode}..${targ}/${mode}${targ}/; my $merged = $list->merge($rate); if($merged ne $rate ) { ########################################## # there was only one file, so we rename it ########################################## rename $merged, $rate; } else { ########################################### # there were multiple raw files, # first we need to sort and unique by time ########################################### $log->entry("Sorting $merged"); Util::FITSfile->new($merged, 1) ->sort("shell", "unique"); ###################################### # ...and now merge the GTIs ###################################### if( $list->extension('GTI') == $list->files() ){ $self->merge_gtis($list, $rate); } unlink $list->files(); } } } # end of loop over modes @$targets = grep $_, keys %all_targs; } # end of merge_rates method ########################################################################### # Merge the GTI extensions in a list of FITS files. The merged GTIs are # appended to the given target file. ########################################################################### sub merge_gtis { my $self=shift; my $list = shift; my $target = shift; my $log =$self->log(); my $filename=$self->filename(); my $procpar =$self->procpar(); my $jobpar =$self->jobpar(); ################################## # now we have to "or" the GTIs ################################## $log->entry("Merging GTIs for $target"); Util::GTIlist->new($list->files()) ->extension("GTI") ->merge_and_append_to($target); } # end of merge_gtis method ############################################################################## # Put in the mask tagged weights as extensions to the mask tagged lightcurves ############################################################################# sub combine_masktag { my $self=shift; my $log =$self->log(); my $filename=$self->filename(); my $procpar =$self->procpar(); my $jobpar =$self->jobpar(); my @mwfiles = $filename->get('maskwt', 'b', '', '*'); foreach my $mtfile ($filename->get('rawmtlc', 'b', '', '*')){ my ($inst, $mode, $index) = $filename->parse($mtfile, 'rawlc'); if( $mode =~ /(\d{8})/ ){ my $targ = $1; my $mwfile = (grep /mw${targ}/, @mwfiles)[0]; if($mwfile){ Util::Ftool->new('fappend') ->params({infile => $mwfile.'[1]', outfile => $mtfile}) ->run(); unlink $mwfile; } } } } 1;