diff --git a/configs/default_inject_neutrino.yaml b/configs/default_inject_neutrino.yaml new file mode 100644 index 0000000..0660d48 --- /dev/null +++ b/configs/default_inject_neutrino.yaml @@ -0,0 +1,221 @@ +# Number of the dataset +# dataset_number: +# Number of total runs/file +#n_runs: 1000 + +# Pattern for the outputfile +output_pattern: '{run_folder}/Level0.{step}_{generator}_IC86.2012_pass2.{dataset_number:6d}.{run_number}.i3.bz2' +# sub-dict to indicate need resources +resources: + # Indication which steps need GPUs, default is 0 + gpus: + 0: 0 + 1: 0 + 2: 1 + 3: 0 + 4: 0 + 5: 0 + # Indication of needed memory for each step if nothing is set 1GB is assumed as default + memory: + 0: 4gb + 1: 5gb + 2: 8gb + 3: 6gb + 4: 6gb + 5: 6gb + # Indication of the walltime (in hours) for each step, if nothing is set the system default for dagman and 1h for pbs are assumed + walltime: + # Indication of the number of cores for each step, default is 1 + cpus: + +# Dagman options +dagman_max_jobs: 5000 +dagman_submits_interval: 500 +dagman_scan_interval: 1 +dagman_submit_delay: 0 + +# Options used in the steps +# Options that are expected to be set to generate the scripts +seed: 1337 +# Whether file after IceTray should be kept +keep_crashed_files: 0 +# If True: use I3GSLRandomService, otherwise use I3SPRNGRandomService +# (Newer cmvfs python and icecube builds do not have I3SPRNGRandomService) +random_service_use_gslrng: True + +# PATH to the GCD File +gcd: /cvmfs/icecube.opensciencegrid.org/data/GCD/GeoCalibDetectorStatus_2020.Run134142.Pass2_V0.i3.gz +gcd_pass2: /cvmfs/icecube.opensciencegrid.org/data/GCD/GeoCalibDetectorStatus_2020.Run134142.Pass2_V0.i3.gz + +# Name of the generator; only used for naming of the output folder +generator: corsika + +# ------------------------------- +# Step 0: Select Neutrino +# ------------------------------- + +# SelectNeutrino specific options + +inject_neutrino_cfg: + #TreeKey: "I3MCTree" + # The maximum energy the generated neutrinos can have + MaxNeutrinoEnergy: 1_000_000 + # The minimum energy the generated neutrinos can have + MinNeutrinoEnergy: 1_000 + # The minimal ratio to the muon bundle energy at entry the generated neutrinos can have + MinEnergyRatio: 1 + # The generation ratios of the different neutrino types + # [nu_e, nu_e_bar, nu_mu, nu_mu_bar, nu_tau, nu_tau_bar] + TypeRatios: [1,1,0,0,0,0] + # Removes all frames with lower bundle energy at detector entry than this value + MinMuonBundleEnergy: 100 + SpectralIndex: -2 + + +# NeutrinoGenerator specific options +neutrino_generator_config: + earth: ["PREM_mmc"] + material: ["Standard"] + icecapmodel: "IceSheet" + simmode: "Detector" + xsecmodel: "csms_differential_v1.0" + xsecdir : "" + propmode: "AutoDetect" + CylinderRadiusInM: 550 + CylinderHeightInM: 1000 + muonrangeextension: False + ccfactor: 1.0 + ncfactor: 0.0 + grfactor: 0.0 + +# MuonPropagation specific options +muon_propagation_config : {} + +# oversampling to use +# Note: this isn't used here, but enables us to use the get_pulses scripts +oversampling_factor: + +# ----------------------------- +# Step 1: Snowstorm Propagation +# ----------------------------- +NumEventsPerModel: 100 +DOMOversizeFactor: 5. +UseI3PropagatorService: False +UseGPUs: True +SummaryFile: + +# These arguments will be passed on to the CLSIM Client Module +ExtraArgumentsToI3CLSimClientModule: + # Sources with no DOMs within this distance (meter) are *not* simulated + ClosestDOMDistanceCutoff: 500. + +# Snowstorm Configuration +snowstorm_config: + # Config for the full-scale systematics dataset + # (after alignment with the calibration group) + # baseline ice-model and hole-ice parametrization + IceModelLocation: "$I3_BUILD/ice-models/resources/models/ICEMODEL/spice_3.2.1" + HoleIceParameterization: "$I3_BUILD/ice-models/resources/models/ANGSENS/angsens/as.flasher_p1_0.30_p2_-1" + # Control ice model perturbations: + Perturbations: + # IceWavePlusModes for depth dependent absorption/scattering scaling + IceWavePlusModes: + apply: False + type: default + # Global ice scattering scaling + Scattering: + type: uniform + uniform: + limits: [[0.9, 1.1]] + # Global ice absorption scaling + Absorption: + type: uniform + uniform: + limits: [[0.9, 1.1]] + # Ice anisotropy scaling + AnisotropyScale: + type: uniform + uniform: + limits: [[0., 2.0]] + # DOM efficiency scaling + DOMEfficiency: + type: uniform + uniform: + limits: [[0.9, 1.1]] + + # DOM angular acceptance according to the Unified HoleIce model + # see: https://github.com/philippeller/angular_acceptance + HoleIceForward_Unified: + type: uniform + uniform: + limits: [[-0.65, 0.60], [-0.08, 0.08]] + + +# ----------------------------- +# Step 2: Detector Simulation +# ----------------------------- +# Remove this list of keys from the M-Frames +det_remove_keys_from_m_frame: [ + 'AngularAcceptance', + 'MediumProperties', + 'WavelengthAcceptance', + 'WavelengthGenerationBias', +] +# keep MCPEs in frame +det_keep_mc_hits: False +# keep I3MCPulseSeriesMap in frame. +det_keep_mc_pulses: False +# keep MCTree with all in-ice propagated secondaries. These take a lot of space compared un propagated tree. +det_keep_propagated_mc_tree: True +# Keep everything upto run X +det_keep_all_upto: -1 +# add beacon lauches. +det_add_beacon_launches: True +# reduce peak memory use by repeatedly merging hits as they are generated. WARNING: Use of this option may slightly reduce precision and drastically increase running time. It is potentially useful for very bright events, and probably harmful for very long events. +det_low_mem: False +# remove events that don't pass any trigger. +det_filter_trigger: True +# do not run Vuvuzela. +det_skip_noise_generation: False +# convert I3MCTree to linearized version if True +det_convert_to_linear_tree: True +# If this is a Genie simulation, then this needs to be set to True +det_is_genie_simulation: False +# If this is an IceTop simulation, then this needs to be set to True +det_is_icetop_simulation: False +# DOM Efficiency Resampling +#(If det_dom_eff_resmapling_sample_efficiency is set to 0, +# no resampling will be performed ) +det_dom_eff_resmapling_sample_efficiency: 0. +det_dom_eff_resmapling_generated_efficiency: 0. + + + +# ---------- +# Step 3: L1 +# ---------- +# Set the Min Bias prescale to something other than default +L1_min_bias_prescale: !!null +# MC is produced by DOMSimulator (default=False) +L1_2012_dom_simulator: False +# Apply QConverter, use if file is P frame only +L1_2012_qify: False +# Retrigger +L1_2012_retrigger: False +# Run GFU +L1_pass2_run_gfu: true +# Keep untriggered events substream and keys +L1_keep_untriggered: False + +additional_keep_keys: + - I3MCTree_preSampling + - NuGPrimary + +# ---------- +# Step 4: L2 +# ---------- + +# Keep all online L2 events or just GFU? +OnlineL2_keep_all_L2: True +# Keep all time residuals per event +OnlineL2_keep_time_residuals: False diff --git a/configs/default_select_neutrino.yaml b/configs/default_select_neutrino.yaml new file mode 100644 index 0000000..9afbff6 --- /dev/null +++ b/configs/default_select_neutrino.yaml @@ -0,0 +1,213 @@ +# Number of the dataset +# dataset_number: +# Number of total runs/file +n_runs: 1000 + +# Pattern for the outputfile +output_pattern: '{run_folder}/Level0.{step}_{generator}_IC86.2012_pass2.{dataset_number:6d}.{run_number}.i3.bz2' +# sub-dict to indicate need resources +resources: + # Indication which steps need GPUs, default is 0 + gpus: + 0: 0 + 1: 0 + 2: 1 + 3: 0 + 4: 0 + 5: 0 + # Indication of needed memory for each step if nothing is set 1GB is assumed as default + memory: + 0: 4gb + 1: 5gb + 2: 8gb + 3: 6gb + 4: 6gb + 5: 6gb + # Indication of the walltime (in hours) for each step, if nothing is set the system default for dagman and 1h for pbs are assumed + walltime: + # Indication of the number of cores for each step, default is 1 + cpus: + +# Dagman options +dagman_max_jobs: 5000 +dagman_submits_interval: 500 +dagman_scan_interval: 1 +dagman_submit_delay: 0 + +# Options used in the steps +# Options that are expected to be set to generate the scripts +seed: 1337 +# Whether file after IceTray should be kept +keep_crashed_files: 0 +# If True: use I3GSLRandomService, otherwise use I3SPRNGRandomService +# (Newer cmvfs python and icecube builds do not have I3SPRNGRandomService) +random_service_use_gslrng: True + +# PATH to the GCD File +gcd: /cvmfs/icecube.opensciencegrid.org/data/GCD/GeoCalibDetectorStatus_2020.Run134142.Pass2_V0.i3.gz +gcd_pass2: /cvmfs/icecube.opensciencegrid.org/data/GCD/GeoCalibDetectorStatus_2020.Run134142.Pass2_V0.i3.gz + +# Name of the generator; only used for naming of the output folder +generator: corsika + +# ------------------------------- +# Step 0: Select Neutrino +# ------------------------------- + +# SelectNeutrino specific options + +select_neutrino_cfg: + #Neutrino is selected with pow(energy [GeV], index)", + EnergyBiasPowerIndex: 1 + # "Bias ratio for NuE:NuEBar:NuMu:NuMuBar:NuTau:NuTauBar. " + # ParticleBiases: [1, 1, 1, 1, 1, 1] + #KeepDarkNeutrinos: false + #UseZeVForm: false + +# NeutrinoGenerator specific options +neutrino_generator_config: + earth: ["PREM_mmc"] + material: ["Standard"] + icecapmodel: "IceSheet" + simmode: "Detector" + xsecmodel: "csms_differential_v1.0" + xsecdir : "" + propmode: "AutoDetect" + muonrangeextension: False + #CylinderRadiusInM: 950 + #CylinderHeightInM: 1900 + ccfactor: 1.0 + ncfactor: 0.0 + grfactor: 0.0 + +# MuonPropagation specific options +muon_propagation_config : {} + +# oversampling to use +# Note: this isn't used here, but enables us to use the get_pulses scripts +oversampling_factor: + +# ----------------------------- +# Step 1: Snowstorm Propagation +# ----------------------------- +NumEventsPerModel: 100 +DOMOversizeFactor: 1. +UseI3PropagatorService: False +UseGPUs: True +SummaryFile: + +# These arguments will be passed on to the CLSIM Client Module +ExtraArgumentsToI3CLSimClientModule: + # Sources with no DOMs within this distance (meter) are *not* simulated + ClosestDOMDistanceCutoff: 500. + +# Snowstorm Configuration +snowstorm_config: + # Config for the full-scale systematics dataset + # (after alignment with the calibration group) + # baseline ice-model and hole-ice parametrization + IceModelLocation: "$I3_BUILD/ice-models/resources/models/ICEMODEL/spice_3.2.1" + HoleIceParameterization: "$I3_BUILD/ice-models/resources/models/ANGSENS/angsens/as.flasher_p1_0.30_p2_-1" + # Control ice model perturbations: + Perturbations: + # IceWavePlusModes for depth dependent absorption/scattering scaling + IceWavePlusModes: + apply: False + type: default + # Global ice scattering scaling + Scattering: + type: uniform + uniform: + limits: [[0.9, 1.1]] + # Global ice absorption scaling + Absorption: + type: uniform + uniform: + limits: [[0.9, 1.1]] + # Ice anisotropy scaling + AnisotropyScale: + type: uniform + uniform: + limits: [[0., 2.0]] + # DOM efficiency scaling + DOMEfficiency: + type: uniform + uniform: + limits: [[0.9, 1.1]] + + # DOM angular acceptance according to the Unified HoleIce model + # see: https://github.com/philippeller/angular_acceptance + HoleIceForward_Unified: + type: uniform + uniform: + limits: [[-0.65, 0.60], [-0.08, 0.08]] + + +# ----------------------------- +# Step 2: Detector Simulation +# ----------------------------- +# Remove this list of keys from the M-Frames +det_remove_keys_from_m_frame: [ + 'AngularAcceptance', + 'MediumProperties', + 'WavelengthAcceptance', + 'WavelengthGenerationBias', +] +# keep MCPEs in frame +det_keep_mc_hits: False +# keep I3MCPulseSeriesMap in frame. +det_keep_mc_pulses: False +# keep MCTree with all in-ice propagated secondaries. These take a lot of space compared un propagated tree. +det_keep_propagated_mc_tree: True +# Keep everything upto run X +det_keep_all_upto: -1 +# add beacon lauches. +det_add_beacon_launches: True +# reduce peak memory use by repeatedly merging hits as they are generated. WARNING: Use of this option may slightly reduce precision and drastically increase running time. It is potentially useful for very bright events, and probably harmful for very long events. +det_low_mem: False +# remove events that don't pass any trigger. +det_filter_trigger: True +# do not run Vuvuzela. +det_skip_noise_generation: False +# convert I3MCTree to linearized version if True +det_convert_to_linear_tree: True +# If this is a Genie simulation, then this needs to be set to True +det_is_genie_simulation: False +# If this is an IceTop simulation, then this needs to be set to True +det_is_icetop_simulation: False +# DOM Efficiency Resampling +#(If det_dom_eff_resmapling_sample_efficiency is set to 0, +# no resampling will be performed ) +det_dom_eff_resmapling_sample_efficiency: 0. +det_dom_eff_resmapling_generated_efficiency: 0. + + + +# ---------- +# Step 3: L1 +# ---------- +# Set the Min Bias prescale to something other than default +L1_min_bias_prescale: !!null +# MC is produced by DOMSimulator (default=False) +L1_2012_dom_simulator: False +# Apply QConverter, use if file is P frame only +L1_2012_qify: False +# Retrigger +L1_2012_retrigger: False +# Run GFU +L1_pass2_run_gfu: true +# Keep untriggered events substream and keys +L1_keep_untriggered: False + +additional_keep_keys: + - I3MCTree_preSampling + - NuGPrimary + +# ---------- +# Step 4: L2 +# ---------- + +# Keep all online L2 events or just GFU? +OnlineL2_keep_all_L2: True +# Keep all time residuals per event +OnlineL2_keep_time_residuals: False diff --git a/processing_chains.yaml b/processing_chains.yaml index bf6bdc8..288eb15 100644 --- a/processing_chains.yaml +++ b/processing_chains.yaml @@ -287,3 +287,25 @@ nancy_2012_muon_l3: 42: level2 43: step_0_2012_muon_L3_nancy_mc + +select_neutrino: + default_config: configs/default_select_neutrino.yaml + job_template: job_templates/py3-v4.3.0.sh + steps: + 0: step_-1_select_neutrino + 1: step_0_neutrino_generator + 2: step_1_snowstorm_propagation_py3_v4_3_0_icetray_v1_8_1 + 3: step_2_snowstorm_detector_simulation_py3_v4_3_0_icetray_v1_8_1 + 4: step_3_pass2_L1_py3_v4_3_0_icetray_v1_8_1 + 5: step_4_pass2_L2_py3_v4_3_0_icetray_v1_8_1 + +inject_neutrino: + default_config: configs/default_inject_neutrino.yaml + job_template: job_templates/py3-v4.3.0.sh + steps: + 0: step_0_inject_neutrino_into_shower + 1: step_0_neutrino_generator + 2: step_1_snowstorm_propagation_py3_v4_3_0_icetray_v1_8_1 + 3: step_2_snowstorm_detector_simulation_py3_v4_3_0_icetray_v1_8_1 + 4: step_3_pass2_L1_py3_v4_3_0_icetray_v1_8_1 + 5: step_4_pass2_L2_py3_v4_3_0_icetray_v1_8_1 \ No newline at end of file diff --git a/steps/step_-1_select_neutrino.py b/steps/step_-1_select_neutrino.py new file mode 100755 index 0000000..b4e5ea1 --- /dev/null +++ b/steps/step_-1_select_neutrino.py @@ -0,0 +1,136 @@ +#!/bin/sh /cvmfs/icecube.opensciencegrid.org/py3-v4.3.0/icetray-start +#METAPROJECT icetray/v1.8.1 +from __future__ import division +import click +import yaml +import numpy as np +import glob + +from icecube.simprod import segments +from icecube.sim_services.propagation import get_propagators +from icecube.icetray import I3Tray +from icecube import icetray, dataclasses, neutrino_generator, simclasses,dataio + +from utils import create_random_services, get_run_folder +from resources import geometry +from resources.oversampling import DAQFrameMultiplier +from resources.import_events import ImportEvents + + +@click.command() +@click.argument('cfg', type=click.Path(exists=True)) +@click.argument('run_number', type=int) +@click.option('--scratch/--no-scratch', default=True) +def main(cfg, run_number, scratch): + with open(cfg, 'r') as stream: + cfg = yaml.full_load(stream) + cfg['run_number'] = run_number + cfg['run_folder'] = get_run_folder(run_number) + if scratch: + outfile = cfg['scratchfile_pattern'].format(**cfg) + else: + outfile = cfg['outfile_pattern'].format(**cfg) + outfile = outfile.replace(' ', '0') + + # ------------------------------ + # get list of files for this run + # ------------------------------ + import_cfg = cfg['event_import_settings'] + glob_files = import_cfg['input_file_glob_list'] + if isinstance(glob_files, str): + # single string provided + files = glob.glob(glob_files.format(run_number=run_number)) + else: + # list of file globs provided + files = [] + for file_pattern in glob_files: + files.extend(glob.glob(file_pattern.format(run_number=run_number))) + # sort files + files = sorted(files) + # ------------------------------ + + click.echo('Run: {}'.format(run_number)) + click.echo('Outfile: {}'.format(outfile)) + click.echo('input Files:') + for file in files: + click.echo('\t{}'.format(file)) + + # crate random services + if 'random_service_use_gslrng' not in cfg: + cfg['random_service_use_gslrng'] = False + random_services, _ = create_random_services( + dataset_number=cfg['dataset_number'], + run_number=cfg['run_number'], + seed=cfg['seed'], + n_services=2, + use_gslrng=cfg['random_service_use_gslrng']) + + # -------------------------------------- + # Build IceTray + # -------------------------------------- + tray = I3Tray() + tray.context['I3RandomService'] = random_services[0] + + # import events from another I3-file + tray.AddModule("I3Reader", "reader", FilenameList=files) + + # TODO: Oversampling + # tray.AddModule(DAQFrameMultiplier, 'DAQFrameMultiplier', + # oversampling_factor=cfg['oversampling_factor'], + # mctree_keys=[import_cfg['mctree_name']]) + + #mctree_keys=[import_cfg['mctree_name']] + + tray.Add('Delete', Keys=["I3MCTree", "MMCTrackList","I3MCTree_preMuonProp_RNGState"]) + tray.Add('Rename', Keys=['I3MCTree_preMuonProp','I3MCTree']) + + nugen_config = cfg["neutrino_generator_config"] + propmode = neutrino_generator.to_propagation_mode(nugen_config["propmode"]) + + + if "CylinderRadiusInM" in nugen_config: + detcylrad = nugen_config["CylinderRadiusInM"]*icetray.I3Units.m + else: + detcylrad = 950*icetray.I3Units.m + if "CylinderHeightInM" in nugen_config: + detcyllen = nugen_config["CylinderHeightInM"]*icetray.I3Units.m + else: + detcyllen = 1900*icetray.I3Units.m + + origin_x = 0.*icetray.I3Units.m + origin_y = 0.*icetray.I3Units.m + origin_z = 0.*icetray.I3Units.m + cylinderparams = [detcylrad,detcyllen,origin_x,origin_y,origin_z] + + tray.AddService("I3EarthModelServiceFactory", "EarthModelService", + EarthModels = nugen_config["earth"], + MaterialModels = nugen_config["material"], + IceCapType = nugen_config["icecapmodel"]) + + tray.AddService("I3NuGSteeringFactory", "NuGSteer", + EarthModelName = "EarthModelService", + NEvents = -1, + CylinderParams = cylinderparams + ) + + selectNeutrinoCfg = cfg["select_neutrino_cfg"] + + tray.Add('I3NuGSourceSelector', **selectNeutrinoCfg if selectNeutrinoCfg else {}) + + click.echo('Output: {}'.format(outfile)) + tray.AddModule("I3Writer", "writer", + Filename=outfile, + Streams=[icetray.I3Frame.DAQ, + icetray.I3Frame.Physics, + icetray.I3Frame.Stream('S'), + icetray.I3Frame.Stream('M')]) + # -------------------------------------- + + click.echo('Scratch: {}'.format(scratch)) + tray.AddModule("TrashCan", "the can") + tray.Execute() + tray.Finish() + + +if __name__ == '__main__': + main() diff --git a/steps/step_0_inject_neutrino_into_shower.py b/steps/step_0_inject_neutrino_into_shower.py new file mode 100755 index 0000000..bcf3830 --- /dev/null +++ b/steps/step_0_inject_neutrino_into_shower.py @@ -0,0 +1,288 @@ +#!/bin/sh /cvmfs/icecube.opensciencegrid.org/py3-v4.3.0/icetray-start +#METAPROJECT icetray/v1.10.0 + +from __future__ import division + +import sys + +sys.path.append("/data/user/lbollmann/repositories/ic3-labels") # >:( + +import click +import yaml +import numpy as np +import glob + +from icecube.simprod import segments +from icecube.sim_services.propagation import get_propagators +from icecube.icetray import I3Tray +from icecube import icetray, dataclasses, neutrino_generator, simclasses, dataio +from icecube.dataclasses import I3Particle + +# print("ASDASDASD") +# import os +# print(os.environ["PYTHONPATH"]) +# print(sys.path) + +from ic3_labels.labels.utils import muon as mu_utils +from ic3_labels.labels.utils import high_level as hl +from ic3_labels.labels.utils.detector import icecube_hull + +from utils import create_random_services, get_run_folder + + + +def p(x, x0, x1, n): + """Power law distribution.""" + """x0 and x1 are the minimum and maximum values of the distribution""" + """n is the spectral index""" + """x is the random number between 0 and 1""" + return (x1**(n+1)-x0**(n+1)*x+x0**(n+1))**(1/(n+1)) + +class InjectNeutrinoIntoShower(icetray.I3ConditionalModule): + def __init__(self, context): + icetray.I3ConditionalModule.__init__(self, context) + self.AddParameter("TreeKey", "The key of the I3MCTree", "I3MCTree") + self.AddParameter( + "MaxNeutrinoEnergy", + "The maximum energy that injected neutrinos can have", + 10e6, + ) + self.AddParameter( + "MinNeutrinoEnergy", + "The minimum energy that injected neutrinos can have", + 1e3, + ) + self.AddParameter( + "MinEnergyRatio", + "The minimum energy ratio between neutrino and muon bundle", + 1.0, + ) + self.AddParameter( + "TypeRatios", + """The ratios of all neutrino types. Must be an array with length 6 with values between 0 and 1. + The order is [NuE, NuEBar, NuMu, NuMuBar, NuTau, NuTauBar]. Default is [1, 1, 1, 1, 1, 1].""", + [1, 1, 1, 1, 1, 1], + ) + self.AddParameter("RandomService", "The random service", None) + self.AddParameter("MinMuonBundleEnergy", "The minimum energy of the muon bundle. If lower the frame is dropped.", 1e3) + self.AddParameter("SpectralIndex", "The spectral index of the power law distribution", -1) + + + def Configure(self): + self._tree_key = self.GetParameter("TreeKey") + self._max_neutrino_energy = self.GetParameter("MaxNeutrinoEnergy") + self._min_neutrino_energy = self.GetParameter("MinNeutrinoEnergy") + self._min_energy_ratio = self.GetParameter("MinEnergyRatio") + self._ratios = self.GetParameter("TypeRatios") + self._random_service = self.GetParameter("RandomService") + self._min_muon_bundle_energy = self.GetParameter("MinMuonBundleEnergy") + self._spectral_index = self.GetParameter("SpectralIndex") + + def DAQ(self, frame): + """Inject neutrino into I3MCtree. + + Parameters + ---------- + frame : icetray.I3Frame.DAQ + tree_key : str + The key of the I3MCTree + max_neutrino_energy : float + The maximum energy that injected neutrinos can have + min_energy_ratio : float + The minimum energy ratio between neutrino and muon bundle + """ + + track_cache, _ = mu_utils.get_muongun_track_cache(frame) + + labels = hl.get_muon_bundle_information( + frame=frame, + convex_hull=icecube_hull, + track_cache=track_cache, + ) + + tree = frame[self._tree_key] + + assert len(tree.primaries) == 1 + primary = tree.primaries[0] + + bundle_energy_at_entry = labels["bundle_energy_at_entry"] + + # If the muon bundle energy is too high or zero then drop frame + if ( + bundle_energy_at_entry <= self._min_muon_bundle_energy + or bundle_energy_at_entry * self._min_energy_ratio + >= self._max_neutrino_energy + ): + return + + ## Create neutrino parameters + particles = [ + dataclasses.I3Particle.NuE, + dataclasses.I3Particle.NuEBar, + dataclasses.I3Particle.NuMu, + dataclasses.I3Particle.NuMuBar, + dataclasses.I3Particle.NuTau, + dataclasses.I3Particle.NuTauBar, + ] + + if self._random_service is None: + raise ValueError( + "random_service parameter of InjectNeutrinoIntoShower is not set" + ) + + # Sample type according to rations + cdf = np.cumsum(self._ratios) + num = self._random_service.uniform(float(cdf[-1])) + + nu_type = particles[np.searchsorted(cdf, num)] + + e_min = max( + bundle_energy_at_entry * self._min_energy_ratio, self._min_neutrino_energy + ) + e_max = self._max_neutrino_energy + + + if self._spectral_index != -1: + uniform = self._random_service.uniform(0, 1) + energy = p(uniform, e_min, e_max, self._spectral_index) + else: + energy = self._random_service.uniform(e_min, e_max) + + pos = primary.pos + dir = primary.dir + time = primary.time + + # Create neutrino + neutrino = I3Particle() + neutrino.type = nu_type + neutrino.energy = energy + neutrino.pos = pos + neutrino.dir = dir + neutrino.time = time + neutrino.shape = dataclasses.I3Particle.StartingTrack + + # Add neutrino to tree + tree_pre_muon_prop = frame["I3MCTree_preMuonProp"] + tree_pre_muon_prop.append_child(primary.id, neutrino) + frame.Delete("I3MCTree_preMuonProp") + frame.Put("I3MCTree_preMuonProp", tree_pre_muon_prop) + + frame["NuGPrimary"] = neutrino + + labels = dataclasses.I3MapStringDouble() + + labels["EnergyMin"] = e_min + labels["EnergyMax"] = e_max + labels["Energy"] = neutrino.energy + labels["EnergyRatio"] = neutrino.energy / bundle_energy_at_entry + labels["MinEnergyRatio"] = self._min_energy_ratio + labels["MinNeutrinoEnergy"] = self._min_neutrino_energy + labels["BundleEnergyAtEntry"] = bundle_energy_at_entry + + frame["InjectNeutrinoInfo"] = labels + frame["InjectNeutrinoRatios"] = dataclasses.I3VectorDouble(self._ratios) + self.PushFrame(frame) + + +@click.command() +@click.argument("cfg", type=click.Path(exists=True)) +@click.argument("run_number", type=int) +@click.option("--scratch/--no-scratch", default=True) +def main(cfg, run_number, scratch): + with open(cfg, "r") as stream: + cfg = yaml.full_load(stream) + cfg["run_number"] = run_number + cfg["run_folder"] = get_run_folder(run_number) + if scratch: + outfile = cfg["scratchfile_pattern"].format(**cfg) + else: + outfile = cfg["outfile_pattern"].format(**cfg) + outfile = outfile.replace(" ", "0") + + # ------------------------------ + # get list of files for this run + # ------------------------------ + import_cfg = cfg["event_import_settings"] + glob_files = import_cfg["input_file_glob_list"] + if isinstance(glob_files, str): + # single string provided + files = glob.glob(glob_files.format(run_number=run_number)) + else: + # list of file globs provided + files = [] + for file_pattern in glob_files: + files.extend(glob.glob(file_pattern.format(run_number=run_number))) + # sort files + files = sorted(files) + # ------------------------------ + + click.echo("Run: {}".format(run_number)) + click.echo("Outfile: {}".format(outfile)) + click.echo("input Files:") + for file in files: + click.echo("\t{}".format(file)) + + # crate random services + if "random_service_use_gslrng" not in cfg: + cfg["random_service_use_gslrng"] = False + random_services, _ = create_random_services( + dataset_number=cfg["dataset_number"], + run_number=cfg["run_number"], + seed=cfg["seed"], + n_services=2, + use_gslrng=cfg["random_service_use_gslrng"], + ) + + # -------------------------------------- + # Build IceTray + # -------------------------------------- + tray = I3Tray() + tray.context["I3RandomService"] = random_services[0] + + # import events from another I3-file + tray.AddModule("I3Reader", "reader", FilenameList=files) + + if "muon_propagation_config" in cfg: + tray.Add("Rename", Keys=["I3MCTree", "I3MCTree_preMuonProp"]) + tray.AddSegment( + segments.PropagateMuons, + "propagate_muons", + RandomService=random_services[1], + **cfg["muon_propagation_config"], + ) + + # inject neutrino into shower + tray.AddModule( + InjectNeutrinoIntoShower, + "inject_neutrino", + RandomService=random_services[0], + **cfg["inject_neutrino_cfg"], + ) + + ## Delete propagated tree and MMCTracklist + tray.Add("Delete", "delete_tree", Keys=["I3MCTree"]) + tray.Add("Rename", "rename_pre_prop_tree", Keys=["I3MCTree_preMuonProp", "I3MCTree"]) + tray.Add("Delete", "delete_mmctracklist", Keys=["MMCTrackList"]) + + click.echo("Output: {}".format(outfile)) + tray.AddModule( + "I3Writer", + "writer", + Filename=outfile, + Streams=[ + icetray.I3Frame.DAQ, + icetray.I3Frame.Physics, + icetray.I3Frame.Stream("S"), + icetray.I3Frame.Stream("M"), + ], + ) + # -------------------------------------- + + click.echo("Scratch: {}".format(scratch)) + tray.AddModule("TrashCan", "the can") + tray.Execute() + tray.Finish() + + +if __name__ == "__main__": + main() diff --git a/steps/step_0_neutrino_generator.py b/steps/step_0_neutrino_generator.py new file mode 100755 index 0000000..ab70073 --- /dev/null +++ b/steps/step_0_neutrino_generator.py @@ -0,0 +1,151 @@ +#!/bin/sh /cvmfs/icecube.opensciencegrid.org/py3-v4.3.0/icetray-start +#METAPROJECT icetray/v1.10.0 +from __future__ import division +import click +import yaml +import numpy as np +import glob + +from icecube.simprod import segments +from icecube.sim_services.propagation import get_propagators +from icecube.icetray import I3Tray +from icecube import icetray, dataclasses, neutrino_generator, simclasses,dataio + +from utils import create_random_services, get_run_folder +from resources import geometry +from resources.oversampling import DAQFrameMultiplier +from resources.import_events import ImportEvents + + +@click.command() +@click.argument('cfg', type=click.Path(exists=True)) +@click.argument('run_number', type=int) +@click.option('--scratch/--no-scratch', default=True) +def main(cfg, run_number, scratch): + with open(cfg, 'r') as stream: + cfg = yaml.full_load(stream) + cfg['run_number'] = run_number + cfg['run_folder'] = get_run_folder(run_number) + if scratch: + outfile = cfg['scratchfile_pattern'].format(**cfg) + else: + outfile = cfg['outfile_pattern'].format(**cfg) + outfile = outfile.replace(' ', '0') + + infile = cfg['infile_pattern'].format(**cfg) + files = [infile.replace(' ', '0')] + click.echo('Run: {}'.format(run_number)) + click.echo('Outfile: {}'.format(outfile)) + click.echo('input Files:') + for file in files: + click.echo('\t{}'.format(file)) + + # crate random services + if 'random_service_use_gslrng' not in cfg: + cfg['random_service_use_gslrng'] = False + random_services, _ = create_random_services( + dataset_number=cfg['dataset_number'], + run_number=cfg['run_number'], + seed=cfg['seed'], + n_services=2, + use_gslrng=cfg['random_service_use_gslrng']) + + + + def get_frames(files): + for file in files: + for frame in dataio.I3File(file): + yield frame + + event_count = 0 + for frame in get_frames(files): + if frame.Stop == frame.DAQ: + if "NuGPrimary" in frame: + event_count += 1 + + if "NEvents" in cfg: + event_count = min(event_count, cfg["NEvents"]) + + # -------------------------------------- + # Build IceTray + # -------------------------------------- + tray = I3Tray() + tray.context['I3RandomService'] = random_services[0] + + # import events from another I3-file + tray.AddModule("I3Reader", "reader", FilenameList=files) + + nugen_config = cfg["neutrino_generator_config"] + + propmode = neutrino_generator.to_propagation_mode(nugen_config["propmode"]) + + + if "CylinderRadiusInM" in nugen_config: + detcylrad = nugen_config["CylinderRadiusInM"]*icetray.I3Units.m + else: + detcylrad = 950*icetray.I3Units.m + if "CylinderHeightInM" in nugen_config: + detcyllen = nugen_config["CylinderHeightInM"]*icetray.I3Units.m + else: + detcyllen = 1900*icetray.I3Units.m + + origin_x = 0.*icetray.I3Units.m + origin_y = 0.*icetray.I3Units.m + origin_z = 0.*icetray.I3Units.m + cylinderparams = [detcylrad,detcyllen,origin_x,origin_y,origin_z] + + tray.AddService("I3EarthModelServiceFactory", "EarthModelService", + EarthModels = nugen_config["earth"], + MaterialModels = nugen_config["material"], + IceCapType = nugen_config["icecapmodel"]) + + tray.AddService("I3NuGSteeringFactory", "NuGSteer", + EarthModelName = "EarthModelService", + NEvents = event_count, + CylinderParams = cylinderparams, + SimMode = nugen_config["simmode"], + DoMuonRangeExtension = nugen_config["muonrangeextension"], + ) + tray.AddService("I3NuGInteractionInfoDifferentialFactory", "interaction", + RandomService = random_services[1], + SteeringName = "NuGSteer", + CrossSectionModel = nugen_config["xsecmodel"], + ) + + tray.AddModule("I3NeutrinoGenerator","generator", + RandomService = random_services[1], + SteeringName = "NuGSteer", + InteractionInfoName = "interaction", + PropagationWeightMode = propmode, + InteractionCCFactor = nugen_config["ccfactor"], + InteractionNCFactor = nugen_config["ncfactor"], + InteractionGRFactor = nugen_config["grfactor"], + If = lambda frame: "NuGPrimary" in frame + ) + + #propagate muons if config exists in config + #Note: Snowstorm may perform muon propagation internally + if 'muon_propagation_config' in cfg: + tray.Add('Rename', Keys=['I3MCTree', 'I3MCTree_preMuonProp']) + tray.AddSegment(segments.PropagateMuons, + 'propagate_muons', + RandomService=random_services[1], + **cfg['muon_propagation_config']) + + click.echo('Output: {}'.format(outfile)) + tray.AddModule("I3Writer", "writer", + Filename=outfile, + Streams=[icetray.I3Frame.DAQ, + icetray.I3Frame.Physics, + icetray.I3Frame.Stream('S'), + icetray.I3Frame.Stream('M')]) + # -------------------------------------- + + click.echo('Scratch: {}'.format(scratch)) + tray.AddModule("TrashCan", "the can") + tray.Execute() + tray.Finish() + + +if __name__ == '__main__': + main() diff --git a/steps/step_1_snowstorm_propagation_py3_v4_3_0_icetray_v1_8_1.py b/steps/step_1_snowstorm_propagation_py3_v4_3_0_icetray_v1_8_1.py new file mode 100755 index 0000000..7850b3b --- /dev/null +++ b/steps/step_1_snowstorm_propagation_py3_v4_3_0_icetray_v1_8_1.py @@ -0,0 +1,599 @@ +#!/bin/sh /cvmfs/icecube.opensciencegrid.org/py3-v4.3.0/icetray-start +#METAPROJECT icetray/v1.8.1 + +""" Run Snowstorm Propagation + +Adopted from: + https://code.icecube.wisc.edu/projects/icecube/browser/IceCube/ + meta-projects/combo/stable/simprod-scripts/resources/scripts/ + SnowSuite/3-Snowstorm.py + +# Copyright (c) 2019 +# Jakob van Santen +# and the IceCube Collaboration +# +# Permission to use, copy, modify, and/or distribute this software for any +# purpose with or without fee is hereby granted, provided that the above +# copyright notice and this permission notice appear in all copies. +# +# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY +# SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION +# OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +""" + +import os +import sys +import click +import yaml +import numpy as np +import time +import copy +import itertools +import tempfile + +from I3Tray import I3Tray +from icecube import icetray, dataclasses, dataio, phys_services, clsim +from icecube import sim_services +from icecube.clsim.traysegments.common import \ + setupPropagators, setupDetector, configureOpenCLDevices +from icecube.clsim.traysegments.I3CLSimMakePhotons import \ + I3CLSimMakePhotonsWithServer +from icecube.ice_models import icewave +from icecube.ice_models import angsens_unified +from icecube.snowstorm import \ + Perturber, MultivariateNormal, DeltaDistribution, UniformDistribution +from icecube.snowstorm import all_parametrizations + +from utils import create_random_services, get_run_folder +from resources import snowstorm_perturbers + + +# ---------------- +# Helper Functions +# ---------------- +class FrameSequenceReader(icetray.I3Module): + """ + Emit frames from an externally supplied dataio.I3FrameSequence, effectively + making a persistent I3Reader. + """ + def __init__(self, ctx): + super(FrameSequenceReader, self).__init__(ctx) + self.AddParameter("Sequence", "Iterable of frames to emit", None) + + def Configure(self): + self._frames = self.GetParameter("Sequence") + + def Process(self): + # this can run into issues if it's the last one + try: + frame = next(self._frames) + if frame is not None: + self.PushFrame(frame) + else: + self.RequestSuspension() + except StopIteration: + self.RequestSuspension() + + +class Bumper(icetray.I3Module): + """ + Stop the tray after N Q-frames + """ + def __init__(self, ctx): + super(Bumper, self).__init__(ctx) + self.AddParameter("NumFrames", "", 100) + + def Configure(self): + self._numframes = self.GetParameter("NumFrames") + self._count = 0 + + def DAQ(self, frame): + self._count += 1 + if self._count >= self._numframes: + self.PushFrame(frame) + self.RequestSuspension() + else: + self.PushFrame(frame) + + +class EnsureSFrame(icetray.I3Module): + """ + Inject an S frame if none present, and ensure that M frames come after S + """ + def __init__(self, ctx): + super(EnsureSFrame, self).__init__(ctx) + self.AddParameter("Enable", "", True) + + def Configure(self): + self._disabled = not self.GetParameter("Enable") + self._mframes = [] + + def Process(self): + frame = self.PopFrame() + if self._disabled: + self.PushFrame(frame) + return + elif frame.Stop.id == 'S': + # got an existing S frame, emit buffered M frames + self._disabled = True + self.PushFrame(frame) + for m in self._mframes: + self.PushFrame(m) + del self._mframes[:] + elif frame.Stop.id == 'M': + self._mframes.append(frame) + elif frame.Stop.id == 'Q': + # no S frame seen, emit SMQ + self._disabled = True + self.PushFrame(icetray.I3Frame('S')) + for m in self._mframes: + self.PushFrame(m) + del self._mframes[:] + self.PushFrame(frame) + else: + self.PushFrame(frame) + + +class GatherStatistics(icetray.I3Module): + """Mimick the summary stage of I3CLSimModule::Finish()""" + def Finish(self): + if 'I3SummaryService' not in self.context: + return + summary = self.context['I3SummaryService'] + server = self.context['CLSimServer'] + if "TotalNumGeneratedHits" not in summary.keys(): + summary["TotalNumGeneratedHits"] = 0 + for k, v in summary.items(): + if (k.startswith("I3PhotonToMCPEConverter") and + k.endswith("NumGeneratedHits")): + summary["TotalNumGeneratedHits"] += v + summary.pop(k) + for k, v in server.GetStatistics().items(): + if k in summary and (k.startswith('Total') or + k.startswith('Num')): + summary[k] += v + else: + summary[k] = v + + +def run_snowstorm_propagation(cfg, infile, outfile): + """Run SnowStorm Propagation. + + Adopted from: + https://code.icecube.wisc.edu/projects/icecube/browser/IceCube/ + meta-projects/combo/stable/simprod-scripts/resources/scripts/ + SnowSuite/3-Snowstorm.py + + + Parameters + ---------- + cfg : dict + Dictionary with configuration settings. + infile : str + Path to input file. + outfile : str + Path to output file. + """ + + start_time = time.time() + + # -------- + # Settings + # -------- + default_args = { + + # required + 'NumEventsPerModel': 100, + 'DOMOversizeFactor': 1., + 'UseI3PropagatorService': True, + + # optional + 'UseGPUs': True, + 'SummaryFile': 'summary_snowstorm.yaml', + + 'UseOnlyDeviceNumber': None, + 'MCTreeName': 'I3MCTree', + 'OutputMCTreeName': None, + 'FlasherInfoVectName': None, + 'FlasherPulseSeriesName': None, + 'PhotonSeriesName': None, + 'MCPESeriesName': "I3MCPESeriesMap", + 'DisableTilt': False, + 'UnWeightedPhotons': False, + 'UnWeightedPhotonsScalingFactor': None, + 'UseGeant4': False, + 'ParticleHistory': True, + 'ParticleHistoryGranularity': 1*icetray.I3Units.m, + 'CrossoverEnergyEM': None, + 'CrossoverEnergyHadron': None, + 'UseCascadeExtension': True, + 'PhotonHistoryEntries': 0, + 'DoNotParallelize': False, + 'UnshadowedFraction': 1.0, + 'WavelengthAcceptance': None, + 'DOMRadius': 0.16510*icetray.I3Units.m, + 'CableOrientation': None, + 'OverrideApproximateNumberOfWorkItems': None, + 'IgnoreSubdetectors': ["IceTop"], + 'ExtraArgumentsToI3PhotonPropagationClientModule': dict(), + } + + # overwrite default settings + default_args.update(cfg) + cfg = default_args + + snowstorm_config = cfg['snowstorm_config'] + if cfg['SummaryFile'] is not None: + cfg['SummaryFile'] = cfg['SummaryFile'].format(**cfg) + ice_model_location = \ + os.path.expandvars(snowstorm_config["IceModelLocation"]) + hole_ice_parameterization = \ + os.path.expandvars(snowstorm_config["HoleIceParameterization"]) + + # set units to meter + cfg['ParticleHistoryGranularity'] *= icetray.I3Units.m + cfg['DOMRadius'] *= icetray.I3Units.m + + # Print out most important settings + click.echo('\n---------------') + click.echo('Script Settigns') + click.echo('---------------') + click.echo('\tInput: {}'.format(infile)) + click.echo('\tGCDFile: {}'.format(cfg['gcd'])) + click.echo('\tOutput: {}'.format(outfile)) + for key in ['DOMOversizeFactor', 'UseI3PropagatorService', 'UseGPUs', + 'SummaryFile']: + click.echo('\t{}: {}'.format(key, cfg[key])) + click.echo('---------------\n') + + # get random service + random_services, _ = create_random_services( + dataset_number=cfg['dataset_number'], + run_number=cfg['run_number'], + seed=cfg['seed'], + n_services=1, + use_gslrng=cfg['random_service_use_gslrng']) + + random_service = random_services[0] + + """ + Setup and run Snowstorm (aka MultiSim) by running a series of short + trays, each with a different ice model. This works by front-loading as much + of the expensive initialization (reading the GCD file, setting up + PROPOSAL/Geant4, etc) as possible, so that only the propagation kernel + needs to be recompiled for every tray. + """ + # instantiate baseline detector setup. + # this will help construct the baseline characteristics before applying + # the perturbers + print("Setting up detector... ", end="") + clsimParams = setupDetector( + GCDFile=cfg['gcd'], + SimulateFlashers=bool(cfg['FlasherInfoVectName'] or + cfg['FlasherPulseSeriesName']), + IceModelLocation=ice_model_location, + DisableTilt=cfg['DisableTilt'], + UnWeightedPhotons=cfg['UnWeightedPhotons'], + UnWeightedPhotonsScalingFactor=cfg['UnWeightedPhotonsScalingFactor'], + UseI3PropagatorService=cfg['UseI3PropagatorService'], + UseGeant4=cfg['UseGeant4'], + CrossoverEnergyEM=cfg['CrossoverEnergyEM'], + CrossoverEnergyHadron=cfg['CrossoverEnergyHadron'], + UseCascadeExtension=cfg['UseCascadeExtension'], + DOMOversizeFactor=cfg['DOMOversizeFactor'], + UnshadowedFraction=cfg['UnshadowedFraction'], + HoleIceParameterization=hole_ice_parameterization, + WavelengthAcceptance=cfg['WavelengthAcceptance'], + DOMRadius=cfg['DOMRadius'], + CableOrientation=cfg['CableOrientation'], + IgnoreSubdetectors=cfg['IgnoreSubdetectors'], + ) + print("done") + print("Setting up OpenCLDevices... ", end="") + openCLDevices = configureOpenCLDevices( + UseGPUs=cfg['UseGPUs'], + UseCPUs=not cfg['UseGPUs'], + OverrideApproximateNumberOfWorkItems=cfg[ + 'OverrideApproximateNumberOfWorkItems'], + DoNotParallelize=cfg['DoNotParallelize'], + UseOnlyDeviceNumber=cfg['UseOnlyDeviceNumber']) + print("done") + + # ------------------- + # Setup perturbations + # ------------------- + # create empty "perturber" object + perturber = Perturber() + # get perturbation_cfg dict to simplify calls + perturbation_cfg = snowstorm_config["Perturbations"] + # loop over all perturbations in the perturbation_cfg + print("Setting up perturbers... ") + for name, params in perturbation_cfg.items(): + # catch special case of IceWavePlusModes + if name == "IceWavePlusModes": + if not params["apply"]: + continue + if params["type"] == "default": + print("-> adding {} of type {}".format(name, params["type"])) + perturber.add('IceWavePlusModes', + *icewave.get_default_perturbation()) + continue + + elif hasattr(snowstorm_perturbers, params["type"]): + print("-> adding {} of type {}".format(name, params["type"])) + get_perturber = getattr(snowstorm_perturbers, params["type"]) + perturber.add('IceWavePlusModes', + *get_perturber(**params['settings'])) + continue + else: + msg = "IceWavePlusModes of type '{}' are not implemented(yet)." + raise NotImplementedError(msg.format(params["type"])) + # all other cases + if params["type"] == "delta": + print("-> adding {} of type {}".format(name, params["type"])) + params = params["delta"] + perturber.add(name, all_parametrizations[name], + DeltaDistribution(params["x0"])) + elif params["type"] == "gauss": + print("-> adding {} of type {}".format(name, params["type"])) + params = params["gauss"] + # Caution: MultivariateNormal expect the covariance matrix as + # first argument, so we need to use sigma**2 + perturber.add(name, all_parametrizations[name], + MultivariateNormal( + dataclasses.I3Matrix(np.diag(params["sigma"])**2), + params["mu"])) + elif params["type"] == "uniform": + print("-> adding {} of type {}".format(name, params["type"])) + params = params["uniform"] + perturber.add(name, all_parametrizations[name], + UniformDistribution( + [dataclasses.make_pair(*limits) + for limits in params["limits"]])) + else: + msg = "Perturbation '{}' of type '{}' not implemented." + raise NotImplementedError(msg.format(name, params["type"])) + print("done") + + # Setting up some other things + gcdFrames = list(dataio.I3File(cfg['gcd'])) + inputStream = dataio.I3FrameSequence([infile]) + summary = dataclasses.I3MapStringDouble() + intermediateOutputFiles = [] + + # -------------- + # Run PhotonProp + # -------------- + + # start a model counter + model_counter = 0 + + # Execute photon propagation + print("Executing photon propagation...", end="") + while inputStream.more(): + # measure CLSimInit time + time_CLSimInit_start = time.time() + + tray = I3Tray() + tray.context['I3RandomService'] = random_service + tray.context['I3SummaryService'] = summary + # make a mutable copy of the config dict + config = dict(clsimParams) + # populate the M frame with I3FrameObjects from clsimParams + model = icetray.I3Frame('M') + for k, v in config.items(): + if isinstance(v, icetray.I3FrameObject): + model[k] = v + # apply perturbations in the order they were configured + perturber.perturb(random_service, model) + # check for items in the M-frame that were changed/added + # by the perturbers + for k in model.keys(): + if k.startswith('Snowstorm'): + # keep all Snowstorm keys + continue + if k not in config: + msg = "\n {} was put in the M frame, but does not appear in " + msg += "the CLSim configuration dict" + raise KeyError(msg.format(k)) + + if config[k] != model[k]: + # if an items was changed, copy it back to clsimParams + config[k] = model[k] + else: + # remove unmodified items from the M frame + del model[k] + + # add "persistent" I3Reader + tray.Add(FrameSequenceReader, + Sequence=itertools.chain(gcdFrames, [model], inputStream)) + + # inject an S frame if it doesn't exist + tray.Add(EnsureSFrame, Enable=len(intermediateOutputFiles) == 0) + + # write pertubations to frame + def populate_s_frame(frame): + perturber.to_frame(frame) + tray.Add(populate_s_frame, Streams=[icetray.I3Frame.Stream('S')]) + + # Add Bumper to stop the tray after NumEventsPerModel Q-frames + tray.Add(Bumper, NumFrames=cfg['NumEventsPerModel']) + + # initialize CLSim server and setup the propagators + server_location = tempfile.mkstemp(prefix='clsim-server-')[1] + address = 'ipc://'+server_location + converters = setupPropagators( + random_service, config, + UseGPUs=cfg['UseGPUs'], + UseCPUs=not cfg['UseGPUs'], + OverrideApproximateNumberOfWorkItems=cfg[ + 'OverrideApproximateNumberOfWorkItems'], + DoNotParallelize=cfg['DoNotParallelize'], + UseOnlyDeviceNumber=cfg['UseOnlyDeviceNumber'] + ) + server = sim_services.I3PhotonPropagationServer( + address, sim_services.I3StepToPhotonConverterSeries(converters)) + + # stash server instance in the context to keep it alive + tray.context['CLSimServer'] = server + + # recycle StepGenerator to prevent repeated, expensive initialization + if 'StepGenerator' in cfg[ + 'ExtraArgumentsToI3PhotonPropagationClientModule']: + stepGenerator = cfg[ + 'ExtraArgumentsToI3PhotonPropagationClientModule'][ + 'StepGenerator'] + stepGenerator.SetMediumProperties(config['MediumProperties']) + stepGenerator.SetWlenBias(config['WavelengthGenerationBias']) + + # add CLSim server to tray + module_config = \ + tray.Add( + I3CLSimMakePhotonsWithServer, + ServerAddress=address, + DetectorSettings=config, + MCTreeName=cfg['MCTreeName'], + OutputMCTreeName=cfg['OutputMCTreeName'], + FlasherInfoVectName=cfg['FlasherInfoVectName'], + FlasherPulseSeriesName=cfg['FlasherPulseSeriesName'], + PhotonSeriesName=cfg['PhotonSeriesName'], + MCPESeriesName=cfg['MCPESeriesName'], + RandomService=random_service, + ParticleHistory=cfg['ParticleHistory'], + ParticleHistoryGranularity=cfg['ParticleHistoryGranularity'], + ExtraArgumentsToI3PhotonPropagationClientModule=cfg[ + 'ExtraArgumentsToI3PhotonPropagationClientModule'], + ) + + # recycle StepGenerator to prevent repeated, expensive initialization + cfg['ExtraArgumentsToI3PhotonPropagationClientModule'][ + 'StepGenerator'] = module_config['StepGenerator'] + + # write to temporary output file + intermediateOutputFiles.append( + tempfile.mkstemp(suffix=(outfile.split("/"))[-1])[1]) + tray.Add("I3Writer", + Filename=intermediateOutputFiles[-1], + DropOrphanStreams=[icetray.I3Frame.TrayInfo], + Streams=[icetray.I3Frame.TrayInfo, + icetray.I3Frame.Simulation, + icetray.I3Frame.Stream('M'), + icetray.I3Frame.Stream('m'), + icetray.I3Frame.DAQ, + icetray.I3Frame.Physics]) + + # gather statistics in the "I3SummaryService" + tray.Add(GatherStatistics) + + # measure CLSimInit time + time_CLSimInit = time.time() - time_CLSimInit_start + summary["CLSimInitTime_{:03d}".format(model_counter)] = time_CLSimInit + if "TotalCLSimInitTime" not in summary: + summary["TotalCLSimInitTime"] = time_CLSimInit + else: + summary["TotalCLSimInitTime"] += time_CLSimInit + # measure CLSimTray time + time_CLSimTray_start = time.time() + + tray.Execute() + + # measure CLSimTray time + time_CLSimTray = time.time() - time_CLSimTray_start + summary["CLSimTrayTime_{:03d}".format(model_counter)] = time_CLSimTray + if "TotalCLSimTrayTime" not in summary: + summary["TotalCLSimTrayTime"] = time_CLSimTray + else: + summary["TotalCLSimTrayTime"] += time_CLSimTray + # remove the temp file made by the server location thingy + os.unlink(server_location) + + # increase model counter + model_counter += 1 + + print("done") + + # Add number of models to summary + summary["TotalNumberOfModels"] = model_counter + + # Concatenate intermediate files + print("Concatenating temporary files... ", end='') + tray = I3Tray() + tray.Add(dataio.I3Reader, "I3Reader", FilenameList=intermediateOutputFiles) + tray.Add("I3Writer", Filename=outfile, + DropOrphanStreams=[icetray.I3Frame.TrayInfo], + Streams=[icetray.I3Frame.TrayInfo, + icetray.I3Frame.Simulation, + icetray.I3Frame.Stream('M'), + icetray.I3Frame.Stream('m'), + icetray.I3Frame.DAQ, + icetray.I3Frame.Physics]) + tray.Execute() + tray.Finish() + print("done") + + print("Cleaning up Temporary files... ") + for fname in intermediateOutputFiles: + os.unlink(fname) + print("done") + + # Recalculate averages + print("Writing summary file... ", end='') + if cfg['UseGPUs']: + if summary['TotalHostTime'] > 0.0: + summary['DeviceUtilization'] = \ + summary['TotalDeviceTime']/summary['TotalHostTime'] + if summary['TotalNumPhotonsGenerated'] > 0.0: + summary['AverageDeviceTimePerPhoton'] = \ + summary['TotalDeviceTime']/summary['TotalNumPhotonsGenerated'] + if summary['TotalNumPhotonsGenerated'] > 0.0: + summary['AverageHostTimePerPhoton'] = \ + summary['TotalHostTime']/summary['TotalNumPhotonsGenerated'] + if cfg['SummaryFile']: + with open(cfg['SummaryFile'], 'w') as f: + yaml.dump(dict(summary), f) + print("done") + print('--------') + print('Summary:') + print('--------') + for key, value in summary.items(): + print('\t{}: {}'.format(key, value)) + print('--------\n') + + # Hurray! + print("All finished!") + # say something about the runtime + end_time = time.time() + print("That took "+str(end_time - start_time)+" seconds.") + + +@click.command() +@click.argument('cfg', type=click.Path(exists=True)) +@click.argument('run_number', type=int) +@click.option('--scratch/--no-scratch', default=True) +def main(cfg, run_number, scratch): + with open(cfg, 'r') as stream: + cfg = yaml.full_load(stream) + cfg['run_number'] = run_number + cfg['run_folder'] = get_run_folder(run_number) + + infile = cfg['infile_pattern'].format(**cfg) + infile = infile.replace(' ', '0') + + if scratch: + outfile = cfg['scratchfile_pattern'].format(**cfg) + else: + outfile = cfg['outfile_pattern'].format(**cfg) + outfile = outfile.replace(' ', '0') + + if cfg.get('distance_splits', False): + raise NotImplementedError('Distance splits are not supported!') + else: + run_snowstorm_propagation(cfg, infile, outfile) + + +if __name__ == '__main__': + main() diff --git a/steps/step_2_snowstorm_detector_simulation_py3_v4_3_0_icetray_v1_8_1.py b/steps/step_2_snowstorm_detector_simulation_py3_v4_3_0_icetray_v1_8_1.py new file mode 100755 index 0000000..e24fff8 --- /dev/null +++ b/steps/step_2_snowstorm_detector_simulation_py3_v4_3_0_icetray_v1_8_1.py @@ -0,0 +1,6 @@ +#!/bin/sh /cvmfs/icecube.opensciencegrid.org/py3-v4.3.0/icetray-start +#METAPROJECT icetray/v1.8.1 +from step_2_snowstorm_detector_simulation import * + +if __name__ == '__main__': + main() diff --git a/steps/step_3_pass2_L1_py3_v4_3_0_icetray_v1_8_1.py b/steps/step_3_pass2_L1_py3_v4_3_0_icetray_v1_8_1.py new file mode 100755 index 0000000..d462d1b --- /dev/null +++ b/steps/step_3_pass2_L1_py3_v4_3_0_icetray_v1_8_1.py @@ -0,0 +1,333 @@ +#!/bin/sh /cvmfs/icecube.opensciencegrid.org/py3-v4.3.0/icetray-start +#METAPROJECT icetray/v1.8.1 +import os + +import click +import yaml + +from I3Tray import I3Tray, I3Units +from icecube import icetray, dataclasses, dataio, filter_tools, trigger_sim +from icecube import phys_services +from icecube.filterscripts import filter_globals +from icecube.filterscripts.all_filters import OnlineFilter +from icecube.phys_services.which_split import which_split +import os +import sys +import time + +import subprocess +from math import log10, cos, radians +from optparse import OptionParser +from os.path import expandvars + + +from utils import get_run_folder, muongun_keys, create_random_services + + +SPLINE_TABLES = '/cvmfs/icecube.opensciencegrid.org/data/photon-tables/splines' + + +@click.command() +@click.argument('cfg', type=click.Path(exists=True)) +@click.argument('run_number', type=int) +@click.option('--scratch/--no-scratch', default=True) +def main(cfg, run_number, scratch): + with open(cfg, 'r') as stream: + if int(yaml.__version__[0]) < 5: + # backwards compatibility for yaml versions before version 5 + cfg = yaml.load(stream) + else: + cfg = yaml.full_load(stream) + cfg['run_number'] = run_number + cfg['run_folder'] = get_run_folder(run_number) + + infile = cfg['infile_pattern'].format(**cfg) + infile = infile.replace(' ', '0') + infile = infile.replace('Level0.{}'.format(cfg['previous_step']), + 'Level0.{}'.format(cfg['previous_step'] % 10)) + infile = infile.replace('2012_pass2', 'pass2') + + if scratch: + outfile = cfg['scratchfile_pattern'].format(**cfg) + else: + outfile = cfg['outfile_pattern'].format(**cfg) + outfile = outfile.replace('Level0.{}'.format(cfg['step']), + 'Level0.{}'.format(cfg['step'] % 10)) + outfile = outfile.replace(' ', '0') + outfile = outfile.replace('2012_pass2', 'pass2') + print('Outfile != $FINAL_OUT clean up for crashed scripts not possible!') + + # collect keys which are to be kept in a addition to the official keep keys + # of the standard L1 and L2 processing + if 'additional_keep_keys' in cfg: + additional_keep_keys = cfg['additional_keep_keys'] + muongun_keys + else: + additional_keep_keys = muongun_keys + additional_keep_keys += [ + 'BiasedMuonWeighter', 'BiasedMuonCorridorWeighter', + 'BiasedMESCHotspotWeighter', 'BiasedSimulationWeight', + 'PROPOSALStorm', 'PROPOSALStormUniformRanges', + 'MCVetoMuonInjectionInfo', 'MMCTrackListVetoMuon', + 'CombinedMuonVetoI3MCTree', 'I3MCTreeVetoMuon', + 'I3MCTreeVetoMuon_preMuonProp', + 'I3MCTreeVetoMuon_preMuonProp_RNGState', + ] + + tray = I3Tray() + """The main L1 script""" + tray.AddModule('I3Reader', + 'i3 reader', + FilenameList=[cfg['gcd_pass2'], infile]) + + # run online filters + online_kwargs = {} + if SPLINE_TABLES: + online_kwargs.update({ + 'SplineRecoAmplitudeTable': os.path.join( + SPLINE_TABLES, 'InfBareMu_mie_abs_z20a10.fits'), + 'SplineRecoTimingTable': os.path.join( + SPLINE_TABLES, 'InfBareMu_mie_prob_z20a10.fits'), + # 'alert_followup_base_GCD_filename': cfg['gcd_pass2'], + }) + if cfg['L1_pass2_run_gfu'] is not None: + online_kwargs['gfu_enabled'] = cfg['L1_pass2_run_gfu'] + if 'L1_needs_wavedeform_spe_corr' not in cfg: + cfg['L1_needs_wavedeform_spe_corr'] = False + tray.AddSegment(OnlineFilter, "OnlineFilter", + decode=False, simulation=True, + vemcal_enabled=False, + alert_followup=False, + needs_wavedeform_spe_corr=cfg[ + 'L1_needs_wavedeform_spe_corr'], + **online_kwargs + ) + + # make random service + _, seed = create_random_services( + dataset_number=cfg['dataset_number'], + run_number=cfg['run_number'], + seed=cfg['seed'], + n_services=0) + filter_mask_randoms = phys_services.I3GSLRandomService(seed) + + # override MinBias Prescale + filterconfigs = filter_globals.filter_pairs + filter_globals.sdst_pairs + print(cfg['L1_min_bias_prescale']) + if cfg['L1_min_bias_prescale']: + for i, filtertuple in enumerate(filterconfigs): + if filtertuple[0] == filter_globals.FilterMinBias: + del filterconfigs[i] + filterconfigs.append((filtertuple[0], + cfg['L1_min_bias_prescale'])) + break + print(filterconfigs) + + # Generate filter Masks for all P frames + tray.AddModule(filter_tools.FilterMaskMaker, + "MakeFilterMasks", + OutputMaskName=filter_globals.filter_mask, + FilterConfigs=filterconfigs, + RandomService=filter_mask_randoms) + + # Merge the FilterMasks + tray.AddModule("OrPframeFilterMasks", + "make_q_filtermask", + InputName=filter_globals.filter_mask, + OutputName=filter_globals.qfilter_mask) + + # Q+P frame specific keep module needs to go first, as KeepFromSubstram + # will rename things, let's rename post keep. + def is_Q(frame): + return frame.Stop == frame.DAQ + + simulation_keeps = [ + 'BackgroundI3MCTree', + 'BackgroundI3MCTreePEcounts', + 'BackgroundI3MCPESeriesMap', + 'BackgroundI3MCTree_preMuonProp', + 'BackgroundI3MCTree_preMuonProp_RNGState', + 'BackgroundMMCTrackList', + 'BeaconLaunches', + 'CorsikaInteractionHeight', + 'CorsikaWeightMap', + 'EventProperties', + 'GenerationSpec', + 'I3LinearizedMCTree', + 'I3MCTree', + 'I3MCTreePEcounts', + 'I3MCTree_preMuonProp', + 'I3MCTree_preMuonProp_RNGState', + 'I3MCPESeriesMap', + 'I3MCPESeriesMapWithoutNoise', + 'I3MCPESeriesMapParticleIDMap', + 'I3MCPulseSeriesMap', + 'I3MCPulseSeriesMapParticleIDMap', + 'I3MCPulseSeriesMapPrimaryIDMap', + 'I3MCWeightDict', + 'LeptonInjectorProperties', + 'MCHitSeriesMap', + 'MCPrimary', + 'MCPrimaryInfo', + 'MMCTrackList', + 'PolyplopiaInfo', + 'PolyplopiaPrimary', + 'RNGState', + 'SignalI3MCPEs', + 'SimTrimmer', # for SimTrimmer flag + 'TimeShift', # the time shift amount + 'WIMP_params', # Wimp-sim + 'noise_weight', # weights for noise-only vuvuzela simulations + 'I3GENIEResultDict' # weight informaition for GENIE simulations + ] + additional_keep_keys + + keep_before_merge = filter_globals.q_frame_keeps + [ + 'InIceDSTPulses', # keep DST pulse masks + 'IceTopDSTPulses', + 'CalibratedWaveformRange', # keep calibration info + 'UncleanedInIcePulsesTimeRange', + 'SplitUncleanedInIcePulses', + 'SplitUncleanedInIcePulsesTimeRange', + 'SplitUncleanedInIceDSTPulsesTimeRange', + 'CalibrationErrata', + 'SaturationWindows', + 'InIceRawData', # keep raw data for now + 'IceTopRawData', + ] + simulation_keeps + + tray.AddModule("Keep", "keep_before_merge", + keys=keep_before_merge, + If=is_Q) + + # second set of prekeeps, conditional on filter content, based on newly + # created Qfiltermask + # Determine if we should apply harsh keep for events that failed to pass + # any filter + # Note: excluding the sdst_streams entries + tray.AddModule("I3IcePickModule", "filterMaskCheckAll", + FilterNameList=filter_globals.filter_streams, + FilterResultName=filter_globals.qfilter_mask, + DecisionName="PassedAnyFilter", + DiscardEvents=False, + Streams=[icetray.I3Frame.DAQ]) + + def do_save_just_superdst(frame): + if frame.Has("PassedAnyFilter"): + if not frame["PassedAnyFilter"].value: + return True # <- Event failed to pass any filter. + else: + return False # <- Event passed some filter + else: + icetray.logging.log_error("Failed to find key frame Bool!!") + return False + + keep_only_superdsts = filter_globals.keep_nofilterpass + [ + 'PassedAnyFilter', + 'InIceDSTPulses', + 'IceTopDSTPulses', + 'SplitUncleanedInIcePulses', + 'SplitUncleanedInIcePulsesTimeRange', + 'SplitUncleanedInIceDSTPulsesTimeRange', + 'RNGState'] + simulation_keeps + tray.AddModule("Keep", "KeepOnlySuperDSTs", + keys=keep_only_superdsts, + If=do_save_just_superdst) + + # Now clean up the events that not even the SuperDST filters passed on + tray.AddModule("I3IcePickModule", "filterMaskCheckSDST", + FilterNameList=filter_globals.sdst_streams, + FilterResultName=filter_globals.qfilter_mask, + DecisionName="PassedKeepSuperDSTOnly", + DiscardEvents=False, + Streams=[icetray.I3Frame.DAQ]) + + def dont_save_superdst(frame): + if frame.Has("PassedKeepSuperDSTOnly") and \ + frame.Has("PassedAnyFilter"): + if frame["PassedAnyFilter"].value: + return False # <- these passed a regular filter, keeper + elif not frame["PassedKeepSuperDSTOnly"].value: + return True # <- Event failed to pass SDST filter. + else: + return False # <- Event passed some SDST filter + else: + icetray.logging.log_error("Failed to find key frame Bool!!") + return False + + # backward compatibility + if 'L1_keep_untriggered' in cfg and cfg['L1_keep_untriggered']: + discard_substream_and_keys = False + else: + discard_substream_and_keys = True + + if discard_substream_and_keys: + tray.AddModule( + "Keep", "KeepOnlyDSTs", + keys=filter_globals.keep_dst_only + [ + "PassedAnyFilter", + "PassedKeepSuperDSTOnly", + filter_globals.eventheader] + additional_keep_keys, + If=dont_save_superdst) + + # Frames should now contain only what is needed. now flatten, + # write/send to server + # Squish P frames back to single Q frame, one for each split: + tray.AddModule("KeepFromSubstream", "null_stream", + StreamName=filter_globals.NullSplitter, + KeepKeys=filter_globals.null_split_keeps) + + in_ice_keeps = filter_globals.inice_split_keeps + \ + filter_globals.onlinel2filter_keeps + in_ice_keeps = in_ice_keeps + [ + 'I3EventHeader', + 'SplitUncleanedInIcePulses', + 'SplitUncleanedInIcePulsesTimeRange', + 'TriggerSplitterLaunchWindow', + 'I3TriggerHierarchy', + 'GCFilter_GCFilterMJD'] + additional_keep_keys + tray.AddModule("Keep", "inice_keeps", + keys=in_ice_keeps, + If=which_split(split_name=filter_globals.InIceSplitter),) + + tray.AddModule("KeepFromSubstream", "icetop_split_stream", + StreamName=filter_globals.IceTopSplitter, + KeepKeys=filter_globals.icetop_split_keeps) + + # Apply small keep list (SuperDST/SmallTrig/DST/FilterMask for non-filter + # passers + # Remove I3DAQData object for events not passing one of the + # 'filters_keeping_allraw' + tray.AddModule("I3IcePickModule", "filterMaskCheck", + FilterNameList=filter_globals.filters_keeping_allraw, + FilterResultName=filter_globals.qfilter_mask, + DecisionName="PassedConventional", + DiscardEvents=False, + Streams=[icetray.I3Frame.DAQ]) + + # Clean out the Raw Data when not passing conventional filter + def I3RawDataCleaner(frame): + if not (('PassedConventional' in frame and + frame['PassedConventional'].value == True) or + ('SimTrimmer' in frame and + frame['SimTrimmer'].value == True)): + frame.Delete('InIceRawData') + frame.Delete('IceTopRawData') + + tray.AddModule(I3RawDataCleaner, + "CleanErrataForConventional", + Streams=[icetray.I3Frame.DAQ]) + + tray.AddModule("I3Writer", "EventWriter", + filename=outfile, + Streams=[icetray.I3Frame.DAQ, + icetray.I3Frame.Physics, + icetray.I3Frame.TrayInfo, + icetray.I3Frame.Simulation, + icetray.I3Frame.Stream('m'), + icetray.I3Frame.Stream('M')]) + tray.AddModule("TrashCan", "the can") + tray.Execute() + tray.Finish() + + +if __name__ == '__main__': + main() diff --git a/steps/step_4_pass2_L2_py3_v4_3_0_icetray_v1_8_1.py b/steps/step_4_pass2_L2_py3_v4_3_0_icetray_v1_8_1.py new file mode 100755 index 0000000..71a12fa --- /dev/null +++ b/steps/step_4_pass2_L2_py3_v4_3_0_icetray_v1_8_1.py @@ -0,0 +1,6 @@ +#!/bin/sh /cvmfs/icecube.opensciencegrid.org/py3-v4.3.0/icetray-start +#METAPROJECT icetray/v1.8.1 +from step_4_pass2_L2 import * + +if __name__ == '__main__': + main()