diff --git a/tools/crbeam/.shed.yml b/tools/crbeam/.shed.yml new file mode 100644 index 00000000..689fbd2d --- /dev/null +++ b/tools/crbeam/.shed.yml @@ -0,0 +1,9 @@ +categories: +- Astronomy +description: CRbeam +homepage_url: null +long_description: CRbeam +name: crbeam_astro_tool +owner: astroteam +remote_repository_url: https://github.com/esg-epfl-apc/tools-astro/tree/main/tools +type: unrestricted diff --git a/tools/crbeam/CRbeam.py b/tools/crbeam/CRbeam.py new file mode 100644 index 00000000..6be66796 --- /dev/null +++ b/tools/crbeam/CRbeam.py @@ -0,0 +1,696 @@ +#!/usr/bin/env python +# coding: utf-8 + +# flake8: noqa + +import json +import os +import shutil + +from oda_api.json import CustomJSONEncoder + +# oda:usesResource oda:CRBeamS3 . +# oda:CRBeamS3 a oda:S3 . +# oda:CRBeamS3 oda:resourceBindingEnvVarName "S3_CREDENTIALS" . + +src_name = "NGC 1365" # http://odahub.io/ontology#AstrophysicalObject + +z_start = 0 # http://odahub.io/ontology#Float +Npart = 2000 # http://odahub.io/ontology#Integer ; oda:lower_limit 1 ; oda:upper_limit 100000 +particle_type = "gamma" # http://odahub.io/ontology#String ; oda:allowed_value "gamma","electron","proton" +Emax = 30 # http://odahub.io/ontology#Energy_TeV +Emin = 0.01 # http://odahub.io/ontology#Energy_TeV +EminSource = 1.0 # http://odahub.io/ontology#Energy_TeV +Gamma = 2.0 # http://odahub.io/ontology#Float +EGMF_fG = 10 # http://odahub.io/ontology#Float +lmaxEGMF_Mpc = 5 # http://odahub.io/ontology#Float +jet_half_size = 5.0 # http://odahub.io/ontology#degree +jet_direction = 0.0 # http://odahub.io/ontology#degree +psf = 1.0 # http://odahub.io/ontology#degree +window_size_RA = 2.0 # http://odahub.io/ontology#degree +window_size_DEC = 1.0 # http://odahub.io/ontology#degree +EBL = "Franceschini 2017" # http://odahub.io/ontology#String ; oda:allowed_value "Franceschini 2017","Stecker 2016 lower limit","Stecker 2016 upper limit","Inoue 2012 Baseline","Inoue 2012 lower limit","Inoue 2012 upper limit" + +_galaxy_wd = os.getcwd() + +with open("inputs.json", "r") as fd: + inp_dic = json.load(fd) +if "_data_product" in inp_dic.keys(): + inp_pdic = inp_dic["_data_product"] +else: + inp_pdic = inp_dic + +for vn, vv in inp_pdic.items(): + if vn != "_selector": + globals()[vn] = type(globals()[vn])(vv) + +get_ipython().run_cell_magic( # noqa: F821 + "bash", + "", + "if [ ! -f utils.py ]\nthen\n git clone https://gitlab.renkulab.io/astronomy/mmoda/crbeam.git tmp_src\n cp tmp_src/*.sh tmp_src/*.py ./\nfi\n", +) + +from astropy import units as u +from astropy.coordinates import SkyCoord +from astropy.nddata import StdDevUncertainty +from astropy.table import Table +from gammapy.maps import Map, MapAxis +from oda_api.api import ProgressReporter +from oda_api.data_products import ( + LightCurveDataProduct, + NumpyDataProduct, + ODAAstropyTable, + PictureProduct, +) +from specutils import Spectrum1D +from utils import find_redshift + +if z_start <= 0: + z_start = find_redshift(src_name) + +source = SkyCoord.from_name(src_name, frame="icrs", parse=False, cache=True) + +pr = ProgressReporter() +pr.report_progress( + stage="Initialization", + progress=0, + substage="spectra", + subprogress=30, + message="some message", +) + +import subprocess + +import light_curve as lc +import numpy as np +from crbeam import CRbeam +from matplotlib import pyplot as plt +from spec_plot import plot_spec + +EGMF = EGMF_fG * 1e-15 + +# internal units are eV +Emax *= 1e12 +Emin *= 1e12 +EminSource *= 1e12 + +background = { + "Franceschini 2017": 12, + "Stecker 2016 lower limit": 10, + "Stecker 2016 upper limit": 11, + "Inoue 2012 Baseline": 3, + "Inoue 2012 lower limit": 4, + "Inoue 2012 upper limit": 5, +}[EBL] + +prog = CRbeam( + z=z_start, + nparticles=Npart, + primary=particle_type, + emax=Emax, + emin=Emin, + emin_source=EminSource, + EGMF=EGMF, + lmaxEGMF=lmaxEGMF_Mpc, + background=background, +) +cmd = prog.command +cmd + +# Here is one way to call CRbeam without global cache +# prog.run(force_overwrite=False) +# Here we call CRbeam with global cache +# prog.run_cached(overwrite_local_cache=True) +# to clear s3 cache run the following command +# prog.remove_cache() + +# To report the progress we will split running CRbeam into 10 parts +n_steps = 10 +# Initialize multistep simulation +data_exists = not prog.start_multistage_run( + overwrite_local_cache=True, n_steps=n_steps +) +proceed = not data_exists + +if proceed: + for step in range(n_steps): + pr.report_progress( + stage="Running Simulation", progress=100 * step // n_steps + ) + proceed = prog.simulation_step() + # todo: report progress using rest API + pr.report_progress(stage="Running Simulation", progress=100) + +assert not proceed, "must be completed before this cell" +if not data_exists: + prog.end_multistep_run() + +def adjust_weights(mc_file, power): + # converting weights to mimic required injection spectrum power + header = "" + with open(mc_file, "rt") as lines: + for line in lines: + if len(line) > 0 and line[0] == "#": + header += line[1:].strip() + "\n" + else: + break + weight_col = 2 + E_src_col = 12 + data = np.loadtxt(mc_file) + weight = data[:, weight_col - 1] + E_src = data[:, E_src_col - 1] + orig_power = ( + prog.power_law + ) # CRBeam is always called with fixed power=1 to optimize cache usage + weight *= np.power(E_src / Emax, -(power - orig_power)) + output_file = f"{mc_file}_p{power}" + np.savetxt(output_file, data, header=header.strip(), fmt="%.6g") + return output_file + +# this code will not execute program since files already exist on server +# prog.run(force_overwrite=False) + +# one can upload cache explicitely by call +# prog.upload_cache() + +pr.report_progress(stage="Building Plots and Tables", progress=0) + +print(prog.output_path) +get_ipython().system("ls {prog.output_path}") # noqa: F821 + +mc_file = prog.output_path + "/photon" + +# how the data looks like +get_ipython().system("cat {mc_file} | awk 'NR<=5'") # noqa: F821 + +if Emax != EminSource: + mc_file = adjust_weights(mc_file, Gamma) + +# how the data looks like +get_ipython().system("cat {mc_file} | awk 'NR<=5'") # noqa: F821 + +#! . ./makeSpecE2.sh {mc_file} +# the code above will not work on MMODA as of Sep 30 2023 +# here is workaround +subprocess.run(["bash", "makeSpecE2.sh", mc_file]) + +# rotating the beam + +if EGMF > 0: + from mc_rotate import mc_rotate + + mc_rotated_file = mc_rotate( + mc_file, jet_half_size, jet_direction, psf_deg=psf + ) +else: + mc_rotated_file = mc_file +mc_rotated_file + +# calculating the energy spectrum +#! . ./makeSpecE2.sh {mc_rotated_file} +subprocess.run(["bash", "makeSpecE2.sh", mc_rotated_file]) + +# how the rotated data looks like +get_ipython().system("cat {mc_rotated_file} | awk 'NR<=5'") # noqa: F821 + +# reading the source distance in Mpc from the data file +T_Mpc = lc.get_distance_Mpc(mc_file) +T_Mpc + +# building the spectrum figure for total flux +spec_file = mc_file + ".spec" +spec_fig = plot_spec( + spec_file, + title="spectrum", + Emin=Emin, + Emax=Emax, + ext="png", + show=False, + logscale=True, +) +spec_image = PictureProduct.from_file(spec_fig) + +# building the spectrum figure for the flux within PSF +spec_rotated_file = mc_rotated_file + ".spec" +spec_rotated_fig = plot_spec( + spec_rotated_file, + title="spectrum", + Emin=Emin, + Emax=Emax, + ext="png", + show=False, + logscale=True, +) +spec_rotated_image = PictureProduct.from_file(spec_rotated_fig) + +spec_rotated_file + +lc_params = dict( + logscale=True, + max_t=-99, # 8760000, + n_points=30, + psf=psf, + min_n_particles=10, + cut_0=True, +) + +# building the light curve figure +if EGMF > 0: + light_curve_fig = lc.make_plot(mc_rotated_file, **lc_params) + light_curve_image = PictureProduct.from_file(light_curve_fig) +else: + # to avoid possible problems with absent figure + light_curve_image = PictureProduct.from_file(spec_fig) + +l_curve = None +if EGMF > 0: + delay, weights = lc.get_counts(mc_rotated_file, **lc_params) + t, f, N = lc.light_curve(delay, weights, **lc_params) + l_curve = LightCurveDataProduct.from_arrays( + times=t * 3600.0, # t is in hours + fluxes=f, + errors=f / np.sqrt(N), + time_format="unix", + ) + +def convert_to_ICRS(phi: np.array, theta: np.array, source: SkyCoord): + # prime system has z-axes pointing the source centered at observer + # it is obtained by rotation of source system by 180 degrees about x-axis + # prime system coords have suffix "prime" (') + # icrs system coords has empty suffix + # TODO: add param ic_jet_plane_direction: SkyCoord - gives plane which contains jet + # by definition in prime system the jet is in y'-z' plane and z' axes points from the source towards the observer + # for simplicity we will first assume that prime frame is oriented as ICRS frame (use SkyOffsetFrame with rotation=0) + + # coordinates of unit direction vector in prime system + + # direction of event arrival at prime system + z_prime = np.cos( + theta / 180.0 * np.pi + ) # (-1)*(-1) = 1 (rotating system and tacking oposite vector) + r_xy_prime = np.sin(theta / 180.0 * np.pi) + x_prime = -r_xy_prime * np.cos( + phi / 180.0 * np.pi + ) # (1)*(-1) = -1 (rotating system and tacking oposite vector) + y_prime = r_xy_prime * np.sin( + phi / 180.0 * np.pi + ) # (-1)*(-1) = 1 (rotating system and tacking oposite vector) + + print("x',y',z' =", x_prime, y_prime, z_prime) + + # angle between z and z' axes + delta1 = 90 * u.deg - source.dec + + print("source:", source.cartesian) + print("delta1 = ", delta1) + + # rotating about y-axes + + x1 = x_prime * np.cos(delta1) + z_prime * np.sin(delta1) + y1 = y_prime + z1 = -x_prime * np.sin(delta1) + z_prime * np.cos(delta1) + + print("step 1: x,y,z =", x1, y1, z1) + + # rotation to -RA about z axis + delta2 = source.ra + x = x1 * np.cos(delta2) - y1 * np.sin(delta2) + y = x1 * np.sin(delta2) + y1 * np.cos(delta2) + z = z1 + + print("x,y,z =", x, y, z) + + event_coords = SkyCoord( + x=x, y=y, z=z, frame="icrs", representation_type="cartesian" + ).spherical + # print(event_coords.spherical) + return event_coords + +def LoadCubeTemplate( + mc_file: str, + source: SkyCoord, + Emax=1e15, + Emin=1e9, + bins_per_decade=20, + binsz=0.02, + theta_mult=1, + sec_component_mult=1, + remove_straight=False, + symmetric_split=0, +): + pass + + mc_data = np.loadtxt(mc_file) + E = mc_data[:, 0] + print(f"dataset energy: {np.min(E)} <= E/eV <= {np.max(E)}") + Theta = mc_data[:, 2] * theta_mult # theta_mult is used for debug only + print(f"dataset Theta: {np.min(Theta)} <= Theta <= {np.max(Theta)}") + # idx = np.where((E >= Emin) & (E<=Emax) & (Theta= Emin) & (E <= Emax) + if remove_straight: + condition &= Theta > 0 + + idx = np.where(condition)[0] + + print("filtered dataset length:", len(idx)) + + E = E[idx] + Theta = Theta[idx] + w = mc_data[:, 1][idx] + Phi = mc_data[:, 3][idx] + # t = mc_data[:,5][idx] + # t *= time_scale_hours + if sec_component_mult != 1: # sec_component_mult is used for debug only + print("WARNING: Sec component multiplicity:", sec_component_mult) + w[Theta > 0] *= sec_component_mult + + if len(idx) > 0: + # print(f'{np.min(t)} <= t/h <= {np.max(t)}') + print(f"{np.min(E)} <= E <= {np.max(E)}") + + energy_axis_name = "energy_true" + + energy_axis = MapAxis.from_energy_bounds( + Emin * u.eV, + Emax * u.eV, + nbin=bins_per_decade, + name=energy_axis_name, + per_decade=True, + ) + + m_cube = Map.create( + binsz=binsz, + width=(window_size_RA * u.deg, window_size_DEC * u.deg), + frame="icrs", + axes=[energy_axis], + skydir=SkyCoord(source), + ) + + print(m_cube.geom) + + if len(idx) > 0: + if symmetric_split > 1: + Theta = np.repeat(Theta, symmetric_split) + E = np.repeat(E, symmetric_split) + w = np.repeat(w, symmetric_split) / symmetric_split + Phi = np.random.random(len(Theta)) * 360 + + sc = convert_to_ICRS(Phi, Theta, source) + m_cube.fill_by_coord( + {"lat": sc.lat, "lon": sc.lon, energy_axis_name: E * u.eV}, + weights=w, + ) + + return m_cube + +def LoadTemplate4D( + mc_file: str, + source: SkyCoord, + redshift, + Emax=1e15, + Emin=1e9, + bins_per_decade=20, + binsz=0.02, + theta_mult=1, + max_time_quantile=1.0, + n_time_bins=100, + symmetric_split=0, +): + from astropy.cosmology import FlatLambdaCDM + + cosmo = FlatLambdaCDM( + H0=67.8 * u.km / u.s / u.Mpc, Tcmb0=2.725 * u.K, Om0=(1 - 0.692) + ) + time_scale_hours = float( + ((cosmo.age(0) - cosmo.age(z_start)) / u.h).decompose() + ) + + mc_data = np.loadtxt(mc_file) + E = mc_data[:, 0] + + print("dataset length:", len(mc_data)) + print(f"dataset energy: {np.min(E)} <= E/eV <= {np.max(E)}") + Theta = mc_data[:, 2] * theta_mult # theta_mult is used for debug only + print(f"dataset Theta: {np.min(Theta)} <= Theta <= {np.max(Theta)}") + # idx = np.where((E >= Emin) & (E<=Emax) & (Theta= Emin) & (E <= Emax))[0] + + print(f"Filters: {Emin} <= E/eV <= {Emax}") + print("filtered dataset length:", len(idx)) + + E = E[idx] + Theta = Theta[idx] + w = mc_data[:, 1][idx] + Phi = mc_data[:, 3][idx] + t = mc_data[:, 5][idx] + t *= time_scale_hours + min_time = np.min(t) + if max_time_quantile >= 1.0: + max_time = np.max(t) + else: + max_time = np.quantile(t, max_time_quantile) + + if max_time == min_time: + max_time = min_time + 1e-10 + + idx = np.where(t <= max_time)[0] + print(f"Filters: {Emin} <= E/eV <= {Emax}, t/h <= {max_time}") + print("filtered dataset length:", len(idx)) + + E = E[idx] + Theta = Theta[idx] + w = w[idx] + Phi = Phi[idx] + t = t[idx] + + energy_axis_name = "energy_true" + + energy_axis = MapAxis.from_energy_bounds( + Emin * u.eV, + Emax * u.eV, + nbin=bins_per_decade, + name=energy_axis_name, + per_decade=True, + ) + + from astropy.time import Time + + reference_time = Time(0, format="unix") + + delta = max_time - min_time + (min_time + delta * np.linspace(0, 1, n_time_bins + 1)) * u.h + # time_axis = TimeMapAxis.from_time_edges( + # time_min=time_edges[:-1], + # time_max=time_edges[1:], + # interp="lin", + # unit=u.h, + # name='time', + # reference_time=reference_time + # ) + + time_axis = MapAxis.from_bounds( + min_time, max_time, n_time_bins, name="time", unit=u.h + ) + + # time_axis = TimeMapAxis(time_edges[:-1], time_edges[1:], reference_time, name='time', interp='lin') + + # time_axis = TimeMapAxis.from_time_bounds(min_time, max_time, n_time_bins, unit=u.h, name='time') + + map4d = Map.create( + binsz=binsz, + width=(window_size_RA * u.deg, window_size_DEC * u.deg), + frame="icrs", + axes=[energy_axis, time_axis], + skydir=SkyCoord(source), + ) + + print(map4d.geom) + + if len(idx) > 0: + if symmetric_split > 1: + Theta = np.repeat(Theta, symmetric_split) + E = np.repeat(E, symmetric_split) + t = np.repeat(t, symmetric_split) + w = np.repeat(w, symmetric_split) / symmetric_split + Phi = np.random.random(len(Theta)) * 360 + + sc = convert_to_ICRS(Phi, Theta, source) + map4d.fill_by_coord( + { + "lat": sc.lat, + "lon": sc.lon, + energy_axis_name: E * u.eV, + "time": t * u.h, + }, + weights=w, + ) + + return map4d + +bins_per_decade = 20 + +symmetric_split = 0 if jet_direction != 0 else 100 + +map3d = LoadCubeTemplate( + mc_rotated_file, + source=source, + Emin=Emin, + Emax=Emax, + bins_per_decade=bins_per_decade, + symmetric_split=symmetric_split, +) + +map3d.write("map3d.fits", overwrite=True) + +map4d = LoadTemplate4D( + mc_rotated_file, + source=source, + redshift=z_start, + Emin=Emin, + Emax=Emax, + bins_per_decade=bins_per_decade, + symmetric_split=symmetric_split, +) +map4d.write("map4d.fits", overwrite=True) + +d = np.genfromtxt(spec_file) +ee = d[:, 0] +ff = d[:, 1] +ff_err = ff / np.sqrt(d[:, 2]) +plt.errorbar(ee, ff, yerr=ff_err, label="total flux") +d = np.genfromtxt(spec_rotated_file) +ee1 = d[:, 0] +ff1 = d[:, 1] +ff_err1 = ff1 / np.sqrt(d[:, 2]) +plt.errorbar(ee1, ff1, yerr=ff_err1, label="PSF flux") +plt.xscale("log") +plt.yscale("log") +plt.xlabel("Energy, eV") +plt.ylabel("$E^2dN/dE$, arbitrary units") +plt.legend(loc="lower right") +plt.savefig("Spectrum.png", format="png", bbox_inches="tight") + +bin_image = PictureProduct.from_file("Spectrum.png") + +data = [ee, ff, ff_err] +names = ("Energy", "Total_flux", "Total_flux_err") +table = ODAAstropyTable(Table(data, names=names)) +data = [ee, ff, ff_err] +names = ("Energy", "PSF_flux", "PSF_flux_err") +table1 = ODAAstropyTable(Table(data, names=names)) + +flux_unit = u.eV / u.cm / u.cm / u.s / u.sr +spec = Spectrum1D( + spectral_axis=ee * u.eV, + flux=ff * flux_unit, + uncertainty=StdDevUncertainty(ff_err), +) +spec.write("spec.fits", overwrite=True) +dp = NumpyDataProduct.from_fits_file("spec.fits") +spec_rotated = Spectrum1D( + spectral_axis=ee1 * u.eV, + flux=ff1 * flux_unit, + uncertainty=StdDevUncertainty(ff_err1), +) +spec_rotated.write("spec_rotated.fits", overwrite=True) +dp_rotated = NumpyDataProduct.from_fits_file("spec_rotated.fits") +map3d = NumpyDataProduct.from_fits_file("map3d.fits") +map4d = NumpyDataProduct.from_fits_file("map4d.fits") + +pr.report_progress(stage="Building Plots and Tables", progress=100) + +spectrum_png = bin_image # http://odahub.io/ontology#ODAPictureProduct +light_curve_png = ( + light_curve_image # http://odahub.io/ontology#ODAPictureProduct +) +total_spectrum_table = table # http://odahub.io/ontology#ODAAstropyTable +psf_spectrum_table = table1 # http://odahub.io/ontology#ODAAstropyTable +lc_result = l_curve # http://odahub.io/ontology#LightCurve +spectrum = dp # https://odahub.io/ontology/#Spectrum +spectrum_rotated = dp_rotated # https://odahub.io/ontology/#Spectrum +map3d = map3d # https://odahub.io/ontology/#Spectrum +map4d = map4d # https://odahub.io/ontology/#Spectrum + +# spec_png = spec_image # http://odahub.io/ontology#ODAPictureProduct +# spec_rotated_png = spec_rotated_image # http://odahub.io/ontology#ODAPictureProduct + +# output gathering +_galaxy_meta_data = {} +_oda_outs = [] +_oda_outs.append( + ("out_CRbeam_spectrum_png", "spectrum_png_galaxy.output", spectrum_png) +) +_oda_outs.append( + ( + "out_CRbeam_light_curve_png", + "light_curve_png_galaxy.output", + light_curve_png, + ) +) +_oda_outs.append( + ( + "out_CRbeam_total_spectrum_table", + "total_spectrum_table_galaxy.output", + total_spectrum_table, + ) +) +_oda_outs.append( + ( + "out_CRbeam_psf_spectrum_table", + "psf_spectrum_table_galaxy.output", + psf_spectrum_table, + ) +) +_oda_outs.append( + ("out_CRbeam_lc_result", "lc_result_galaxy.output", lc_result) +) + +for _outn, _outfn, _outv in _oda_outs: + _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn) + if isinstance(_outv, str) and os.path.isfile(_outv): + shutil.move(_outv, _galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + elif getattr(_outv, "write_fits_file", None): + _outv.write_fits_file(_galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "fits"} + elif getattr(_outv, "write_file", None): + _outv.write_file(_galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + else: + with open(_galaxy_outfile_name, "w") as fd: + json.dump(_outv, fd, cls=CustomJSONEncoder) + _galaxy_meta_data[_outn] = {"ext": "json"} +_simple_outs = [] +_simple_outs.append( + ("out_CRbeam_spectrum", "spectrum_galaxy.output", spectrum) +) +_simple_outs.append( + ( + "out_CRbeam_spectrum_rotated", + "spectrum_rotated_galaxy.output", + spectrum_rotated, + ) +) +_simple_outs.append(("out_CRbeam_map3d", "map3d_galaxy.output", map3d)) +_simple_outs.append(("out_CRbeam_map4d", "map4d_galaxy.output", map4d)) +_numpy_available = True + +for _outn, _outfn, _outv in _simple_outs: + _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn) + if isinstance(_outv, str) and os.path.isfile(_outv): + shutil.move(_outv, _galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + elif _numpy_available and isinstance(_outv, np.ndarray): + with open(_galaxy_outfile_name, "wb") as fd: + np.savez(fd, _outv) + _galaxy_meta_data[_outn] = {"ext": "npz"} + else: + with open(_galaxy_outfile_name, "w") as fd: + json.dump(_outv, fd) + _galaxy_meta_data[_outn] = {"ext": "expression.json"} + +with open(os.path.join(_galaxy_wd, "galaxy.json"), "w") as fd: + json.dump(_galaxy_meta_data, fd) +print("*** Job finished successfully ***") diff --git a/tools/crbeam/Generate_events.py b/tools/crbeam/Generate_events.py new file mode 100644 index 00000000..01aa8a6a --- /dev/null +++ b/tools/crbeam/Generate_events.py @@ -0,0 +1,237 @@ +#!/usr/bin/env python +# coding: utf-8 + +# flake8: noqa + +import json +import os +import shutil + +from oda_api.json import CustomJSONEncoder + +# oda:usesResource oda:CRBeamS3 . +# oda:CRBeamS3 a oda:S3 . +# oda:CRBeamS3 oda:resourceBindingEnvVarName "S3_CREDENTIALS" . + +src_name = "1ES 1215+303" # http://odahub.io/ontology#AstrophysicalObject + +z_start = 0.13 # http://odahub.io/ontology#Float +Npart = 10000 # http://odahub.io/ontology#Integer ; oda:lower_limit 1 ; oda:upper_limit 100000 +particle_type = "gamma" # http://odahub.io/ontology#String ; oda:allowed_value "gamma","electron","proton" +Emax = 50 # http://odahub.io/ontology#Energy_TeV +Emin = 0.01 # http://odahub.io/ontology#Energy_TeV +EminSource = 0.01 # http://odahub.io/ontology#Energy_TeV +Gamma = 2.0 # http://odahub.io/ontology#Float +EGMF_fG = 100 # http://odahub.io/ontology#Float +lmaxEGMF_Mpc = 5 # http://odahub.io/ontology#Float +jet_half_size = 180.0 # http://odahub.io/ontology#degree +jet_direction = 0.0 # http://odahub.io/ontology#degree +psf = 180.0 # http://odahub.io/ontology#degree +window_size_RA = 4.0 # http://odahub.io/ontology#degree +window_size_DEC = 4.0 # http://odahub.io/ontology#degree +EBL = "Franceschini 2017" # http://odahub.io/ontology#String ; oda:allowed_value "Franceschini 2017","Stecker 2016 lower limit","Stecker 2016 upper limit","Inoue 2012 Baseline","Inoue 2012 lower limit","Inoue 2012 upper limit" + +_galaxy_wd = os.getcwd() + +with open("inputs.json", "r") as fd: + inp_dic = json.load(fd) +if "_data_product" in inp_dic.keys(): + inp_pdic = inp_dic["_data_product"] +else: + inp_pdic = inp_dic + +for vn, vv in inp_pdic.items(): + if vn != "_selector": + globals()[vn] = type(globals()[vn])(vv) + +get_ipython().run_cell_magic( # noqa: F821 + "bash", + "", + "if [ ! -f utils.py ]\nthen\n git clone https://gitlab.renkulab.io/astronomy/mmoda/crbeam.git tmp_src\n cp tmp_src/*.sh tmp_src/*.py ./\nfi\n", +) + +from astropy.coordinates import SkyCoord +from astropy.table import Table +from oda_api.api import ProgressReporter +from oda_api.data_products import ODAAstropyTable +from utils import find_redshift + +if z_start <= 0: + z_start = find_redshift(src_name) + +source = SkyCoord.from_name(src_name, frame="icrs", parse=False, cache=True) + +pr = ProgressReporter() +pr.report_progress( + stage="Initialization", + progress=0, + substage="spectra", + subprogress=30, + message="some message", +) + +import light_curve as lc +import numpy as np +from crbeam import CRbeam + +EGMF = EGMF_fG * 1e-15 + +# internal units are eV +Emax *= 1e12 +Emin *= 1e12 +EminSource *= 1e12 + +background = { + "Franceschini 2017": 12, + "Stecker 2016 lower limit": 10, + "Stecker 2016 upper limit": 11, + "Inoue 2012 Baseline": 3, + "Inoue 2012 lower limit": 4, + "Inoue 2012 upper limit": 5, +}[EBL] + +prog = CRbeam( + z=z_start, + nparticles=Npart, + primary=particle_type, + emax=Emax, + emin=Emin, + emin_source=EminSource, + EGMF=EGMF, + lmaxEGMF=lmaxEGMF_Mpc, + background=background, +) +cmd = prog.command +cmd + +# Here is one way to call CRbeam without global cache +# prog.run(force_overwrite=False) +# Here we call CRbeam with global cache +# prog.run_cached(overwrite_local_cache=True) +# to clear s3 cache run the following command +# prog.remove_cache() + +# To report the progress we will split running CRbeam into 10 parts +n_steps = 10 +# Initialize multistep simulation +data_exists = not prog.start_multistage_run( + overwrite_local_cache=True, n_steps=n_steps +) +proceed = not data_exists + +if proceed: + for step in range(n_steps): + pr.report_progress( + stage="Running Simulation", progress=100 * step // n_steps + ) + proceed = prog.simulation_step() + # todo: report progress using rest API + pr.report_progress(stage="Running Simulation", progress=100) + +assert not proceed, "must be completed before this cell" +if not data_exists: + prog.end_multistep_run() + +def adjust_weights(mc_file, power): + # converting weights to mimic required injection spectrum power + header = "" + with open(mc_file, "rt") as lines: + for line in lines: + if len(line) > 0 and line[0] == "#": + header += line[1:].strip() + "\n" + else: + break + weight_col = 2 + E_src_col = 12 + data = np.loadtxt(mc_file) + weight = data[:, weight_col - 1] + E_src = data[:, E_src_col - 1] + orig_power = ( + prog.power_law + ) # CRBeam is always called with fixed power=1 to optimize cache usage + weight *= np.power(E_src / Emax, -(power - orig_power)) + output_file = f"{mc_file}_p{power}" + np.savetxt(output_file, data, header=header.strip(), fmt="%.6g") + return output_file + +# this code will not execute program since files already exist on server +# prog.run(force_overwrite=False) + +# one can upload cache explicitely by call +# prog.upload_cache() + +pr.report_progress(stage="Building Plots and Tables", progress=0) + +print(prog.output_path) +get_ipython().system("ls {prog.output_path}") # noqa: F821 + +mc_file = prog.output_path + "/photon" + +if Emax != EminSource: + mc_file = adjust_weights(mc_file, Gamma) + +# rotating the beam + +if EGMF > 0: + from mc_rotate import mc_rotate + + mc_rotated_file = mc_rotate( + mc_file, jet_half_size, jet_direction, psf_deg=psf + ) +else: + mc_rotated_file = mc_file +mc_rotated_file + +# reading the source distance in Mpc from the data file +T_Mpc = lc.get_distance_Mpc(mc_file) +T_Mpc + +from oda_api.data_products import ODAAstropyTable + +d = np.genfromtxt(mc_rotated_file, skip_header=3) +# E/eV,weight,Theta,Phi,dT_raw/T dT_calc/T,z_cascade_production,N_interactions,z_src,E_src/eV,z_prod +names = ( + "E/eV", + "weight", + "Theta", + "Phi", + "dT_raw/T", + "dT_calc/T", + "z_cascade_production", + "N_interactions", + "z_src", + "E_src/eV", + "z_prod", +) +res = ODAAstropyTable(Table(d, names=names)) + +pr.report_progress(stage="Building Plots and Tables", progress=100) + +Event_file = res # http://odahub.io/ontology#ODAAstropyTable + +# output gathering +_galaxy_meta_data = {} +_oda_outs = [] +_oda_outs.append( + ("out_Generate_events_Event_file", "Event_file_galaxy.output", Event_file) +) + +for _outn, _outfn, _outv in _oda_outs: + _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn) + if isinstance(_outv, str) and os.path.isfile(_outv): + shutil.move(_outv, _galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + elif getattr(_outv, "write_fits_file", None): + _outv.write_fits_file(_galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "fits"} + elif getattr(_outv, "write_file", None): + _outv.write_file(_galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + else: + with open(_galaxy_outfile_name, "w") as fd: + json.dump(_outv, fd, cls=CustomJSONEncoder) + _galaxy_meta_data[_outn] = {"ext": "json"} + +with open(os.path.join(_galaxy_wd, "galaxy.json"), "w") as fd: + json.dump(_galaxy_meta_data, fd) +print("*** Job finished successfully ***") diff --git a/tools/crbeam/compute_new_column.py b/tools/crbeam/compute_new_column.py new file mode 100644 index 00000000..cdfd9f61 --- /dev/null +++ b/tools/crbeam/compute_new_column.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +# coding: utf-8 + +# flake8: noqa + +import json +import os +import shutil + +fn = "testfile.tsv" # oda:POSIXPath +new_column = "sum" +expression = "c1 + c2" +sep = "comma" + +_galaxy_wd = os.getcwd() + +with open("inputs.json", "r") as fd: + inp_dic = json.load(fd) +if "_data_product" in inp_dic.keys(): + inp_pdic = inp_dic["_data_product"] +else: + inp_pdic = inp_dic + +for vn, vv in inp_pdic.items(): + if vn != "_selector": + globals()[vn] = type(globals()[vn])(vv) + +import numpy as np +import pandas as pd + +if sep == "tab": + sep = "\t" +elif sep == "comma": + sep = "," + +df = pd.read_csv(fn, sep=sep, index_col=False) + +def filter_df(row): + for i, c in enumerate(df.columns): + globals()[f"c{i}"] = row[c] + + return eval(expression) + +df[new_column] = df.apply(filter_df, axis=1) + +df + +df.to_csv("outfile.tsv", sep=sep, index=False) + +outputfile = "outfile.tsv" # http://odahub.io/ontology#test + +# output gathering +_galaxy_meta_data = {} +_simple_outs = [] +_simple_outs.append( + ( + "out_compute_new_column_outputfile", + "outputfile_galaxy.output", + outputfile, + ) +) +_numpy_available = True + +for _outn, _outfn, _outv in _simple_outs: + _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn) + if isinstance(_outv, str) and os.path.isfile(_outv): + shutil.move(_outv, _galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + elif _numpy_available and isinstance(_outv, np.ndarray): + with open(_galaxy_outfile_name, "wb") as fd: + np.savez(fd, _outv) + _galaxy_meta_data[_outn] = {"ext": "npz"} + else: + with open(_galaxy_outfile_name, "w") as fd: + json.dump(_outv, fd) + _galaxy_meta_data[_outn] = {"ext": "expression.json"} + +with open(os.path.join(_galaxy_wd, "galaxy.json"), "w") as fd: + json.dump(_galaxy_meta_data, fd) +print("*** Job finished successfully ***") diff --git a/tools/crbeam/crbeam_astro_tool.xml b/tools/crbeam/crbeam_astro_tool.xml new file mode 100644 index 00000000..5799aa40 --- /dev/null +++ b/tools/crbeam/crbeam_astro_tool.xml @@ -0,0 +1,314 @@ + + + crbeam + pyarrow + unzip + ipython + oda-api + matplotlib + + nbconvert + seaborn + minio + specutils + scipy + gammapy + astroquery + astropy + + ipython '$__tool_directory__/${_data_product._selector}.py' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + _data_product['_selector'] == 'Generate_events' + + + _data_product['_selector'] == 'histogram_column' + + + _data_product['_selector'] == 'histogram_column' + + + _data_product['_selector'] == 'CRbeam' + + + _data_product['_selector'] == 'CRbeam' + + + _data_product['_selector'] == 'CRbeam' + + + _data_product['_selector'] == 'CRbeam' + + + _data_product['_selector'] == 'CRbeam' + + + _data_product['_selector'] == 'CRbeam' + + + _data_product['_selector'] == 'CRbeam' + + + _data_product['_selector'] == 'CRbeam' + + + _data_product['_selector'] == 'CRbeam' + + + _data_product['_selector'] == 'model_CTA_events' + + + _data_product['_selector'] == 'model_CTA_events' + + + _data_product['_selector'] == 'filter_table' + + + _data_product['_selector'] == 'compute_new_column' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/tools/crbeam/filter_table.py b/tools/crbeam/filter_table.py new file mode 100644 index 00000000..45344319 --- /dev/null +++ b/tools/crbeam/filter_table.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python +# coding: utf-8 + +# flake8: noqa + +import json +import os +import shutil + +fn = "testfile.tsv" # oda:POSIXPath +expression = "c1 > 1e3" +sep = "comma" + +_galaxy_wd = os.getcwd() + +with open("inputs.json", "r") as fd: + inp_dic = json.load(fd) +if "_data_product" in inp_dic.keys(): + inp_pdic = inp_dic["_data_product"] +else: + inp_pdic = inp_dic + +for vn, vv in inp_pdic.items(): + if vn != "_selector": + globals()[vn] = type(globals()[vn])(vv) + +import numpy as np +import pandas as pd + +if sep == "tab": + sep = "\t" +elif sep == "comma": + sep = "," + +df = pd.read_csv(fn, sep=sep, index_col=False) + +def filter_df(row): + for i, c in enumerate(df.columns): + globals()[f"c{i}"] = row[c] + + return eval(expression) + +df = df[filter_df(df)] + +df.to_csv("outfile.tsv", sep=sep, index=False) + +outputfile = "outfile.tsv" # http://odahub.io/ontology#test + +# output gathering +_galaxy_meta_data = {} +_simple_outs = [] +_simple_outs.append( + ("out_filter_table_outputfile", "outputfile_galaxy.output", outputfile) +) +_numpy_available = True + +for _outn, _outfn, _outv in _simple_outs: + _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn) + if isinstance(_outv, str) and os.path.isfile(_outv): + shutil.move(_outv, _galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + elif _numpy_available and isinstance(_outv, np.ndarray): + with open(_galaxy_outfile_name, "wb") as fd: + np.savez(fd, _outv) + _galaxy_meta_data[_outn] = {"ext": "npz"} + else: + with open(_galaxy_outfile_name, "w") as fd: + json.dump(_outv, fd) + _galaxy_meta_data[_outn] = {"ext": "expression.json"} + +with open(os.path.join(_galaxy_wd, "galaxy.json"), "w") as fd: + json.dump(_galaxy_meta_data, fd) +print("*** Job finished successfully ***") diff --git a/tools/crbeam/histogram_column.py b/tools/crbeam/histogram_column.py new file mode 100644 index 00000000..1c56a99d --- /dev/null +++ b/tools/crbeam/histogram_column.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python +# coding: utf-8 + +# flake8: noqa + +import json +import os +import shutil + +from oda_api.json import CustomJSONEncoder + +fn = "testfile.tsv" # oda:POSIXPath +sep = "comma" +column = "c0" +weights = "c1" +binning = "logarithmic" # http://odahub.io/ontology#String ; oda:allowed_value "linear","logarithmic" +minval = 1e10 +maxval = 1e13 +nbins = 15 +xlabel = "Energy, eV" +ylabel = "Ncounts" + +_galaxy_wd = os.getcwd() + +with open("inputs.json", "r") as fd: + inp_dic = json.load(fd) +if "_data_product" in inp_dic.keys(): + inp_pdic = inp_dic["_data_product"] +else: + inp_pdic = inp_dic + +for vn, vv in inp_pdic.items(): + if vn != "_selector": + globals()[vn] = type(globals()[vn])(vv) + +import numpy as np +import pandas as pd + +if sep == "tab": + sep = "\t" +elif sep == "comma": + sep = "," + +df = pd.read_csv(fn, sep=sep, index_col=False) + +from numpy import log10 + +if binning == "linear": + bins = np.linspace(minval, maxval, nbins + 1) +else: + bins = np.logspace(log10(minval), log10(maxval), nbins + 1) +bins + +import matplotlib.pyplot as plt + +for i, c in enumerate(df.columns): + if column == f"c{i}": + colname = c + elif weights == f"c{i}": + weightname = c +print(colname, weightname) + +plt.figure() +h = plt.hist(df[colname], weights=df[weightname], bins=bins) + +if binning == "logarithmic": + plt.xscale("log") + plt.yscale("log") +plt.xlabel(xlabel) +plt.ylabel(ylabel) +plt.savefig("Histogram.png", format="png", dpi=150) +hist_counts = h[0] +hist_bins = h[1] +hist_mins = hist_bins[:-1] +hist_maxs = hist_bins[1:] + +from astropy.table import Table +from oda_api.data_products import ODAAstropyTable, PictureProduct + +names = ("bins_min", "bins_max", "counts") +res = ODAAstropyTable(Table([hist_mins, hist_maxs, hist_counts], names=names)) + +plot = PictureProduct.from_file("Histogram.png") + +histogram_data = res # http://odahub.io/ontology#ODAAstropyTable +histogram_picture = plot # http://odahub.io/ontology#ODAPictureProduct + +# output gathering +_galaxy_meta_data = {} +_oda_outs = [] +_oda_outs.append( + ( + "out_histogram_column_histogram_data", + "histogram_data_galaxy.output", + histogram_data, + ) +) +_oda_outs.append( + ( + "out_histogram_column_histogram_picture", + "histogram_picture_galaxy.output", + histogram_picture, + ) +) + +for _outn, _outfn, _outv in _oda_outs: + _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn) + if isinstance(_outv, str) and os.path.isfile(_outv): + shutil.move(_outv, _galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + elif getattr(_outv, "write_fits_file", None): + _outv.write_fits_file(_galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "fits"} + elif getattr(_outv, "write_file", None): + _outv.write_file(_galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + else: + with open(_galaxy_outfile_name, "w") as fd: + json.dump(_outv, fd, cls=CustomJSONEncoder) + _galaxy_meta_data[_outn] = {"ext": "json"} + +with open(os.path.join(_galaxy_wd, "galaxy.json"), "w") as fd: + json.dump(_galaxy_meta_data, fd) +print("*** Job finished successfully ***") diff --git a/tools/crbeam/model_CTA_events.py b/tools/crbeam/model_CTA_events.py new file mode 100644 index 00000000..0cba905c --- /dev/null +++ b/tools/crbeam/model_CTA_events.py @@ -0,0 +1,522 @@ +#!/usr/bin/env python +# coding: utf-8 + +# flake8: noqa + +import json +import os +import shutil +from pathlib import Path + +import astropy.units as u +import matplotlib.pyplot as plt +import numpy as np +from astropy import units as u +from astropy.coordinates import SkyCoord +from astropy.io import fits +from gammapy.data import ( + FixedPointingInfo, + Observation, + PointingMode, + observatory_locations, +) +from gammapy.datasets import MapDataset, MapDatasetEventSampler +from gammapy.irf import load_irf_dict_from_file +from gammapy.makers import MapDatasetMaker +from gammapy.maps import Map, MapAxis, WcsGeom +from gammapy.modeling.models import ( + FoVBackgroundModel, + Models, + SkyModel, + TemplateSpatialModel, +) +from oda_api.data_products import NumpyDataProduct, PictureProduct +from oda_api.json import CustomJSONEncoder +from utils import find_redshift + +src_name = "Mrk 501" # http://odahub.io/ontology#AstrophysicalObject +z_start = 0 # http://odahub.io/ontology#Float +Npart = 5000 # http://odahub.io/ontology#Integer ; oda:lower_limit 1 ; oda:upper_limit 100000 +particle_type = "gamma" # http://odahub.io/ontology#String ; oda:allowed_value "gamma","electron","proton" +Emax = 30 # http://odahub.io/ontology#Energy_TeV +Emin = 0.1 # http://odahub.io/ontology#Energy_TeV +EminSource = 1.0 # http://odahub.io/ontology#Energy_TeV +Gamma = 2.0 # http://odahub.io/ontology#Float +EGMF_fG = 10 # http://odahub.io/ontology#Float +lmaxEGMF_Mpc = 5 # http://odahub.io/ontology#Float +jet_half_size = 5.0 # http://odahub.io/ontology#degree +jet_direction = 0.0 # http://odahub.io/ontology#degree +window_size_RA = 2.0 # http://odahub.io/ontology#degree +window_size_DEC = 1.0 # http://odahub.io/ontology#degree +livetime = 0.1 # http://odahub.io/ontology#TimeIntervalDays +EBL = "Franceschini 2017" # http://odahub.io/ontology#String ; oda:allowed_value "Franceschini 2017","Stecker 2016 lower limit","Stecker 2016 upper limit","Inoue 2012 Baseline","Inoue 2012 lower limit","Inoue 2012 upper limit" + +_galaxy_wd = os.getcwd() + +with open("inputs.json", "r") as fd: + inp_dic = json.load(fd) +if "_data_product" in inp_dic.keys(): + inp_pdic = inp_dic["_data_product"] +else: + inp_pdic = inp_dic + +for vn, vv in inp_pdic.items(): + if vn != "_selector": + globals()[vn] = type(globals()[vn])(vv) + +livetime = livetime * u.hr * 24 + +if z_start <= 0: + z_start = find_redshift(src_name) + +source = SkyCoord.from_name(src_name, frame="icrs", parse=False, cache=True) + +z_start, source + +import subprocess + +import numpy as np +from crbeam import CRbeam +from matplotlib import pyplot as plt + +EGMF = EGMF_fG * 1e-15 + +background = { + "Franceschini 2017": 12, + "Stecker 2016 lower limit": 10, + "Stecker 2016 upper limit": 11, + "Inoue 2012 Baseline": 3, + "Inoue 2012 lower limit": 4, + "Inoue 2012 upper limit": 5, +}[EBL] + +prog = CRbeam( + z=z_start, + nparticles=Npart, + primary=particle_type, + emax=Emax * 1e12, + emin=Emin * 1e12, + emin_source=EminSource * 1e12, + EGMF=EGMF, + lmaxEGMF=lmaxEGMF_Mpc, + background=background, +) +cmd = prog.command +cmd + +n_steps = 10 +# Initialize multistep simulation +data_exists = not prog.start_multistage_run( + overwrite_local_cache=True, n_steps=n_steps +) +proceed = not data_exists + +if proceed: + for step in range(n_steps): + print(f"running simulation {100 * step // n_steps}%", prog.output_dir) + proceed = prog.simulation_step() + # todo: report progress using rest API + print(f"running simulation 100%", prog.output_dir) + +assert not proceed, "must be completed before this cell" +if not data_exists: + prog.end_multistep_run() + +def adjust_weights(mc_file, power): + # converting weights to mimic required injection spectrum power + header = "" + with open(mc_file, "rt") as lines: + for line in lines: + if len(line) > 0 and line[0] == "#": + header += line[1:].strip() + "\n" + else: + break + weight_col = 2 + E_src_col = 12 + data = np.loadtxt(mc_file) + weight = data[:, weight_col - 1] + E_src = data[:, E_src_col - 1] + orig_power = ( + prog.power_law + ) # CRBeam is always called with fixed power=1 to optimize cache usage + weight *= np.power(E_src / Emax, -(power - orig_power)) + output_file = f"{mc_file}_p{power}" + np.savetxt(output_file, data, header=header.strip(), fmt="%.6g") + return output_file + +mc_file = prog.output_path + "/photon" +if Emax != EminSource: + mc_file = adjust_weights(mc_file, Gamma) + +# rotating the beam + +if EGMF > 0: + from mc_rotate import mc_rotate + + mc_rotated_file = mc_rotate(mc_file, jet_half_size, jet_direction) +else: + mc_rotated_file = mc_file +mc_rotated_file + +selected_n_bins_per_decade = 20 # n bins per decade +n_events_reduction_factor = 1 # suppress flux factor +n_models_suppress_factor = ( + 1 # exclude only every n_models_suppress_factor-th energy bin +) +theta_mult = 1 # manually increase deflection angle +max_rel_energy_error = 3 + +def convert_to_ICRS(phi: np.array, theta: np.array, source: SkyCoord): + # prime system has z-axes pointing the source centered at observer + # it is obtained by rotation of source system by 180 degrees about x-axis + # prime system coords have suffix "prime" (') + # icrs system coords has empty suffix + # TODO: add param ic_jet_plane_direction: SkyCoord - gives plane which contains jet + # by definition in prime system the jet is in y'-z' plane and z' axes points from the source towards the observer + # for simplicity we will first assume that prime frame is oriented as ICRS frame (use SkyOffsetFrame with rotation=0) + + # coordinates of unit direction vector in prime system + + # direction of event arrival at prime system + z_prime = np.cos( + theta / 180.0 * np.pi + ) # (-1)*(-1) = 1 (rotating system and tacking oposite vector) + r_xy_prime = np.sin(theta / 180.0 * np.pi) + x_prime = -r_xy_prime * np.cos( + phi / 180.0 * np.pi + ) # (1)*(-1) = -1 (rotating system and tacking oposite vector) + y_prime = r_xy_prime * np.sin( + phi / 180.0 * np.pi + ) # (-1)*(-1) = 1 (rotating system and tacking oposite vector) + + print("x',y',z' =", x_prime, y_prime, z_prime) + + # angle between z and z' axes + delta1 = 90 * u.deg - source.dec + + print("source:", source.cartesian) + print("delta1 = ", delta1) + + # rotating about y-axes + + x1 = x_prime * np.cos(delta1) + z_prime * np.sin(delta1) + y1 = y_prime + z1 = -x_prime * np.sin(delta1) + z_prime * np.cos(delta1) + + print("step 1: x,y,z =", x1, y1, z1) + + # rotation to -RA about z axis + delta2 = source.ra + x = x1 * np.cos(delta2) - y1 * np.sin(delta2) + y = x1 * np.sin(delta2) + y1 * np.cos(delta2) + z = z1 + + print("x,y,z =", x, y, z) + + event_coords = SkyCoord( + x=x, y=y, z=z, frame="icrs", representation_type="cartesian" + ).spherical + # print(event_coords.spherical) + return event_coords + +def LoadCubeTemplate( + mc_file: str, + source: SkyCoord, + redshift, + Emax=1e3, + Emin=1e-3, + bins_per_decade=20, + binsz=0.02, + theta_mult=1, +): + from astropy.cosmology import FlatLambdaCDM + + cosmo = FlatLambdaCDM( + H0=67.8 * u.km / u.s / u.Mpc, Tcmb0=2.725 * u.K, Om0=(1 - 0.692) + ) + time_scale_hours = float( + ((cosmo.age(0) - cosmo.age(z_start)) / u.h).decompose() + ) + + mc_data = np.loadtxt(mc_file) + E = mc_data[:, 0] * 1e-12 + print(f"dataset energy: {np.min(E)} <= E <= {np.max(E)}") + Theta = mc_data[:, 2] * theta_mult # theta_mult is used for debug only + print(f"dataset Theta: {np.min(Theta)} <= Theta <= {np.max(Theta)}") + # idx = np.where((E >= Emin) & (E<=Emax) & (Theta= Emin) & (E <= Emax))[0] + + print("filtered dataset length:", len(idx)) + + E = E[idx] + Theta = Theta[idx] + w = mc_data[:, 1][idx] + Phi = mc_data[:, 3][idx] + t = mc_data[:, 5][idx] + t *= time_scale_hours + + if len(idx) > 0: + print(f"{np.min(t)} <= t/h <= {np.max(t)}") + print(f"{np.min(E)} <= E/TeV <= {np.max(E)}") + + energy_axis = MapAxis.from_energy_bounds( + Emin * u.TeV, + Emax * u.TeV, + nbin=bins_per_decade, + name="energy", + per_decade=True, + ) + + m_cube = Map.create( + binsz=binsz, + width=(window_size_RA * u.deg, window_size_DEC * u.deg), + frame="icrs", + axes=[energy_axis], + skydir=SkyCoord(source), + ) + + print(m_cube.geom) + + if len(idx) > 0: + sc = convert_to_ICRS(Phi, Theta, source) + m_cube.fill_by_coord( + {"lat": sc.lat, "lon": sc.lon, "energy": E * u.TeV}, weights=w + ) + + return m_cube + +source + +# telescope is pointing at a fixed position in ICRS for the observation +pointing = FixedPointingInfo(fixed_icrs=source, mode=PointingMode.POINTING) + +location = observatory_locations["cta_south"] + +# irfs = load_irf_dict_from_file(path / irf_filename) +filename = "data/Prod5-North-20deg-AverageAz-4LSTs09MSTs.180000s-v0.1.fits.gz" +irfs = load_irf_dict_from_file(filename) + +observation = Observation.create( + obs_id=1001, + pointing=pointing, + livetime=livetime, + irfs=irfs, + location=location, +) +print(observation) + +energy_axis = MapAxis.from_energy_bounds( + max_rel_energy_error * Emin * u.TeV, + Emax * u.TeV, + nbin=selected_n_bins_per_decade, + per_decade=True, +) +energy_axis_true = MapAxis.from_energy_bounds( + Emin * u.TeV, + max_rel_energy_error * Emax * u.TeV, + nbin=selected_n_bins_per_decade, + per_decade=True, + name="energy_true", +) +migra_axis = MapAxis.from_bounds( + 0.5, 2, nbin=150, node_type="edges", name="migra" +) + +geom = WcsGeom.create( + skydir=pointing.fixed_icrs, + width=(2, 2), + binsz=0.02, + frame="icrs", + axes=[energy_axis], +) + +cube_map = LoadCubeTemplate( + mc_rotated_file, + source=source, + redshift=z_start, + theta_mult=theta_mult, + Emin=Emin, + Emax=Emax, + bins_per_decade=selected_n_bins_per_decade, +) +cube_map.write("3d.fits", overwrite=True) + +mid_energy = cube_map.data.shape[0] // 2 +map0 = cube_map.slice_by_idx({"energy": mid_energy}) +map0.plot() +plt.show() +# print(map0) + +empty = MapDataset.create( + geom, + energy_axis_true=energy_axis_true, + migra_axis=migra_axis, + name="my-dataset", +) +maker = MapDatasetMaker(selection=["exposure", "background", "psf", "edisp"]) +dataset = maker.run(empty, observation) + +Path("event_sampling").mkdir(exist_ok=True) +dataset.write("./event_sampling/dataset.fits", overwrite=True) + +def GetBinSpectralModel(E, bins_per_decade=20, norm=1): + amplitude = 1e-12 * u.Unit("cm-2 s-1") * norm + from gammapy.modeling.models import GaussianSpectralModel + + sigma = (10 ** (1 / bins_per_decade) - 1) * E + return GaussianSpectralModel(mean=E, sigma=sigma, amplitude=amplitude) + +spec = cube_map.get_spectrum() +spec + +spec.plot() # this plot shows dN/dE * E (below we check this) + +spec.data.shape, spec.data[spec.data.shape[0] // 2, 0, 0] + +# ### Let us build spectrum manually and compare with output of cube_map.get_spectrum() + +subprocess.run(["bash", "makeSpecE2.sh", mc_rotated_file]) + +spec_file = mc_rotated_file + ".spec" +spec_data = np.loadtxt(spec_file) +E = spec_data[:, 0] +fluxE = spec_data[:, 1] / E # convert dN/dE * E^2 to dN/dE * E +plt.scatter(np.log10(E) - 12, np.log10(fluxE)) +plt.xlabel("Log(E/TeV)") +plt.ylabel("Log(dN/dE * E)") + +energy_bins = cube_map.geom.axes["energy"].center +len(energy_bins), float(np.max(energy_bins) / u.TeV) + +int_bin_flux = ( + spec.data.flatten() +) # we don't have to multiply by energy_bins /u.TeV since spectrum is in already multiplied by E (see above) +int_bin_flux /= ( + Npart + / 200000 + * np.max(int_bin_flux) + * n_events_reduction_factor + * 20 + / len(energy_bins) +) # roughly 100 events +int_bin_flux + +bin_models = [] +for i, (flux, E) in enumerate(zip(int_bin_flux, energy_bins)): + if n_models_suppress_factor > 1 and i % n_models_suppress_factor != 0: + continue + if flux == 0: + continue + spectral_model_delta = GetBinSpectralModel( + E, norm=flux + ) # normalizing here + spacial_template_model = TemplateSpatialModel( + cube_map.slice_by_idx({"energy": i}), + filename=f"cube_bin{i}.fit", + normalize=True, + ) + sky_bin_model = SkyModel( + spectral_model=spectral_model_delta, + spatial_model=spacial_template_model, + name=f"bin_{i}", + ) + bin_models.append(sky_bin_model) + +bkg_model = FoVBackgroundModel(dataset_name="my-dataset") +models = Models(bin_models + [bkg_model]) +file_model = "./event_sampling/cube.yaml" +models.write(file_model, overwrite=True) + +dataset.models = models +print(dataset.models) + +sampler = MapDatasetEventSampler(random_state=0) +events = sampler.run(dataset, observation) + +print(f"Source events: {(events.table['MC_ID'] > 0).sum()}") +print(f"Background events: {(events.table['MC_ID'] == 0).sum()}") + +for i in range(1, len(bin_models) + 1): + n = (events.table["MC_ID"] == i).sum() + if n > 1: + print(f"\tmodel {i}: {n} events") + +events.select_offset([0, 1] * u.deg).peek() +plt.savefig("events.png", format="png", bbox_inches="tight") +plt.show() + +events.table + +print(f"Save events ...") +primary_hdu = fits.PrimaryHDU() +hdu_evt = fits.BinTableHDU(events.table) +hdu_gti = fits.BinTableHDU(dataset.gti.table, name="GTI") +hdu_all = fits.HDUList([primary_hdu, hdu_evt, hdu_gti]) +hdu_all.writeto(f"./events.fits", overwrite=True) +#################### +hdul = fits.open("events.fits") + +events.table.meta + +events_fits = NumpyDataProduct.from_fits_file("events.fits") +events_summary = PictureProduct.from_file("events.png") + +events_summary_png = ( + events_summary # http://odahub.io/ontology#ODAPictureProduct +) +events_fits_fits = events_fits # https://odahub.io/ontology/#Spectrum + +# output gathering +_galaxy_meta_data = {} +_oda_outs = [] +_oda_outs.append( + ( + "out_model_CTA_events_events_summary_png", + "events_summary_png_galaxy.output", + events_summary_png, + ) +) + +for _outn, _outfn, _outv in _oda_outs: + _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn) + if isinstance(_outv, str) and os.path.isfile(_outv): + shutil.move(_outv, _galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + elif getattr(_outv, "write_fits_file", None): + _outv.write_fits_file(_galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "fits"} + elif getattr(_outv, "write_file", None): + _outv.write_file(_galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + else: + with open(_galaxy_outfile_name, "w") as fd: + json.dump(_outv, fd, cls=CustomJSONEncoder) + _galaxy_meta_data[_outn] = {"ext": "json"} +_simple_outs = [] +_simple_outs.append( + ( + "out_model_CTA_events_events_fits_fits", + "events_fits_fits_galaxy.output", + events_fits_fits, + ) +) +_numpy_available = True + +for _outn, _outfn, _outv in _simple_outs: + _galaxy_outfile_name = os.path.join(_galaxy_wd, _outfn) + if isinstance(_outv, str) and os.path.isfile(_outv): + shutil.move(_outv, _galaxy_outfile_name) + _galaxy_meta_data[_outn] = {"ext": "_sniff_"} + elif _numpy_available and isinstance(_outv, np.ndarray): + with open(_galaxy_outfile_name, "wb") as fd: + np.savez(fd, _outv) + _galaxy_meta_data[_outn] = {"ext": "npz"} + else: + with open(_galaxy_outfile_name, "w") as fd: + json.dump(_outv, fd) + _galaxy_meta_data[_outn] = {"ext": "expression.json"} + +with open(os.path.join(_galaxy_wd, "galaxy.json"), "w") as fd: + json.dump(_galaxy_meta_data, fd) +print("*** Job finished successfully ***")