Skip to content
Permalink

Comparing changes

Choose two branches to see what’s changed or to start a new pull request. If you need to, you can also or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: threeML/threeML-analysis-workshop
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: master
Choose a base ref
...
head repository: henrikef/threeML-analysis-workshop
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: master
Choose a head ref
Can’t automatically merge. Don’t worry, you can still create the pull request.
  • 10 commits
  • 5 files changed
  • 4 contributors

Commits on May 14, 2019

  1. Copy the full SHA
    f245ab4 View commit details
  2. install naima

    henrikef committed May 14, 2019
    Copy the full SHA
    8cdb2e2 View commit details

Commits on May 15, 2019

  1. Copy the full SHA
    3d236ec View commit details
  2. moved to own directory

    cbrisboi committed May 15, 2019
    Copy the full SHA
    7b6aba6 View commit details
  3. Merge pull request #1 from cbrisboi/master

    added crab fitting script for workshop
    henrikef authored May 15, 2019
    Copy the full SHA
    0781bfa View commit details
  4. Merge pull request #2 from henrikef/crab_fit

    Crab fit
    henrikef authored May 15, 2019
    Copy the full SHA
    6ed0c85 View commit details
  5. Update README.md

    instructions for scripts
    henrikef authored May 15, 2019
    Copy the full SHA
    e018301 View commit details
  6. Updated comments

    henrikef committed May 15, 2019
    Copy the full SHA
    6f3da7e View commit details
  7. Copy the full SHA
    dc7640b View commit details
  8. Update README.md

    henrikef authored May 15, 2019
    Copy the full SHA
    75b5e69 View commit details
Showing with 124 additions and 22 deletions.
  1. +18 −3 README.md
  2. +91 −0 hawc_fit/crab_fit_logparabola.py
  3. +2 −0 install_everything.sh
  4. +2 −0 install_from_conda.sh
  5. +11 −19 joint_fit_example/example_joint.py
21 changes: 18 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# About threeML

See https://threeml.readthedocs.io/ and https://astromodels.readthedocs.io/

# Prerequisites

Installation instructions here should work on MacOS and linux.
@@ -39,9 +43,20 @@ Inside your conda environment, call

cd ~

# Getting the data

HAWC data and detector response file can be downloaded into the `data` directory using the script provided: `get_hawc_data.sh`

You will get access to a google drive with VERITAS data, please download it manually and put it in the same directory.

Fermi-LAT data is downloaded automatically by the plugin.

# HAWC Crab example

# Crab example
cd hawc_fit
python crab_fit_logparabola.py

## Getting the data
# Joint Crab example

## Example analysis
cd joint_fit_example
python example_joint.py
91 changes: 91 additions & 0 deletions hawc_fit/crab_fit_logparabola.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
from hawc_hal import HAL, HealpixConeROI
import matplotlib.pyplot as plt
from threeML import *

# Define the ROI
ra_crab, dec_crab = 83.63,22.02
data_radius = 3.0
model_radius = 8.0

roi = HealpixConeROI(data_radius=data_radius,
model_radius=model_radius,
ra=ra_crab,
dec=dec_crab)

# Instance the plugin
maptree = "../data/HAWC_9bin_507days_crab_data.hd5"
response = "../data/HAWC_9bin_507days_crab_response.hd5"

hawc = HAL("HAWC",
maptree,
response,
roi,
flat_sky_pixels_size=0.1)

# Use from bin 1 to bin 9
hawc.set_active_measurements(1, 9)

# Display information about the data loaded and the ROI
hawc.display()

# Look at the data
fig = hawc.display_stacked_image(smoothing_kernel_sigma=0.17)

# Save to file
fig.savefig("public_crab_logParabola_stacked_image.png")


# Define model
spectrum = Log_parabola()
source = PointSource("crab", ra=ra_crab, dec=dec_crab, spectral_shape=spectrum)

spectrum.piv = 7 * u.TeV
spectrum.piv.fix = True

spectrum.K = 1e-14 / (u.TeV * u.cm ** 2 * u.s) # norm (in 1/(keV cm2 s))
spectrum.K.bounds = (1e-35, 1e-10) / (u.TeV * u.cm ** 2 * u.s) # without units energies are in keV


spectrum.alpha = -2.5 # log parabolic alpha (index)
spectrum.alpha.bounds = (-4., 2.)

spectrum.beta = 0 # log parabolic alpha (index)
spectrum.beta.bounds = (-4., 2.)

model = Model(source)

data = DataList(hawc)

jl = JointLikelihood(model, data, verbose=False)
jl.set_minimizer("minuit")
param_df, like_df = jl.fit()

results=jl.results
results.write_to("crab_lp_public_results.fits", overwrite=True)
results.optimized_model.save("crab_fit.yml", overwrite=True)

# See the model in counts space and the residuals
fig = hawc.display_spectrum()

# Save it to file
fig.savefig("public_crab_logParabola_residuals.png")

# See the spectrum fit
fig = plot_spectra(jl.results,
ene_min=1.0,
ene_max=37,
num_ene=50,
energy_unit='TeV',
flux_unit='TeV/(s cm2)')
plt.xlim(0.8,100)
plt.ylabel(r"$E^2\,dN/dE$ [TeV cm$^{-2}$ s$^{-1}$]")
plt.xlabel("Energy [TeV]")
fig.savefig("public_crab_fit_spectrum.png")

# Look at the different energy planes (the columns are model, data, residuals)
fig = hawc.display_fit(smoothing_kernel_sigma=0.3,display_colorbar=True)
fig.savefig("public_crab_fit_planes.png")

# Compute TS
TS = jl.compute_TS("crab", like_df)
print(TS)
2 changes: 2 additions & 0 deletions install_everything.sh
Original file line number Diff line number Diff line change
@@ -52,6 +52,8 @@ source activate $ENVIRONMENT_NAME
#This version of numpy may not be available via conda.
pip install numpy==1.15.3

pip install naima

# Install root_numpy making sure it is built against the installed version of ROOT
pip uninstall root_numpy
export MACOSX_DEPLOYMENT_TARGET=10.10
2 changes: 2 additions & 0 deletions install_from_conda.sh
Original file line number Diff line number Diff line change
@@ -23,6 +23,8 @@ source activate $ENVIRONMENT_NAME
#This version of numpy may not be available via conda.
pip install numpy==1.15.3

pip install naima

# Install root_numpy making sure it is built against the installed version of ROOT
pip uninstall root_numpy
export MACOSX_DEPLOYMENT_TARGET=10.10
30 changes: 11 additions & 19 deletions joint_fit_example/example_joint.py
Original file line number Diff line number Diff line change
@@ -9,6 +9,7 @@
from astropy import units as u
from hawc_hal import HAL, HealpixConeROI

#new version of the plugin from Udara
from VERITASLike import VERITASLike

import os
@@ -18,14 +19,11 @@ def find_and_delete(name, path):
if name in files:
os.remove(os.path.join(root, name))

def main(use_hal):
ROIra, ROIdec = 83.630, 22.020 #2HWC catalog position of Crab Nebula
ra, dec = 83.629, 22.014
ra, dec = ROIra, ROIdec
def main():
ra, dec = 83.630, 22.020 #2HWC catalog position of Crab Nebula
maptree = '../data/HAWC_9bin_507days_crab_data.hd5'
response = '../data/HAWC_9bin_507days_crab_response.hd5'
veritasdata = '../data/threemlVEGAS20hr2p45_run54809_run57993.root'
#veritasdata = 'data/threemlVEGAS20hr2p45.root'
latdirectory = '../data/lat_crab_data' # will put downloaded Fermi data there

data_radius = 3.0
@@ -34,8 +32,8 @@ def main(use_hal):
#set up HAWC dataset
roi = HealpixConeROI(data_radius=data_radius,
model_radius=model_radius,
ra=ROIra,
dec=ROIdec)
ra=ra,
dec=dec)

hawc = HAL("HAWC", maptree, response, roi)
hawc.set_active_measurements(1, 9) # Perform the fist only within the last nine bins
@@ -62,19 +60,22 @@ def main(use_hal):
lat_data = {"name":"Fermi_LAT", "data":[fermi_lat], "Emin":0.1*u.GeV, "Emax":300*u.GeV, "E0":10*u.GeV }

# Made up "Fermi-LAT" flux points
# Not used for now, these are just an example for how to set up XYLike data
# XYLike points are amsumed in base units of 3ML: keV, and keV s-1 cm-2 (bug: even if you provide something else...).
x = [ 1.38e6, 2.57e6, 4.46e6, 7.76e6, 18.19e6, 58.88e6] # keV
y = [5.92e-14, 1.81e-14, 6.39e-15, 1.62e-15, 2.41e-16, 1.87e-17] # keV s-1 cm-2
yerr = [1.77e-15, 5.45e-16, 8.93e-17, 4.86e-17, 5.24e-18, 7.28e-19] # keV s-1 cm-2
# Just save a copy for later use (plot points). Will redefine similar objects with other "source_name"
xy_test = threeML.XYLike("xy_test", x, y, yerr, poisson_data=False, quiet=False, source_name='XY_Test')


joint_data = {"name":"Fermi_VERITAS_HAWC", "data":[fermi_lat, veritas, hawc], "Emin":0.1*u.GeV, "Emax": 37e3*u.GeV, "E0":1*u.TeV}

datasets = [veritas_data, hawc_data, lat_data, joint_data ]

fig, ax = plt.subplots()

#Loop through datasets and do the fit.
for dataset in datasets:

data = threeML.DataList(*dataset["data"])
@@ -124,24 +125,15 @@ def main(use_hal):
subplot=ax,
)

#workaround to get rit of gammapy temporary file
find_and_delete("ccube.fits", "." )


plotXYpoints = False
if plotXYpoints:
# Ad hoc plotting of the XYLike points. Something cleaner should be added to 3ML directly.
xx = xy_test.x * 1e-9 # 1e-9 for keV to TeV
yy = xy_test.y * 1e9 * xx * xx # 1e9 for the per keV to per TeV, xx*xx for E^2
yyerr = xy_test.yerr * 1e9 * xx * xx
#plt.errorbar(xx, yy, yerr=yyerr, fmt='o', label='XYTest points')
plt.legend()

plt.xlim(5e-2, 50e3)
plt.xlabel("Energy [GeV]" )
plt.ylabel(r"$E^2~dN/dE~[erg/cm^2/s]$" )
plt.ylabel(r"$E^2$ dN/dE [erg/$cm^2$/s]" )
plt.savefig("joint_spectrum.png" )


if __name__ == "__main__":

main(use_hal=True)
main()