diff --git a/docs/_templates/custom-footer.html b/docs/_templates/custom-footer.html
new file mode 100644
index 0000000000..84facf5f1b
--- /dev/null
+++ b/docs/_templates/custom-footer.html
@@ -0,0 +1,4 @@
+{% block extrafooter %}
+ {{super}}
+ Data Privacy.
+{% endblock %}
diff --git a/docs/conf.py b/docs/conf.py
index 86a2edbc91..464bf8fa77 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -116,7 +116,7 @@ def setup(app):
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
-exclude_patterns.append("_templates")
+#exclude_patterns.append("_templates")
exclude_patterns.append("**.ipynb_checkpoints")
exclude_patterns.append("user-guide/model-gallery/*/*.ipynb")
exclude_patterns.append("user-guide/model-gallery/*/*.md5")
@@ -186,6 +186,11 @@ def setup(app):
# Static files to copy after template files
html_static_path = ["_static"]
+html_css_files = ["custom.css"]
+html_js_files = ["matomo.js"]
+
+templates_path = ["_templates"]
+
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
@@ -237,10 +242,10 @@ def setup(app):
"json_url": "https://docs.gammapy.org/stable/switcher.json",
"version_match": switch_version,
},
- "navbar_end": ["version-switcher", "theme-switcher", "navbar-icon-links"],
+ "navbar_end": ["version-switcher", "navbar-icon-links"],
"navigation_with_keys": True,
# footers
- "footer_start": ["copyright"],
+ "footer_start": ["copyright","custom-footer.html"],
"footer_center": ["last-updated"],
"footer_end": ["sphinx-version", "theme-version"]
}
@@ -336,13 +341,7 @@ def setup(app):
},
}
-html_static_path = ["_static"]
-html_css_files = ["custom.css"]
-html_js_files = ["matomo.js"]
-
html_context = {
"default_mode": "light",
}
-# Add-on to insert the Matomo tracker
-templates_path = ['_templates']
diff --git a/docs/user-guide/astro/darkmatter/index.rst b/docs/user-guide/astro/darkmatter/index.rst
index daf81c33cb..13808b86d9 100644
--- a/docs/user-guide/astro/darkmatter/index.rst
+++ b/docs/user-guide/astro/darkmatter/index.rst
@@ -89,11 +89,11 @@ module `DarkBit`_. DarkBit can be used for directly computing observables and
likelihoods, for any combination of parameter values in some underlying
particle model.
-Gammapy tutorial
-================
+Using gammapy.astro.darkmatter
+------------------------------
+
+.. minigallery:: gammapy.astro.darkmatter
-.. minigallery:: `gammapy.astro.darkmatter.DarkMatterAnnihilationSpectralModel`
- :add-heading:
.. _Cirelli et al. 2011: http://iopscience.iop.org/article/10.1088/1475-7516/2011/03/051/pdf
diff --git a/docs/user-guide/astro/index.rst b/docs/user-guide/astro/index.rst
index 9fcd287eed..3bc8c7184e 100644
--- a/docs/user-guide/astro/index.rst
+++ b/docs/user-guide/astro/index.rst
@@ -9,14 +9,14 @@ This module contains utility functions for some astrophysical scenarios:
* `~gammapy.astro.population` for astrophysical population models
* `~gammapy.astro.darkmatter` for dark matter spatial and spectral models
-The ``gammapy.astro`` sub-package is in a prototyping phase and its scope and future
+The `gammapy.astro` sub-package is in a prototyping phase and its scope and future
are currently being discussed. It is likely that some functionality will
be removed or split out into a separate package at some point.
Getting started
---------------
-The ``gammapy.astro`` namespace is empty. Use these import statements:
+The `gammapy.astro` namespace is empty. Use these import statements:
.. testcode::
diff --git a/docs/user-guide/catalog.rst b/docs/user-guide/catalog.rst
index 6a4225629e..0a426671d7 100644
--- a/docs/user-guide/catalog.rst
+++ b/docs/user-guide/catalog.rst
@@ -5,17 +5,17 @@ Source catalogs
`gammapy.catalog` provides convenient access to common gamma-ray source catalogs.
-* ``hgps`` / `SourceCatalogHGPS` - H.E.S.S. Galactic plane survey (HGPS)
-* ``gamma-cat`` / `SourceCatalogGammaCat` - An open catalog of gamma-ray sources
-* ``3fgl`` / `SourceCatalog3FGL` - LAT 4-year point source catalog
-* ``4fgl`` / `SourceCatalog4FGL` - LAT 8-year point source catalog
-* ``2fhl`` / `SourceCatalog2FHL` - LAT second high-energy source catalog
-* ``3fhl`` / `SourceCatalog3FHL` - LAT third high-energy source catalog
-* ``2hwc`` / `SourceCatalog2HWC` - 2HWC catalog from the HAWC observatory
-* ``3hwc`` / `SourceCatalog3HWC` - 3HWC catalog from the HAWC observatory
-
-For each catalog, a `SourceCatalog` class is provided to represent the catalog table,
-and a matching `SourceCatalogObject` class to represent one catalog source and table row.
+* ``hgps`` / `~gammapy.catalog.SourceCatalogHGPS` - H.E.S.S. Galactic plane survey (HGPS)
+* ``gamma-cat`` / `~gammapy.catalog.SourceCatalogGammaCat` - An open catalog of gamma-ray sources
+* ``3fgl`` / `~gammapy.catalog.SourceCatalog3FGL` - LAT 4-year point source catalog
+* ``4fgl`` / `~gammapy.catalog.SourceCatalog4FGL` - LAT 8-year point source catalog
+* ``2fhl`` / `~gammapy.catalog.SourceCatalog2FHL` - LAT second high-energy source catalog
+* ``3fhl`` / `~gammapy.catalog.SourceCatalog3FHL` - LAT third high-energy source catalog
+* ``2hwc`` / `~gammapy.catalog.SourceCatalog2HWC` - 2HWC catalog from the HAWC observatory
+* ``3hwc`` / `~gammapy.catalog.SourceCatalog3HWC` - 3HWC catalog from the HAWC observatory
+
+For each catalog, a `~gammapy.catalog.SourceCatalog` class is provided to represent the catalog table,
+and a matching `~gammapy.catalog.SourceCatalogObject` class to represent one catalog source and table row.
The main functionality provided is methods that map catalog information to
`~gammapy.modeling.models.SkyModel`, `~gammapy.modeling.models.SpectralModel`,
@@ -29,9 +29,9 @@ or to create initial source models for certain energy bands and sky regions.
Using gammapy.catalog
---------------------
-.. minigallery:: gammapy.catalog.SourceCatalog3FHL
+.. minigallery:: `~gammapy.catalog.SourceCatalog3FHL`
:add-heading:
-.. minigallery:: gammapy.catalog.SourceCatalogGammaCat
+.. minigallery:: `~gammapy.catalog.SourceCatalogGammaCat`
:add-heading:
diff --git a/docs/user-guide/datasets/index.rst b/docs/user-guide/datasets/index.rst
index 19f73627b0..2bd4fdf715 100644
--- a/docs/user-guide/datasets/index.rst
+++ b/docs/user-guide/datasets/index.rst
@@ -8,7 +8,7 @@ Datasets (DL4)
The `gammapy.datasets` sub-package contains classes to handle reduced
gamma-ray data for modeling and fitting.
-The `Dataset` class bundles reduced data, IRFs and model to perform
+The `~gammapy.datasets.Dataset` class bundles reduced data, IRFs and model to perform
likelihood fitting and joint-likelihood fitting.
All datasets contain a `~gammapy.modeling.models.Models` container with one or more
`~gammapy.modeling.models.SkyModel` objects that represent additive emission
@@ -96,7 +96,7 @@ a joint fit across multiple datasets.
Predicted counts
----------------
-The total number of predicted counts from a `MapDataset` are computed per bin like:
+The total number of predicted counts from a `~gammapy.datasets.MapDataset` are computed per bin like:
.. math::
@@ -174,7 +174,7 @@ Here, :math:`k` denotes a bin in reconstructed energy,
For the model evaluation, an important factor that needs to be accounted for is
that the energy threshold changes between observations.
-With the above implementation using a `~gammapy.irf.EDispersionMap`,
+With the above implementation using a `~gammapy.irf.EDispMap`,
the `npred` is conserved,
ie, the predicted number of counts on the stacked
dataset is the sum expected by stacking the `npred` of the individual runs,
@@ -220,11 +220,13 @@ stack runs taken under similar conditions and then do a joint fit on the stacked
Serialisation of datasets
-------------------------
-The various `Map` objects contained in `~gammapy.datasets.MapDataset` and `~gammapy.datasets.MapDatasetOnOff` are serialised according to `GADF Sky Maps `__.
+The various `~gammapy.maps.Map` objects contained in `~gammapy.datasets.MapDataset` and
+`~gammapy.datasets.MapDatasetOnOff` are serialised according to
+`GADF Sky Maps `__.
A hdulist is created with the different attributes, and each of these are written with the data
-contained in a `BinTableHDU` with a `WcsGeom` and a `BANDS HDU` specifying the non-spatial dimensions.
-Optionally, a `meta_table` is also written as an `astropy.table.Table` containing various information
-about the observations which created the dataset. While the `meta_table` can contain useful information for
+contained in a ``BinTableHDU`` with a `~gammapy.maps.WcsGeom` and a ``BANDS HDU`` specifying the non-spatial dimensions.
+Optionally, a ``meta_table`` is also written as an `astropy.table.Table` containing various information
+about the observations which created the dataset. While the ``meta_table`` can contain useful information for
later stage analysis, it is not used anywhere internally within gammapy.
`~gammapy.datasets.SpectrumDataset` follows a similar convention as for `~gammapy.datasets.MapDataset`, but uses a
@@ -233,7 +235,7 @@ later stage analysis, it is not used anywhere internally within gammapy.
either according to the above specification, or (by default), according to the
`OGIP standards `__.
-`~gammapy.datasets.FluxPointsDatasets` are serialised as `gammapy.estimators.FluxPoints` objects, which contains
+`~gammapy.datasets.FluxPointsDataset` are serialised as `gammapy.estimators.FluxPoints` objects, which contains
a set of `gammapy.maps.Map` objects storing the estimated flux as function of energy, and some optional quantities like
typically errors, upper limits, etc. It also contains a reference model,
serialised as a `~gammapy.modeling.models.TemplateSpectralModel`.
diff --git a/docs/user-guide/dl3.rst b/docs/user-guide/dl3.rst
index abea89a040..9eb8cb9dbc 100644
--- a/docs/user-guide/dl3.rst
+++ b/docs/user-guide/dl3.rst
@@ -32,7 +32,7 @@ You can use the `~gammapy.data.EventList` class to load IACT gamma-ray event lis
filename = '$GAMMAPY_DATA/hess-dl3-dr1/data/hess_dl3_dr1_obs_id_023523.fits.gz'
events = EventList.read(filename)
-To load Fermi-LAT event lists, use the `~gammapy.data.EventListLAT` class:
+To load Fermi-LAT event lists, you can also use the `~gammapy.data.EventList` class:
.. testcode::
@@ -139,7 +139,7 @@ Writing event lists and GTIs to file
To write the events or GTIs separately, one can just save the underlying
`astropy.table.Table`. To have an event file written in a correct DL3 format, it is
-necessary to utilise the ``write`` method available for`~gammapy.data.Observation`.
+necessary to utilise the ``write`` method available for `~gammapy.data.Observation`.
It will write the `~gammapy.data.EventList` and their associated GTIs together in the
same FITS file according to the format specifications. To avoid writing IRFs along the
``EventList`` one has to set ``include_irfs`` to ``False``. See the example below:
diff --git a/docs/user-guide/estimators.rst b/docs/user-guide/estimators.rst
index 76bc6843d0..5c87e5f901 100644
--- a/docs/user-guide/estimators.rst
+++ b/docs/user-guide/estimators.rst
@@ -35,7 +35,7 @@ In general the flux can be estimated using two methods:
not re-optimising other parameters, one can estimate the significance based on the
analytical solution by [LiMa1983]. In this case the "best fit" flux and significance
are given by the excess over the null hypothesis. This method is also named
- **backward folding**. This method is currently only exposed in the `ExcessMapEstimator`
+ **backward folding**. This method is currently only exposed in the `~gammapy.estimators.ExcessMapEstimator`
Energy edges
@@ -91,7 +91,7 @@ arguments are translated into a TS difference assuming ``ts = n_sigma ** 2``.
.. _sedtypes:
SED types
-~~~~~~~~~
+^^^^^^^^^
In addition to the norm values a reference spectral model and energy ranges
are given. Using this reference spectral model the norm values can be converted
@@ -112,7 +112,7 @@ e2dnde Differential energy flux at ``e_ref``
The same can be applied for the error and upper limit information.
More information can be found on the `likelihood SED type page`_.
-The `FluxPoints` and `FluxMaps` objects can optionally define meta
+The `~gammapy.estimators.FluxPoints` and `~gammapy.estimators.FluxMaps` objects can optionally define meta
data with the following valid keywords:
================= =================================================
@@ -141,7 +141,7 @@ A note on negative flux and upper limit values:
Flux maps
---------
-This how to compute flux maps with the `ExcessMapEstimator`:
+This how to compute flux maps with the `~gammapy.estimators.ExcessMapEstimator`:
.. testcode::
diff --git a/docs/user-guide/hli.rst b/docs/user-guide/hli.rst
index ad0c8a7e06..1fcd585da5 100644
--- a/docs/user-guide/hli.rst
+++ b/docs/user-guide/hli.rst
@@ -14,7 +14,7 @@ the functionality that is available in the sub-packages to support
standard analysis use case in a convenient way.
-Using gammapy.datasets
+Using gammapy.analysis
----------------------
.. minigallery:: gammapy.analysis.Analysis
diff --git a/docs/user-guide/howto.rst b/docs/user-guide/howto.rst
index a058d500af..f0ee808736 100644
--- a/docs/user-guide/howto.rst
+++ b/docs/user-guide/howto.rst
@@ -278,6 +278,18 @@ warning like so:
.. accordion-footer::
+.. accordion-header::
+ :id: HowToAvoidNaninFp
+ :title: Avoid NaN results in Flux Point estimation
+ :link: ../tutorials/api/estimators.html#a-fully-configured-flux-points-estimation
+
+Sometimes, upper limit values may show as ``nan`` while running a `~gammapy.estimators.FluxPointsEstimator`
+or a `~gammapy.estimators.LightCurveEstimator`. This often arises because the range of the norm parameter
+being scanned over is not sufficient. Increasing this range usually solves the problem. In some cases,
+you can also consider configuring the estimator with a different `~gammapy.modeling.Fit` backend.
+
+.. accordion-footer::
+
.. accordion-header::
:id: HowToProgressBar
:title: Display a progress bar
diff --git a/docs/user-guide/makers/index.rst b/docs/user-guide/makers/index.rst
index e67091add6..1a8fe24b72 100644
--- a/docs/user-guide/makers/index.rst
+++ b/docs/user-guide/makers/index.rst
@@ -24,7 +24,7 @@ Background estimation
Safe data range definition
--------------------------
-The definition of a safe data range is done using the `SafeMaskMaker` or manually.
+The definition of a safe data range is done using the `~gammapy.makers.SafeMaskMaker` or manually.
Using gammapy.makers
diff --git a/docs/user-guide/maps/index.rst b/docs/user-guide/maps/index.rst
index 9b783254e8..d272251eea 100644
--- a/docs/user-guide/maps/index.rst
+++ b/docs/user-guide/maps/index.rst
@@ -20,11 +20,11 @@ non-spatial dimensions and can represent images (2D), cubes (3D), or hypercubes
`gammapy.maps` is organized around two data structures: *geometry* classes
-inheriting from `~Geom` and *map* classes inheriting from `~Map`. A geometry
-defines the map boundaries, pixelization scheme, and provides methods for
-converting to/from map and pixel coordinates. A map owns a `~Geom` instance
+inheriting from `~gammapy.maps.Geom` and *map* classes inheriting from `~gammapy.maps.Map`. A geometry
+defines the map boundaries, pixelisation scheme, and provides methods for
+converting to/from map and pixel coordinates. A map owns a `~gammapy.maps.Geom` instance
as well as a data array containing map values. Where possible it is recommended
-to use the abstract `~Map` interface for accessing or updating the contents of a
+to use the abstract `~gammapy.maps.Map` interface for accessing or updating the contents of a
map as this allows algorithms to be used interchangeably with different map
representations. The following reviews methods of the abstract map interface.
@@ -33,11 +33,11 @@ Getting started with maps
-------------------------
All map objects have an abstract interface provided through the methods of the
-`~Map`. These methods can be used for accessing and manipulating the contents of
+`~gammapy.maps.Map`. These methods can be used for accessing and manipulating the contents of
a map without reference to the underlying data representation (e.g. whether a
-map uses WCS or HEALPix pixelization). For applications which do depend on the
+map uses WCS or HEALPix pixelisation). For applications which do depend on the
specific representation one can also work directly with the classes derived from
-`~Map`. In the following we review some of the basic methods for working with
+`~gammapy.maps.Map`. In the following we review some of the basic methods for working with
map objects, more details are given in the :doc:`/tutorials/api/maps`
tutorial.
@@ -47,20 +47,20 @@ Accessor methods
----------------
All map objects have a set of accessor methods provided through the abstract
-`~Map` class. These methods can be used to access or update the contents of the
+`~gammapy.maps.Map` class. These methods can be used to access or update the contents of the
map irrespective of its underlying representation. Four types of accessor
methods are provided:
* ``get`` : Return the value of the map at the pixel containing the
- given coordinate (`~Map.get_by_idx`, `~Map.get_by_pix`, `~Map.get_by_coord`).
+ given coordinate (`~gammapy.maps.Map.get_by_idx`, `~gammapy.maps.Map.get_by_pix`, `~gammapy.maps.Map.get_by_coord`).
* ``interp`` : Interpolate or extrapolate the value of the map at an arbitrary
coordinate.
* ``set`` : Set the value of the map at the pixel containing the
- given coordinate (`~Map.set_by_idx`, `~Map.set_by_pix`, `~Map.set_by_coord`).
+ given coordinate (`~gammapy.maps.Map.set_by_idx`, `~gammapy.maps.Map.set_by_pix`, `~gammapy.maps.Map.set_by_coord`).
* ``fill`` : Increment the value of the map at the pixel containing
the given coordinate with a unit weight or the value in the optional
- ``weights`` argument (`~Map.fill_by_idx`, `~Map.fill_by_pix`,
- `~Map.fill_by_coord`).
+ ``weights`` argument (`~gammapy.maps.Map.fill_by_idx`, `~gammapy.maps.Map.fill_by_pix`,
+ `~gammapy.maps.Map.fill_by_coord`).
Accessor methods accept as their first argument a coordinate tuple containing
scalars, lists, or numpy arrays with one tuple element for each dimension of the
@@ -84,7 +84,7 @@ coordinates can be expressed in one of three coordinate systems:
array for each non-spatial dimension.
The coordinate system accepted by a given accessor method can be inferred from
-the suffix of the method name (e.g. `~Map.get_by_idx`). The following
+the suffix of the method name (e.g. `~gammapy.maps.Map.get_by_idx`). The following
demonstrates how one can access the same pixels of a WCS map using each of the
three coordinate systems:
@@ -141,7 +141,7 @@ following demonstrates how one can set pixel values:
Interface with MapCoord and SkyCoord
------------------------------------
-The ``coord`` accessor methods accept `dict`, `~gammapy.maps.MapCoord`, and
+The ``coord`` accessor methods accept ``dict``, `~gammapy.maps.MapCoord`, and
`~astropy.coordinates.SkyCoord` arguments in addition to the standard `tuple` of
`~numpy.ndarray` argument. When using a `tuple` argument a
`~astropy.coordinates.SkyCoord` can be used instead of longitude and latitude
@@ -166,7 +166,7 @@ transformed to match the coordinate system of the map.
m.set_by_coord((skycoord, energy), [0.5, 1.5])
m.get_by_coord((skycoord, energy))
-A `~gammapy.maps.MapCoord` or `dict` argument can be used to interact with a map object
+A `~gammapy.maps.MapCoord` or ``dict`` argument can be used to interact with a map object
without reference to the axis ordering of the map geometry:
.. testcode::
@@ -188,9 +188,9 @@ MapCoord
`~gammapy.maps.MapCoord` is an N-dimensional coordinate object that stores both spatial and
non-spatial coordinates and is accepted by all ``coord`` methods. A `~gammapy.maps.MapCoord`
-can be created with or without explicitly named axes with `MapCoord.create`.
+can be created with or without explicitly named axes with `~gammapy.maps.MapCoord.create`.
Axes of a `~gammapy.maps.MapCoord` can be accessed by index, name, or attribute. A `~gammapy.maps.MapCoord`
-without explicit axis names can be created by calling `MapCoord.create` with a
+without explicit axis names can be created by calling `~gammapy.maps.MapCoord.create` with a
`tuple` argument:
.. testcode::
@@ -231,8 +231,8 @@ latitude. Non-spatial axes are assigned a default name ``axis{I}`` where
``{I}`` is the index of the non-spatial dimension. `~gammapy.maps.MapCoord` objects created
without named axes must have the same axis ordering as the map geometry.
-A `~gammapy.maps.MapCoord` with named axes can be created by calling `MapCoord.create`
-with a `dict`:
+A `~gammapy.maps.MapCoord` with named axes can be created by calling `~gammapy.maps.MapCoord.create`
+with a ``dict``:
.. testcode::
diff --git a/docs/user-guide/scripts/index.rst b/docs/user-guide/scripts/index.rst
index 3ce4c0421e..63d299370e 100644
--- a/docs/user-guide/scripts/index.rst
+++ b/docs/user-guide/scripts/index.rst
@@ -150,7 +150,7 @@ Write your own CLI
This section explains how to write your own command line interface (CLI).
We will focus on the command line aspect, and use a very simple example where we
-just call `gammapy.stats.CashCountsStatistics.sqrt_ts`.
+just call `gammapy.stats.CashCountsStatistic.sqrt_ts`.
From the interactive Python or IPython prompt or from a Jupyter notebook you
just import the functionality you need and call it, like this:
diff --git a/docs/user-guide/utils.rst b/docs/user-guide/utils.rst
index dcb01e7dcd..b692ffcf7d 100644
--- a/docs/user-guide/utils.rst
+++ b/docs/user-guide/utils.rst
@@ -5,12 +5,12 @@
Utility functions
=================
-``gammapy.utils`` is a collection of utility functions that are used in many
+`gammapy.utils` is a collection of utility functions that are used in many
places or don't fit in one of the other packages.
-Since the various sub-modules of ``gammapy.utils`` are mostly unrelated, they
+Since the various sub-modules of `gammapy.utils` are mostly unrelated, they
are not imported into the top-level namespace. Here are some examples of how to
-import functionality from the ``gammapy.utils`` sub-modules:
+import functionality from the `gammapy.utils` sub-modules:
.. testcode::
diff --git a/docs/user-guide/visualization/index.rst b/docs/user-guide/visualization/index.rst
index 6b0f3a3b90..7583c205c3 100644
--- a/docs/user-guide/visualization/index.rst
+++ b/docs/user-guide/visualization/index.rst
@@ -19,7 +19,7 @@ used in gamma-ray astronomy (`colormap_hess` and `colormap_milagro`).
Survey panel plots
------------------
-The `MapPanelPlotter` allows to split e.g. a galactic plane survey image with
+The `~gammapy.visualization.MapPanelPlotter` allows to split e.g. a galactic plane survey image with
a large aspect ratio into multiple panels. Here is a short example:
.. plot::
diff --git a/examples/tutorials/analysis-2d/ring_background.py b/examples/tutorials/analysis-2d/ring_background.py
index 7947fc923b..aa8a0642aa 100644
--- a/examples/tutorials/analysis-2d/ring_background.py
+++ b/examples/tutorials/analysis-2d/ring_background.py
@@ -59,6 +59,7 @@
from gammapy.makers import RingBackgroundMaker
from gammapy.visualization import plot_distribution
from gammapy.utils.check import check_tutorials_setup
+
log = logging.getLogger(__name__)
@@ -104,11 +105,11 @@
config.datasets.geom.wcs.binsize = "0.02 deg"
# Cutout size (for the run-wise event selection)
-config.datasets.geom.selection.offset_max = 3.5 * u.deg
+config.datasets.geom.selection.offset_max = 2.5 * u.deg
# We now fix the energy axis for the counts map - (the reconstructed energy binning)
config.datasets.geom.axes.energy.min = "0.5 TeV"
-config.datasets.geom.axes.energy.max = "5 TeV"
+config.datasets.geom.axes.energy.max = "10 TeV"
config.datasets.geom.axes.energy.nbins = 10
# We need to extract the ring for each observation separately, hence, no stacking at this stage
@@ -169,7 +170,7 @@
geom_image = geom.to_image().to_cube([energy_axis.squash()])
# Make the exclusion mask
-regions = CircleSkyRegion(center=source_pos, radius=0.3 * u.deg)
+regions = CircleSkyRegion(center=source_pos, radius=0.4 * u.deg)
exclusion_mask = ~geom_image.region_mask([regions])
exclusion_mask.sum_over_axes().plot()
plt.show()
@@ -193,12 +194,12 @@
# together to create a single stacked map for further analysis
#
-# %%time
energy_axis_true = analysis.datasets[0].exposure.geom.axes["energy_true"]
stacked_on_off = MapDatasetOnOff.create(
geom=geom_image, energy_axis_true=energy_axis_true, name="stacked"
)
+# %%time
for dataset in analysis.datasets:
# Ring extracting makes sense only for 2D analysis
dataset_on_off = ring_maker.run(dataset.to_image())
@@ -223,12 +224,13 @@
# significance is computed according to the Li & Ma expression for ON and
# OFF Poisson measurements, see
# `here `__.
-# Since astropy convolution kernels only accept integers, we first convert
-# our required size in degrees to int depending on our pixel size.
#
+#
+# Also, since the off counts are obtained with a ring background estimation, and are thus already correlated, we specify `correlate_off=False`
+# to avoid over correlation.
# Using a convolution radius of 0.04 degrees
-estimator = ExcessMapEstimator(0.04 * u.deg, selection_optional=[])
+estimator = ExcessMapEstimator(0.04 * u.deg, selection_optional=[], correlate_off=False)
lima_maps = estimator.run(stacked_on_off)
significance_map = lima_maps["sqrt_ts"]
@@ -238,28 +240,29 @@
fig, (ax1, ax2) = plt.subplots(
figsize=(11, 4), subplot_kw={"projection": lima_maps.geom.wcs}, ncols=2
)
-
ax1.set_title("Significance map")
significance_map.plot(ax=ax1, add_cbar=True)
-
ax2.set_title("Excess map")
excess_map.plot(ax=ax2, add_cbar=True)
plt.show()
-
######################################################################
# It is often important to look at the significance distribution outside
# the exclusion region to check that the background estimation is not
# contaminated by gamma-ray events. This can be the case when exclusion
# regions are not large enough. Typically, we expect the off distribution
-# to be a standard normal distribution.
+# to be a standard normal distribution. To compute the significance distribution outside the exclusion region,
+# we can recompute the maps after adding a `mask_fit` to our dataset.
#
-# create a 2D mask for the images
-significance_map_off = significance_map * exclusion_mask
+# Mask the regions with gamma-ray emission
+stacked_on_off.mask_fit = exclusion_mask
+lima_maps2 = estimator.run(stacked_on_off)
+significance_map_off = lima_maps2["sqrt_ts"]
-kwargs_axes = {"xlabel": "Significance", "yscale": "log", "ylim": (1e-5, 1)}
+# region
+kwargs_axes = {"xlabel": "Significance", "yscale": "log", "ylim": (1e-3, 1)}
ax, _ = plot_distribution(
significance_map,
kwargs_hist={
@@ -267,7 +270,7 @@
"alpha": 0.5,
"color": "red",
"label": "all bins",
- "bins": 21,
+ "bins": 51,
},
kwargs_axes=kwargs_axes,
)
@@ -281,11 +284,12 @@
"alpha": 0.5,
"color": "blue",
"label": "off bins",
- "bins": 21,
+ "bins": 51,
},
kwargs_axes=kwargs_axes,
)
plt.show()
+# endregion
# sphinx_gallery_thumbnail_number = 2
diff --git a/examples/tutorials/analysis-time/light_curve_flare.py b/examples/tutorials/analysis-time/light_curve_flare.py
index 573d9f17f3..315f856df9 100644
--- a/examples/tutorials/analysis-time/light_curve_flare.py
+++ b/examples/tutorials/analysis-time/light_curve_flare.py
@@ -86,6 +86,7 @@
)
from gammapy.maps import MapAxis, RegionGeom
from gammapy.modeling.models import PowerLawSpectralModel, SkyModel
+from gammapy.modeling import Fit
######################################################################
# Check setup
@@ -126,9 +127,8 @@
# Define time intervals
# ---------------------
#
-# We create the list of time intervals. Each time interval is an
+# We create the list of time intervals, each of duration 10 minutes. Each time interval is an
# `astropy.time.Time` object, containing a start and stop time.
-#
t0 = Time("2006-07-29T20:30")
duration = 10 * u.min
@@ -236,26 +236,21 @@
######################################################################
-# Define the Model
-# ----------------
+# Define underlying model
+# -----------------------
#
-# The actual flux will depend on the spectral shape assumed. For
-# simplicity, we use the power law spectral model of index 3.4 used in the
+# Since we use forward folding to obtain the flux points in each bin, exact values will depend on the underlying model. In this example, we use a power law as used in the
# `reference
# paper `__.
#
-# Here we use only a spectral model in the
-# `~gammapy.modeling.models.SkyModel` object.
+# As we have are only using spectral datasets, we do not need any spatial models.
#
+# **Note** : All time bins must have the same spectral model. To see how to investigate spectral variability,
+# see :doc:`time resolved spectroscopy notebook `.
-spectral_model = PowerLawSpectralModel(
- index=3.4, amplitude=2e-11 * u.Unit("1 / (cm2 s TeV)"), reference=1 * u.TeV
-)
-spectral_model.parameters["index"].frozen = False
-
+spectral_model = PowerLawSpectralModel(amplitude=1e-10 * u.Unit("1 / (cm2 s TeV)"))
sky_model = SkyModel(spatial_model=None, spectral_model=spectral_model, name="pks2155")
-
######################################################################
# Assign to model to all datasets
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -266,22 +261,30 @@
datasets.models = sky_model
+# %%time
+fit = Fit()
+result = fit.run(datasets)
+print(result.models.to_parameters_table())
+
######################################################################
# Extract the light curve
# -----------------------
#
# We first create the `~gammapy.estimators.LightCurveEstimator` for the
# list of datasets we just produced. We give the estimator the name of the
-# source component to be fitted.
+# source component to be fitted. We can directly compute the light curve in multiple energy
+# bins by supplying a list of `energy_edges`.
+#
# By default the likelihood scan is computed from 0.2 to 5.0.
# Here, we increase the max value to 10.0, because we are
# dealing with a large flare.
lc_maker_1d = LightCurveEstimator(
- energy_edges=[0.7, 20] * u.TeV,
+ energy_edges=[0.7, 1, 20] * u.TeV,
source="pks2155",
time_intervals=time_intervals,
selection_optional="all",
+ n_jobs=4,
)
lc_maker_1d.norm.scan_max = 10
@@ -301,21 +304,37 @@
# Finally we plot the result for the 1D lightcurve:
#
plt.figure(figsize=(8, 6))
-lc_1d.plot(marker="o")
+plt.tight_layout()
+plt.subplots_adjust(bottom=0.3)
+lc_1d.plot(marker="o", axis_name="time", sed_type="flux")
plt.show()
######################################################################
-# Light curves once obtained can be rebinned.
-# Here, we rebin 4 adjacent bins together, to get 30 min bins
+# Light curves once obtained can be rebinned using the likelihood profiles.
+# Here, we rebin 3 adjacent bins together, to get 30 minute bins.
+#
+# We will first slice `lc_1d` to obtain the lightcurve in the first energy bin
#
-axis_new = get_rebinned_axis(lc_1d, method="fixed-bins", group_size=3, axis_name="time")
+slices = {"energy": slice(0, 1)}
+sliced_lc = lc_1d.slice_by_idx(slices)
+print(sliced_lc)
+
+axis_new = get_rebinned_axis(
+ sliced_lc, method="fixed-bins", group_size=3, axis_name="time"
+)
print(axis_new)
-lc_new = lc_1d.resample_axis(axis_new)
+lc_new = sliced_lc.resample_axis(axis_new)
plt.figure(figsize=(8, 6))
-ax = lc_1d.plot(label="original")
+plt.tight_layout()
+plt.subplots_adjust(bottom=0.3)
+ax = sliced_lc.plot(label="original")
lc_new.plot(ax=ax, label="rebinned")
plt.legend()
-plt.show()
\ No newline at end of file
+plt.show()
+
+#####################################################################
+# We can use the sliced lightcurve to understand the variability,
+# as shown in the doc:`/tutorials/analysis-time/variability_estimation` tutorial.
diff --git a/examples/tutorials/api/estimators.py b/examples/tutorials/api/estimators.py
index 01cb717955..2255c165a5 100644
--- a/examples/tutorials/api/estimators.py
+++ b/examples/tutorials/api/estimators.py
@@ -116,10 +116,10 @@
#
fp_estimator.norm.scan_min = 0.1
-fp_estimator.norm.scan_max = 3
+fp_estimator.norm.scan_max = 10
######################################################################
-# Note: The default scan range of the norm parameter is between 0.1 to 10. In case the upper
+# Note: The default scan range of the norm parameter is between 0.2 to 5. In case the upper
# limit values lie outside this range, nan values will be returned. It may thus be useful to
# increase this range, specially for the computation of upper limits from weak sources.
#
diff --git a/examples/tutorials/api/makers.py b/examples/tutorials/api/makers.py
index 866517b99e..e13bfd0564 100644
--- a/examples/tutorials/api/makers.py
+++ b/examples/tutorials/api/makers.py
@@ -117,12 +117,13 @@
# `background_oversampling` parameter that defines the oversampling
# factor in energy used to compute the background (default is None).
#
+# .. _safe-data-range:
# Safe data range handling
# ------------------------
#
-# To exclude the data range from a `~gammapy.makers.MapDataset`, that is associated with
-# high systematics on instrument response functions, a `~gammapy.makers.MapDataset.mask_safe`
-# can be defined. The `~gammapy.makers.MapDataset.mask_safe` is a `~gammapy.maps.Map` object
+# To exclude the data range from a `~gammapy.datasets.MapDataset`, that is associated with
+# high systematics on instrument response functions, a `~gammapy.datasets.MapDataset.mask_safe`
+# can be defined. The `~gammapy.datasets.MapDataset.mask_safe` is a `~gammapy.maps.Map` object
# with `bool` data type, which indicates for each pixel, whether it should be included in
# the analysis. The convention is that a value of `True` or `1`
# includes the pixel, while a value of `False` or `0` excludes a
@@ -141,8 +142,8 @@
# observation center are excluded
# - bkg-peak, the energy threshold is defined as the upper edge of the
# energy bin with the highest predicted background rate. This method
-# was introduced in the HESS DL3 validation paper:
-# https://arxiv.org/pdf/1910.08088.pdf
+# was introduced in the
+# `H.E.S.S. DL3 validation paper `__
#
# Note that currently some methods computing a safe energy range
# ("aeff-default", "aeff-max" and "edisp-bias") determine a true energy range and
@@ -175,7 +176,7 @@
# Background estimation
# ---------------------
#
-# The background computed by the `MapDatasetMaker` gives the number of
+# The background computed by the `~gammapy.makers.MapDatasetMaker` gives the number of
# counts predicted by the background IRF of the observation. Because its
# actual normalization, or even its spectral shape, might be poorly
# constrained, it is necessary to correct it with the data themselves.
@@ -241,7 +242,7 @@
# -------------------
#
# The data reduction steps can be combined in a single loop to run a full
-# data reduction chain. For this the `MapDatasetMaker` is run first and
+# data reduction chain. For this the `~gammapy.makers.MapDatasetMaker` is run first and
# the output dataset is the passed on to the next maker step. Finally, the
# dataset per observation is stacked into a larger map.
#
@@ -273,7 +274,7 @@
######################################################################
# To maintain good performance it is always recommended to do a cutout of
-# the `MapDataset` as shown above. In case you want to increase the
+# the `~gammapy.datasets.MapDataset` as shown above. In case you want to increase the
# offset-cut later, you can also choose a larger width of the cutout than
# `2 * offset_max`.
#
@@ -308,8 +309,8 @@
# count spectra, background is implicitly modeled via the OFF counts
# spectrum.
#
-# The `~gammapy.datasets.SpectrumDatasetMaker` make spectrum dataset for a single
-# observation. In that case the irfs and background are computed at a
+# The `~gammapy.makers.SpectrumDatasetMaker` make spectrum dataset for a single
+# observation. In that case the IRFs and background are computed at a
# single fixed offset, which is recommended only for point-sources.
#
# Here is an example of data reduction loop to create
@@ -349,7 +350,7 @@
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# It can often be useful to know the total number of hours spent
# in the given field of view (without correcting for the acceptance
-# variation). This can be computed using `make_observation_time_map`
+# variation). This can be computed using `~gammapy.makers.utils.make_observation_time_map`
# as shown below
#
@@ -420,7 +421,7 @@
######################################################################
# To get the value of the observation time at a particular position,
-# use `get_by_coord`
+# use ``get_by_coord``
obs_time_src = total_obstime.get_by_coord(source_pos)
effective_times_src = effective_livetime.get_by_coord(
diff --git a/examples/tutorials/data/cta.py b/examples/tutorials/data/cta.py
index afc77daeb2..c170fc6231 100644
--- a/examples/tutorials/data/cta.py
+++ b/examples/tutorials/data/cta.py
@@ -280,6 +280,7 @@
# %%
# This is how for analysis you could slice out the PSF
# at a given field of view offset
+plt.figure(figsize=(8, 5))
irfs["psf"].plot_containment_radius_vs_energy(
offset=[1] * u.deg, fraction=[0.68, 0.8, 0.95]
)
diff --git a/examples/tutorials/data/fermi_lat.py b/examples/tutorials/data/fermi_lat.py
index e3ca852769..aeba07c7c4 100644
--- a/examples/tutorials/data/fermi_lat.py
+++ b/examples/tutorials/data/fermi_lat.py
@@ -309,7 +309,7 @@
######################################################################
# To get an idea of the size of the PSF we check how the containment radii
-# of the Fermi-LAT PSF vari with energy and different containment
+# of the Fermi-LAT PSF vary with energy and different containment
# fractions:
#
diff --git a/examples/tutorials/data/hawc.py b/examples/tutorials/data/hawc.py
index 6546ca2125..38453b7960 100644
--- a/examples/tutorials/data/hawc.py
+++ b/examples/tutorials/data/hawc.py
@@ -33,7 +33,7 @@
`paper `__
for a detailed description). These two event classes are not independent, meaning that
events are repeated between the NN and GP datasets. Therefore, these data should never
-be analyzed jointly, and one of the two estimators needs to be chosen before
+be analysed jointly, and one of the two estimators needs to be chosen before
proceeding.
Once the data has been reduced to a `~gammapy.datasets.MapDataset`, there are no differences
@@ -48,6 +48,7 @@
"""
+
import astropy.units as u
from astropy.coordinates import SkyCoord
import matplotlib.pyplot as plt
@@ -175,7 +176,7 @@
######################################################################
# Note: this axis is the one used to create the background model map. If
# different edges are used, the `~gammapy.makers.MapDatasetMaker` will interpolate between
-# them, which might lead to unexpected behavior.
+# them, which might lead to unexpected behaviour.
######################################################################
# Define the energy true axis:
@@ -208,7 +209,7 @@
# derived in reconstructed energy.
dataset_empty = MapDataset.create(
- geom, energy_axis_true=energy_axis_true, name="fHit " + str(fHit), reco_psf=True
+ geom, energy_axis_true=energy_axis_true, name=f"fHit {fHit}", reco_psf=True
)
dataset = maker.run(dataset_empty, obs)
@@ -278,7 +279,7 @@
# Exercises
# ---------
#
-# - Repeat the process for a different fHit bin
+# - Repeat the process for a different fHit bin.
# - Repeat the process for all the fHit bins provided in the data
# release and fit a model to the result.
#
@@ -289,6 +290,6 @@
# ----------
#
# Now you know how to access and work with HAWC data. All other
-# tutorials and documentation concerning 3D analysis and `~gammapy.datasets.MapDataset`s
-# can be used from this step.
+# tutorials and documentation concerning 3D analysis techniques and
+# the `~gammapy.datasets.MapDataset` object can be used from this step on.
#
diff --git a/examples/tutorials/data/hess.py b/examples/tutorials/data/hess.py
index 8337b12e3f..0c6131715b 100644
--- a/examples/tutorials/data/hess.py
+++ b/examples/tutorials/data/hess.py
@@ -1,6 +1,7 @@
"""
H.E.S.S. with Gammapy
=====================
+
Explore H.E.S.S. event lists and IRFs.
diff --git a/gammapy/data/metadata.py b/gammapy/data/metadata.py
index ae2d55f78e..c5033e4393 100644
--- a/gammapy/data/metadata.py
+++ b/gammapy/data/metadata.py
@@ -107,6 +107,8 @@ def from_header(cls, header, format="gadf"):
"PRESSURE",
"RELHUM",
"NSBLEVEL",
+ "CREATOR",
+ "HDUVERS",
]
optional = dict()
for key in optional_keywords:
diff --git a/gammapy/data/observations.py b/gammapy/data/observations.py
index 953c08ba61..fbd2b8fd80 100644
--- a/gammapy/data/observations.py
+++ b/gammapy/data/observations.py
@@ -16,6 +16,7 @@
from astropy.utils import lazyproperty
import matplotlib.pyplot as plt
from gammapy.utils.deprecation import GammapyDeprecationWarning
+from gammapy.irf import FoVAlignment
from gammapy.utils.fits import LazyFitsData, earth_location_to_dict
from gammapy.utils.metadata import CreatorMetaData, TargetMetaData, TimeInfoMetaData
from gammapy.utils.scripts import make_path
@@ -65,7 +66,7 @@ class Observation:
aeff = LazyFitsData(cache=False)
edisp = LazyFitsData(cache=False)
psf = LazyFitsData(cache=False)
- bkg = LazyFitsData(cache=False)
+ _bkg = LazyFitsData(cache=False)
_rad_max = LazyFitsData(cache=True)
_events = LazyFitsData(cache=False)
_gti = LazyFitsData(cache=True)
@@ -90,7 +91,7 @@ def __init__(
self.aeff = aeff
self.edisp = edisp
self.psf = psf
- self.bkg = bkg
+ self._bkg = bkg
self._rad_max = rad_max
self._gti = gti
self._events = events
@@ -105,6 +106,28 @@ def _repr_html_(self):
except AttributeError:
return f"{html.escape(str(self))}
"
+ @property
+ def bkg(self):
+ """Background of the observation."""
+ bkg = self._bkg
+ # used for backward compatibility of old HESS data
+ try:
+ if (
+ bkg
+ and self._meta
+ and self._meta.optional
+ and self._meta.optional["CREATOR"] == "SASH FITS::EventListWriter"
+ and self._meta.optional["HDUVERS"] == "0.2"
+ ):
+ bkg._fov_alignment = FoVAlignment.REVERSE_LON_RADEC
+ except KeyError:
+ pass
+ return bkg
+
+ @bkg.setter
+ def bkg(self, value):
+ self._bkg = value
+
@property
def meta(self):
"""Return metadata container."""
@@ -137,7 +160,7 @@ def rad_max(self):
def available_hdus(self):
"""Which HDUs are available."""
available_hdus = []
- keys = ["_events", "_gti", "aeff", "edisp", "psf", "bkg", "_rad_max"]
+ keys = ["_events", "_gti", "aeff", "edisp", "psf", "_bkg", "_rad_max"]
hdus = ["events", "gti", "aeff", "edisp", "psf", "bkg", "rad_max"]
for key, hdu in zip(keys, hdus):
available = self.__dict__.get(key, False)
diff --git a/gammapy/datasets/flux_points.py b/gammapy/datasets/flux_points.py
index 30e767527a..81915d0b91 100644
--- a/gammapy/datasets/flux_points.py
+++ b/gammapy/datasets/flux_points.py
@@ -40,7 +40,7 @@ def _get_reference_model(model, energy_bounds, margin_percent=70):
class FluxPointsDataset(Dataset):
- """Bundle a set of flux points with a parametric model, to compute fit statistic function using chi2 statistics.
+ """Bundle a set of flux points with a parametric model, to compute fit statistic function using different statistics (see ``stat_type``).
For more information see :ref:`datasets`.
diff --git a/gammapy/irf/core.py b/gammapy/irf/core.py
index 3aa483af6b..91d7ddf52d 100644
--- a/gammapy/irf/core.py
+++ b/gammapy/irf/core.py
@@ -33,6 +33,8 @@ class FoVAlignment(str, Enum):
ALTAZ = "ALTAZ"
RADEC = "RADEC"
+ # used for backward compatibility of old HESS data
+ REVERSE_LON_RADEC = "REVERSE_LON_RADEC"
class IRF(metaclass=abc.ABCMeta):
diff --git a/gammapy/irf/psf/core.py b/gammapy/irf/psf/core.py
index 226d4ea594..e55cd7ffd4 100644
--- a/gammapy/irf/psf/core.py
+++ b/gammapy/irf/psf/core.py
@@ -168,7 +168,7 @@ def plot_containment_radius_vs_energy(
ax.set_ylabel(
f"Containment radius [{ax.yaxis.units.to_string(UNIT_STRING_FORMAT)}]"
)
- ax.yaxis.set_major_formatter(mtick.FormatStrFormatter("%.1e"))
+ ax.yaxis.set_major_formatter(mtick.FormatStrFormatter("%.2f"))
return ax
def plot_containment_radius(
diff --git a/gammapy/makers/safe.py b/gammapy/makers/safe.py
index 73d9b60b8c..54e1635500 100644
--- a/gammapy/makers/safe.py
+++ b/gammapy/makers/safe.py
@@ -17,9 +17,11 @@
class SafeMaskMaker(Maker):
"""Make safe data range mask for a given observation.
+ For more information see :ref:`safe-data-range`.
+
.. warning::
- Currently some methods computing a safe energy range ("aeff-default",
+ Currently, some methods computing a safe energy range ("aeff-default",
"aeff-max" and "edisp-bias") determine a true energy range and apply
it to reconstructed energy, effectively neglecting the energy dispersion.
@@ -280,7 +282,7 @@ def make_mask_energy_bkg_peak(self, dataset, observation=None):
The energy threshold is defined as the lower edge of the energy
bin with the highest predicted background rate. This is to ensure analysis in
a region where a Powerlaw approximation to the background spectrum is valid.
- The is motivated by its use in the HESS DL3
+ The is motivated by its use in the H.E.S.S. DL3
validation paper: https://arxiv.org/pdf/1910.08088.pdf
Parameters
diff --git a/gammapy/makers/tests/test_spectrum.py b/gammapy/makers/tests/test_spectrum.py
index ae46443331..4949fcb24a 100644
--- a/gammapy/makers/tests/test_spectrum.py
+++ b/gammapy/makers/tests/test_spectrum.py
@@ -7,6 +7,7 @@
from regions import CircleSkyRegion
from gammapy.data import DataStore
from gammapy.datasets import SpectrumDataset
+from gammapy.irf import FoVAlignment
from gammapy.makers import (
ReflectedRegionsBackgroundMaker,
SafeMaskMaker,
@@ -125,6 +126,11 @@ def test_spectrum_dataset_maker_hess_dl3(spectrum_dataset_crab, observations_hes
datasets = []
for obs in observations_hess_dl3:
+
+ assert obs.meta.optional["CREATOR"] == "SASH FITS::EventListWriter"
+ assert obs.meta.optional["HDUVERS"] == "0.2"
+ assert obs.bkg.fov_alignment == FoVAlignment.REVERSE_LON_RADEC
+
dataset = maker.run(spectrum_dataset_crab, obs)
datasets.append(dataset)
diff --git a/gammapy/makers/utils.py b/gammapy/makers/utils.py
index a7fd48e80f..da25161118 100644
--- a/gammapy/makers/utils.py
+++ b/gammapy/makers/utils.py
@@ -3,7 +3,7 @@
import warnings
import numpy as np
import astropy.units as u
-from astropy.coordinates import Angle, SkyOffsetFrame
+from astropy.coordinates import Angle
from astropy.table import Table
from gammapy.data import FixedPointingInfo
from gammapy.irf import BackgroundIRF, EDispMap, FoVAlignment, PSFMap
@@ -82,12 +82,15 @@ def _get_fov_coords(pointing, irf, geom, use_region_center=True, obstime=None):
fov_lon, fov_lat = sky_to_fov(
altaz_coord.az, altaz_coord.alt, pointing_altaz.az, pointing_altaz.alt
)
- elif irf.fov_alignment == FoVAlignment.RADEC:
- # Create OffsetFrame
- frame = SkyOffsetFrame(origin=pointing_icrs)
- pseudo_fov_coord = sky_coord.transform_to(frame)
- fov_lon = pseudo_fov_coord.lon
- fov_lat = pseudo_fov_coord.lat
+ elif irf.fov_alignment in [FoVAlignment.RADEC, FoVAlignment.REVERSE_LON_RADEC]:
+ fov_lon, fov_lat = sky_to_fov(
+ sky_coord.icrs.ra,
+ sky_coord.icrs.dec,
+ pointing_icrs.icrs.ra,
+ pointing_icrs.icrs.dec,
+ )
+ if irf.fov_alignment == FoVAlignment.REVERSE_LON_RADEC:
+ fov_lon = -fov_lon
else:
raise ValueError(
f"Unsupported background coordinate system: {irf.fov_alignment!r}"
diff --git a/gammapy/maps/region/ndmap.py b/gammapy/maps/region/ndmap.py
index 5ef492a0ac..4c315a081f 100644
--- a/gammapy/maps/region/ndmap.py
+++ b/gammapy/maps/region/ndmap.py
@@ -159,9 +159,6 @@ def plot(self, ax=None, axis_name=None, **kwargs):
if "energy" in axis_name:
ax.set_yscale("log", nonpositive="clip")
- if len(self.geom.axes) > 1:
- plt.legend()
-
return ax
def plot_hist(self, ax=None, **kwargs):
diff --git a/gammapy/modeling/models/temporal.py b/gammapy/modeling/models/temporal.py
index 39988f9dab..80ddca86e8 100644
--- a/gammapy/modeling/models/temporal.py
+++ b/gammapy/modeling/models/temporal.py
@@ -1,5 +1,6 @@
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Time-dependent models."""
+
import logging
import numpy as np
import scipy.interpolate
@@ -7,8 +8,8 @@
from astropy.io import fits
from astropy.table import Table
from astropy.time import Time
-from astropy.utils import lazyproperty
from gammapy.maps import MapAxis, RegionNDMap, TimeMapAxis
+from astropy.utils import lazyproperty
from gammapy.modeling import Parameter
from gammapy.utils.compat import COPY_IF_NEEDED
from gammapy.utils.random import InverseCDFSampler, get_random_state
@@ -983,12 +984,26 @@ class TemplatePhaseCurveTemporalModel(TemporalModel):
f2 = Parameter("f2", _f2_default, frozen=True)
def __init__(self, table, filename=None, **kwargs):
- self.table = table
+ self.table = self._normalise_table(table)
if filename is not None:
filename = str(make_path(filename))
self.filename = filename
super().__init__(**kwargs)
+ @staticmethod
+ def _normalise_table(table):
+ x = table["PHASE"].data
+ y = table["NORM"].data
+
+ interpolator = scipy.interpolate.InterpolatedUnivariateSpline(
+ x, y, k=1, ext=2, bbox=[0.0, 1.0]
+ )
+
+ integral = interpolator.integral(0, 1)
+
+ table["NORM"] *= 1 / integral
+ return table
+
@classmethod
def read(
cls,
@@ -1010,6 +1025,7 @@ def read(
Filename with path.
"""
filename = str(make_path(path))
+
return cls(
Table.read(filename),
filename=filename,
@@ -1118,11 +1134,15 @@ def integral(self, t_min, t_max):
) - self._interpolator.antiderivative()(ph_min)
# Divide by Jacobian (here we neglect variations of frequency during the integration period)
- total = (phase_integral + start_integral + end_integral) / frequency
+ total = phase_integral + start_integral + end_integral
# Normalize by total integration time
- integral_norm = total / self.time_sum(t_min, t_max)
+ n_period = (self.time_sum(t_min, t_max) * frequency).to("")
+ if int(n_period) == 0:
+ n_period = 1
+
+ integral_norm = total / n_period
- return integral_norm.to("")
+ return integral_norm
@classmethod
def from_dict(cls, data):
diff --git a/gammapy/modeling/models/tests/test_temporal.py b/gammapy/modeling/models/tests/test_temporal.py
index 7ce68ab444..c8e78676a3 100644
--- a/gammapy/modeling/models/tests/test_temporal.py
+++ b/gammapy/modeling/models/tests/test_temporal.py
@@ -354,7 +354,6 @@ def test_sine_temporal_model_integral():
@requires_data()
def test_to_dict(light_curve):
-
out = light_curve.to_dict()
assert out["temporal"]["type"] == "LightCurveTemplateTemporalModel"
assert "lightcrv_PKSB1222+216.fits" in out["temporal"]["filename"]
@@ -362,7 +361,6 @@ def test_to_dict(light_curve):
@requires_data()
def test_with_skymodel(light_curve):
-
sky_model = SkyModel(spectral_model=PowerLawSpectralModel())
out = sky_model.to_dict()
assert "temporal" not in out
@@ -399,7 +397,7 @@ def test_phase_curve_model(tmp_path):
)
result = phase_model(t_ref + [0, 0.025, 0.05] * u.s)
- assert_allclose(result, [0, 0.5, 0], atol=1e-5)
+ assert_allclose(result, [0.000000e00, 1.999989e00, 2.169609e-05], atol=1e-5)
phase_model.filename = str(make_path(tmp_path / "tmp.fits"))
phase_model.write()
@@ -415,22 +413,22 @@ def test_phase_curve_model(tmp_path):
)
assert_allclose(new_model.table["PHASE"].data, phase)
- assert_allclose(new_model.table["NORM"].data, norm)
+ assert_allclose(new_model.table["NORM"].data, norm * 4)
# exact number of phases
integral = phase_model.integral(t_ref, t_ref + 10 * u.s)
- assert_allclose(integral, 0.25, rtol=1e-5)
+ assert_allclose(integral, 1.0, rtol=1e-5)
# long duration. Should be equal to the phase average
integral = phase_model.integral(t_ref + 1 * u.h, t_ref + 3 * u.h)
- assert_allclose(integral, 0.25, rtol=1e-5)
+ assert_allclose(integral, 1.0, rtol=1e-5)
# 1.25 phase
integral = phase_model.integral(t_ref, t_ref + 62.5 * u.ms)
- assert_allclose(integral, 0.225, rtol=1e-5)
+ assert_allclose(integral, 0.9, rtol=1e-5)
def test_phase_curve_model_sample_time():
phase = np.linspace(0.0, 1, 51)
- norm = 1 * (phase < 0.5)
+ norm = 1.0 * (phase < 0.5)
table = Table(data={"PHASE": phase, "NORM": norm})
t_ref = Time("2020-06-01", scale="utc")
@@ -472,7 +470,7 @@ def test_phasecurve_DC1():
times = Time(t_ref, format="mjd") + [0.0, 0.5, 0.65, 1.0] * P0
norm = model(times)
- assert_allclose(norm, [0.05, 0.15, 1.0, 0.05])
+ assert_allclose(norm, [0.294118, 0.882353, 5.882353, 0.294118], atol=1e-5)
with mpl_plot_check():
model.plot_phasogram(n_points=200)
diff --git a/gammapy/utils/docs.py b/gammapy/utils/docs.py
index 257119c7c9..0c6372bd0e 100644
--- a/gammapy/utils/docs.py
+++ b/gammapy/utils/docs.py
@@ -12,10 +12,11 @@
- https://doughellmann.com/blog/2010/05/09/defining-custom-roles-in-sphinx/
- http://docutils.sourceforge.net/docs/howto/rst-directives.html
-- https://github.com/docutils-mirror/docutils/blob/master/docutils/parsers/rst/directives/images.py
+- https://github.com/docutils/docutils/blob/master/docutils/docutils/parsers/rst/directives/images.py
- https://github.com/sphinx-doc/sphinx/blob/master/sphinx/directives/other.py
-- https://github.com/bokeh/bokeh/tree/master/bokeh/sphinxext
+- https://github.com/bokeh/bokeh/tree/master/src/bokeh/sphinxext
"""
+
import os
from pathlib import Path
from docutils.parsers.rst import Directive
@@ -44,13 +45,15 @@ def run(self):
raw = f"""