Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Integrate halo model for intrinsic alignments in 3x2-point systematics. #400

Draft
wants to merge 15 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
250 changes: 250 additions & 0 deletions examples/des_y1_3x2pt/des_y1_cosmic_shear_HMIA.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,250 @@
"""Example of a Firecrown likelihood using the DES Y1 cosmic shear data and
the halo model for intrinsic alignments."""
import os
from typing import Tuple
import sacc
import pyccl as ccl

import firecrown.likelihood.gauss_family.statistic.source.weak_lensing as wl
from firecrown.likelihood.gauss_family.statistic.two_point import TwoPoint, TracerNames
from firecrown.likelihood.gauss_family.gaussian import ConstGaussian
from firecrown.parameters import ParamsMap
from firecrown.modeling_tools import ModelingTools
from firecrown.likelihood.likelihood import Likelihood

saccfile = os.path.expanduser(
os.path.expandvars(
"${FIRECROWN_DIR}/examples/des_y1_3x2pt/des_y1_3x2pt_sacc_data.fits"
)
)


def build_likelihood(_) -> Tuple[Likelihood, ModelingTools]:
"""Build the likelihood for the DES Y1 cosmic shear data TATT."""
# Load sacc file
sacc_data = sacc.Sacc.load_fits(saccfile)

# Define sources
n_source = 1
sources = {}

# Define the intrinsic alignment systematic. This will be added to the
# lensing sources later
ia_systematic = wl.HMAlignmentSystematic()

for i in range(n_source):
# Define the photo-z shift systematic.
pzshift = wl.PhotoZShift(sacc_tracer=f"src{i}")

# Create the weak lensing source, specifying the name of the tracer in the
# sacc file and a list of systematics
sources[f"src{i}"] = wl.WeakLensing(
sacc_tracer=f"src{i}", systematics=[pzshift, ia_systematic]
)

# Define the statistics we like to include in the likelihood
stats = {}
for stat, sacc_stat in [
("xip", "galaxy_shear_xi_plus"),
("xim", "galaxy_shear_xi_minus"),
]:
for i in range(n_source):
for j in range(i, n_source):
# Define two-point statistics, given two sources (from above) and
# the type of statistic.
stats[f"{stat}_src{i}_src{j}"] = TwoPoint(
source0=sources[f"src{i}"],
source1=sources[f"src{j}"],
sacc_data_type=sacc_stat,
)

# Create the likelihood from the statistics
# Define the halo model components. This is one solution but maybe not the best!
hmd_200m = ccl.halos.MassDef200m
cM = "Duffy08"
nM = "Tinker10"
bM = "Tinker10"

modeling_tools = ModelingTools(
hm_definition=hmd_200m, hm_function=nM, bias_function=bM, cM_relation=cM
)
likelihood = ConstGaussian(statistics=list(stats.values()))

# Read the two-point data from the sacc file
likelihood.read(sacc_data)

# To allow this likelihood to be used in cobaya or cosmosis, define a
# an object called "likelihood" must be defined
print(
"Using parameters:", list(likelihood.required_parameters().get_params_names())
)

return likelihood, modeling_tools


# We can also run the likelihood directly
def run_likelihood() -> None:
"""Run the likelihood."""
import numpy as np # pylint: disable-msg=import-outside-toplevel

likelihood, tools = build_likelihood(None)

# Load sacc file
sacc_data = sacc.Sacc.load_fits(saccfile)

src0_tracer = sacc_data.get_tracer("src0")
z, nz = src0_tracer.z, src0_tracer.nz # pylint: disable-msg=invalid-name

# Define a ccl.Cosmology object using default parameters
ccl_cosmo = ccl.CosmologyVanillaLCDM()
ccl_cosmo.compute_nonlin_power()

# Bare CCL setup
a_1h = 1e-3 # 1-halo alignment amplitude.
a_2h = 1.0 # 2-halo alignment amplitude.

# Code that creates a Pk2D object:
k_arr = np.geomspace(1e-3, 1e3, 128) # For evaluating
a_arr = np.linspace(0.1, 1, 16)
cM = ccl.halos.ConcentrationDuffy08(mass_def="200m")
nM = ccl.halos.MassFuncTinker10(mass_def="200m")
bM = ccl.halos.HaloBiasTinker10(mass_def="200m")
hmc = ccl.halos.HMCalculator(mass_function=nM, halo_bias=bM, mass_def="200m")
sat_gamma_HOD = ccl.halos.SatelliteShearHOD(
mass_def="200m", concentration=cM, a1h=a_1h, b=-2
)
# NFW profile for matter (G)
NFW = ccl.halos.HaloProfileNFW(
mass_def="200m", concentration=cM, truncated=True, fourier_analytic=True
)
pk_GI_1h = ccl.halos.halomod_Pk2D(
ccl_cosmo,
hmc,
NFW,
prof2=sat_gamma_HOD,
get_2h=False,
lk_arr=np.log(k_arr),
a_arr=a_arr,
)
pk_II_1h = ccl.halos.halomod_Pk2D(
ccl_cosmo, hmc, sat_gamma_HOD, get_2h=False, lk_arr=np.log(k_arr), a_arr=a_arr
)
# NLA
C1rhocrit = 5e-14 * ccl.physical_constants.RHO_CRITICAL # standard IA normalisation
pk_GI_NLA = ccl.Pk2D.from_function(
pkfunc=lambda k, a: -a_2h
* C1rhocrit
* ccl_cosmo["Omega_m"]
/ ccl_cosmo.growth_factor(a)
* ccl_cosmo.nonlin_matter_power(k, a),
is_logp=False,
)
pk_II_NLA = ccl.Pk2D.from_function(
pkfunc=lambda k, a: (
a_2h * C1rhocrit * ccl_cosmo["Omega_m"] / ccl_cosmo.growth_factor(a)
)
** 2
* ccl_cosmo.nonlin_matter_power(k, a),
is_logp=False,
)
pk_GI = pk_GI_1h + pk_GI_NLA
pk_II = pk_II_1h + pk_II_NLA

# Set the parameters for our systematics
systematics_params = ParamsMap(
{
"ia_a_1h": a_1h,
"ia_a_2h": a_2h,
"src0_delta_z": 0.000,
}
)

# Apply the systematics parameters
likelihood.update(systematics_params)

# Prepare the cosmology object
tools.update(systematics_params)
tools.prepare(ccl_cosmo)

# Compute the log-likelihood, using the ccl.Cosmology object as the input
log_like = likelihood.compute_loglike(tools)

print(f"Log-like = {log_like:.1f}")

# Plot the predicted and measured statistic
assert isinstance(likelihood, ConstGaussian)
two_point_0 = likelihood.statistics[0].statistic
assert isinstance(two_point_0, TwoPoint)

# x = two_point_0.ell_or_theta_
# y_data = two_point_0.measured_statistic_

assert isinstance(likelihood, ConstGaussian)
assert likelihood.cov is not None

# y_err = np.sqrt(np.diag(likelihood.cov))[: len(x)]
# y_theory = two_point_0.predicted_statistic_

# pylint: disable=no-member
print(list(two_point_0.cells.keys()))

make_plot(ccl_cosmo, nz, pk_GI, pk_II, two_point_0, z)


def make_plot(ccl_cosmo, nz, pk_GI, pk_II, two_point_0, z):
"""Create and show a diagnostic plot."""
import numpy as np # pylint: disable-msg=import-outside-toplevel
import matplotlib.pyplot as plt # pylint: disable-msg=import-outside-topleve

ells = two_point_0.ells
cells_gg = two_point_0.cells[TracerNames("shear", "shear")]
cells_gi = two_point_0.cells[TracerNames("shear", "intrinsic_hm")]
cells_ig = two_point_0.cells[TracerNames("intrinsic_hm", "shear")]
cells_ii = two_point_0.cells[TracerNames("intrinsic_hm", "intrinsic_hm")]
cells_total = two_point_0.cells[TracerNames("", "")]
# pylint: enable=no-member

# Code that computes effect from IA using that Pk2D object
t_lens = ccl.WeakLensingTracer(ccl_cosmo, dndz=(z, nz), use_A_ia=False)
t_ia = ccl.WeakLensingTracer(
ccl_cosmo,
has_shear=False,
use_A_ia=False,
dndz=(z, nz),
ia_bias=(z, np.ones_like(z)),
)
# pylint: disable=invalid-name
cl_GI = ccl.angular_cl(ccl_cosmo, t_lens, t_ia, ells, p_of_k_a=pk_GI)
cl_IG = ccl.angular_cl(ccl_cosmo, t_ia, t_lens, ells, p_of_k_a=pk_GI)
cl_II = ccl.angular_cl(ccl_cosmo, t_ia, t_ia, ells, p_of_k_a=pk_II)
# The weak gravitational lensing power spectrum
cl_GG = ccl.angular_cl(ccl_cosmo, t_lens, t_lens, ells)
# The observed angular power spectrum is the sum of the two.
cl_theory = cl_GG + cl_GI + cl_IG + cl_II
# pylint: enable=invalid-name

# plt.plot(x, y_theory, label="Total")
plt.plot(ells, cells_gg, label="GG firecrown")
plt.plot(ells, cl_GG, ls="--", label="GG CCL")
plt.plot(ells, -cells_gi, label="-GI firecrown")
plt.plot(ells, -cl_GI, ls="--", label="-GI CCL")
plt.plot(ells, cells_ii, label="II firecrown")
plt.plot(ells, cl_II, ls="--", label="II CCL")
plt.plot(ells, cells_total, label="total firecrown")
plt.plot(ells, cl_theory, ls="--", label="total CCL")

# plt.errorbar(x, y_data, y_err, ls="none", marker="o")
plt.xscale("log")
plt.yscale("log")
plt.xlabel(r"$\ell$")
plt.ylabel(r"$C_\ell$")
plt.legend()
# plt.xlim(right=5e3)
# plt.ylim(bottom=1e-12)
plt.title("Halo model IA")
plt.savefig("halo_model.png", facecolor="white", dpi=300)
plt.show()


if __name__ == "__main__":
run_likelihood()
3 changes: 2 additions & 1 deletion examples/des_y1_3x2pt/des_y1_cosmic_shear_TATT.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,8 @@ def run_likelihood() -> None:
}
)

# Apply the systematics parameters
# Apply the systematics parameter
tools.update(systematics_params)
likelihood.update(systematics_params)

# Prepare the cosmology object
Expand Down
6 changes: 3 additions & 3 deletions firecrown/likelihood/gauss_family/statistic/source/source.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,9 +280,9 @@ def __init__(

self.sacc_tracer = sacc_tracer
self.current_tracer_args: Optional[_SourceGalaxyArgsT] = None
self.systematics: UpdatableCollection[SourceGalaxySystematic] = (
UpdatableCollection(systematics)
)
self.systematics: UpdatableCollection[
SourceGalaxySystematic
] = UpdatableCollection(systematics)
self.tracer_args: _SourceGalaxyArgsT

def _read(self, sacc_data: sacc.Sacc):
Expand Down
58 changes: 58 additions & 0 deletions firecrown/likelihood/gauss_family/statistic/source/weak_lensing.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@ class WeakLensingArgs(SourceGalaxyArgs):
ia_pt_c_d: Optional[tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]] = None
ia_pt_c_2: Optional[tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]] = None

ia_a_1h: Optional[np.float64] = None
ia_a_2h: Optional[np.float64] = None


class WeakLensingSystematic(SourceGalaxySystematic[WeakLensingArgs]):
"""Abstract base class for all weak lensing systematics."""
Expand Down Expand Up @@ -206,6 +209,39 @@ def apply(
)


class HMAlignmentSystematic(WeakLensingSystematic):
"""Halo model intrinsic alignment systematic.

This systematic adds a halo model based intrinsic alignment systematic
which, at the moment, is fixed within the redshift bin.

The following parameters are special Updatable parameters, which means that
they can be updated by the sampler, sacc_tracer is going to be used as a
prefix for the parameters:

:ivar ia_a_1h: the 1-halo intrinsic alignment bias parameter (satellite galaxies).
:ivar ia_a_2h: the 2-halo intrinsic alignment bias parameter (central galaxies).
"""

def __init__(self, sacc_tracer: Optional[str] = None):
super().__init__()

self.ia_a_1h = parameters.register_new_updatable_parameter()
self.ia_a_2h = parameters.register_new_updatable_parameter()

self.sacc_tracer = sacc_tracer

def apply(
self, tools: ModelingTools, tracer_arg: WeakLensingArgs
) -> WeakLensingArgs:
"""Return a new halo-model alignment systematic, based on the given
tracer_arg, in the context of the given cosmology."""

return replace(
tracer_arg, has_hm=True, ia_a_1h=self.ia_a_1h, ia_a_2h=self.ia_a_2h
)


class WeakLensing(SourceGalaxy[WeakLensingArgs]):
"""Source class for weak lensing."""

Expand Down Expand Up @@ -291,6 +327,28 @@ def create_tracers(self, tools: ModelingTools):
)
tracers.append(ia_tracer)

if tracer_args.has_hm:
cM = tools.get_cM_relation()
halo_profile = pyccl.halos.SatelliteShearHOD(
mass_def=tools.hm_definition, concentration=cM, a1h=tracer_args.ia_a_1h
)
ccl_wl_dummy_tracer = pyccl.WeakLensingTracer(
ccl_cosmo,
has_shear=False,
use_A_ia=False,
dndz=(tracer_args.z, tracer_args.dndz),
ia_bias=(tracer_args.z, np.ones_like(tracer_args.z)),
)
ia_tracer = Tracer(
ccl_wl_dummy_tracer,
tracer_name="intrinsic_hm",
halo_profile=halo_profile,
)
halo_profile.ia_a_2h = (
tracer_args.ia_a_2h
) # Attach the 2-halo amplitude here.
tracers.append(ia_tracer)

self.current_tracer_args = tracer_args

return tracers, tracer_args
Expand Down
Loading
Loading