From 4068b533503d85ea6577d05a432ae2d8c490389c Mon Sep 17 00:00:00 2001 From: Tomomi Sunayama <24945535+ts485@users.noreply.github.com> Date: Fri, 19 Jul 2024 14:15:00 +0200 Subject: [PATCH 1/4] issue/627/testskypixelcoords: New notebook to test coordinate system effects with instructions to use example data (#629) * new notebook to test coordinate systems and shear measurements * editing README for the data availability * Added option to choose coordinate system for generated mock catalog * Added ellipticity coordinate system conversion tests to test_galaxycluster.py and test_mockdata.py * Added documentation to other test cases in Tomomi's notebook. Still need to incorporate Caio's added functionality for sky vs. pixel coordinates in CLMM * starting to work through and document example of lensing signal calculation of HSCY3 cluster without CLMM, but need to change colossus dependency because it is not easy to install on NERSC * added kwargs to GalaxyCluster instance definition in preparation for re-installing latest CLMM with Caio's added coordinate_system kwargs * test_coordinate.ipynb now runs from top to bottom using Caio's coordinate system implementation! * modified the Oguri example to also show incorrect coordinate assumption as noted by Caio * added a note for the CosmoDC2 clusters so the user is aware to not necessarily expect a strong signal in center regions * clarified why there might be less signal in the TXPipe outputted CosmoDC2 thing --------- Co-authored-by: Caio Lima de Oliveira Co-authored-by: Camille Avestruz Co-authored-by: Marina Ricci --- README.md | 2 +- docs/doc-config.ini | 1 + examples/DC2/DC2_gt_profiles.ipynb | 4 +- examples/DC2/data_and_model_demo_DC2.ipynb | 2 +- examples/demo_boost_factors.ipynb | 34 +- .../demo_compute_deltasigma_weights.ipynb | 45 +- .../demo_coordinate_system_datasets.ipynb | 1013 +++++++++++++++++ examples/demo_dataops_functionality.ipynb | 6 +- examples/demo_mass_conversion.ipynb | 4 +- examples/demo_mock_ensemble.ipynb | 37 +- examples/demo_mock_ensemble_realistic.ipynb | 6 +- examples/demo_theory_functionality.ipynb | 4 +- ...mple1_Fit_Halo_Mass_to_Shear_Catalog.ipynb | 4 +- ...mple2_Fit_Halo_Mass_to_Shear_Catalog.ipynb | 4 +- ...mple3_Fit_Halo_Mass_to_Shear_Catalog.ipynb | 6 +- .../Example4_Fit_Halo_mass_to_HSC_data.ipynb | 30 +- 16 files changed, 1044 insertions(+), 158 deletions(-) create mode 100644 examples/demo_coordinate_system_datasets.ipynb diff --git a/README.md b/README.md index 7ee5ff075..c9a97fd36 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,3 @@ - # CLMM [![Build and Check](https://github.com/LSSTDESC/CLMM/workflows/Build%20and%20Check/badge.svg)](https://github.com/LSSTDESC/CLMM/actions?query=workflow%3A%22Build+and+Check%22) [![Coverage Status](https://coveralls.io/repos/github/LSSTDESC/CLMM/badge.svg?branch=main)](https://coveralls.io/github/LSSTDESC/CLMM?branch=main) @@ -110,6 +109,7 @@ the `CCL` publication be cited. See details The `Cluster Toolkit` documentation can be found [here](https://cluster-toolkit.readthedocs.io/en/latest/#). +The data for the notebook test_coordinate.ipynb is available at https://www.dropbox.com/scl/fo/dwsccslr5iwb7lnkf8jvx/AJkjgFeemUEHpHaZaHHqpAg?rlkey=efbtsr15mdrs3y6xsm7l48o0r&st=xb58ap0g&dl=0 # Contributing to CLMM diff --git a/docs/doc-config.ini b/docs/doc-config.ini index f6475ae66..cb9d24c5c 100644 --- a/docs/doc-config.ini +++ b/docs/doc-config.ini @@ -33,5 +33,6 @@ OTHER ../examples/demo_compute_deltasigma_weights.ipynb ../examples/demo_mass_conversion.ipynb ../examples/demo_mock_ensemble_realistic.ipynb +../examples/demo_coordinate_system_datasets.ipynb #../examples/other_compare_NFW_critical_massdef.ipynb #../examples/other_flat_sky_limitations.ipynb diff --git a/examples/DC2/DC2_gt_profiles.ipynb b/examples/DC2/DC2_gt_profiles.ipynb index d971b50b7..153820205 100644 --- a/examples/DC2/DC2_gt_profiles.ipynb +++ b/examples/DC2/DC2_gt_profiles.ipynb @@ -600,7 +600,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -614,7 +614,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.1" + "version": "3.9.0" } }, "nbformat": 4, diff --git a/examples/DC2/data_and_model_demo_DC2.ipynb b/examples/DC2/data_and_model_demo_DC2.ipynb index f69fe46e9..a91c0c12c 100644 --- a/examples/DC2/data_and_model_demo_DC2.ipynb +++ b/examples/DC2/data_and_model_demo_DC2.ipynb @@ -340,7 +340,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.1" + "version": "3.6.9" } }, "nbformat": 4, diff --git a/examples/demo_boost_factors.ipynb b/examples/demo_boost_factors.ipynb index fda156a82..6f236bd22 100644 --- a/examples/demo_boost_factors.ipynb +++ b/examples/demo_boost_factors.ipynb @@ -2,7 +2,6 @@ "cells": [ { "cell_type": "markdown", - "id": "b803f8e6", "metadata": {}, "source": [ "# Boost factors" @@ -11,7 +10,6 @@ { "cell_type": "code", "execution_count": null, - "id": "7180b199", "metadata": {}, "outputs": [], "source": [ @@ -26,7 +24,6 @@ }, { "cell_type": "markdown", - "id": "11e76008", "metadata": {}, "source": [ "Make sure we know which version we're using" @@ -35,7 +32,6 @@ { "cell_type": "code", "execution_count": null, - "id": "389d5ab7", "metadata": {}, "outputs": [], "source": [ @@ -44,7 +40,6 @@ }, { "cell_type": "markdown", - "id": "d86fa53c", "metadata": {}, "source": [ "### Define cosmology object" @@ -53,7 +48,6 @@ { "cell_type": "code", "execution_count": null, - "id": "74bcb35c", "metadata": {}, "outputs": [], "source": [ @@ -62,7 +56,6 @@ }, { "cell_type": "markdown", - "id": "6b458081", "metadata": {}, "source": [ "First, we want to generate a $\\Delta\\Sigma$ (excess surface density) profile from mock data, to which we can apply boost factors. The mock data is generated in the following cells." @@ -70,7 +63,6 @@ }, { "cell_type": "markdown", - "id": "de5340ee", "metadata": {}, "source": [ "Generate cluster object from mock data" @@ -79,7 +71,6 @@ { "cell_type": "code", "execution_count": null, - "id": "6cec7b64", "metadata": {}, "outputs": [], "source": [ @@ -107,7 +98,6 @@ }, { "cell_type": "markdown", - "id": "50045a1f", "metadata": {}, "source": [ "Loading this into a CLMM cluster object centered on (0,0)" @@ -116,7 +106,6 @@ { "cell_type": "code", "execution_count": null, - "id": "a564b1c9", "metadata": {}, "outputs": [], "source": [ @@ -127,7 +116,6 @@ }, { "cell_type": "markdown", - "id": "eb9c2218", "metadata": {}, "source": [ "Compute cross and tangential excess surface density for each source galaxy" @@ -136,7 +124,6 @@ { "cell_type": "code", "execution_count": null, - "id": "bc5b051d", "metadata": {}, "outputs": [], "source": [ @@ -154,7 +141,6 @@ }, { "cell_type": "markdown", - "id": "26081451", "metadata": {}, "source": [ "Calculate the binned profile" @@ -163,7 +149,6 @@ { "cell_type": "code", "execution_count": null, - "id": "eaadac01", "metadata": {}, "outputs": [], "source": [ @@ -190,7 +175,6 @@ }, { "cell_type": "markdown", - "id": "7e88afc4", "metadata": {}, "source": [ "Plot the $\\Delta\\Sigma$ profile" @@ -199,7 +183,6 @@ { "cell_type": "code", "execution_count": null, - "id": "0aeb2f52", "metadata": {}, "outputs": [], "source": [ @@ -219,7 +202,6 @@ }, { "cell_type": "markdown", - "id": "e28ec4c5", "metadata": {}, "source": [ "## Boost Factors" @@ -227,7 +209,6 @@ }, { "cell_type": "markdown", - "id": "19912d42", "metadata": {}, "source": [ "CLMM offers two boost models, the NFW boost model, and a powerlaw boost model. \n", @@ -242,7 +223,6 @@ { "cell_type": "code", "execution_count": null, - "id": "e6b6ee3d", "metadata": {}, "outputs": [], "source": [ @@ -255,7 +235,6 @@ }, { "cell_type": "markdown", - "id": "58cbc299", "metadata": {}, "source": [ "Plot the two boost factors, $B(R)$" @@ -264,7 +243,6 @@ { "cell_type": "code", "execution_count": null, - "id": "1c1de291", "metadata": {}, "outputs": [], "source": [ @@ -278,7 +256,6 @@ }, { "cell_type": "markdown", - "id": "30e8e6a0", "metadata": {}, "source": [ "The $\\Delta\\Sigma$ profiles can be corrected with the boost factor using `correct_sigma_with_boost_values` or `correct_sigma_with_boost_model`. \n", @@ -289,7 +266,6 @@ }, { "cell_type": "markdown", - "id": "6c4daf84", "metadata": {}, "source": [ "\n", @@ -309,7 +285,6 @@ }, { "cell_type": "markdown", - "id": "48e0f2ef", "metadata": {}, "source": [ "First we will apply the boost factor with `correct_sigma_with_boost_values`" @@ -318,7 +293,6 @@ { "cell_type": "code", "execution_count": null, - "id": "d7b5f71d", "metadata": {}, "outputs": [], "source": [ @@ -332,7 +306,6 @@ }, { "cell_type": "markdown", - "id": "15252880", "metadata": {}, "source": [ "Plot the result" @@ -341,7 +314,6 @@ { "cell_type": "code", "execution_count": null, - "id": "e2d90e0c", "metadata": {}, "outputs": [], "source": [ @@ -380,7 +352,6 @@ }, { "cell_type": "markdown", - "id": "caf00c95", "metadata": {}, "source": [ "Now the same again but with `correct_sigma_with_boost_model`" @@ -389,7 +360,6 @@ { "cell_type": "code", "execution_count": null, - "id": "b4b3f953", "metadata": {}, "outputs": [], "source": [ @@ -440,7 +410,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -454,7 +424,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.8" + "version": "3.6.9" } }, "nbformat": 4, diff --git a/examples/demo_compute_deltasigma_weights.ipynb b/examples/demo_compute_deltasigma_weights.ipynb index d261446b4..9e4cbe5c1 100644 --- a/examples/demo_compute_deltasigma_weights.ipynb +++ b/examples/demo_compute_deltasigma_weights.ipynb @@ -2,7 +2,6 @@ "cells": [ { "cell_type": "markdown", - "id": "68dc9675-9e74-434d-aba3-e90e5a0280c6", "metadata": {}, "source": [ "# Weak lensing weights\n", @@ -35,7 +34,6 @@ { "cell_type": "code", "execution_count": null, - "id": "88e8bdd1", "metadata": {}, "outputs": [], "source": [ @@ -63,7 +61,6 @@ { "cell_type": "code", "execution_count": null, - "id": "2b021533-9563-4b7f-9f77-6b9765938e71", "metadata": {}, "outputs": [], "source": [ @@ -73,7 +70,6 @@ { "cell_type": "code", "execution_count": null, - "id": "b8b70cdf", "metadata": {}, "outputs": [], "source": [ @@ -83,7 +79,6 @@ { "cell_type": "code", "execution_count": null, - "id": "4bcd70d3", "metadata": {}, "outputs": [], "source": [ @@ -97,7 +92,6 @@ { "cell_type": "code", "execution_count": null, - "id": "74790332", "metadata": {}, "outputs": [], "source": [ @@ -128,7 +122,6 @@ }, { "cell_type": "markdown", - "id": "d1e1ec03", "metadata": {}, "source": [ "## Compute the WL weights" @@ -136,7 +129,6 @@ }, { "cell_type": "markdown", - "id": "de35fdb0-5551-4a41-8200-e43abca422b8", "metadata": {}, "source": [ "### redshift point estimate + no shape error\n", @@ -148,7 +140,6 @@ }, { "cell_type": "markdown", - "id": "747e99bf", "metadata": {}, "source": [ "#### Using the functional interface\n", @@ -159,7 +150,6 @@ { "cell_type": "code", "execution_count": null, - "id": "3432e008", "metadata": {}, "outputs": [], "source": [ @@ -169,7 +159,6 @@ { "cell_type": "code", "execution_count": null, - "id": "05f947c3", "metadata": {}, "outputs": [], "source": [ @@ -182,7 +171,6 @@ }, { "cell_type": "markdown", - "id": "206a9562", "metadata": {}, "source": [ "#### As a method of the `GalaxyCluster` object\n", @@ -192,7 +180,6 @@ { "cell_type": "code", "execution_count": null, - "id": "0ce7f15d", "metadata": {}, "outputs": [], "source": [ @@ -203,7 +190,6 @@ }, { "cell_type": "markdown", - "id": "b5195cda-4092-470f-a2de-5d9816a8aa70", "metadata": {}, "source": [ "### photoz + no shape errors" @@ -211,7 +197,6 @@ }, { "cell_type": "markdown", - "id": "5f288e2c-32cc-4f35-8744-c66c073b3024", "metadata": {}, "source": [ "When considering the photo-z distribution, we can compute the weight based on an effective critical surface density:\n", @@ -229,7 +214,6 @@ }, { "cell_type": "markdown", - "id": "6355d76b", "metadata": {}, "source": [ "#### Using the functional interface\n", @@ -240,7 +224,6 @@ { "cell_type": "code", "execution_count": null, - "id": "0dbbfa25", "metadata": {}, "outputs": [], "source": [ @@ -255,7 +238,6 @@ { "cell_type": "code", "execution_count": null, - "id": "aee36658", "metadata": {}, "outputs": [], "source": [ @@ -268,7 +250,6 @@ }, { "cell_type": "markdown", - "id": "5285a1a7-e990-4d83-a418-15e04d80a948", "metadata": {}, "source": [ "#### As a method of the `GalaxyCluster` object\n", @@ -279,7 +260,6 @@ { "cell_type": "code", "execution_count": null, - "id": "b56b9478", "metadata": {}, "outputs": [], "source": [ @@ -290,7 +270,6 @@ }, { "cell_type": "markdown", - "id": "8aff3684-360f-4084-9a51-fb5fb407adc7", "metadata": {}, "source": [ "### redshift point estimate + shape error" @@ -298,7 +277,6 @@ }, { "cell_type": "markdown", - "id": "b1f4fd49-4a5a-480a-a67c-046da6dd4b80", "metadata": {}, "source": [ "$$\n", @@ -309,7 +287,6 @@ { "cell_type": "code", "execution_count": null, - "id": "ccfe5c83", "metadata": {}, "outputs": [], "source": [ @@ -330,7 +307,6 @@ }, { "cell_type": "markdown", - "id": "1eb3565f-49b1-40db-986f-0294dad06ad6", "metadata": {}, "source": [ "### photoz + shape error" @@ -338,7 +314,6 @@ }, { "cell_type": "markdown", - "id": "d8cdadd9-3da3-4371-9e12-684aca1d6083", "metadata": {}, "source": [ "$$\n", @@ -349,7 +324,6 @@ { "cell_type": "code", "execution_count": null, - "id": "5e581173", "metadata": {}, "outputs": [], "source": [ @@ -370,7 +344,6 @@ }, { "cell_type": "markdown", - "id": "84764558-67df-470f-93ff-32bd5956b40d", "metadata": {}, "source": [ "With `add=True`, the weights have been added as new columns in the `cl.galcat` Table. A new `sigma_c` column is also automatically added." @@ -379,7 +352,6 @@ { "cell_type": "code", "execution_count": null, - "id": "1d0cdfbc-8ffd-4c7f-b8a5-9ca3dbf1dfc3", "metadata": {}, "outputs": [], "source": [ @@ -388,7 +360,6 @@ }, { "cell_type": "markdown", - "id": "392114b6-08bd-4243-9383-59a0fd3b0014", "metadata": { "tags": [] }, @@ -407,7 +378,6 @@ { "cell_type": "code", "execution_count": null, - "id": "683d2ce5-914c-4abe-96c8-e97f2d07fb43", "metadata": {}, "outputs": [], "source": [ @@ -431,7 +401,6 @@ }, { "cell_type": "markdown", - "id": "f8096f22-4552-46e8-9bfe-4e26667994fe", "metadata": {}, "source": [ "### Vizualizing the results\n", @@ -442,7 +411,6 @@ { "cell_type": "code", "execution_count": null, - "id": "7d6749d0", "metadata": {}, "outputs": [], "source": [ @@ -495,7 +463,6 @@ }, { "cell_type": "markdown", - "id": "70d75c94-60f1-429e-ad6e-6aaacaf48ebb", "metadata": {}, "source": [ "- The galaxy weights increase with the true galaxy redshift (left panel, red dots), i.e. weights take account that galaxies far from the cluster are more sheared than closer ones.\n", @@ -510,7 +477,6 @@ }, { "cell_type": "markdown", - "id": "dff3d90e", "metadata": {}, "source": [ "### Background probability" @@ -518,7 +484,6 @@ }, { "cell_type": "markdown", - "id": "7231e5ec-062a-49ee-a41a-3e2c0d36e5cd", "metadata": {}, "source": [ "The probability for a galaxy with photometric redshift of being in the background of the cluster is given by\n", @@ -530,7 +495,6 @@ { "cell_type": "code", "execution_count": null, - "id": "6481f253", "metadata": {}, "outputs": [], "source": [ @@ -558,7 +522,6 @@ { "cell_type": "code", "execution_count": null, - "id": "8e4e4fcf-84d3-43d6-8bb3-498b5f8121c4", "metadata": {}, "outputs": [], "source": [ @@ -568,7 +531,6 @@ { "cell_type": "code", "execution_count": null, - "id": "3753c288-4a93-40a7-9eb4-efe0bd658f37", "metadata": {}, "outputs": [], "source": [ @@ -580,7 +542,6 @@ { "cell_type": "code", "execution_count": null, - "id": "ffe9ee72", "metadata": {}, "outputs": [], "source": [ @@ -592,7 +553,6 @@ { "cell_type": "code", "execution_count": null, - "id": "cc7b66f7", "metadata": {}, "outputs": [], "source": [ @@ -667,7 +627,6 @@ }, { "cell_type": "markdown", - "id": "5cb9e668-5f92-4614-a04a-b2c7d020b27b", "metadata": {}, "source": [ "The figure above shows the background probability $P(z > z_l)$ for each galaxy.\n", @@ -679,7 +638,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -693,7 +652,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.8" + "version": "3.6.9" } }, "nbformat": 4, diff --git a/examples/demo_coordinate_system_datasets.ipynb b/examples/demo_coordinate_system_datasets.ipynb new file mode 100644 index 000000000..acbedfb65 --- /dev/null +++ b/examples/demo_coordinate_system_datasets.ipynb @@ -0,0 +1,1013 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Tests of coordinate system effects on shear profiles\n", + "\n", + "Author: Tomomi Sunayama, July 15, 2024\n", + "\n", + "Tested, modified, and documented by: Camille Avestruz, July 18, 2024\n", + "\n", + "Reviewed by: Caio Lima de Oliveira, July 19, 2024\n", + "\n", + "This notebook illustrates the impact of having an incorrect coordinate system when calculating shear profiles. The default coordinate system for `CLMM` is the euclidean/pixel coordinate system. This is consistent with DC2 catalogs. However, if we input a catalog that assumes a celestial (sky) coordinate system and use the default euclidean (pixel) coordinate system (or vice versa), the signal of shear around a cluster disappears because the signal essentially looks random.\n", + "\n", + "We test:\n", + "- CosmoDC2 source galaxies with shears extracted from `TXPipe` for a single galaxy cluster (euclidean/pixel coordinate system)\n", + "- Example source galaxies for galaxy clusters from a [Summer School](https://github.com/oguri/wlcluster_tutorial) taught by Masamune Oguri (euclidean/pixel coordinate system)\n", + "- HSC Y3 source galaxies with shears post processed by Tomomi Sunayama (celestial/sky coordinate system)\n", + "\n", + "We also \n", + "- Compare the explicit calculation of a shear profile on the HSC Y3 source galaxies against a profile produced from `CLMM`. \n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Instructions to download text data\n", + "\n", + "Before running this notebook, you will need to download some data. The data is available through a [dropbox link](https://www.dropbox.com/scl/fo/dwsccslr5iwb7lnkf8jvx/AJkjgFeemUEHpHaZaHHqpAg?rlkey=efbtsr15mdrs3y6xsm7l48o0r&st=xb58ap0g&dl=0)\n", + "\n", + "First, create a directory where you want to put the example data, e.g. for a given `data_coords_dir`:\n", + "```\n", + "mkdir -p /data_coords\n", + "cd /data_coords\n", + "```\n", + "Download all files from [dropbox link](https://www.dropbox.com/scl/fo/dwsccslr5iwb7lnkf8jvx/AJkjgFeemUEHpHaZaHHqpAg?rlkey=efbtsr15mdrs3y6xsm7l48o0r&st=xb58ap0g&dl=0). This will be a zip file, `CLMM_data.zip` of size 242 Mb. scp or move this to `data_coords_dir`.\n", + "\n", + "From the directory, you should be able to unzip:\n", + "```\n", + "unzip data_CLMM.zip -d .\n", + "```\n", + "You now have the necessary data files to run this notebook. \n", + "\n", + "**Make sure to change the `data_coords_dir` variable in the cell below to the appropriate location where you unzipped these files.**\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# CHANGE TO YOUR LOCATION\n", + "data_coords_dir = \"/data_coords/\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "import numpy as np\n", + "import matplotlib.pyplot as plt\n", + "\n", + "# %matplotlib inline\n", + "from astropy.table import Table\n", + "import pickle as pkl\n", + "from pathlib import Path\n", + "import pandas\n", + "from astropy.io import fits" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "from clmm import Cosmology\n", + "\n", + "cosmo = Cosmology(H0=70.0, Omega_dm0=0.27 - 0.045, Omega_b0=0.045, Omega_k0=0.0)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Example galaxy cluster from CosmoDC2\n", + "\n", + "Here, we plot an example galaxy cluster shear profile using `clmm`. The cluster and source galaxy files are generated from the CosmoDC2 processed through TXPipe. We test the coordinate system." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cluster = pandas.read_pickle(data_coords_dir + \"/test_cluster.pkl\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cluster_z = cluster[\"redshift\"] # Cluster redshift\n", + "cluster_ra = cluster[\"ra\"] # Cluster Ra in deg\n", + "cluster_dec = cluster[\"dec\"]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "source = np.genfromtxt(data_coords_dir + \"/test_source.txt\", names=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "gal_ra = source[\"ra\"]\n", + "gal_dec = source[\"dec\"]\n", + "gal_e1 = source[\"e1\"]\n", + "gal_e2 = source[\"e2\"]\n", + "gal_z = source[\"zmean\"]\n", + "gal_id = np.arange(len(source[\"ra\"]))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import clmm\n", + "import clmm.dataops as da\n", + "from clmm.utils import convert_units\n", + "\n", + "# Create a GCData with the galaxies.\n", + "galaxies = clmm.GCData(\n", + " [gal_ra, gal_dec, gal_e1, gal_e2, gal_z, gal_id], names=[\"ra\", \"dec\", \"e1\", \"e2\", \"z\", \"id\"]\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we create a `GalaxyCluster` object, specifying an *incorrect* coordinate system. For source galaxies from CosmoDC2, these are in the **euclidean** coordinate system. We use the implemented kwarg when defining the galaxy cluster object to specify the **celestial** coordinate system." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Create a GalaxyCluster.\n", + "cluster = clmm.GalaxyCluster(\n", + " \"Name of cluster\", cluster_ra, cluster_dec, cluster_z, galaxies, coordinate_system=\"celestial\"\n", + ")\n", + "\n", + "# Convert elipticities into shears for the members.\n", + "cluster.compute_tangential_and_cross_components()\n", + "print(cluster.galcat.colnames)\n", + "\n", + "# Measure profile and add profile table to the cluster.\n", + "seps = convert_units(cluster.galcat[\"theta\"], \"radians\", \"Mpc\", cluster.z, cosmo)\n", + "\n", + "cluster.make_radial_profile(\n", + " bins=da.make_bins(0.1, 3.0, 15, method=\"evenlog10width\"),\n", + " bin_units=\"Mpc\",\n", + " cosmo=cosmo,\n", + " include_empty_bins=False,\n", + " gal_ids_in_bins=True,\n", + ")\n", + "print(cluster.profile.colnames)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here, we plot the resulting profile when `clmm` uses assumes a coordinate system inconsistent with the catalogs provided. You will note that the signal is virtually zero at almost all radii." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig = plt.figure(figsize=(6, 4))\n", + "ax = fig.add_axes((0, 0, 1, 1))\n", + "errorbar_kwargs = dict(linestyle=\"\", marker=\"o\", markersize=1, elinewidth=0.5, capthick=0.5)\n", + "ax.errorbar(\n", + " cluster.profile[\"radius\"],\n", + " cluster.profile[\"gt\"],\n", + " cluster.profile[\"gt_err\"],\n", + " c=\"k\",\n", + " **errorbar_kwargs\n", + ")\n", + "ax.set_xlabel(\"r [Mpc]\", fontsize=10)\n", + "ax.set_ylabel(r\"$g_t$\", fontsize=10)\n", + "ax.grid(lw=0.3)\n", + "ax.minorticks_on()\n", + "ax.grid(which=\"minor\", lw=0.1)\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we create a `GalaxyCluster` object, specifying the *correct* coordinate system. For source galaxies from CosmoDC2, these are in the **euclidean** coordinate system. We use the implemented kwarg when defining the galaxy cluster object to also specify the **euclidean** coordinate system. However, with a single galaxy cluster, the signal is not significant enough to clearly see a difference. There is a possible excess excess with the correct coordinate system at larger radii. Note: First, the lensing signal in CosmoDC2 clusters at the inner radii is known to be weak due to a limitation in the resolution when the ray tracing was performed in generating the source galaxy shears. Second, this has been process through `TXPipe`, which is something else we will need to understand." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cluster2 = clmm.GalaxyCluster(\n", + " \"Name of cluster\", cluster_ra, cluster_dec, cluster_z, galaxies, coordinate_system=\"euclidean\"\n", + ")\n", + "cluster2.compute_tangential_and_cross_components()\n", + "print(cluster.galcat.colnames)\n", + "\n", + "# Measure profile and add profile table to the cluster.\n", + "seps = convert_units(cluster2.galcat[\"theta\"], \"radians\", \"Mpc\", cluster2.z, cosmo)\n", + "\n", + "cluster2.make_radial_profile(\n", + " bins=da.make_bins(0.1, 3.0, 15, method=\"evenlog10width\"),\n", + " bin_units=\"Mpc\",\n", + " cosmo=cosmo,\n", + " include_empty_bins=False,\n", + " gal_ids_in_bins=True,\n", + ")\n", + "print(cluster.profile.colnames)\n", + "fig = plt.figure(figsize=(6, 4))\n", + "ax = fig.add_axes((0, 0, 1, 1))\n", + "errorbar_kwargs = dict(linestyle=\"\", marker=\"o\", markersize=1, elinewidth=0.5, capthick=0.5)\n", + "ax.errorbar(\n", + " cluster.profile[\"radius\"], cluster.profile[\"gt\"], cluster.profile[\"gt_err\"], label=\"celestial\"\n", + ")\n", + "ax.errorbar(\n", + " cluster2.profile[\"radius\"],\n", + " cluster2.profile[\"gt\"],\n", + " cluster2.profile[\"gt_err\"],\n", + " label=\"euclidean\",\n", + ")\n", + "ax.set_xlabel(\"r [Mpc]\", fontsize=20)\n", + "ax.set_ylabel(r\"$g_t$\", fontsize=20)\n", + "ax.grid(lw=0.3)\n", + "ax.minorticks_on()\n", + "ax.grid(which=\"minor\", lw=0.1)\n", + "plt.legend(fontsize=20)\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Example source galaxies from M. Oguri\n", + "\n", + "This dataset is a curated selection of cluster and source catalogs from Summer School lectures delivered by Masamune Oguri. There are eight galaxy clusters in this selection. \n", + "\n", + "More details on the corresponding tutorial can be found at this [GitHub link](https://github.com/oguri/wlcluster_tutorial). These are also in the **euclidean** coordinate system." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "clusters = [\n", + " \"a1703\",\n", + " \"gho1320\",\n", + " \"sdss0851\",\n", + " \"sdss1050\",\n", + " \"sdss1138\",\n", + " \"sdss1226\",\n", + " \"sdss1329\",\n", + " \"sdss1531\",\n", + "]\n", + "\n", + "zl_all = {\n", + " \"a1703\": 0.277,\n", + " \"gho1320\": 0.308,\n", + " \"sdss0851\": 0.370,\n", + " \"sdss1050\": 0.60,\n", + " \"sdss1138\": 0.451,\n", + " \"sdss1226\": 0.435,\n", + " \"sdss1329\": 0.443,\n", + " \"sdss1531\": 0.335,\n", + "}\n", + "\n", + "ra_cl_all = {\n", + " \"a1703\": 198.771833,\n", + " \"gho1320\": 200.703208,\n", + " \"sdss0851\": 132.911917,\n", + " \"sdss1050\": 162.666250,\n", + " \"sdss1138\": 174.537292,\n", + " \"sdss1226\": 186.712958,\n", + " \"sdss1329\": 202.393708,\n", + " \"sdss1531\": 232.794167,\n", + "}\n", + "\n", + "dec_cl_all = {\n", + " \"a1703\": 51.817389,\n", + " \"gho1320\": 31.654944,\n", + " \"sdss0851\": 33.518361,\n", + " \"sdss1050\": 0.285306,\n", + " \"sdss1138\": 27.908528,\n", + " \"sdss1226\": 21.831194,\n", + " \"sdss1329\": 22.721167,\n", + " \"sdss1531\": 34.240278,\n", + "}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cname = \"a1703\"\n", + "\n", + "# cluster redshift\n", + "zl = zl_all.get(cname)\n", + "\n", + "# coordinates of the cluster center\n", + "ra_cl = ra_cl_all.get(cname)\n", + "dec_cl = dec_cl_all.get(cname)\n", + "\n", + "# fix source redshift to 1.0\n", + "zs = 1.0" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We inspect the first galaxy cluster, Abell 1703." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "rfile = data_coords_dir + \"/data/shear_\" + cname + \".dat\"\n", + "data = np.loadtxt(rfile, comments=\"#\")\n", + "\n", + "ra = data[:, 0]\n", + "dec = data[:, 1]\n", + "e1 = data[:, 2]\n", + "e2 = data[:, 3]\n", + "wei = data[:, 4]\n", + "ids = np.arange(np.shape(data)[0])\n", + "redshifts = np.ones(np.shape(data)[0])\n", + "galaxies = clmm.GCData(\n", + " [ra, dec, e1, e2, redshifts, ids], names=[\"ra\", \"dec\", \"e1\", \"e2\", \"z\", \"id\"]\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we create a GalaxyCluster object, specifying the correct coordinate system. For source galaxies from the Oguri curated dataset, these are in the euclidean coordinate system. We use the implemented kwarg when defining the galaxy cluster object to also **specify the euclidean coordinate system**." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cluster = clmm.GalaxyCluster(\"A1703euc\", ra_cl, dec_cl, zl, galaxies, coordinate_system=\"euclidean\")\n", + "\n", + "# Convert elipticities into shears for the members.\n", + "cluster.compute_tangential_and_cross_components()\n", + "print(cluster.galcat.colnames)\n", + "\n", + "# Measure profile and add profile table to the cluster.\n", + "seps = convert_units(cluster.galcat[\"theta\"], \"radians\", \"Mpc\", cluster.z, cosmo)\n", + "\n", + "cluster.make_radial_profile(\n", + " bins=da.make_bins(0.2, 3.0, 8, method=\"evenlog10width\"),\n", + " bin_units=\"Mpc\",\n", + " cosmo=cosmo,\n", + " include_empty_bins=False,\n", + " gal_ids_in_bins=True,\n", + ")\n", + "print(cluster.profile.colnames)\n", + "\n", + "\n", + "fig = plt.figure(figsize=(6, 4))\n", + "ax = fig.add_axes((0, 0, 1, 1))\n", + "errorbar_kwargs = dict(linestyle=\"\", marker=\"o\", markersize=1, elinewidth=0.5, capthick=0.5)\n", + "ax.errorbar(\n", + " cluster.profile[\"radius\"], cluster.profile[\"gt\"], cluster.profile[\"gt_err\"], label=\"euclidean\"\n", + ")\n", + "\n", + "# assume incorrect coordinates\n", + "cluster2 = clmm.GalaxyCluster(\n", + " \"A1703cel\", ra_cl, dec_cl, zl, galaxies, coordinate_system=\"celestial\"\n", + ")\n", + "\n", + "cluster2.compute_tangential_and_cross_components()\n", + "print(cluster2.galcat.colnames)\n", + "\n", + "# Measure profile and add profile table to the cluster.\n", + "seps = convert_units(cluster2.galcat[\"theta\"], \"radians\", \"Mpc\", cluster.z, cosmo)\n", + "\n", + "cluster2.make_radial_profile(\n", + " bins=da.make_bins(0.2, 3.0, 8, method=\"evenlog10width\"),\n", + " bin_units=\"Mpc\",\n", + " cosmo=cosmo,\n", + " include_empty_bins=False,\n", + " gal_ids_in_bins=True,\n", + ")\n", + "print(cluster2.profile.colnames)\n", + "\n", + "ax.errorbar(\n", + " cluster2.profile[\"radius\"],\n", + " cluster2.profile[\"gt\"],\n", + " cluster2.profile[\"gt_err\"],\n", + " label=\"celestial\",\n", + ")\n", + "\n", + "\n", + "ax.set_xlabel(\"r [Mpc]\", fontsize=20)\n", + "ax.set_ylabel(r\"$g_t$\", fontsize=20)\n", + "ax.grid(lw=0.3)\n", + "ax.minorticks_on()\n", + "ax.grid(which=\"minor\", lw=0.1)\n", + "plt.legend(fontsize=20)\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Example source galaxies from HSC Y3\n", + "\n", + "This dataset is a simplified version of HSC Y3 data (GAMA15H), post-processed by Tomomi Sunayama for testing purposes. The pre-processed data is already public. These catalogs assume a **celestial** coordinate system." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cluster_cat = np.genfromtxt(\n", + " data_coords_dir + \"/GAMA15H/redmapper_dr8_GAMA15H.txt\",\n", + " dtype=np.dtype(\n", + " [(\"ra\", np.float64), (\"dec\", np.float64), (\"z\", np.float64), (\"richness\", np.float64)]\n", + " ),\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "source_cat = fits.getdata(data_coords_dir + \"/GAMA15H/GAMA15H_tutorial.fits\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "cl = cluster_cat[0]" + ] + }, + { + "cell_type": "markdown", + "id": "8820c46a", + "metadata": {}, + "source": [ + "Here, we use a KDTree implementation in scipy to extract the background source galaxies for the first galaxy cluster in the dataset." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from scipy import spatial\n", + "\n", + "source1 = source_cat[source_cat[\"photoz\"] > (cl[\"z\"] + 0.3)]\n", + "tree = spatial.cKDTree(np.array((source1[\"ra\"], source1[\"dec\"])).T)\n", + "sel = tree.query_ball_point([cl[\"ra\"], cl[\"dec\"]], 3)\n", + "bg = source1[sel]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bf2df4dc", + "metadata": {}, + "outputs": [], + "source": [ + "# Inspect background source galaxy selection\n", + "bg" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a77e1bfe", + "metadata": {}, + "outputs": [], + "source": [ + "sources = clmm.GCData(\n", + " [bg[\"RA\"], bg[\"Dec\"], bg[\"e1\"], bg[\"e2\"], bg[\"photoz\"], bg[\"weight\"]],\n", + " names=[\"ra\", \"dec\", \"e1\", \"e2\", \"z\", \"w_ls\"],\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "45d76a13", + "metadata": {}, + "outputs": [], + "source": [ + "cluster = clmm.GalaxyCluster(\n", + " \"redmapper\", cl[\"ra\"], cl[\"dec\"], cl[\"z\"], sources, coordinate_system=\"celestial\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b1f88b26", + "metadata": {}, + "outputs": [], + "source": [ + "sigma_c = cosmo.eval_sigma_crit(cl[\"z\"], sources[\"z\"])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6de9c9de", + "metadata": {}, + "outputs": [], + "source": [ + "cluster.compute_tangential_and_cross_components(\n", + " shape_component1=\"e1\",\n", + " shape_component2=\"e2\",\n", + " tan_component=\"DS_t\",\n", + " cross_component=\"DS_x\",\n", + " cosmo=cosmo,\n", + " is_deltasigma=True,\n", + " use_pdz=False,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5fa13fb2", + "metadata": {}, + "outputs": [], + "source": [ + "cluster" + ] + }, + { + "cell_type": "markdown", + "id": "a4dcd4a7", + "metadata": {}, + "source": [ + "Now we construct a radial profile of the tangential and cross terms for the galaxy cluster" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "seps = convert_units(cluster.galcat[\"theta\"], \"radians\", \"Mpc\", cluster.z, cosmo)\n", + "\n", + "cluster.make_radial_profile(\n", + " tan_component_in=\"DS_t\",\n", + " cross_component_in=\"DS_x\",\n", + " tan_component_out=\"DS_t\",\n", + " cross_component_out=\"DS_x\",\n", + " weights_in=\"w_ls\",\n", + " bins=da.make_bins(0.1, 20.0, 15, method=\"evenlog10width\"),\n", + " bin_units=\"Mpc\",\n", + " cosmo=cosmo,\n", + " include_empty_bins=False,\n", + " gal_ids_in_bins=False,\n", + ")\n", + "print(cluster.profile.colnames)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "286dc0f9", + "metadata": {}, + "outputs": [], + "source": [ + "fig = plt.figure(figsize=(6, 6))\n", + "ax = fig.add_axes((0, 0, 1, 1))\n", + "errorbar_kwargs = dict(linestyle=\"\", marker=\"o\", markersize=1, elinewidth=0.5, capthick=0.5)\n", + "ax.errorbar(\n", + " cluster.profile[\"radius\"],\n", + " cluster.profile[\"DS_t\"] / 1e13,\n", + " cluster.profile[\"DS_t_err\"] / 1e13,\n", + " label=\"celestial\",\n", + ")\n", + "plt.loglog()\n", + "\n", + "ax.set_xlabel(\"r [Mpc]\", fontsize=20)\n", + "ax.set_ylabel(r\"$\\Delta\\Sigma(r)$\", fontsize=20)\n", + "ax.grid(lw=0.3)\n", + "ax.minorticks_on()\n", + "ax.grid(which=\"minor\", lw=0.1)\n", + "plt.legend(fontsize=20)\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "id": "05e4ee26", + "metadata": {}, + "source": [ + "## Example explicit lensing profile measurement comparison with CLMM profile\n", + "\n", + "Here, we use the example HSC Y3 dataset to explicitly measure the lensing signal without using CLMM for comparison. Note, we need to still define a cosmology to calculate comoving distances." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "651e0048", + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "from astropy.io import fits\n", + "import matplotlib as mpl\n", + "import matplotlib.pyplot as plt\n", + "from scipy import spatial\n", + "from astropy.cosmology import WMAP5" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dbefe49d", + "metadata": {}, + "outputs": [], + "source": [ + "# Read in the data\n", + "cluster_cat = np.genfromtxt(\n", + " data_coords_dir + \"GAMA15H/redmapper_dr8_GAMA15H.txt\",\n", + " dtype=np.dtype(\n", + " [(\"RA\", np.float64), (\"Dec\", np.float64), (\"z\", np.float64), (\"richness\", np.float64)]\n", + " ),\n", + ")\n", + "source_cat = fits.getdata(data_coords_dir + \"GAMA15H/GAMA15H_tutorial.fits\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a826c6e0", + "metadata": {}, + "outputs": [], + "source": [ + "cosmo = WMAP5" + ] + }, + { + "cell_type": "markdown", + "id": "ca4bd885", + "metadata": {}, + "source": [ + "Below, we measure lensing signals with simplified assumptions. We do not account for responsivity, multiplicative, nor additive biases. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3fdca16e", + "metadata": {}, + "outputs": [], + "source": [ + "# Define functions for the explicit calculation\n", + "\n", + "\n", + "def calcDistanceAngle(a1, d1, a2, d2):\n", + " \"\"\"Compute the angle between lens and source galaxies from ra, dec in radians\n", + " a1 (float, ndarray) : lens RA in radians\n", + " d1 (float, ndarray) : lens DEC in radians\n", + " a2 (float, ndarray) : src RA in radians\n", + " d2 (float, ndarray) : src DEC in radians\n", + " \"\"\"\n", + " return np.arccos(np.cos(d1) * np.cos(d2) * np.cos(a1 - a2) + np.sin(d1) * np.sin(d2))\n", + "\n", + "\n", + "def cosPhi2(a1, a2, d1, d2, distanceAngle):\n", + " \"\"\"Compute $\\cos(2\\phi)$ for a given distance angle between lens and source galaxies from ra, dec in radians\n", + " a1 (float, ndarray) : lens RA in radians\n", + " a2 (float, ndarray) : src RA in radians\n", + " d1 (float, ndarray) : lens DEC in radians\n", + " d2 (float, ndarray) : src DEC in radians\n", + " distanceAngle (float, ndarray) : angular distance between lens and source in radians\n", + " \"\"\"\n", + " return np.sin(a2 - a1) * np.cos(d1) / np.sin(distanceAngle)\n", + "\n", + "\n", + "def sinPhi2(a1, a2, d1, d2, distanceAngle):\n", + " \"\"\"Compute $\\sin(2\\phi)$ for a given distance angle between lens and source galaxies from ra, dec in radians\n", + " a1 (float, ndarray) : lens RA in radians\n", + " a2 (float, ndarray) : src RA in radians\n", + " d1 (float, ndarray) : lens DEC in radians\n", + " d2 (float, ndarray) : src DEC in radians\n", + " distanceAngle (float, ndarray) : angular distance between lens and source in radians\n", + " \"\"\"\n", + " return (-np.cos(d2) * np.sin(d1) + np.sin(d2) * np.cos(d1) * np.cos(a1 - a2)) / np.sin(\n", + " distanceAngle\n", + " )\n", + "\n", + "\n", + "def compute_sin2phi_cos2phi(a1, a2, d1, d2, distanceAngle):\n", + " \"\"\"Compute necessary coefficients for the et and ex components, sin2phi and cos2phi\n", + " a1 (float, ndarray) : lens RA in radians\n", + " a2 (float, ndarray) : src RA in radians\n", + " d1 (float, ndarray) : lens DEC in radians\n", + " d2 (float, ndarray) : src DEC in radians\n", + " distanceAngle (float, ndarray) : angular distance between lens and source in radians\n", + " \"\"\"\n", + " cosp = cosPhi2(a1, a2, d1, d2, distanceAngle)\n", + " sinp = sinPhi2(a1, a2, d1, d2, distanceAngle)\n", + " cos2p = cosp**2 - sinp**2\n", + " sin2p = 2.0 * sinp * cosp\n", + " return cos2p, sin2p\n", + "\n", + "\n", + "def calc_et_ex(e1, e2, cos2p, sin2p):\n", + " \"\"\"Calculate the et and ex from the e1 e2 values of all sources and their sin2phi, cos2phi\"\"\"\n", + " et = -(e1 * cos2p + e2 * sin2p)\n", + " ex = -(-e1 * sin2p + e2 * cos2p)\n", + " return et, ex\n", + "\n", + "\n", + "def populate_profile_sums(ps, i_r, src_in_bin, Sigma_cr, sel, et, ex):\n", + " \"\"\"Populate the profile sums at a given radian bin from the calculated selection, sigma_crit, et, and ex\"\"\"\n", + " ps[\"n\"][i_r] += sel.sum()\n", + " ps[\"e_sq\"][i_r] += np.sum(et**2 + ex**2)\n", + "\n", + " wt = src_in_bin[\"weight\"] * Sigma_cr**-2 # multiply by the lens weights if it is not one\n", + " ps[\"w\"][i_r] += np.sum(wt)\n", + "\n", + " wetsigma = wt * Sigma_cr * et\n", + " ps[\"wetsigma\"][i_r] += np.sum(wetsigma)\n", + " ps[\"wetsigma_sq\"][i_r] += np.sum(wetsigma**2)\n", + "\n", + " wexsigma = wt * Sigma_cr * ex\n", + " ps[\"wexsigma\"][i_r] += np.sum(wexsigma)\n", + " ps[\"wexsigma_sq\"][i_r] += np.sum(wexsigma**2)\n", + "\n", + " wsigmainv = wt * 1.0 / Sigma_cr\n", + " ps[\"wsigmainv\"][i_r] += np.sum(wsigmainv)\n", + "\n", + " wzs = wt * src_in_bin[\"photoz\"]\n", + " ps[\"wzs\"][i_r] += np.sum(wzs)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f343ae7c", + "metadata": {}, + "outputs": [], + "source": [ + "# Relevant constants, radial binning, source photoz range and lens photoz range\n", + "\n", + "d2r = np.pi / 180.0\n", + "r2d = 180.0 / np.pi\n", + "Mpc = 3.08568025 * 10**19 # 1Mpc = 3.08568025*10**19 km\n", + "M_sun = 1.9884 * 10**33 # solar mass [g]\n", + "c_light = 2.99792458 * 10**5 # speed of light [km/s]\n", + "G = 6.67384 * 10 ** (-20) # gravitational constant [km^3s^-2kg^-1]\n", + "Sigma_cr_fact = c_light**2 / (4 * np.pi * G) * Mpc * 10**3 / M_sun\n", + "rbin_edges = np.logspace(-1, np.log10(20), 15) # Define your radial bins\n", + "\n", + "# Named numpy arrays for relevant profile values to explicitly compute and sum at each radii\n", + "profile_names = [\n", + " \"e_sq\",\n", + " \"w\",\n", + " \"wetsigma\",\n", + " \"wetsigma_sq\",\n", + " \"wexsigma\",\n", + " \"wexsigma_sq\",\n", + " \"wsigmainv\",\n", + " \"wzs\",\n", + " \"n\",\n", + "]\n", + "profile_sums = np.rec.fromarrays(\n", + " [np.zeros(len(rbin_edges) - 1) for i in profile_names], names=profile_names\n", + ")\n", + "\n", + "source_pz = {\"min\": 0.5, \"max\": 10}\n", + "lens_pz = {\"min\": 0.1, \"max\": 0.2}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8da9ec37", + "metadata": {}, + "outputs": [], + "source": [ + "# Select lens clusters and source galaxies from catalogs using kdtree\n", + "\n", + "source_pz_criteria = (source_cat[\"photoz\"] < source_pz[\"max\"]) & (\n", + " source_cat[\"photoz\"] > source_pz[\"min\"]\n", + ")\n", + "selected_sources = source_cat[source_pz_criteria]\n", + "\n", + "tree = spatial.cKDTree(np.array((selected_sources[\"RA\"], selected_sources[\"Dec\"])).T)\n", + "\n", + "# We only select one,selecting many will take much more time to compute.\n", + "lens_pz_criteria = (cluster_cat[\"z\"] > lens_pz[\"min\"]) & (cluster_cat[\"z\"] < lens_pz[\"max\"])\n", + "lens_clusters = cluster_cat[lens_pz_criteria][:1]\n", + "\n", + "# Set weights for the cluster lenses to one\n", + "lens_weights = np.ones(lens_clusters.size)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "972bdfcb", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Calculate lensing profiles for each cluster lens\n", + "\n", + "for ilens in np.arange(lens_clusters.size):\n", + "\n", + " # Select source galaxies for this cluster lens\n", + " sel = tree.query_ball_point([lens_clusters[\"RA\"][ilens], lens_clusters[\"Dec\"][ilens]], 3)\n", + " sel_z = (\n", + " source_cat[sel][\"photoz\"] > lens_clusters[\"z\"][ilens]\n", + " ) # Try to change the source galaxy selection\n", + " source_bg = source_cat[sel][sel_z]\n", + "\n", + " # Compute an angle between the lens and the source\n", + " theta_ls = calcDistanceAngle(\n", + " lens_clusters[\"RA\"][ilens] * d2r,\n", + " lens_clusters[\"Dec\"][ilens] * d2r,\n", + " source_bg[\"RA\"] * d2r,\n", + " source_bg[\"Dec\"] * d2r,\n", + " )\n", + "\n", + " # Compute the comoving distance of the lens\n", + " l_chi = cosmo.comoving_distance((lens_clusters[\"z\"][ilens])).value\n", + " r = theta_ls * l_chi\n", + " assign_r = np.digitize(r, rbin_edges)\n", + "\n", + " for i_r in range(len(rbin_edges) - 1):\n", + " # Subselection mask of source galaxies in the radial bin\n", + " sel = assign_r == i_r + 1\n", + "\n", + " # Subselected source galaxies and their respective angle, theta, to lens\n", + " source_bg_inbin = source_bg[sel]\n", + " theta_sub = theta_ls[sel]\n", + "\n", + " # Compute the cos(2*phi) and sin(2*phi) for a given distance angle between lens and source galaxies\n", + " cos2p, sin2p = compute_sin2phi_cos2phi(\n", + " lens_clusters[\"RA\"][ilens] * d2r,\n", + " source_bg_inbin[\"RA\"] * d2r,\n", + " lens_clusters[\"Dec\"][ilens] * d2r,\n", + " source_bg_inbin[\"Dec\"] * d2r,\n", + " theta_sub,\n", + " )\n", + "\n", + " # Calculate tangential and cross terms from e1, e2 of all source galaxies in the rbin\n", + " et, ex = calc_et_ex(source_bg_inbin[\"e1\"], source_bg_inbin[\"e2\"], cos2p, sin2p)\n", + "\n", + " # Calculate critical surface mass density [M_sun/ comoving Mpc^2]. (1+zl)**-2 is for comoving coordinates.\n", + " comoving_distance = cosmo.comoving_distance((source_bg_inbin[\"photoz\"])).value\n", + " Sigma_cr = (\n", + " Sigma_cr_fact\n", + " / (1.0 - l_chi / comoving_distance)\n", + " / l_chi\n", + " / (1.0 + lens_clusters[\"z\"][ilens])\n", + " / 10**12\n", + " )\n", + "\n", + " # Populate the profile_sums at this radial bin for this cluster lens\n", + " populate_profile_sums(profile_sums, i_r, source_bg_inbin, Sigma_cr, sel, et, ex)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8078ef52", + "metadata": {}, + "outputs": [], + "source": [ + "# Compute the lensing signal and errors to plot\n", + "\n", + "radial_bin = (\n", + " 2.0\n", + " * (rbin_edges[1:] ** 3 - rbin_edges[:-1] ** 3)\n", + " / (3.0 * (rbin_edges[1:] ** 2 - rbin_edges[:-1] ** 2))\n", + ")\n", + "gt = 0.5 * profile_sums[\"wetsigma\"] / profile_sums[\"w\"]\n", + "gt_err = 0.5 * np.sqrt(profile_sums[\"wetsigma_sq\"]) / profile_sums[\"w\"]\n", + "gx = 0.5 * profile_sums[\"wexsigma\"] / profile_sums[\"w\"]\n", + "gx_err = 0.5 * np.sqrt(profile_sums[\"wexsigma_sq\"]) / profile_sums[\"w\"]\n", + "sigma_cr = 1.0 / (profile_sums[\"wsigmainv\"] / profile_sums[\"w\"])" + ] + }, + { + "cell_type": "markdown", + "id": "4f4756df", + "metadata": {}, + "source": [ + "Below, we compare the explicitly calculated lensing signal against the CLMM calculated signal. You will notice that the `CLMM` calculated profile is systematically lower than the one calculated using Tomomi's code. This is likely due to a combination of assumed weighting scheme and other factors that differ between HSC post-processing and what `CLMM` assumes or a \"little h\" problem, which we will need to understand and possibly address." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Figure for the lensing signal\n", + "plt.figure(figsize=(6, 6))\n", + "\n", + "# From explicit calculation\n", + "plt.errorbar(radial_bin, gt, yerr=gt_err, label=\"original\")\n", + "\n", + "# From CLMM\n", + "plt.errorbar(\n", + " cluster.profile[\"radius\"],\n", + " cluster.profile[\"DS_t\"] / 1e13,\n", + " cluster.profile[\"DS_t_err\"] / 1e13,\n", + " label=\"CLMM\",\n", + ")\n", + "plt.loglog()\n", + "plt.legend(fontsize=20)\n", + "plt.xlabel(r\"$R[h^{-1}{\\rm Mpc}]$\", fontsize=20)\n", + "plt.ylabel(r\"$\\Delta\\Sigma(R)$\", fontsize=20)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/demo_dataops_functionality.ipynb b/examples/demo_dataops_functionality.ipynb index d26413402..a985af223 100644 --- a/examples/demo_dataops_functionality.ipynb +++ b/examples/demo_dataops_functionality.ipynb @@ -505,7 +505,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "- e.g., generate 20 bins between 1 and 6 Mpc, each contaning the same number of galaxies" + " e.g., generate 20 bins between 1 and 6 Mpc, each contaning the same number of galaxies" ] }, { @@ -794,7 +794,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -808,7 +808,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.4" + "version": "3.6.9" } }, "nbformat": 4, diff --git a/examples/demo_mass_conversion.ipynb b/examples/demo_mass_conversion.ipynb index 3cb6e9aa8..587d5243b 100644 --- a/examples/demo_mass_conversion.ipynb +++ b/examples/demo_mass_conversion.ipynb @@ -384,7 +384,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -398,7 +398,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.13" + "version": "3.6.9" } }, "nbformat": 4, diff --git a/examples/demo_mock_ensemble.ipynb b/examples/demo_mock_ensemble.ipynb index 84354b111..fd3c734db 100644 --- a/examples/demo_mock_ensemble.ipynb +++ b/examples/demo_mock_ensemble.ipynb @@ -2,7 +2,6 @@ "cells": [ { "cell_type": "markdown", - "id": "ee163bf2", "metadata": {}, "source": [ "# Generate mock data for a cluster ensemble\n", @@ -12,7 +11,6 @@ { "cell_type": "code", "execution_count": null, - "id": "7d8e5d58", "metadata": {}, "outputs": [], "source": [ @@ -37,7 +35,6 @@ { "cell_type": "code", "execution_count": null, - "id": "47314c70-58d9-4e9d-8053-f424767c9926", "metadata": {}, "outputs": [], "source": [ @@ -47,7 +44,6 @@ { "cell_type": "code", "execution_count": null, - "id": "0a88a2e2", "metadata": {}, "outputs": [], "source": [ @@ -56,7 +52,6 @@ }, { "cell_type": "markdown", - "id": "df55d889", "metadata": {}, "source": [ "## Generating a cluster catalog and associated source catalogs\n", @@ -66,7 +61,6 @@ { "cell_type": "code", "execution_count": null, - "id": "910f87b4", "metadata": {}, "outputs": [], "source": [ @@ -96,7 +90,6 @@ }, { "cell_type": "markdown", - "id": "ed013971", "metadata": {}, "source": [ "### Background galaxy catalog generation\n", @@ -116,7 +109,6 @@ { "cell_type": "code", "execution_count": null, - "id": "9e662adc-6d92-4c8a-87a3-926c47567e73", "metadata": {}, "outputs": [], "source": [ @@ -130,7 +122,6 @@ { "cell_type": "code", "execution_count": null, - "id": "60d9e41b", "metadata": {}, "outputs": [], "source": [ @@ -196,7 +187,6 @@ { "cell_type": "code", "execution_count": null, - "id": "544e0b75-0a5e-4506-bc35-63314c6b508c", "metadata": {}, "outputs": [], "source": [ @@ -205,7 +195,6 @@ }, { "cell_type": "markdown", - "id": "a7f6a520", "metadata": {}, "source": [ "## Creating ClusterEnsemble object and estimation of individual excess surface density profiles\n", @@ -222,7 +211,6 @@ { "cell_type": "code", "execution_count": null, - "id": "cdb973ec", "metadata": {}, "outputs": [], "source": [ @@ -246,7 +234,6 @@ }, { "cell_type": "markdown", - "id": "774b45da", "metadata": {}, "source": [ "There is also the option to create an `ClusterEnsemble` object without the clusters list. Then, the user may compute the individual profile for each wanted cluster and compute the radial profile once all the indvidual profiles have been computed. This method may be reccomended if there a large number of clusters to avoid excess of memory allocation, since the user can generate each cluster separately, compute its individual profile and then delete the cluster object. " @@ -255,7 +242,6 @@ { "cell_type": "code", "execution_count": null, - "id": "dff912bc", "metadata": {}, "outputs": [], "source": [ @@ -278,7 +264,6 @@ }, { "cell_type": "markdown", - "id": "dce0a7da", "metadata": {}, "source": [ "A third option is where all clusters already have the profile computed, and we just add those to the `ClusterEnsemble` object:" @@ -287,7 +272,6 @@ { "cell_type": "code", "execution_count": null, - "id": "489277dd", "metadata": {}, "outputs": [], "source": [ @@ -310,7 +294,6 @@ { "cell_type": "code", "execution_count": null, - "id": "9de6b261-f1aa-4b43-941b-d415462fec95", "metadata": {}, "outputs": [], "source": [ @@ -320,7 +303,6 @@ { "cell_type": "code", "execution_count": null, - "id": "a92f1bed", "metadata": {}, "outputs": [], "source": [ @@ -338,7 +320,6 @@ }, { "cell_type": "markdown", - "id": "99e3fe18", "metadata": {}, "source": [ "## Stacked profile of the cluster ensemble\n", @@ -348,7 +329,6 @@ { "cell_type": "code", "execution_count": null, - "id": "5f028309", "metadata": {}, "outputs": [], "source": [ @@ -357,7 +337,6 @@ }, { "cell_type": "markdown", - "id": "907fd664", "metadata": {}, "source": [ "## Covariance (Bootstrap, sample, Jackknife) of the stack between radial bins\n", @@ -372,7 +351,6 @@ { "cell_type": "code", "execution_count": null, - "id": "22db4f4d", "metadata": {}, "outputs": [], "source": [ @@ -388,7 +366,6 @@ { "cell_type": "code", "execution_count": null, - "id": "892abedb", "metadata": {}, "outputs": [], "source": [ @@ -431,7 +408,6 @@ { "cell_type": "code", "execution_count": null, - "id": "f3e698d9", "metadata": {}, "outputs": [], "source": [ @@ -455,7 +431,6 @@ }, { "cell_type": "markdown", - "id": "bfeeb5cf-d8ce-4cd0-93aa-9c36e874e8c4", "metadata": {}, "source": [ "## Visualizing the stacked profiles and corresponding model" @@ -463,7 +438,6 @@ }, { "cell_type": "markdown", - "id": "a237d1ce-92b9-4bcc-8711-e9ab7eafc377", "metadata": {}, "source": [ "In the figure below, we plot:\n", @@ -475,7 +449,6 @@ { "cell_type": "code", "execution_count": null, - "id": "e15ad6da-b66c-4bbd-875c-e0437b4390f8", "metadata": {}, "outputs": [], "source": [ @@ -489,7 +462,6 @@ { "cell_type": "code", "execution_count": null, - "id": "d8e95aaa", "metadata": {}, "outputs": [], "source": [ @@ -588,7 +560,6 @@ }, { "cell_type": "markdown", - "id": "aa1791f1", "metadata": {}, "source": [ "## Saving/Loading ClusterEnsemble\n", @@ -598,7 +569,6 @@ { "cell_type": "code", "execution_count": null, - "id": "cd489bbf", "metadata": {}, "outputs": [], "source": [ @@ -608,7 +578,6 @@ { "cell_type": "code", "execution_count": null, - "id": "c3a86d18", "metadata": {}, "outputs": [], "source": [ @@ -618,9 +587,9 @@ ], "metadata": { "kernelspec": { - "display_name": "wrk", + "display_name": "Python 3", "language": "python", - "name": "wrk" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -632,7 +601,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.6.9" } }, "nbformat": 4, diff --git a/examples/demo_mock_ensemble_realistic.ipynb b/examples/demo_mock_ensemble_realistic.ipynb index bb0f9147f..42cae6d7d 100644 --- a/examples/demo_mock_ensemble_realistic.ipynb +++ b/examples/demo_mock_ensemble_realistic.ipynb @@ -1179,9 +1179,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3.11", + "display_name": "Python 3 (ipykernel)", "language": "python", - "name": "python3.11" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -1193,7 +1193,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.4" + "version": "3.9.0" } }, "nbformat": 4, diff --git a/examples/demo_theory_functionality.ipynb b/examples/demo_theory_functionality.ipynb index ce22a18b8..393606340 100644 --- a/examples/demo_theory_functionality.ipynb +++ b/examples/demo_theory_functionality.ipynb @@ -541,7 +541,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -555,7 +555,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.5" + "version": "3.6.9" } }, "nbformat": 4, diff --git a/examples/mass_fitting/Example1_Fit_Halo_Mass_to_Shear_Catalog.ipynb b/examples/mass_fitting/Example1_Fit_Halo_Mass_to_Shear_Catalog.ipynb index 60f8acb0e..aa5db1668 100644 --- a/examples/mass_fitting/Example1_Fit_Halo_Mass_to_Shear_Catalog.ipynb +++ b/examples/mass_fitting/Example1_Fit_Halo_Mass_to_Shear_Catalog.ipynb @@ -671,7 +671,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -685,7 +685,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.10" + "version": "3.6.9" } }, "nbformat": 4, diff --git a/examples/mass_fitting/Example2_Fit_Halo_Mass_to_Shear_Catalog.ipynb b/examples/mass_fitting/Example2_Fit_Halo_Mass_to_Shear_Catalog.ipynb index 21b72ef29..567d5f292 100644 --- a/examples/mass_fitting/Example2_Fit_Halo_Mass_to_Shear_Catalog.ipynb +++ b/examples/mass_fitting/Example2_Fit_Halo_Mass_to_Shear_Catalog.ipynb @@ -605,7 +605,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -619,7 +619,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.10" + "version": "3.6.9" } }, "nbformat": 4, diff --git a/examples/mass_fitting/Example3_Fit_Halo_Mass_to_Shear_Catalog.ipynb b/examples/mass_fitting/Example3_Fit_Halo_Mass_to_Shear_Catalog.ipynb index 6c9c07b41..993d839ff 100644 --- a/examples/mass_fitting/Example3_Fit_Halo_Mass_to_Shear_Catalog.ipynb +++ b/examples/mass_fitting/Example3_Fit_Halo_Mass_to_Shear_Catalog.ipynb @@ -959,9 +959,9 @@ ], "metadata": { "kernelspec": { - "display_name": "wrk", + "display_name": "Python 3", "language": "python", - "name": "wrk" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -973,7 +973,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.5" + "version": "3.6.9" } }, "nbformat": 4, diff --git a/examples/mass_fitting/Example4_Fit_Halo_mass_to_HSC_data.ipynb b/examples/mass_fitting/Example4_Fit_Halo_mass_to_HSC_data.ipynb index 0c423d25b..74502f5c7 100644 --- a/examples/mass_fitting/Example4_Fit_Halo_mass_to_HSC_data.ipynb +++ b/examples/mass_fitting/Example4_Fit_Halo_mass_to_HSC_data.ipynb @@ -2,7 +2,6 @@ "cells": [ { "cell_type": "markdown", - "id": "e320d2bf-dce6-4a5b-ab38-04b0c948155f", "metadata": { "tags": [] }, @@ -43,7 +42,6 @@ }, { "cell_type": "markdown", - "id": "4ef8f003-be65-44a3-8b0a-04749d1f7f13", "metadata": {}, "source": [ "\n", @@ -55,7 +53,6 @@ { "cell_type": "code", "execution_count": null, - "id": "d3503a1a-103f-48aa-b600-ef2d72de82a0", "metadata": {}, "outputs": [], "source": [ @@ -70,7 +67,6 @@ }, { "cell_type": "markdown", - "id": "5e228cf4-f5f3-4b55-97b7-f1e022a5b29c", "metadata": {}, "source": [ "\n", @@ -93,7 +89,6 @@ }, { "cell_type": "markdown", - "id": "c684515e-e176-4b92-9509-0217ade681a0", "metadata": { "jp-MarkdownHeadingCollapsed": true, "tags": [] @@ -155,7 +150,6 @@ }, { "cell_type": "markdown", - "id": "9f4535f6-23b6-46e8-b9f8-7aeb899bfe7c", "metadata": {}, "source": [ "\n", @@ -169,7 +163,6 @@ { "cell_type": "code", "execution_count": null, - "id": "a04d966e-e00b-4e07-94d9-c3cc13b6183c", "metadata": {}, "outputs": [], "source": [ @@ -187,7 +180,6 @@ { "cell_type": "code", "execution_count": null, - "id": "88e10ef1-f035-462b-98c6-ceac1d32a7e1", "metadata": {}, "outputs": [], "source": [ @@ -197,7 +189,6 @@ { "cell_type": "code", "execution_count": null, - "id": "58147358-bd97-4d81-9111-4909afb67e47", "metadata": {}, "outputs": [], "source": [ @@ -210,7 +201,6 @@ { "cell_type": "code", "execution_count": null, - "id": "cb6c53e2-aa5e-40e1-b46f-a955f0eb055a", "metadata": {}, "outputs": [], "source": [ @@ -244,7 +234,6 @@ { "cell_type": "code", "execution_count": null, - "id": "f6a2ae58-d538-479c-ac26-27dbf9838780", "metadata": {}, "outputs": [], "source": [ @@ -275,7 +264,6 @@ { "cell_type": "code", "execution_count": null, - "id": "8a98993f-2261-452b-8515-a2c64d52c744", "metadata": {}, "outputs": [], "source": [ @@ -308,7 +296,6 @@ { "cell_type": "code", "execution_count": null, - "id": "6d599e88-8a45-4b44-a46a-f2c00d86bd7b", "metadata": {}, "outputs": [], "source": [ @@ -340,7 +327,6 @@ }, { "cell_type": "markdown", - "id": "e71d6d00-f18e-4e27-969b-f02f13032713", "metadata": {}, "source": [ "\n", @@ -351,7 +337,6 @@ { "cell_type": "code", "execution_count": null, - "id": "1c4b465d-9e1c-4b63-a43e-eeea6139b462", "metadata": {}, "outputs": [], "source": [ @@ -389,7 +374,6 @@ { "cell_type": "code", "execution_count": null, - "id": "42573e50-85cc-4977-baf4-723711792c6b", "metadata": {}, "outputs": [], "source": [ @@ -427,7 +411,6 @@ { "cell_type": "code", "execution_count": null, - "id": "db4ebc79-67e8-43da-ad89-ca96666f541d", "metadata": {}, "outputs": [], "source": [ @@ -452,7 +435,6 @@ { "cell_type": "code", "execution_count": null, - "id": "87e51a27-936a-48f5-abc7-120cff89b3ac", "metadata": {}, "outputs": [], "source": [ @@ -519,7 +501,6 @@ { "cell_type": "code", "execution_count": null, - "id": "c83799a1-5122-4e89-b67d-8f691ff9d541", "metadata": {}, "outputs": [], "source": [ @@ -551,7 +532,6 @@ { "cell_type": "code", "execution_count": null, - "id": "f1175703-d2ea-4f13-9f01-3cee81d90f84", "metadata": {}, "outputs": [], "source": [ @@ -573,7 +553,6 @@ { "cell_type": "code", "execution_count": null, - "id": "dc59846f-a380-449d-a30b-5261085a34f2", "metadata": {}, "outputs": [], "source": [ @@ -585,7 +564,6 @@ { "cell_type": "code", "execution_count": null, - "id": "2f5b8ac6-7294-4e85-97ac-28b62512be60", "metadata": {}, "outputs": [], "source": [ @@ -600,7 +578,6 @@ { "cell_type": "code", "execution_count": null, - "id": "84e71bf0-5aa8-4c48-8042-bd461430291a", "metadata": {}, "outputs": [], "source": [ @@ -632,7 +609,6 @@ { "cell_type": "code", "execution_count": null, - "id": "2d0e95d3-bc69-4cf7-8ac0-1788b74fe5d6", "metadata": {}, "outputs": [], "source": [ @@ -650,7 +626,6 @@ { "cell_type": "code", "execution_count": null, - "id": "3acffc41-3f07-4a33-8f31-8b27b4363d7e", "metadata": {}, "outputs": [], "source": [ @@ -765,7 +740,6 @@ }, { "cell_type": "markdown", - "id": "aaa8f5fb-82f0-490e-8fea-3a1f18b34311", "metadata": {}, "source": [ "### Reference\n", @@ -795,7 +769,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3", "language": "python", "name": "python3" }, @@ -809,7 +783,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.12" + "version": "3.6.9" } }, "nbformat": 4, From e95f0e0c3a6c86f9a44df56131c7e90654942055 Mon Sep 17 00:00:00 2001 From: Michel Aguena Date: Mon, 5 Aug 2024 10:54:13 +0200 Subject: [PATCH 2/4] Issue/628/rad min max in ensemble (#631) * add radius min/max in ensamble and validation for its consistency * pass rmin/max to ClusterEnsemble.stacked_data * Update version to 1.13.0 --------- Co-authored-by: Hsin Fan <57552401+hsinfan1996@users.noreply.github.com> --- clmm/__init__.py | 2 +- clmm/clusterensemble.py | 19 ++++++-- clmm/dataops/__init__.py | 10 +--- clmm/gcdata.py | 2 +- clmm/utils/__init__.py | 1 + clmm/utils/validation.py | 35 +++++++++++++- examples/demo_mock_ensemble.ipynb | 49 ++++++++++++++++++- examples/demo_mock_ensemble_realistic.ipynb | 52 ++++++++++++++++++-- tests/test_dataops.py | 7 +++ tests/test_mockdata.py | 8 +++- tests/test_theory.py | 53 ++++++++++++--------- tests/test_utils.py | 16 +++++++ 12 files changed, 210 insertions(+), 44 deletions(-) diff --git a/clmm/__init__.py b/clmm/__init__.py index 0111be973..d998efc18 100644 --- a/clmm/__init__.py +++ b/clmm/__init__.py @@ -26,4 +26,4 @@ ) from . import support -__version__ = "1.12.5" +__version__ = "1.13.0" diff --git a/clmm/clusterensemble.py b/clmm/clusterensemble.py index 6a518279c..787a1de2e 100644 --- a/clmm/clusterensemble.py +++ b/clmm/clusterensemble.py @@ -7,6 +7,7 @@ from .gcdata import GCData from .dataops import make_stacked_radial_profile +from .utils import DiffArray class ClusterEnsemble: @@ -27,7 +28,7 @@ class ClusterEnsemble: * "tan_sc" : tangential component computed with sample covariance * "cross_sc" : cross component computed with sample covariance - * "tan_jk" : tangential component computed with bootstrap + * "tan_bs" : tangential component computed with bootstrap * "cross_bs" : cross component computed with bootstrap * "tan_jk" : tangential component computed with jackknife * "cross_jk" : cross component computed with jackknife @@ -46,7 +47,7 @@ def __init__(self, unique_id, gc_list=None, **kwargs): else: raise TypeError(f"unique_id incorrect type: {type(unique_id)}") self.unique_id = unique_id - self.data = GCData(meta={"bin_units": None}) + self.data = GCData(meta={"bin_units": None, "radius_min": None, "radius_max": None}) if gc_list is not None: self._add_values(gc_list, **kwargs) self.stacked_data = None @@ -198,6 +199,9 @@ def add_individual_radial_profile( """ cl_bin_units = profile_table.meta.get("bin_units", None) self.data.update_info_ext_valid("bin_units", self.data, cl_bin_units, overwrite=False) + for col in ("radius_min", "radius_max"): + value = DiffArray(profile_table[col]) + self.data.update_info_ext_valid(col, self.data, value, overwrite=False) cl_cosmo = profile_table.meta.get("cosmo", None) self.data.update_info_ext_valid("cosmo", self.data, cl_cosmo, overwrite=False) @@ -248,9 +252,14 @@ def make_stacked_radial_profile(self, tan_component="gt", cross_component="gx", [self.data[tan_component], self.data[cross_component]], ) self.stacked_data = GCData( - [radius, *components], - meta=self.data.meta, - names=("radius", tan_component, cross_component), + [ + self.data.meta["radius_min"].value, + self.data.meta["radius_max"].value, + radius, + *components, + ], + meta={k: v for k, v in self.data.meta.items() if k not in ("radius_min", "radius_max")}, + names=("radius_min", "radius_max", "radius", tan_component, cross_component), ) def compute_sample_covariance(self, tan_component="gt", cross_component="gx"): diff --git a/clmm/dataops/__init__.py b/clmm/dataops/__init__.py index 262c31dfa..eab62c280 100644 --- a/clmm/dataops/__init__.py +++ b/clmm/dataops/__init__.py @@ -1,8 +1,6 @@ -"""Functions to compute polar/azimuthal averages in radial bins""" - +"""Data operation for polar/azimuthal averages in radial bins and weights""" import warnings import numpy as np -import scipy from astropy.coordinates import SkyCoord from astropy import units as u from ..gcdata import GCData @@ -17,11 +15,7 @@ _validate_is_deltasigma_sigma_c, _validate_coordinate_system, ) -from ..redshift import ( - _integ_pzfuncs, - compute_for_good_redshifts, -) -from ..theory import compute_critical_surface_density_eff +from ..redshift import _integ_pzfuncs def compute_tangential_and_cross_components( diff --git a/clmm/gcdata.py b/clmm/gcdata.py index eae2c102c..29f9eb5d5 100644 --- a/clmm/gcdata.py +++ b/clmm/gcdata.py @@ -150,7 +150,7 @@ def update_info_ext_valid(self, key, gcdata, ext_value, overwrite=False): key: str Name of key to compare and update. gcdata: GCData - Table to check if same cosmology. + Table to check if same cosmology and ensemble bins. ext_value: Value to be compared to. overwrite: bool diff --git a/clmm/utils/__init__.py b/clmm/utils/__init__.py index 3bbf9b81c..bcd1806ac 100644 --- a/clmm/utils/__init__.py +++ b/clmm/utils/__init__.py @@ -41,6 +41,7 @@ _validate_dec, _validate_is_deltasigma_sigma_c, _validate_coordinate_system, + DiffArray, ) from .units import ( diff --git a/clmm/utils/validation.py b/clmm/utils/validation.py index 767ef67f5..1002ed222 100644 --- a/clmm/utils/validation.py +++ b/clmm/utils/validation.py @@ -226,7 +226,6 @@ def _validate_is_deltasigma_sigma_c(is_deltasigma, sigma_c): if not is_deltasigma and sigma_c is not None: raise TypeError(f"sigma_c (={sigma_c}) must be None when is_deltasigma=False") - def _validate_coordinate_system(loc, coordinate_system, valid_type): r"""Validate the coordinate system. @@ -245,3 +244,37 @@ def _validate_coordinate_system(loc, coordinate_system, valid_type): validate_argument(loc, coordinate_system, valid_type) if loc[coordinate_system] not in ["celestial", "euclidean"]: raise ValueError(f"{coordinate_system} must be 'celestial' or 'euclidean'.") + +class DiffArray: + """Array where arr1==arr2 is actually all(arr1==arr)""" + + def __init__(self, array): + self.value = np.array(array) + + def __eq__(self, other): + # pylint: disable=unidiomatic-typecheck + if type(other) != type(self): + return False + if self.value.size != other.value.size: + return False + return (self.value == other.value).all() + + def __repr__(self): + out = str(self.value) + if self.value.size > 4: + out = self._get_lim_str(out) + " ... " + self._get_lim_str(out[::-1])[::-1] + return out + + def _get_lim_str(self, out): + # pylint: disable=undefined-loop-variable + # get count starting point + for init_index, char in enumerate(out): + if all(char != _char for _char in "[]() "): + break + # get str + sep = 0 + for i, char in enumerate(out[init_index + 1 :]): + sep += int(char == " " and out[i + init_index] != " ") + if sep == 2: + break + return out[: i + init_index + 1] diff --git a/examples/demo_mock_ensemble.ipynb b/examples/demo_mock_ensemble.ipynb index fd3c734db..ed0233561 100644 --- a/examples/demo_mock_ensemble.ipynb +++ b/examples/demo_mock_ensemble.ipynb @@ -320,6 +320,43 @@ }, { "cell_type": "markdown", + "id": "84bd786f", + "metadata": {}, + "source": [ + "The individual cluster data and profiles are stored at the `.data` table of the `ClusterEnsemble`:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "864f3acb", + "metadata": {}, + "outputs": [], + "source": [ + "clusterensemble.data[:3]" + ] + }, + { + "cell_type": "markdown", + "id": "6ea533b0", + "metadata": {}, + "source": [ + "The edges of the radial bins, their units, and the cosmology are stored on the metadata of this table:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "39723fb1", + "metadata": {}, + "outputs": [], + "source": [ + "clusterensemble.data.meta" + ] + }, + { + "cell_type": "markdown", + "id": "99e3fe18", "metadata": {}, "source": [ "## Stacked profile of the cluster ensemble\n", @@ -335,6 +372,16 @@ "clusterensemble.make_stacked_radial_profile(tan_component=\"DS_t\", cross_component=\"DS_x\")" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "98b9fa63", + "metadata": {}, + "outputs": [], + "source": [ + "clusterensemble.stacked_data" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -601,7 +648,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.9" + "version": "3.10.8" } }, "nbformat": 4, diff --git a/examples/demo_mock_ensemble_realistic.ipynb b/examples/demo_mock_ensemble_realistic.ipynb index 42cae6d7d..b251ab103 100644 --- a/examples/demo_mock_ensemble_realistic.ipynb +++ b/examples/demo_mock_ensemble_realistic.ipynb @@ -175,7 +175,7 @@ "source": [ "Ncm.cfg_init()\n", "# cosmo_nc = Nc.HICosmoDEXcdm()\n", - "cosmo_nc = Nc.HICosmo.new_from_name(Nc.HICosmo, \"NcHICosmoDECpl{'massnu-length':<1>}\")\n", + "cosmo_nc = Nc.HICosmoDECpl(massnu_length=1)\n", "cosmo_nc.omega_x2omega_k()\n", "cosmo_nc.param_set_by_name(\"w0\", -1.0)\n", "cosmo_nc.param_set_by_name(\"w1\", 0.0)\n", @@ -201,7 +201,7 @@ "\n", "dist = Nc.Distance.new(2.0)\n", "dist.prepare_if_needed(cosmo_nc)\n", - "tf = Nc.TransferFunc.new_from_name(\"NcTransferFuncEH\")\n", + "tf = Nc.TransferFuncEH()\n", "\n", "psml = Nc.PowspecMLTransfer.new(tf)\n", "psml.require_kmin(1.0e-6)\n", @@ -903,6 +903,42 @@ " )" ] }, + { + "cell_type": "markdown", + "id": "b5e17ee0", + "metadata": {}, + "source": [ + "The individual cluster data and profiles are stored at the `.data` table of the `ClusterEnsemble`:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4a970edb", + "metadata": {}, + "outputs": [], + "source": [ + "clusterensemble.data[:3]" + ] + }, + { + "cell_type": "markdown", + "id": "7320e899", + "metadata": {}, + "source": [ + "The edges of the radial bins, their units, and the cosmology are stored on the metadata of this table:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f9ea8436", + "metadata": {}, + "outputs": [], + "source": [ + "clusterensemble.data.meta" + ] + }, { "cell_type": "markdown", "id": "9091de7c", @@ -922,6 +958,16 @@ "clusterensemble.make_stacked_radial_profile(tan_component=\"DS_t\", cross_component=\"DS_x\")" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "e4125570", + "metadata": {}, + "outputs": [], + "source": [ + "clusterensemble.stacked_data" + ] + }, { "cell_type": "markdown", "id": "771c1382", @@ -1193,7 +1239,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.0" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/tests/test_dataops.py b/tests/test_dataops.py index 54942c9dd..679f56917 100644 --- a/tests/test_dataops.py +++ b/tests/test_dataops.py @@ -145,6 +145,13 @@ def test_compute_lensing_angles_flatsky(): ra_l, dec_l, ra_s, dec_s, coordinate_system="celestial" ) + assert_allclose( + da._compute_lensing_angles_flatsky(-180, dec_l, np.array([180.1, 179.7]), dec_s), + [[0.0012916551296819666, 0.003424250083245557], [-2.570568636904587, 0.31079754672944354]], + TOLERANCE["rtol"], + err_msg="Failure when ra_l and ra_s are the same but one is defined negative", + ) + assert_allclose( thetas_celestial, thetas_euclidean, diff --git a/tests/test_mockdata.py b/tests/test_mockdata.py index 9cd53e219..4b2943d6a 100644 --- a/tests/test_mockdata.py +++ b/tests/test_mockdata.py @@ -264,13 +264,17 @@ def test_shapenoise(): # Verify that the shape noise is Gaussian around 0 (for the very small shear here) sigma = 0.25 - data = mock.generate_galaxy_catalog(10**12.0, 0.3, 4, cosmo, 0.8, ngals=50000, shapenoise=sigma) + data = mock.generate_galaxy_catalog( + 10**12.0, 0.3, 4, cosmo, 0.8, ngals=50000, shapenoise=sigma + ) # Check that there are no galaxies with |e|>1 assert_equal(np.count_nonzero((data["e1"] > 1) | (data["e1"] < -1)), 0) assert_equal(np.count_nonzero((data["e2"] > 1) | (data["e2"] < -1)), 0) # Check that shape noise is Guassian with correct std dev bins = np.arange(-1, 1.1, 0.1) - gauss = 5000 * np.exp(-0.5 * (bins[:-1] + 0.05) ** 2 / sigma**2) / (sigma * np.sqrt(2 * np.pi)) + gauss = ( + 5000 * np.exp(-0.5 * (bins[:-1] + 0.05) ** 2 / sigma**2) / (sigma * np.sqrt(2 * np.pi)) + ) assert_allclose(np.histogram(data["e1"], bins=bins)[0], gauss, atol=50, rtol=0.05) assert_allclose(np.histogram(data["e2"], bins=bins)[0], gauss, atol=50, rtol=0.05) diff --git a/tests/test_theory.py b/tests/test_theory.py index a17ab661d..6a5a7f96d 100644 --- a/tests/test_theory.py +++ b/tests/test_theory.py @@ -7,7 +7,11 @@ from clmm.constants import Constants as clc from clmm.galaxycluster import GalaxyCluster from clmm import GCData -from clmm.utils import compute_beta_s_square_mean_from_distribution, compute_beta_s_mean_from_distribution, compute_beta_s_func +from clmm.utils import ( + compute_beta_s_square_mean_from_distribution, + compute_beta_s_mean_from_distribution, + compute_beta_s_func, +) from clmm.redshift.distributions import chang2013, desc_srd TOLERANCE = {"rtol": 1.0e-8} @@ -213,7 +217,7 @@ def test_compute_reduced_shear(modeling_data): assert_allclose( theo.compute_reduced_shear_from_convergence(np.array(shear), np.array(convergence)), np.array(truth), - **TOLERANCE + **TOLERANCE, ) @@ -254,7 +258,7 @@ def helper_profiles(func): assert_allclose( func(r3d, mdelta, cdelta, z_cl, cclcosmo, halo_profile_model="nfw"), defaulttruth, - **TOLERANCE + **TOLERANCE, ) assert_allclose( func(r3d, mdelta, cdelta, z_cl, cclcosmo, massdef="mean"), defaulttruth, **TOLERANCE @@ -263,7 +267,7 @@ def helper_profiles(func): assert_allclose( func(r3d, mdelta, cdelta, z_cl, cclcosmo, halo_profile_model="NFW"), defaulttruth, - **TOLERANCE + **TOLERANCE, ) assert_allclose( func(r3d, mdelta, cdelta, z_cl, cclcosmo, massdef="MEAN"), defaulttruth, **TOLERANCE @@ -375,22 +379,25 @@ def test_profiles(modeling_data, profile_init): # Test use_projected_quad if mod.backend == "ccl" and profile_init == "einasto": - if hasattr(mod.hdpm, 'projected_quad'): + if hasattr(mod.hdpm, "projected_quad"): mod.set_projected_quad(True) assert_allclose( mod.eval_surface_density( cfg["SIGMA_PARAMS"]["r_proj"], cfg["SIGMA_PARAMS"]["z_cl"], verbose=True ), cfg["numcosmo_profiles"]["Sigma"], - reltol*1e-1, + reltol * 1e-1, ) assert_allclose( theo.compute_surface_density( - cosmo=cosmo, **cfg["SIGMA_PARAMS"], alpha_ein=alpha_ein, verbose=True, + cosmo=cosmo, + **cfg["SIGMA_PARAMS"], + alpha_ein=alpha_ein, + verbose=True, use_projected_quad=True, ), cfg["numcosmo_profiles"]["Sigma"], - reltol*1e-1, + reltol * 1e-1, ) delattr(mod.hdpm, "projected_quad") @@ -547,11 +554,13 @@ def test_shear_convergence_unittests(modeling_data, profile_init): cfg_inf = load_validation_config() # compute some values - cfg_inf['GAMMA_PARAMS']['z_src'] = 1000. + cfg_inf["GAMMA_PARAMS"]["z_src"] = 1000.0 beta_s_mean = compute_beta_s_mean_from_distribution( - cfg_inf['GAMMA_PARAMS']['z_cluster'], cfg_inf['GAMMA_PARAMS']['z_src'], cosmo) + cfg_inf["GAMMA_PARAMS"]["z_cluster"], cfg_inf["GAMMA_PARAMS"]["z_src"], cosmo + ) beta_s_square_mean = compute_beta_s_square_mean_from_distribution( - cfg_inf['GAMMA_PARAMS']['z_cluster'], cfg_inf['GAMMA_PARAMS']['z_src'], cosmo) + cfg_inf["GAMMA_PARAMS"]["z_cluster"], cfg_inf["GAMMA_PARAMS"]["z_src"], cosmo + ) gammat_inf = theo.compute_tangential_shear(cosmo=cosmo, **cfg_inf["GAMMA_PARAMS"]) kappa_inf = theo.compute_convergence(cosmo=cosmo, **cfg_inf["GAMMA_PARAMS"]) @@ -581,14 +590,14 @@ def test_shear_convergence_unittests(modeling_data, profile_init): theo.compute_reduced_tangential_shear, cosmo=cosmo, **cfg_inf["GAMMA_PARAMS"], - approx="notvalid" + approx="notvalid", ) assert_raises( ValueError, theo.compute_magnification, cosmo=cosmo, **cfg_inf["GAMMA_PARAMS"], - approx="notvalid" + approx="notvalid", ) assert_raises( ValueError, @@ -596,7 +605,7 @@ def test_shear_convergence_unittests(modeling_data, profile_init): cosmo=cosmo, **cfg_inf["GAMMA_PARAMS"], alpha=alpha, - approx="notvalid" + approx="notvalid", ) # test KeyError from invalid key in integ_kwargs assert_raises( @@ -604,14 +613,14 @@ def test_shear_convergence_unittests(modeling_data, profile_init): theo.compute_reduced_tangential_shear, cosmo=cosmo, **cfg_inf["GAMMA_PARAMS"], - integ_kwargs={"notavalidkey": 0.0} + integ_kwargs={"notavalidkey": 0.0}, ) assert_raises( KeyError, theo.compute_magnification, cosmo=cosmo, **cfg_inf["GAMMA_PARAMS"], - integ_kwargs={"notavalidkey": 0.0} + integ_kwargs={"notavalidkey": 0.0}, ) assert_raises( KeyError, @@ -619,7 +628,7 @@ def test_shear_convergence_unittests(modeling_data, profile_init): cosmo=cosmo, **cfg_inf["GAMMA_PARAMS"], alpha=alpha, - integ_kwargs={"notavalidkey": 0.0} + integ_kwargs={"notavalidkey": 0.0}, ) # test ValueError from unsupported z_src_info cfg_inf["GAMMA_PARAMS"]["z_src_info"] = "notvalid" @@ -632,14 +641,14 @@ def test_shear_convergence_unittests(modeling_data, profile_init): theo.compute_reduced_tangential_shear, cosmo=cosmo, **cfg_inf["GAMMA_PARAMS"], - approx="order1" + approx="order1", ) assert_raises( ValueError, theo.compute_magnification, cosmo=cosmo, **cfg_inf["GAMMA_PARAMS"], - approx="order1" + approx="order1", ) assert_raises( ValueError, @@ -647,7 +656,7 @@ def test_shear_convergence_unittests(modeling_data, profile_init): cosmo=cosmo, **cfg_inf["GAMMA_PARAMS"], alpha=2, - approx="order1" + approx="order1", ) # test z_src_info = 'beta' @@ -1065,7 +1074,7 @@ def test_compute_magnification_bias(modeling_data): assert_allclose( theo.compute_magnification_bias_from_magnification(magnification[0], alpha[0]), truth[0][0], - **TOLERANCE + **TOLERANCE, ) assert_allclose( theo.compute_magnification_bias_from_magnification(magnification, alpha), truth, **TOLERANCE @@ -1075,7 +1084,7 @@ def test_compute_magnification_bias(modeling_data): np.array(magnification), np.array(alpha) ), np.array(truth), - **TOLERANCE + **TOLERANCE, ) diff --git a/tests/test_utils.py b/tests/test_utils.py index 0b2db3591..a70524472 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -11,6 +11,7 @@ convert_shapes_to_epsilon, arguments_consistency, validate_argument, + DiffArray, ) from clmm.redshift import distributions as zdist @@ -535,6 +536,21 @@ def test_validate_argument(): ) +def test_diff_array(): + """test validate argument""" + # Validate diffs + assert DiffArray([1, 2]) == DiffArray([1, 2]) + assert DiffArray([1, 2]) == DiffArray(np.array([1, 2])) + assert DiffArray([1, 2]) != DiffArray(np.array([2, 2])) + assert DiffArray([1, 2]) != DiffArray(np.array([1, 2, 3])) + assert DiffArray([1, 2]) != None + # Validate prints + arr = DiffArray([1, 2]) + assert str(arr.value) == arr.__repr__() + arr = DiffArray(range(10)) + assert str(arr.value) != arr.__repr__() + + def test_beta_functions(modeling_data): z_cl = 1.0 z_src = [2.4, 2.1] From e788037ab9ad25c201792cfdfa795cafa802dde4 Mon Sep 17 00:00:00 2001 From: lorenzonggl <109162518+lorenzonggl@users.noreply.github.com> Date: Fri, 9 Aug 2024 16:48:52 +0200 Subject: [PATCH 3/4] issue/630/add environment.yml (#632) * add environment.yml * adding healpy in the list of packages * adding description in INSTALL.md file * Update version to 1.13.1 --------- Co-authored-by: m-aguena --- INSTALL.md | 11 +++++++++++ clmm/__init__.py | 2 +- environment.yml | 20 ++++++++++++++++++++ 3 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 environment.yml diff --git a/INSTALL.md b/INSTALL.md index 68d1fbd7e..05ca78243 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -64,6 +64,17 @@ Now, you can install CLMM and its dependencies as python setup.py install # build from source ``` +### Local environment for CLMM + +Alternatively, you can create a new local environment by running + +```bash + conda env create -f environment.yml + conda activate clmm +``` + +You can now install CLMM in a local and stable environment with the usual procedure. + ## Access to the proper environment on cori.nersc.gov If you have access to NERSC, this will likely be the easiest to make sure you have the appropriate environment. After logging into cori.nersc.gov, you will need to execute the following. We recommend executing line-by-line to avoid errors: diff --git a/clmm/__init__.py b/clmm/__init__.py index d998efc18..885ed98bb 100644 --- a/clmm/__init__.py +++ b/clmm/__init__.py @@ -26,4 +26,4 @@ ) from . import support -__version__ = "1.13.0" +__version__ = "1.13.1" diff --git a/environment.yml b/environment.yml new file mode 100644 index 000000000..86a876f6b --- /dev/null +++ b/environment.yml @@ -0,0 +1,20 @@ +# CLMM Conda environment +name: clmm +channels: + - conda-forge + - defaults +dependencies: + - python==3.10.2 + - pip>=21.0 + - jupyter>=1.0 + - numpy==1.25.2 + - scipy==1.11.2 + - astropy==5.3.2 + - healpy == 1.16.5 + - matplotlib==3.7.2 + - gsl==2.7 + - cmake==3.30.0 + - pyccl==3.0 + - numcosmo==0.18.2 + - pytest=7.4.0 + - sphinx==7.2.4 From 7d90ee1080a31618cbe18b16e77bb2891424675c Mon Sep 17 00:00:00 2001 From: Michel Aguena Date: Mon, 23 Sep 2024 18:06:20 +0200 Subject: [PATCH 4/4] add R0917 to pylint disable in pyproject.toml (#642) * add R0917 to pylint disable in pyproject.toml * Update version to 1.13.2 --- clmm/__init__.py | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/clmm/__init__.py b/clmm/__init__.py index 885ed98bb..a4b97c140 100644 --- a/clmm/__init__.py +++ b/clmm/__init__.py @@ -26,4 +26,4 @@ ) from . import support -__version__ = "1.13.1" +__version__ = "1.13.2" diff --git a/pyproject.toml b/pyproject.toml index 5bd0e0a77..efc4a2346 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ line-length = 100 [tool.pylint] max-line-length = 100 -disable = ["R0913"] +disable = ["R0913", "R0917"] good-names = ["ra", "z", "z1", "z2", "a", "a1", "a2", "gt", "gx", "mu", "da", "i", "H0", "Omega_b0", "Omega_dm0", "Omega_k0",