Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Jan 29, 2025
1 parent 4ff05c4 commit cf45fd8
Showing 1 changed file with 103 additions and 33 deletions.
136 changes: 103 additions & 33 deletions docs/notebooks/sdba-advanced.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,9 @@
"# Plot nearest neighbors and weighing function\n",
"wax = ax.twinx()\n",
"wax.plot(tas.time, weights, color=\"indianred\")\n",
"ax.plot(tas.time, tas.where(tas * weights > 0), \"o\", color=\"lightcoral\", fillstyle=\"none\")\n",
"ax.plot(\n",
" tas.time, tas.where(tas * weights > 0), \"o\", color=\"lightcoral\", fillstyle=\"none\"\n",
")\n",
"\n",
"ax.plot(ti, ys[366], \"ko\")\n",
"wax.set_ylabel(\"Weights\")\n",
Expand Down Expand Up @@ -203,15 +205,21 @@
"t = xr.cftime_range(\"2000-01-01\", \"2030-12-31\", freq=\"D\", calendar=\"noleap\")\n",
"ref = xr.DataArray(\n",
" (\n",
" -20 * np.cos(2 * np.pi * t.dayofyear / 365) + 2 * np.random.random_sample((t.size,)) + 273.15 + 0.1 * (t - t[0]).days / 365\n",
" -20 * np.cos(2 * np.pi * t.dayofyear / 365)\n",
" + 2 * np.random.random_sample((t.size,))\n",
" + 273.15\n",
" + 0.1 * (t - t[0]).days / 365\n",
" ), # \"warming\" of 1K per decade,\n",
" dims=(\"time\",),\n",
" coords={\"time\": t},\n",
" attrs={\"units\": \"K\"},\n",
")\n",
"sim = xr.DataArray(\n",
" (\n",
" -18 * np.cos(2 * np.pi * t.dayofyear / 365) + 2 * np.random.random_sample((t.size,)) + 273.15 + 0.11 * (t - t[0]).days / 365\n",
" -18 * np.cos(2 * np.pi * t.dayofyear / 365)\n",
" + 2 * np.random.random_sample((t.size,))\n",
" + 273.15\n",
" + 0.11 * (t - t[0]).days / 365\n",
" ), # \"warming\" of 1.1K per decade\n",
" dims=(\"time\",),\n",
" coords={\"time\": t},\n",
Expand All @@ -230,7 +238,9 @@
"source": [
"from xclim.sdba.adjustment import QuantileDeltaMapping\n",
"\n",
"QDM = QuantileDeltaMapping.train(ref, hist, nquantiles=15, kind=\"+\", group=\"time.dayofyear\")\n",
"QDM = QuantileDeltaMapping.train(\n",
" ref, hist, nquantiles=15, kind=\"+\", group=\"time.dayofyear\"\n",
")\n",
"QDM"
]
},
Expand Down Expand Up @@ -317,7 +327,9 @@
"from xclim import set_options\n",
"\n",
"with set_options(sdba_extra_output=True):\n",
" QDM = QuantileDeltaMapping.train(ref, hist, nquantiles=15, kind=\"+\", group=\"time.dayofyear\")\n",
" QDM = QuantileDeltaMapping.train(\n",
" ref, hist, nquantiles=15, kind=\"+\", group=\"time.dayofyear\"\n",
" )\n",
" out = QDM.adjust(sim)\n",
"\n",
"out.sim_q"
Expand Down Expand Up @@ -357,7 +369,9 @@
"metadata": {},
"outputs": [],
"source": [
"QDM = QuantileDeltaMapping.train(ref, hist, nquantiles=15, kind=\"+\", group=\"time.dayofyear\")\n",
"QDM = QuantileDeltaMapping.train(\n",
" ref, hist, nquantiles=15, kind=\"+\", group=\"time.dayofyear\"\n",
")\n",
"\n",
"scen_nowin = QDM.adjust(sim)"
]
Expand Down Expand Up @@ -418,7 +432,9 @@
"\n",
"group = sdba.Grouper(\"time.dayofyear\", window=31)\n",
"\n",
"dref = convert_calendar(open_dataset(\"sdba/ahccd_1950-2013.nc\"), \"noleap\").sel(time=slice(\"1981\", \"2010\"))\n",
"dref = convert_calendar(open_dataset(\"sdba/ahccd_1950-2013.nc\"), \"noleap\").sel(\n",
" time=slice(\"1981\", \"2010\")\n",
")\n",
"dsim = open_dataset(\"sdba/CanESM2_1950-2100.nc\")\n",
"\n",
"dref = dref.assign(\n",
Expand Down Expand Up @@ -486,7 +502,9 @@
"outputs": [],
"source": [
"ref_res, ref_norm = sdba.processing.normalize(ref, group=group, kind=\"+\")\n",
"hist_res, hist_norm = sdba.processing.normalize(sim.sel(time=slice(\"1981\", \"2010\")), group=group, kind=\"+\")\n",
"hist_res, hist_norm = sdba.processing.normalize(\n",
" sim.sel(time=slice(\"1981\", \"2010\")), group=group, kind=\"+\"\n",
")\n",
"scaling = sdba.utils.get_correction(hist_norm, ref_norm, kind=\"+\")"
]
},
Expand All @@ -496,7 +514,9 @@
"metadata": {},
"outputs": [],
"source": [
"sim_scaled = sdba.utils.apply_correction(sim, sdba.utils.broadcast(scaling, sim, group=group), kind=\"+\")\n",
"sim_scaled = sdba.utils.apply_correction(\n",
" sim, sdba.utils.broadcast(scaling, sim, group=group), kind=\"+\"\n",
")\n",
"\n",
"loess = sdba.detrending.LoessDetrend(group=group, f=0.2, d=0, kind=\"+\", niter=1)\n",
"simfit = loess.fit(sim_scaled)\n",
Expand All @@ -518,7 +538,9 @@
"metadata": {},
"outputs": [],
"source": [
"PCA = sdba.adjustment.PrincipalComponents.train(ref_res, hist_res, group=group, crd_dim=\"multivar\", best_orientation=\"simple\")\n",
"PCA = sdba.adjustment.PrincipalComponents.train(\n",
" ref_res, hist_res, group=group, crd_dim=\"multivar\", best_orientation=\"simple\"\n",
")\n",
"\n",
"scen1_res = PCA.adjust(sim_res)"
]
Expand Down Expand Up @@ -566,9 +588,15 @@
"metadata": {},
"outputs": [],
"source": [
"dref.tasmax.sel(time=slice(\"1981\", \"2010\"), location=\"Vancouver\").groupby(\"time.dayofyear\").mean().plot(label=\"obs\")\n",
"dsim.tasmax.sel(time=slice(\"1981\", \"2010\"), location=\"Vancouver\").groupby(\"time.dayofyear\").mean().plot(label=\"raw\")\n",
"dscen.tasmax.sel(time=slice(\"1981\", \"2010\"), location=\"Vancouver\").groupby(\"time.dayofyear\").mean().plot(label=\"scen\")\n",
"dref.tasmax.sel(time=slice(\"1981\", \"2010\"), location=\"Vancouver\").groupby(\n",
" \"time.dayofyear\"\n",
").mean().plot(label=\"obs\")\n",
"dsim.tasmax.sel(time=slice(\"1981\", \"2010\"), location=\"Vancouver\").groupby(\n",
" \"time.dayofyear\"\n",
").mean().plot(label=\"raw\")\n",
"dscen.tasmax.sel(time=slice(\"1981\", \"2010\"), location=\"Vancouver\").groupby(\n",
" \"time.dayofyear\"\n",
").mean().plot(label=\"scen\")\n",
"plt.legend()"
]
},
Expand All @@ -578,9 +606,15 @@
"metadata": {},
"outputs": [],
"source": [
"dref.pr.sel(time=slice(\"1981\", \"2010\"), location=\"Vancouver\").groupby(\"time.dayofyear\").mean().plot(label=\"obs\")\n",
"dsim.pr.sel(time=slice(\"1981\", \"2010\"), location=\"Vancouver\").groupby(\"time.dayofyear\").mean().plot(label=\"raw\")\n",
"dscen.pr.sel(time=slice(\"1981\", \"2010\"), location=\"Vancouver\").groupby(\"time.dayofyear\").mean().plot(label=\"scen\")\n",
"dref.pr.sel(time=slice(\"1981\", \"2010\"), location=\"Vancouver\").groupby(\n",
" \"time.dayofyear\"\n",
").mean().plot(label=\"obs\")\n",
"dsim.pr.sel(time=slice(\"1981\", \"2010\"), location=\"Vancouver\").groupby(\n",
" \"time.dayofyear\"\n",
").mean().plot(label=\"raw\")\n",
"dscen.pr.sel(time=slice(\"1981\", \"2010\"), location=\"Vancouver\").groupby(\n",
" \"time.dayofyear\"\n",
").mean().plot(label=\"scen\")\n",
"plt.legend()"
]
},
Expand Down Expand Up @@ -611,11 +645,19 @@
"\n",
"vals = np.random.randint(0, 1000, size=(t.size,)) / 100\n",
"vals_ref = (4 ** np.where(vals < 9, vals / 100, vals)) / 3e6\n",
"vals_sim = (1 + 0.1 * np.random.random_sample((t.size,))) * (4 ** np.where(vals < 9.5, vals / 100, vals)) / 3e6\n",
"vals_sim = (\n",
" (1 + 0.1 * np.random.random_sample((t.size,)))\n",
" * (4 ** np.where(vals < 9.5, vals / 100, vals))\n",
" / 3e6\n",
")\n",
"\n",
"pr_ref = xr.DataArray(vals_ref, coords={\"time\": t}, dims=(\"time\",), attrs={\"units\": \"mm/day\"})\n",
"pr_ref = xr.DataArray(\n",
" vals_ref, coords={\"time\": t}, dims=(\"time\",), attrs={\"units\": \"mm/day\"}\n",
")\n",
"pr_ref = pr_ref.sel(time=slice(\"2000\", \"2015\"))\n",
"pr_sim = xr.DataArray(vals_sim, coords={\"time\": t}, dims=(\"time\",), attrs={\"units\": \"mm/day\"})\n",
"pr_sim = xr.DataArray(\n",
" vals_sim, coords={\"time\": t}, dims=(\"time\",), attrs={\"units\": \"mm/day\"}\n",
")\n",
"pr_hist = pr_sim.sel(time=slice(\"2000\", \"2015\"))"
]
},
Expand All @@ -638,8 +680,12 @@
"from xclim import sdba\n",
"\n",
"group = sdba.Grouper(\"time.dayofyear\", window=31)\n",
"hist_ad, pth, dP0 = sdba.processing.adapt_freq(pr_ref, pr_hist, thresh=\"0.05 mm d-1\", group=group)\n",
"QM_ad = sdba.EmpiricalQuantileMapping.train(pr_ref, hist_ad, nquantiles=15, kind=\"*\", group=group)\n",
"hist_ad, pth, dP0 = sdba.processing.adapt_freq(\n",
" pr_ref, pr_hist, thresh=\"0.05 mm d-1\", group=group\n",
")\n",
"QM_ad = sdba.EmpiricalQuantileMapping.train(\n",
" pr_ref, hist_ad, nquantiles=15, kind=\"*\", group=group\n",
")\n",
"scen_ad = QM_ad.adjust(pr_sim)\n",
"\n",
"pr_ref.sel(time=\"2010\").plot(alpha=0.9, label=\"Reference\")\n",
Expand Down Expand Up @@ -711,14 +757,20 @@
"# load test data\n",
"hist = open_dataset(\"sdba/CanESM2_1950-2100.nc\").sel(time=slice(\"1950\", \"1980\")).tasmax\n",
"ref = open_dataset(\"sdba/nrcan_1950-2013.nc\").sel(time=slice(\"1950\", \"1980\")).tasmax\n",
"sim = open_dataset(\"sdba/CanESM2_1950-2100.nc\").sel(time=slice(\"1980\", \"2010\")).tasmax # biased\n",
"sim = (\n",
" open_dataset(\"sdba/CanESM2_1950-2100.nc\").sel(time=slice(\"1980\", \"2010\")).tasmax\n",
") # biased\n",
"\n",
"# learn the bias in historical simulation compared to reference\n",
"QM = sdba.EmpiricalQuantileMapping.train(ref, hist, nquantiles=50, group=\"time\", kind=\"+\")\n",
"QM = sdba.EmpiricalQuantileMapping.train(\n",
" ref, hist, nquantiles=50, group=\"time\", kind=\"+\"\n",
")\n",
"\n",
"# correct the bias in the future\n",
"scen = QM.adjust(sim, extrapolation=\"constant\", interp=\"nearest\")\n",
"ref_future = open_dataset(\"sdba/nrcan_1950-2013.nc\").sel(time=slice(\"1980\", \"2010\")).tasmax # truth\n",
"ref_future = (\n",
" open_dataset(\"sdba/nrcan_1950-2013.nc\").sel(time=slice(\"1980\", \"2010\")).tasmax\n",
") # truth\n",
"\n",
"plt.figure(figsize=(15, 5))\n",
"lw = 0.3\n",
Expand All @@ -737,20 +789,28 @@
"outputs": [],
"source": [
"# calculate the mean warm Spell Length Distribution\n",
"sim_prop = sdba.properties.spell_length_distribution(da=sim, thresh=\"28 degC\", op=\">\", stat=\"mean\", group=\"time\")\n",
"sim_prop = sdba.properties.spell_length_distribution(\n",
" da=sim, thresh=\"28 degC\", op=\">\", stat=\"mean\", group=\"time\"\n",
")\n",
"\n",
"\n",
"scen_prop = sdba.properties.spell_length_distribution(da=scen, thresh=\"28 degC\", op=\">\", stat=\"mean\", group=\"time\")\n",
"scen_prop = sdba.properties.spell_length_distribution(\n",
" da=scen, thresh=\"28 degC\", op=\">\", stat=\"mean\", group=\"time\"\n",
")\n",
"\n",
"ref_prop = sdba.properties.spell_length_distribution(da=ref_future, thresh=\"28 degC\", op=\">\", stat=\"mean\", group=\"time\")\n",
"ref_prop = sdba.properties.spell_length_distribution(\n",
" da=ref_future, thresh=\"28 degC\", op=\">\", stat=\"mean\", group=\"time\"\n",
")\n",
"# measure the difference between the prediction and the reference with an absolute bias of the properties\n",
"measure_sim = sdba.measures.bias(sim_prop, ref_prop)\n",
"measure_scen = sdba.measures.bias(scen_prop, ref_prop)\n",
"\n",
"plt.figure(figsize=(5, 3))\n",
"plt.plot(measure_sim.location, measure_sim.values, \".\", label=\"biased model (sim)\")\n",
"plt.plot(measure_scen.location, measure_scen.values, \".\", label=\"adjusted model (scen)\")\n",
"plt.title(\"Bias of the mean of the warm spell \\n length distribution compared to observations\")\n",
"plt.title(\n",
" \"Bias of the mean of the warm spell \\n length distribution compared to observations\"\n",
")\n",
"plt.legend()\n",
"plt.ylim(-2.5, 2.5)"
]
Expand All @@ -770,11 +830,17 @@
"outputs": [],
"source": [
"# calculate the mean warm Spell Length Distribution\n",
"sim_prop = sdba.properties.spell_length_distribution(da=sim, thresh=\"28 degC\", op=\">\", stat=\"mean\", group=\"time.season\")\n",
"sim_prop = sdba.properties.spell_length_distribution(\n",
" da=sim, thresh=\"28 degC\", op=\">\", stat=\"mean\", group=\"time.season\"\n",
")\n",
"\n",
"scen_prop = sdba.properties.spell_length_distribution(da=scen, thresh=\"28 degC\", op=\">\", stat=\"mean\", group=\"time.season\")\n",
"scen_prop = sdba.properties.spell_length_distribution(\n",
" da=scen, thresh=\"28 degC\", op=\">\", stat=\"mean\", group=\"time.season\"\n",
")\n",
"\n",
"ref_prop = sdba.properties.spell_length_distribution(da=ref_future, thresh=\"28 degC\", op=\">\", stat=\"mean\", group=\"time.season\")\n",
"ref_prop = sdba.properties.spell_length_distribution(\n",
" da=ref_future, thresh=\"28 degC\", op=\">\", stat=\"mean\", group=\"time.season\"\n",
")\n",
"# Properties are often associated with the same measures. This correspondence is implemented in xclim:\n",
"measure = sdba.properties.spell_length_distribution.get_measure()\n",
"measure_sim = measure(sim_prop, ref_prop)\n",
Expand All @@ -783,7 +849,9 @@
"fig, axs = plt.subplots(2, 2, figsize=(9, 6))\n",
"axs = axs.ravel()\n",
"for i in range(4):\n",
" axs[i].plot(measure_sim.location, measure_sim.values[:, i], \".\", label=\"biased model (sim)\")\n",
" axs[i].plot(\n",
" measure_sim.location, measure_sim.values[:, i], \".\", label=\"biased model (sim)\"\n",
" )\n",
" axs[i].plot(\n",
" measure_scen.location,\n",
" measure_scen.isel(season=i).values,\n",
Expand All @@ -793,7 +861,9 @@
" axs[i].set_title(measure_scen.season.values[i])\n",
" axs[i].legend(loc=\"lower right\")\n",
" axs[i].set_ylim(-2.5, 2.5)\n",
"fig.suptitle(\"Bias of the mean of the warm spell length distribution compared to observations\")\n",
"fig.suptitle(\n",
" \"Bias of the mean of the warm spell length distribution compared to observations\"\n",
")\n",
"plt.tight_layout()"
]
}
Expand Down

0 comments on commit cf45fd8

Please sign in to comment.