Skip to content

Commit

Permalink
Temporary hack to add "geometry.front_retreat.prescribed.file"
Browse files Browse the repository at this point in the history
Otherwise length of pism_config_axis will vary.
  • Loading branch information
aaschwanden committed Nov 22, 2024
1 parent 768b235 commit 07723fb
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 22 deletions.
38 changes: 19 additions & 19 deletions analysis/analyze_scalar.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def prepare_observations(
basin_url: Union[Path, str],
grace_url: Union[Path, str],
config: Dict,
reference_year: float,
reference_date: str,
engine: str = "h5netcdf",
) -> tuple:
"""
Expand All @@ -120,8 +120,8 @@ def prepare_observations(
The URL or path to the GRACE observation dataset.
config : Dict
A dictionary containing configuration settings for processing the datasets.
reference_year : float
The reference year for normalizing cumulative variables.
reference_date : str
The reference date for normalizing cumulative variables.
Returns
-------
Expand All @@ -134,7 +134,7 @@ def prepare_observations(
... "Cumulative Variables": {"cumulative_mass_balance": "mass_balance"},
... "Cumulative Uncertainty Variables": {"cumulative_mass_balance_uncertainty": "mass_balance_uncertainty"}
... }
>>> prepare_observations("basin.nc", "grace.nc", config, 2000.0)
>>> prepare_observations("basin.nc", "grace.nc", config, "2000-01-1")
(<xarray.Dataset>, <xarray.Dataset>)
"""
obs_basin = xr.open_dataset(basin_url, engine=engine, chunks=-1)
Expand All @@ -146,7 +146,7 @@ def prepare_observations(
obs_basin = prp.normalize_cumulative_variables(
obs_basin,
list(cumulative_vars.values()) + list(cumulative_uncertainty_vars.values()),
reference_year,
reference_date,
)

obs_grace = xr.open_dataset(grace_url, engine=engine, chunks=-1)
Expand All @@ -160,7 +160,7 @@ def prepare_observations(
obs_grace = prp.normalize_cumulative_variables(
obs_grace,
[cumulative_vars] + [cumulative_uncertainty_vars],
reference_year,
reference_date,
)

return obs_basin, obs_grace
Expand All @@ -170,7 +170,7 @@ def prepare_observations(
def prepare_simulations(
filenames: List[Path | str],
config: Dict,
reference_year: float,
reference_date: str,
parallel: bool = True,
engine: str = "netcdf4",
) -> xr.Dataset:
Expand Down Expand Up @@ -228,7 +228,7 @@ def prepare_simulations(
ds = prp.normalize_cumulative_variables(
ds,
list(config["Cumulative Variables"].values()),
reference_year=reference_year,
reference_date=reference_date,
)
return ds

Expand Down Expand Up @@ -394,7 +394,7 @@ def plot_obs_sims(
filtering_var: str,
filter_range: List[int] = [1990, 2019],
fig_dir: Union[str, Path] = "figures",
reference_year: float = 1986.0,
reference_date: str = "1986-01-1",
sim_alpha: float = 0.4,
obs_alpha: float = 1.0,
sigma: float = 2,
Expand Down Expand Up @@ -546,7 +546,7 @@ def plot_obs_sims(

axs[0].xaxis.set_tick_params(labelbottom=False)

axs[0].set_ylabel(f"Cumulative mass\nloss since {reference_year} (Gt)")
axs[0].set_ylabel(f"Cumulative mass\nloss since {reference_date} (Gt)")
axs[0].set_xlabel("")
axs[0].set_title(f"{basin} filtered by {filtering_var}")
axs[1].set_title("")
Expand All @@ -567,7 +567,7 @@ def plot_obs_sims_3(
filtering_var: str,
filter_range: List[int] = [1990, 2019],
fig_dir: Union[str, Path] = "figures",
reference_year: float = 1986.0,
reference_date: str = "1986-01-01",
sim_alpha: float = 0.4,
obs_alpha: float = 1.0,
sigma: float = 2,
Expand Down Expand Up @@ -742,7 +742,7 @@ def plot_obs_sims_3(
axs[0].xaxis.set_tick_params(labelbottom=False)
axs[1].xaxis.set_tick_params(labelbottom=False)

axs[0].set_ylabel(f"Cumulative mass\nloss since {reference_year} (Gt)")
axs[0].set_ylabel(f"Cumulative mass\nloss since {reference_date} (Gt)")
axs[0].set_xlabel("")
axs[1].set_xlabel("")
axs[0].set_title(f"{basin} filtered by {filtering_var}")
Expand Down Expand Up @@ -895,10 +895,10 @@ def plot_obs_sims_3(
default="MS",
)
parser.add_argument(
"--reference_year",
help="""Reference year.""",
type=float,
default=2004,
"--reference_date",
help="""Reference date.""",
type=str,
default="2004-01-1",
)
parser.add_argument(
"--n_jobs",
Expand Down Expand Up @@ -933,7 +933,7 @@ def plot_obs_sims_3(
fudge_factor = options.fudge_factor
notebook = options.notebook
parallel = options.parallel
reference_year = options.reference_year
reference_date = options.reference_date
resampling_frequency = options.resampling_frequency
outlier_variable = options.outlier_variable
outlier_range = options.outlier_range
Expand Down Expand Up @@ -975,14 +975,14 @@ def plot_obs_sims_3(
}

simulated_ds = prepare_simulations(
basin_files, ragis_config, reference_year, parallel=parallel, engine=engine
basin_files, ragis_config, reference_date, parallel=parallel, engine=engine
)

observed_mankoff_ds, observed_grace_ds = prepare_observations(
options.mankoff_url,
options.grace_url,
ragis_config,
reference_year,
reference_date,
engine=engine,
)

Expand Down
3 changes: 3 additions & 0 deletions analysis/compute_basins_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,8 @@
for k, v in pism_config.attrs.items()
if not any(k.endswith(suffix) for suffix in suffixes_to_exclude)
}
if "geometry.front_retreat.prescribed.file" not in config.keys():
config["geometry.front_retreat.prescribed.file"] = "false"

stats = ds["run_stats"]
if cf:
Expand Down Expand Up @@ -225,6 +227,7 @@
basin_sums["basin"] = basin_sums["basin"].astype(f"S{n_basins}")
basin_sums["ensemble_id"] = basin_sums["ensemble_id"].astype(f"S{n_ensemble}")
basin_sums.attrs["Conventions"] = "CF-1.8"

basin_sums.to_netcdf(basins_file, engine=engine)

client.close()
Expand Down
5 changes: 2 additions & 3 deletions pism_ragis/processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -610,7 +610,7 @@ def load_ensemble(
filenames,
parallel=parallel,
chunks={"exp_id": -1, "time": -1},
decode_cf=False,
decode_cf=True,
engine=engine,
).drop_vars(["spatial_ref", "mapping"], errors="ignore")
if "time" in ds["pism_config"].coords:
Expand All @@ -621,7 +621,7 @@ def load_ensemble(

@timeit
def normalize_cumulative_variables(
ds: xr.Dataset, variables, reference_year: float = 1992.0
ds: xr.Dataset, variables, reference_date: str = "1992-01-01"
) -> xr.Dataset:
"""
Normalize cumulative variables in an xarray Dataset by subtracting their values at a reference year.
Expand Down Expand Up @@ -656,7 +656,6 @@ def normalize_cumulative_variables(
Data variables:
cumulative_var (time) int64 0 10 20 30 40 50
"""
reference_date = decimal_year_to_datetime(reference_year)
ds[variables] -= ds[variables].sel(time=reference_date, method="nearest")
return ds

Expand Down

0 comments on commit 07723fb

Please sign in to comment.