Skip to content

Commit

Permalink
merge dev to master
Browse files Browse the repository at this point in the history
  • Loading branch information
pvankatwyk committed Mar 20, 2024
2 parents 3eb8181 + f3abb42 commit fb23414
Show file tree
Hide file tree
Showing 73 changed files with 8,404 additions and 4,455 deletions.
2 changes: 1 addition & 1 deletion LICENSE.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
MIT License

Copyright (c) 2022 Peter Van Katwyk
Copyright (c) 2024 Peter Van Katwyk

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
Expand Down
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,6 @@ This repository contains source code for processing climate forcings from [ISMIP

Documentation can be found at here: <https://brown-sciml.github.io/ise/>.

To access code for exact replication of "A Variational LSTM Emulator of Sea Level Contribution From the Antarctic Ice Sheet", see the release [https://github.com/Brown-SciML/ise/releases/tag/v1.0.0](https://github.com/Brown-SciML/ise/releases/tag/v1.0.0).

*This repository is a work in progress that is actively being updated and improved. Feel free to contact Peter Van Katwyk, Ph.D. student @ Brown University at peter_van_katwyk@brown.edu with further questions.*
257 changes: 0 additions & 257 deletions compute_ivaf_minus_control.py

This file was deleted.

13 changes: 0 additions & 13 deletions examples/gp.py

This file was deleted.

49 changes: 49 additions & 0 deletions examples/grids/process.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
from ise.data.process import DatasetMerger, DimensionalityReducer, ProjectionProcessor

ice_sheet = "AIS"
print(f"ice sheet: {ice_sheet}")

# all filepaths...
forcing_directory = r"/gpfs/data/kbergen/pvankatw/pvankatw-bfoxkemp/GHub-ISMIP6-Forcing/"
projections_directory = (
r"/gpfs/data/kbergen/pvankatw/pvankatw-bfoxkemp/GHub-ISMIP6-Projection-AIS/"
if ice_sheet == "AIS"
else r"/gpfs/data/kbergen/pvankatw/pvankatw-bfoxkemp/GHub-ISMIP6-Projection-GrIS/"
)
scalefac_fp = (
r"/gpfs/data/kbergen/pvankatw/pvankatw-bfoxkemp/af2_el_ismip6_ant_01.nc"
if ice_sheet == "AIS"
else r"/gpfs/data/kbergen/pvankatw/pvankatw-bfoxkemp/GHub-ISMIP6-Projection-GrIS/af2_ISMIP6_GrIS_05000m.nc"
)
densities_path = (
r"/users/pvankatw/research/current/supplemental/AIS_densities.csv"
if ice_sheet == "AIS"
else r"/users/pvankatw/research/current/supplemental/GIS_densities.csv"
)

scaler = 'robust'
output_dir = f"/oscar/scratch/pvankatw/datasets/pca_full_{scaler}/{ice_sheet}"
converted_forcing_dir = f"{output_dir}/forcings/"
converted_projection_dir = f"{output_dir}/projections/"
experiment_file = r"/users/pvankatw/research/current/ise/utils/ismip6_experiments_updated.csv"
# df = get_model_densities(r"/gpfs/data/kbergen/pvankatw/pvankatw-bfoxkemp/v7_CMIP5_pub", r"/users/pvankatw/research/current/supplemental/")


# Take both the forcing files and the projections, train PCA models, and convert forcings and projections to PCA space
pca = DimensionalityReducer(
forcing_dir=forcing_directory,
projection_dir=projections_directory,
output_dir=output_dir,
ice_sheet=ice_sheet,
)
pca.generate_pca_models(num_forcing_pcs='99%', num_projection_pcs='99.99%', scaling_method=scaler)
pca.convert_forcings(pca_model_directory=f"{output_dir}/pca_models/")
pca.convert_projections(pca_model_directory=f"{output_dir}/pca_models/")

# Merge the converted forcings and projections into a single dataset
merger = DatasetMerger(
ice_sheet, converted_forcing_dir, converted_projection_dir, experiment_file, output_dir
)
merger.merge_dataset()

print("Done!")
25 changes: 13 additions & 12 deletions examples/analyze_model.py → examples/sectors/analyze_model.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,26 @@
import sys
sys.path.append("..")
from ise.models.timeseries import TimeSeriesEmulator
from ise.pipelines.testing import analyze_model

import pandas as pd

sys.path.append("../..")
from ise.models.timeseries import TimeSeriesEmulator
from ise.pipelines.testing import analyze_model

DATA_DIRECTORY = r"/users/pvankatw/emulator/untracked_folder/ml_data_directory"
PRETRAINED_MODELS = r'/users/pvankatw/emulator/ise/models/pretrained/'
UNTRACKED = r'/users/pvankatw/emulator/untracked_folder'
PRETRAINED_MODELS = r"/users/pvankatw/emulator/ise/models/pretrained/"
UNTRACKED = r"/users/pvankatw/emulator/untracked_folder"


train_features = pd.read_csv(f"{DATA_DIRECTORY}/ts_train_features.csv")
architecture = {
'num_rnn_layers': 4,
'num_rnn_hidden': 256,
'input_layer_size': train_features.shape[1]
"num_rnn_layers": 4,
"num_rnn_hidden": 256,
"input_layer_size": train_features.shape[1],
}

model_path = f'{PRETRAINED_MODELS}/Emulator.pt'
model_path = f"{PRETRAINED_MODELS}/Emulator.pt"

print('\nAnalyzing')
print("\nAnalyzing")
analyze_model(
data_directory=DATA_DIRECTORY,
model_path=model_path,
Expand All @@ -30,5 +31,5 @@
dropout_prob=0.3,
mc_iterations=100,
verbose=False,
save_directory=f'{UNTRACKED}/analyze_model'
)
save_directory=f"{UNTRACKED}/analyze_model",
)
Loading

0 comments on commit fb23414

Please sign in to comment.