diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 4f2a7fbf..30ec26b6 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -16,7 +16,7 @@ Provide a concise Python code snippet that demonstrates the issue. To display th ```python import xeofs as xe -model = xe.models.EOF() +model = xe.single.EOF() ... ``` diff --git a/.github/ISSUE_TEMPLATE/how_to.md b/.github/ISSUE_TEMPLATE/how_to.md index f29b2c6f..10f41a3b 100644 --- a/.github/ISSUE_TEMPLATE/how_to.md +++ b/.github/ISSUE_TEMPLATE/how_to.md @@ -16,7 +16,7 @@ Provide a concise Python code snippet that demonstrates your approach. To displa ```python import xeofs as xe -model = xe.models.EOF() +model = xe.single.EOF() ... ``` diff --git a/.gitignore b/.gitignore index 91b8f5cf..2051d342 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,7 @@ # Personal .vscode/ # Test related to CCA -tests/models/test_cca_solution.py +tests/**/test_cca_solution.py # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/README.md b/README.md index dba878cd..5d76e11a 100644 --- a/README.md +++ b/README.md @@ -60,9 +60,9 @@ In order to get started with `xeofs`, follow these simple steps: Initiate and fit the EOF/PCA model to the data ```python ->>> eof = xe.models.EOF(n_modes=10) +>>> eof = xe.single.EOF(n_modes=10) >>> eof.fit(t2m, dim="time") # doctest: +ELLIPSIS - + ``` Now, you can access the model's EOF components and PC scores: @@ -77,9 +77,9 @@ Now, you can access the model's EOF components and PC scores: Initiate and fit an `EOFRotator` class to the model to obtain a varimax-rotated EOF analysis ```python ->>> rotator = xe.models.EOFRotator(n_modes=3) +>>> rotator = xe.single.EOFRotator(n_modes=3) >>> rotator.fit(eof) # doctest: +ELLIPSIS - + >>> rot_comps = rotator.components() # Rotated EOFs (spatial patterns) >>> rot_scores = rotator.scores() # Rotated PCs (temporal patterns) @@ -89,9 +89,9 @@ Initiate and fit an `EOFRotator` class to the model to obtain a varimax-rotated **Maximum Covariance Analysis (MCA)** ```python ->>> mca = xe.models.MCA(n_modes=10) +>>> mca = xe.cross.MCA(n_modes=10) >>> mca.fit(t2m_west, t2m_east, dim="time") # doctest: +ELLIPSIS - + >>> comps1, comps2 = mca.components() # Singular vectors (spatial patterns) >>> scores1, scores2 = mca.scores() # Expansion coefficients (temporal patterns) @@ -101,9 +101,9 @@ Initiate and fit an `EOFRotator` class to the model to obtain a varimax-rotated **Varimax-rotated MCA** ```python ->>> rotator = xe.models.MCARotator(n_modes=10) +>>> rotator = xe.cross.MCARotator(n_modes=10) >>> rotator.fit(mca) # doctest: +ELLIPSIS - + >>> rot_comps = rotator.components() # Rotated singular vectors (spatial patterns) >>> rot_scores = rotator.scores() # Rotated expansion coefficients (temporal patterns) diff --git a/xeofs/models/_np_classes/__init__.py b/docs/__init__.py similarity index 100% rename from xeofs/models/_np_classes/__init__.py rename to docs/__init__.py diff --git a/docs/_autosummary/xeofs.models.ComplexEOF.rst b/docs/_autosummary/xeofs.models.ComplexEOF.rst deleted file mode 100644 index 7eb1f432..00000000 --- a/docs/_autosummary/xeofs.models.ComplexEOF.rst +++ /dev/null @@ -1,45 +0,0 @@ -xeofs.models.ComplexEOF -======================= - -.. currentmodule:: xeofs.models - -.. autoclass:: ComplexEOF - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~ComplexEOF.__init__ - ~ComplexEOF.components - ~ComplexEOF.components_amplitude - ~ComplexEOF.components_phase - ~ComplexEOF.compute - ~ComplexEOF.deserialize - ~ComplexEOF.explained_variance - ~ComplexEOF.explained_variance_ratio - ~ComplexEOF.fit - ~ComplexEOF.fit_transform - ~ComplexEOF.get_params - ~ComplexEOF.get_serialization_attrs - ~ComplexEOF.inverse_transform - ~ComplexEOF.load - ~ComplexEOF.save - ~ComplexEOF.scores - ~ComplexEOF.scores_amplitude - ~ComplexEOF.scores_phase - ~ComplexEOF.serialize - ~ComplexEOF.singular_values - ~ComplexEOF.transform - - - - - - \ No newline at end of file diff --git a/docs/_autosummary/xeofs.models.ComplexEOFRotator.rst b/docs/_autosummary/xeofs.models.ComplexEOFRotator.rst deleted file mode 100644 index 41edbcde..00000000 --- a/docs/_autosummary/xeofs.models.ComplexEOFRotator.rst +++ /dev/null @@ -1,45 +0,0 @@ -xeofs.models.ComplexEOFRotator -============================== - -.. currentmodule:: xeofs.models - -.. autoclass:: ComplexEOFRotator - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~ComplexEOFRotator.__init__ - ~ComplexEOFRotator.components - ~ComplexEOFRotator.components_amplitude - ~ComplexEOFRotator.components_phase - ~ComplexEOFRotator.compute - ~ComplexEOFRotator.deserialize - ~ComplexEOFRotator.explained_variance - ~ComplexEOFRotator.explained_variance_ratio - ~ComplexEOFRotator.fit - ~ComplexEOFRotator.fit_transform - ~ComplexEOFRotator.get_params - ~ComplexEOFRotator.get_serialization_attrs - ~ComplexEOFRotator.inverse_transform - ~ComplexEOFRotator.load - ~ComplexEOFRotator.save - ~ComplexEOFRotator.scores - ~ComplexEOFRotator.scores_amplitude - ~ComplexEOFRotator.scores_phase - ~ComplexEOFRotator.serialize - ~ComplexEOFRotator.singular_values - ~ComplexEOFRotator.transform - - - - - - \ No newline at end of file diff --git a/docs/_autosummary/xeofs.models.ComplexMCA.rst b/docs/_autosummary/xeofs.models.ComplexMCA.rst deleted file mode 100644 index 9d8c7f53..00000000 --- a/docs/_autosummary/xeofs.models.ComplexMCA.rst +++ /dev/null @@ -1,48 +0,0 @@ -xeofs.models.ComplexMCA -======================= - -.. currentmodule:: xeofs.models - -.. autoclass:: ComplexMCA - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~ComplexMCA.__init__ - ~ComplexMCA.components - ~ComplexMCA.components_amplitude - ~ComplexMCA.components_phase - ~ComplexMCA.compute - ~ComplexMCA.covariance_fraction - ~ComplexMCA.deserialize - ~ComplexMCA.fit - ~ComplexMCA.get_params - ~ComplexMCA.get_serialization_attrs - ~ComplexMCA.heterogeneous_patterns - ~ComplexMCA.homogeneous_patterns - ~ComplexMCA.inverse_transform - ~ComplexMCA.load - ~ComplexMCA.save - ~ComplexMCA.scores - ~ComplexMCA.scores_amplitude - ~ComplexMCA.scores_phase - ~ComplexMCA.serialize - ~ComplexMCA.singular_values - ~ComplexMCA.squared_covariance - ~ComplexMCA.squared_covariance_fraction - ~ComplexMCA.total_covariance - ~ComplexMCA.transform - - - - - - \ No newline at end of file diff --git a/docs/_autosummary/xeofs.models.ComplexMCARotator.rst b/docs/_autosummary/xeofs.models.ComplexMCARotator.rst deleted file mode 100644 index 4c647104..00000000 --- a/docs/_autosummary/xeofs.models.ComplexMCARotator.rst +++ /dev/null @@ -1,48 +0,0 @@ -xeofs.models.ComplexMCARotator -============================== - -.. currentmodule:: xeofs.models - -.. autoclass:: ComplexMCARotator - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~ComplexMCARotator.__init__ - ~ComplexMCARotator.components - ~ComplexMCARotator.components_amplitude - ~ComplexMCARotator.components_phase - ~ComplexMCARotator.compute - ~ComplexMCARotator.covariance_fraction - ~ComplexMCARotator.deserialize - ~ComplexMCARotator.fit - ~ComplexMCARotator.get_params - ~ComplexMCARotator.get_serialization_attrs - ~ComplexMCARotator.heterogeneous_patterns - ~ComplexMCARotator.homogeneous_patterns - ~ComplexMCARotator.inverse_transform - ~ComplexMCARotator.load - ~ComplexMCARotator.save - ~ComplexMCARotator.scores - ~ComplexMCARotator.scores_amplitude - ~ComplexMCARotator.scores_phase - ~ComplexMCARotator.serialize - ~ComplexMCARotator.singular_values - ~ComplexMCARotator.squared_covariance - ~ComplexMCARotator.squared_covariance_fraction - ~ComplexMCARotator.total_covariance - ~ComplexMCARotator.transform - - - - - - \ No newline at end of file diff --git a/docs/_autosummary/xeofs.models.EOF.rst b/docs/_autosummary/xeofs.models.EOF.rst deleted file mode 100644 index 90da1cde..00000000 --- a/docs/_autosummary/xeofs.models.EOF.rst +++ /dev/null @@ -1,41 +0,0 @@ -xeofs.models.EOF -================ - -.. currentmodule:: xeofs.models - -.. autoclass:: EOF - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~EOF.__init__ - ~EOF.components - ~EOF.compute - ~EOF.deserialize - ~EOF.explained_variance - ~EOF.explained_variance_ratio - ~EOF.fit - ~EOF.fit_transform - ~EOF.get_params - ~EOF.get_serialization_attrs - ~EOF.inverse_transform - ~EOF.load - ~EOF.save - ~EOF.scores - ~EOF.serialize - ~EOF.singular_values - ~EOF.transform - - - - - - \ No newline at end of file diff --git a/docs/_autosummary/xeofs.models.EOFRotator.rst b/docs/_autosummary/xeofs.models.EOFRotator.rst deleted file mode 100644 index c1f06783..00000000 --- a/docs/_autosummary/xeofs.models.EOFRotator.rst +++ /dev/null @@ -1,41 +0,0 @@ -xeofs.models.EOFRotator -======================= - -.. currentmodule:: xeofs.models - -.. autoclass:: EOFRotator - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~EOFRotator.__init__ - ~EOFRotator.components - ~EOFRotator.compute - ~EOFRotator.deserialize - ~EOFRotator.explained_variance - ~EOFRotator.explained_variance_ratio - ~EOFRotator.fit - ~EOFRotator.fit_transform - ~EOFRotator.get_params - ~EOFRotator.get_serialization_attrs - ~EOFRotator.inverse_transform - ~EOFRotator.load - ~EOFRotator.save - ~EOFRotator.scores - ~EOFRotator.serialize - ~EOFRotator.singular_values - ~EOFRotator.transform - - - - - - \ No newline at end of file diff --git a/docs/_autosummary/xeofs.models.ExtendedEOF.rst b/docs/_autosummary/xeofs.models.ExtendedEOF.rst deleted file mode 100644 index b1740349..00000000 --- a/docs/_autosummary/xeofs.models.ExtendedEOF.rst +++ /dev/null @@ -1,41 +0,0 @@ -xeofs.models.ExtendedEOF -======================== - -.. currentmodule:: xeofs.models - -.. autoclass:: ExtendedEOF - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~ExtendedEOF.__init__ - ~ExtendedEOF.components - ~ExtendedEOF.compute - ~ExtendedEOF.deserialize - ~ExtendedEOF.explained_variance - ~ExtendedEOF.explained_variance_ratio - ~ExtendedEOF.fit - ~ExtendedEOF.fit_transform - ~ExtendedEOF.get_params - ~ExtendedEOF.get_serialization_attrs - ~ExtendedEOF.inverse_transform - ~ExtendedEOF.load - ~ExtendedEOF.save - ~ExtendedEOF.scores - ~ExtendedEOF.serialize - ~ExtendedEOF.singular_values - ~ExtendedEOF.transform - - - - - - \ No newline at end of file diff --git a/docs/_autosummary/xeofs.models.GWPCA.rst b/docs/_autosummary/xeofs.models.GWPCA.rst deleted file mode 100644 index 2bc03eb9..00000000 --- a/docs/_autosummary/xeofs.models.GWPCA.rst +++ /dev/null @@ -1,41 +0,0 @@ -xeofs.models.GWPCA -================== - -.. currentmodule:: xeofs.models - -.. autoclass:: GWPCA - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~GWPCA.__init__ - ~GWPCA.components - ~GWPCA.compute - ~GWPCA.deserialize - ~GWPCA.explained_variance - ~GWPCA.explained_variance_ratio - ~GWPCA.fit - ~GWPCA.fit_transform - ~GWPCA.get_params - ~GWPCA.get_serialization_attrs - ~GWPCA.inverse_transform - ~GWPCA.largest_locally_weighted_components - ~GWPCA.load - ~GWPCA.save - ~GWPCA.scores - ~GWPCA.serialize - ~GWPCA.transform - - - - - - \ No newline at end of file diff --git a/docs/_autosummary/xeofs.models.MCA.rst b/docs/_autosummary/xeofs.models.MCA.rst deleted file mode 100644 index 16b58d96..00000000 --- a/docs/_autosummary/xeofs.models.MCA.rst +++ /dev/null @@ -1,44 +0,0 @@ -xeofs.models.MCA -================ - -.. currentmodule:: xeofs.models - -.. autoclass:: MCA - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~MCA.__init__ - ~MCA.components - ~MCA.compute - ~MCA.covariance_fraction - ~MCA.deserialize - ~MCA.fit - ~MCA.get_params - ~MCA.get_serialization_attrs - ~MCA.heterogeneous_patterns - ~MCA.homogeneous_patterns - ~MCA.inverse_transform - ~MCA.load - ~MCA.save - ~MCA.scores - ~MCA.serialize - ~MCA.singular_values - ~MCA.squared_covariance - ~MCA.squared_covariance_fraction - ~MCA.total_covariance - ~MCA.transform - - - - - - \ No newline at end of file diff --git a/docs/_autosummary/xeofs.models.MCARotator.rst b/docs/_autosummary/xeofs.models.MCARotator.rst deleted file mode 100644 index 785836da..00000000 --- a/docs/_autosummary/xeofs.models.MCARotator.rst +++ /dev/null @@ -1,44 +0,0 @@ -xeofs.models.MCARotator -======================= - -.. currentmodule:: xeofs.models - -.. autoclass:: MCARotator - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~MCARotator.__init__ - ~MCARotator.components - ~MCARotator.compute - ~MCARotator.covariance_fraction - ~MCARotator.deserialize - ~MCARotator.fit - ~MCARotator.get_params - ~MCARotator.get_serialization_attrs - ~MCARotator.heterogeneous_patterns - ~MCARotator.homogeneous_patterns - ~MCARotator.inverse_transform - ~MCARotator.load - ~MCARotator.save - ~MCARotator.scores - ~MCARotator.serialize - ~MCARotator.singular_values - ~MCARotator.squared_covariance - ~MCARotator.squared_covariance_fraction - ~MCARotator.total_covariance - ~MCARotator.transform - - - - - - \ No newline at end of file diff --git a/docs/_autosummary/xeofs.models.OPA.rst b/docs/_autosummary/xeofs.models.OPA.rst deleted file mode 100644 index 53000a77..00000000 --- a/docs/_autosummary/xeofs.models.OPA.rst +++ /dev/null @@ -1,40 +0,0 @@ -xeofs.models.OPA -================ - -.. currentmodule:: xeofs.models - -.. autoclass:: OPA - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~OPA.__init__ - ~OPA.components - ~OPA.compute - ~OPA.decorrelation_time - ~OPA.deserialize - ~OPA.filter_patterns - ~OPA.fit - ~OPA.fit_transform - ~OPA.get_params - ~OPA.get_serialization_attrs - ~OPA.inverse_transform - ~OPA.load - ~OPA.save - ~OPA.scores - ~OPA.serialize - ~OPA.transform - - - - - - \ No newline at end of file diff --git a/docs/_autosummary/xeofs.validation.EOFBootstrapper.rst b/docs/_autosummary/xeofs.validation.EOFBootstrapper.rst deleted file mode 100644 index 748247b8..00000000 --- a/docs/_autosummary/xeofs.validation.EOFBootstrapper.rst +++ /dev/null @@ -1,41 +0,0 @@ -xeofs.validation.EOFBootstrapper -================================ - -.. currentmodule:: xeofs.validation - -.. autoclass:: EOFBootstrapper - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~EOFBootstrapper.__init__ - ~EOFBootstrapper.components - ~EOFBootstrapper.compute - ~EOFBootstrapper.deserialize - ~EOFBootstrapper.explained_variance - ~EOFBootstrapper.explained_variance_ratio - ~EOFBootstrapper.fit - ~EOFBootstrapper.fit_transform - ~EOFBootstrapper.get_params - ~EOFBootstrapper.get_serialization_attrs - ~EOFBootstrapper.inverse_transform - ~EOFBootstrapper.load - ~EOFBootstrapper.save - ~EOFBootstrapper.scores - ~EOFBootstrapper.serialize - ~EOFBootstrapper.singular_values - ~EOFBootstrapper.transform - - - - - - \ No newline at end of file diff --git a/docs/_templates/custom-class-template.rst b/docs/_templates/custom-class-template.rst index 4197f9f0..a7db2b37 100644 --- a/docs/_templates/custom-class-template.rst +++ b/docs/_templates/custom-class-template.rst @@ -4,7 +4,6 @@ .. autoclass:: {{ objname }} :members: - :show-inheritance: :inherited-members: {% block methods %} diff --git a/docs/_autosummary/xeofs.models.RotatorFactory.rst b/docs/api_reference/_autosummary/xeofs.RotatorFactory.rst similarity index 67% rename from docs/_autosummary/xeofs.models.RotatorFactory.rst rename to docs/api_reference/_autosummary/xeofs.RotatorFactory.rst index b23f04da..0b54f4b2 100644 --- a/docs/_autosummary/xeofs.models.RotatorFactory.rst +++ b/docs/api_reference/_autosummary/xeofs.RotatorFactory.rst @@ -1,11 +1,10 @@ -xeofs.models.RotatorFactory -=========================== +RotatorFactory +============== -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs .. autoclass:: RotatorFactory :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.cross.CPCCA.rst b/docs/api_reference/_autosummary/xeofs.cross.CPCCA.rst new file mode 100644 index 00000000..0cf9e049 --- /dev/null +++ b/docs/api_reference/_autosummary/xeofs.cross.CPCCA.rst @@ -0,0 +1,46 @@ +CPCCA +===== + +.. currentmodule:: xeofs.cross + +.. autoclass:: CPCCA + :members: + :inherited-members: + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~CPCCA.__init__ + ~CPCCA.components + ~CPCCA.compute + ~CPCCA.correlation_coefficients_X + ~CPCCA.correlation_coefficients_Y + ~CPCCA.cross_correlation_coefficients + ~CPCCA.deserialize + ~CPCCA.fit + ~CPCCA.fraction_variance_X_explained_by_X + ~CPCCA.fraction_variance_Y_explained_by_X + ~CPCCA.fraction_variance_Y_explained_by_Y + ~CPCCA.get_params + ~CPCCA.get_serialization_attrs + ~CPCCA.heterogeneous_patterns + ~CPCCA.homogeneous_patterns + ~CPCCA.inverse_transform + ~CPCCA.load + ~CPCCA.predict + ~CPCCA.save + ~CPCCA.scores + ~CPCCA.serialize + ~CPCCA.squared_covariance_fraction + ~CPCCA.transform + + + + + + \ No newline at end of file diff --git a/docs/api_reference/_autosummary/xeofs.models.CPCCARotator.rst b/docs/api_reference/_autosummary/xeofs.cross.CPCCARotator.rst similarity index 90% rename from docs/api_reference/_autosummary/xeofs.models.CPCCARotator.rst rename to docs/api_reference/_autosummary/xeofs.cross.CPCCARotator.rst index d40d1e54..534d5abe 100644 --- a/docs/api_reference/_autosummary/xeofs.models.CPCCARotator.rst +++ b/docs/api_reference/_autosummary/xeofs.cross.CPCCARotator.rst @@ -1,11 +1,10 @@ -xeofs.models.CPCCARotator -========================= +CPCCARotator +============ -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.cross .. autoclass:: CPCCARotator :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.models.ComplexCPCCA.rst b/docs/api_reference/_autosummary/xeofs.cross.ComplexCPCCA.rst similarity index 91% rename from docs/api_reference/_autosummary/xeofs.models.ComplexCPCCA.rst rename to docs/api_reference/_autosummary/xeofs.cross.ComplexCPCCA.rst index 59473ed2..617ae608 100644 --- a/docs/api_reference/_autosummary/xeofs.models.ComplexCPCCA.rst +++ b/docs/api_reference/_autosummary/xeofs.cross.ComplexCPCCA.rst @@ -1,11 +1,10 @@ -xeofs.models.ComplexCPCCA -========================= +ComplexCPCCA +============ -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.cross .. autoclass:: ComplexCPCCA :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.models.ComplexCPCCARotator.rst b/docs/api_reference/_autosummary/xeofs.cross.ComplexCPCCARotator.rst similarity index 91% rename from docs/api_reference/_autosummary/xeofs.models.ComplexCPCCARotator.rst rename to docs/api_reference/_autosummary/xeofs.cross.ComplexCPCCARotator.rst index bc04ef61..aac762a6 100644 --- a/docs/api_reference/_autosummary/xeofs.models.ComplexCPCCARotator.rst +++ b/docs/api_reference/_autosummary/xeofs.cross.ComplexCPCCARotator.rst @@ -1,11 +1,10 @@ -xeofs.models.ComplexCPCCARotator -================================ +ComplexCPCCARotator +=================== -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.cross .. autoclass:: ComplexCPCCARotator :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.models.ComplexMCA.rst b/docs/api_reference/_autosummary/xeofs.cross.ComplexMCA.rst similarity index 91% rename from docs/api_reference/_autosummary/xeofs.models.ComplexMCA.rst rename to docs/api_reference/_autosummary/xeofs.cross.ComplexMCA.rst index fd12cb0e..7cf80815 100644 --- a/docs/api_reference/_autosummary/xeofs.models.ComplexMCA.rst +++ b/docs/api_reference/_autosummary/xeofs.cross.ComplexMCA.rst @@ -1,11 +1,10 @@ -xeofs.models.ComplexMCA -======================= +ComplexMCA +========== -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.cross .. autoclass:: ComplexMCA :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.models.ComplexMCARotator.rst b/docs/api_reference/_autosummary/xeofs.cross.ComplexMCARotator.rst similarity index 91% rename from docs/api_reference/_autosummary/xeofs.models.ComplexMCARotator.rst rename to docs/api_reference/_autosummary/xeofs.cross.ComplexMCARotator.rst index 81ad13d9..b05a460a 100644 --- a/docs/api_reference/_autosummary/xeofs.models.ComplexMCARotator.rst +++ b/docs/api_reference/_autosummary/xeofs.cross.ComplexMCARotator.rst @@ -1,11 +1,10 @@ -xeofs.models.ComplexMCARotator -============================== +ComplexMCARotator +================= -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.cross .. autoclass:: ComplexMCARotator :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.cross.HilbertCPCCA.rst b/docs/api_reference/_autosummary/xeofs.cross.HilbertCPCCA.rst new file mode 100644 index 00000000..1d6410ea --- /dev/null +++ b/docs/api_reference/_autosummary/xeofs.cross.HilbertCPCCA.rst @@ -0,0 +1,50 @@ +HilbertCPCCA +============ + +.. currentmodule:: xeofs.cross + +.. autoclass:: HilbertCPCCA + :members: + :inherited-members: + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~HilbertCPCCA.__init__ + ~HilbertCPCCA.components + ~HilbertCPCCA.components_amplitude + ~HilbertCPCCA.components_phase + ~HilbertCPCCA.compute + ~HilbertCPCCA.correlation_coefficients_X + ~HilbertCPCCA.correlation_coefficients_Y + ~HilbertCPCCA.cross_correlation_coefficients + ~HilbertCPCCA.deserialize + ~HilbertCPCCA.fit + ~HilbertCPCCA.fraction_variance_X_explained_by_X + ~HilbertCPCCA.fraction_variance_Y_explained_by_X + ~HilbertCPCCA.fraction_variance_Y_explained_by_Y + ~HilbertCPCCA.get_params + ~HilbertCPCCA.get_serialization_attrs + ~HilbertCPCCA.heterogeneous_patterns + ~HilbertCPCCA.homogeneous_patterns + ~HilbertCPCCA.inverse_transform + ~HilbertCPCCA.load + ~HilbertCPCCA.predict + ~HilbertCPCCA.save + ~HilbertCPCCA.scores + ~HilbertCPCCA.scores_amplitude + ~HilbertCPCCA.scores_phase + ~HilbertCPCCA.serialize + ~HilbertCPCCA.squared_covariance_fraction + ~HilbertCPCCA.transform + + + + + + \ No newline at end of file diff --git a/docs/api_reference/_autosummary/xeofs.cross.HilbertCPCCARotator.rst b/docs/api_reference/_autosummary/xeofs.cross.HilbertCPCCARotator.rst new file mode 100644 index 00000000..d097cd3e --- /dev/null +++ b/docs/api_reference/_autosummary/xeofs.cross.HilbertCPCCARotator.rst @@ -0,0 +1,50 @@ +HilbertCPCCARotator +=================== + +.. currentmodule:: xeofs.cross + +.. autoclass:: HilbertCPCCARotator + :members: + :inherited-members: + + + .. automethod:: __init__ + + + .. rubric:: Methods + + .. autosummary:: + + ~HilbertCPCCARotator.__init__ + ~HilbertCPCCARotator.components + ~HilbertCPCCARotator.components_amplitude + ~HilbertCPCCARotator.components_phase + ~HilbertCPCCARotator.compute + ~HilbertCPCCARotator.correlation_coefficients_X + ~HilbertCPCCARotator.correlation_coefficients_Y + ~HilbertCPCCARotator.cross_correlation_coefficients + ~HilbertCPCCARotator.deserialize + ~HilbertCPCCARotator.fit + ~HilbertCPCCARotator.fraction_variance_X_explained_by_X + ~HilbertCPCCARotator.fraction_variance_Y_explained_by_X + ~HilbertCPCCARotator.fraction_variance_Y_explained_by_Y + ~HilbertCPCCARotator.get_params + ~HilbertCPCCARotator.get_serialization_attrs + ~HilbertCPCCARotator.heterogeneous_patterns + ~HilbertCPCCARotator.homogeneous_patterns + ~HilbertCPCCARotator.inverse_transform + ~HilbertCPCCARotator.load + ~HilbertCPCCARotator.predict + ~HilbertCPCCARotator.save + ~HilbertCPCCARotator.scores + ~HilbertCPCCARotator.scores_amplitude + ~HilbertCPCCARotator.scores_phase + ~HilbertCPCCARotator.serialize + ~HilbertCPCCARotator.squared_covariance_fraction + ~HilbertCPCCARotator.transform + + + + + + \ No newline at end of file diff --git a/docs/api_reference/_autosummary/xeofs.models.HilbertMCA.rst b/docs/api_reference/_autosummary/xeofs.cross.HilbertMCA.rst similarity index 65% rename from docs/api_reference/_autosummary/xeofs.models.HilbertMCA.rst rename to docs/api_reference/_autosummary/xeofs.cross.HilbertMCA.rst index 83d28914..8b6bc60e 100644 --- a/docs/api_reference/_autosummary/xeofs.models.HilbertMCA.rst +++ b/docs/api_reference/_autosummary/xeofs.cross.HilbertMCA.rst @@ -1,11 +1,10 @@ -xeofs.models.HilbertMCA -======================= +HilbertMCA +========== -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.cross .. autoclass:: HilbertMCA :members: - :show-inheritance: :inherited-members: @@ -21,24 +20,28 @@ ~HilbertMCA.components_amplitude ~HilbertMCA.components_phase ~HilbertMCA.compute - ~HilbertMCA.covariance_fraction + ~HilbertMCA.correlation_coefficients_X + ~HilbertMCA.correlation_coefficients_Y + ~HilbertMCA.covariance_fraction_CD95 + ~HilbertMCA.cross_correlation_coefficients ~HilbertMCA.deserialize ~HilbertMCA.fit + ~HilbertMCA.fraction_variance_X_explained_by_X + ~HilbertMCA.fraction_variance_Y_explained_by_X + ~HilbertMCA.fraction_variance_Y_explained_by_Y ~HilbertMCA.get_params ~HilbertMCA.get_serialization_attrs ~HilbertMCA.heterogeneous_patterns ~HilbertMCA.homogeneous_patterns ~HilbertMCA.inverse_transform ~HilbertMCA.load + ~HilbertMCA.predict ~HilbertMCA.save ~HilbertMCA.scores ~HilbertMCA.scores_amplitude ~HilbertMCA.scores_phase ~HilbertMCA.serialize - ~HilbertMCA.singular_values - ~HilbertMCA.squared_covariance ~HilbertMCA.squared_covariance_fraction - ~HilbertMCA.total_covariance ~HilbertMCA.transform diff --git a/docs/api_reference/_autosummary/xeofs.models.HilbertMCARotator.rst b/docs/api_reference/_autosummary/xeofs.cross.HilbertMCARotator.rst similarity index 65% rename from docs/api_reference/_autosummary/xeofs.models.HilbertMCARotator.rst rename to docs/api_reference/_autosummary/xeofs.cross.HilbertMCARotator.rst index 652ec73c..3d3d2bcd 100644 --- a/docs/api_reference/_autosummary/xeofs.models.HilbertMCARotator.rst +++ b/docs/api_reference/_autosummary/xeofs.cross.HilbertMCARotator.rst @@ -1,11 +1,10 @@ -xeofs.models.HilbertMCARotator -============================== +HilbertMCARotator +================= -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.cross .. autoclass:: HilbertMCARotator :members: - :show-inheritance: :inherited-members: @@ -21,24 +20,28 @@ ~HilbertMCARotator.components_amplitude ~HilbertMCARotator.components_phase ~HilbertMCARotator.compute - ~HilbertMCARotator.covariance_fraction + ~HilbertMCARotator.correlation_coefficients_X + ~HilbertMCARotator.correlation_coefficients_Y + ~HilbertMCARotator.covariance_fraction_CD95 + ~HilbertMCARotator.cross_correlation_coefficients ~HilbertMCARotator.deserialize ~HilbertMCARotator.fit + ~HilbertMCARotator.fraction_variance_X_explained_by_X + ~HilbertMCARotator.fraction_variance_Y_explained_by_X + ~HilbertMCARotator.fraction_variance_Y_explained_by_Y ~HilbertMCARotator.get_params ~HilbertMCARotator.get_serialization_attrs ~HilbertMCARotator.heterogeneous_patterns ~HilbertMCARotator.homogeneous_patterns ~HilbertMCARotator.inverse_transform ~HilbertMCARotator.load + ~HilbertMCARotator.predict ~HilbertMCARotator.save ~HilbertMCARotator.scores ~HilbertMCARotator.scores_amplitude ~HilbertMCARotator.scores_phase ~HilbertMCARotator.serialize - ~HilbertMCARotator.singular_values - ~HilbertMCARotator.squared_covariance ~HilbertMCARotator.squared_covariance_fraction - ~HilbertMCARotator.total_covariance ~HilbertMCARotator.transform diff --git a/docs/api_reference/_autosummary/xeofs.models.MCA.rst b/docs/api_reference/_autosummary/xeofs.cross.MCA.rst similarity index 90% rename from docs/api_reference/_autosummary/xeofs.models.MCA.rst rename to docs/api_reference/_autosummary/xeofs.cross.MCA.rst index 6fc243d3..751ef8fd 100644 --- a/docs/api_reference/_autosummary/xeofs.models.MCA.rst +++ b/docs/api_reference/_autosummary/xeofs.cross.MCA.rst @@ -1,11 +1,10 @@ -xeofs.models.MCA -================ +MCA +=== -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.cross .. autoclass:: MCA :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.models.MCARotator.rst b/docs/api_reference/_autosummary/xeofs.cross.MCARotator.rst similarity index 90% rename from docs/api_reference/_autosummary/xeofs.models.MCARotator.rst rename to docs/api_reference/_autosummary/xeofs.cross.MCARotator.rst index ed88ae22..d8cbbe56 100644 --- a/docs/api_reference/_autosummary/xeofs.models.MCARotator.rst +++ b/docs/api_reference/_autosummary/xeofs.cross.MCARotator.rst @@ -1,11 +1,10 @@ -xeofs.models.MCARotator -======================= +MCARotator +========== -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.cross .. autoclass:: MCARotator :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.models.CCA.rst b/docs/api_reference/_autosummary/xeofs.models.CCA.rst deleted file mode 100644 index 3238c999..00000000 --- a/docs/api_reference/_autosummary/xeofs.models.CCA.rst +++ /dev/null @@ -1,39 +0,0 @@ -xeofs.models.CCA -================ - -.. currentmodule:: xeofs.models - -.. autoclass:: CCA - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~CCA.__init__ - ~CCA.components - ~CCA.explained_covariance - ~CCA.explained_covariance_ratio - ~CCA.explained_variance - ~CCA.explained_variance_ratio - ~CCA.fit - ~CCA.get_metadata_routing - ~CCA.get_params - ~CCA.scores - ~CCA.set_fit_request - ~CCA.set_params - ~CCA.set_transform_request - ~CCA.transform - ~CCA.weights - - - - - - \ No newline at end of file diff --git a/docs/api_reference/_autosummary/xeofs.models.ContinuumPowerCCA.rst b/docs/api_reference/_autosummary/xeofs.models.ContinuumPowerCCA.rst deleted file mode 100644 index 5939d3d8..00000000 --- a/docs/api_reference/_autosummary/xeofs.models.ContinuumPowerCCA.rst +++ /dev/null @@ -1,47 +0,0 @@ -xeofs.models.ContinuumPowerCCA -============================== - -.. currentmodule:: xeofs.models - -.. autoclass:: ContinuumPowerCCA - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~ContinuumPowerCCA.__init__ - ~ContinuumPowerCCA.components - ~ContinuumPowerCCA.compute - ~ContinuumPowerCCA.correlation_coefficients_X - ~ContinuumPowerCCA.correlation_coefficients_Y - ~ContinuumPowerCCA.cross_correlation_coefficients - ~ContinuumPowerCCA.deserialize - ~ContinuumPowerCCA.fit - ~ContinuumPowerCCA.fraction_variance_X_explained_by_X - ~ContinuumPowerCCA.fraction_variance_Y_explained_by_X - ~ContinuumPowerCCA.fraction_variance_Y_explained_by_Y - ~ContinuumPowerCCA.get_params - ~ContinuumPowerCCA.get_serialization_attrs - ~ContinuumPowerCCA.heterogeneous_patterns - ~ContinuumPowerCCA.homogeneous_patterns - ~ContinuumPowerCCA.inverse_transform - ~ContinuumPowerCCA.load - ~ContinuumPowerCCA.predict - ~ContinuumPowerCCA.save - ~ContinuumPowerCCA.scores - ~ContinuumPowerCCA.serialize - ~ContinuumPowerCCA.squared_covariance_fraction - ~ContinuumPowerCCA.transform - - - - - - \ No newline at end of file diff --git a/docs/api_reference/_autosummary/xeofs.models.RotatorFactory.rst b/docs/api_reference/_autosummary/xeofs.models.RotatorFactory.rst deleted file mode 100644 index b23f04da..00000000 --- a/docs/api_reference/_autosummary/xeofs.models.RotatorFactory.rst +++ /dev/null @@ -1,26 +0,0 @@ -xeofs.models.RotatorFactory -=========================== - -.. currentmodule:: xeofs.models - -.. autoclass:: RotatorFactory - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~RotatorFactory.__init__ - ~RotatorFactory.create_rotator - - - - - - \ No newline at end of file diff --git a/docs/api_reference/_autosummary/xeofs.models.SparsePCA.rst b/docs/api_reference/_autosummary/xeofs.models.SparsePCA.rst deleted file mode 100644 index 068ccd89..00000000 --- a/docs/api_reference/_autosummary/xeofs.models.SparsePCA.rst +++ /dev/null @@ -1,40 +0,0 @@ -xeofs.models.SparsePCA -====================== - -.. currentmodule:: xeofs.models - -.. autoclass:: SparsePCA - :members: - :show-inheritance: - :inherited-members: - - - .. automethod:: __init__ - - - .. rubric:: Methods - - .. autosummary:: - - ~SparsePCA.__init__ - ~SparsePCA.components - ~SparsePCA.compute - ~SparsePCA.deserialize - ~SparsePCA.explained_variance - ~SparsePCA.explained_variance_ratio - ~SparsePCA.fit - ~SparsePCA.fit_transform - ~SparsePCA.get_params - ~SparsePCA.get_serialization_attrs - ~SparsePCA.inverse_transform - ~SparsePCA.load - ~SparsePCA.save - ~SparsePCA.scores - ~SparsePCA.serialize - ~SparsePCA.transform - - - - - - \ No newline at end of file diff --git a/docs/_autosummary/xeofs.models.CCA.rst b/docs/api_reference/_autosummary/xeofs.multi.CCA.rst similarity index 85% rename from docs/_autosummary/xeofs.models.CCA.rst rename to docs/api_reference/_autosummary/xeofs.multi.CCA.rst index 3238c999..93abea08 100644 --- a/docs/_autosummary/xeofs.models.CCA.rst +++ b/docs/api_reference/_autosummary/xeofs.multi.CCA.rst @@ -1,11 +1,10 @@ -xeofs.models.CCA -================ +CCA +=== -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.multi .. autoclass:: CCA :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.models.ComplexEOF.rst b/docs/api_reference/_autosummary/xeofs.single.ComplexEOF.rst similarity index 88% rename from docs/api_reference/_autosummary/xeofs.models.ComplexEOF.rst rename to docs/api_reference/_autosummary/xeofs.single.ComplexEOF.rst index 7eb1f432..5bac3289 100644 --- a/docs/api_reference/_autosummary/xeofs.models.ComplexEOF.rst +++ b/docs/api_reference/_autosummary/xeofs.single.ComplexEOF.rst @@ -1,11 +1,10 @@ -xeofs.models.ComplexEOF -======================= +ComplexEOF +========== -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.single .. autoclass:: ComplexEOF :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.models.ComplexEOFRotator.rst b/docs/api_reference/_autosummary/xeofs.single.ComplexEOFRotator.rst similarity index 89% rename from docs/api_reference/_autosummary/xeofs.models.ComplexEOFRotator.rst rename to docs/api_reference/_autosummary/xeofs.single.ComplexEOFRotator.rst index 41edbcde..ebedc5cd 100644 --- a/docs/api_reference/_autosummary/xeofs.models.ComplexEOFRotator.rst +++ b/docs/api_reference/_autosummary/xeofs.single.ComplexEOFRotator.rst @@ -1,11 +1,10 @@ -xeofs.models.ComplexEOFRotator -============================== +ComplexEOFRotator +================= -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.single .. autoclass:: ComplexEOFRotator :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.models.EOF.rst b/docs/api_reference/_autosummary/xeofs.single.EOF.rst similarity index 85% rename from docs/api_reference/_autosummary/xeofs.models.EOF.rst rename to docs/api_reference/_autosummary/xeofs.single.EOF.rst index 90da1cde..38f3c7dd 100644 --- a/docs/api_reference/_autosummary/xeofs.models.EOF.rst +++ b/docs/api_reference/_autosummary/xeofs.single.EOF.rst @@ -1,11 +1,10 @@ -xeofs.models.EOF -================ +EOF +=== -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.single .. autoclass:: EOF :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.models.EOFRotator.rst b/docs/api_reference/_autosummary/xeofs.single.EOFRotator.rst similarity index 86% rename from docs/api_reference/_autosummary/xeofs.models.EOFRotator.rst rename to docs/api_reference/_autosummary/xeofs.single.EOFRotator.rst index c1f06783..0c54557a 100644 --- a/docs/api_reference/_autosummary/xeofs.models.EOFRotator.rst +++ b/docs/api_reference/_autosummary/xeofs.single.EOFRotator.rst @@ -1,11 +1,10 @@ -xeofs.models.EOFRotator -======================= +EOFRotator +========== -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.single .. autoclass:: EOFRotator :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.models.ExtendedEOF.rst b/docs/api_reference/_autosummary/xeofs.single.ExtendedEOF.rst similarity index 86% rename from docs/api_reference/_autosummary/xeofs.models.ExtendedEOF.rst rename to docs/api_reference/_autosummary/xeofs.single.ExtendedEOF.rst index b1740349..c9d06932 100644 --- a/docs/api_reference/_autosummary/xeofs.models.ExtendedEOF.rst +++ b/docs/api_reference/_autosummary/xeofs.single.ExtendedEOF.rst @@ -1,11 +1,10 @@ -xeofs.models.ExtendedEOF -======================== +ExtendedEOF +=========== -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.single .. autoclass:: ExtendedEOF :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.models.GWPCA.rst b/docs/api_reference/_autosummary/xeofs.single.GWPCA.rst similarity index 86% rename from docs/api_reference/_autosummary/xeofs.models.GWPCA.rst rename to docs/api_reference/_autosummary/xeofs.single.GWPCA.rst index 2bc03eb9..486bc2e6 100644 --- a/docs/api_reference/_autosummary/xeofs.models.GWPCA.rst +++ b/docs/api_reference/_autosummary/xeofs.single.GWPCA.rst @@ -1,11 +1,10 @@ -xeofs.models.GWPCA -================== +GWPCA +===== -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.single .. autoclass:: GWPCA :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.models.HilbertEOF.rst b/docs/api_reference/_autosummary/xeofs.single.HilbertEOF.rst similarity index 88% rename from docs/api_reference/_autosummary/xeofs.models.HilbertEOF.rst rename to docs/api_reference/_autosummary/xeofs.single.HilbertEOF.rst index f90a25df..286f6500 100644 --- a/docs/api_reference/_autosummary/xeofs.models.HilbertEOF.rst +++ b/docs/api_reference/_autosummary/xeofs.single.HilbertEOF.rst @@ -1,11 +1,10 @@ -xeofs.models.HilbertEOF -======================= +HilbertEOF +========== -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.single .. autoclass:: HilbertEOF :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.models.HilbertEOFRotator.rst b/docs/api_reference/_autosummary/xeofs.single.HilbertEOFRotator.rst similarity index 89% rename from docs/api_reference/_autosummary/xeofs.models.HilbertEOFRotator.rst rename to docs/api_reference/_autosummary/xeofs.single.HilbertEOFRotator.rst index f8a11bf2..fe1e4f67 100644 --- a/docs/api_reference/_autosummary/xeofs.models.HilbertEOFRotator.rst +++ b/docs/api_reference/_autosummary/xeofs.single.HilbertEOFRotator.rst @@ -1,11 +1,10 @@ -xeofs.models.HilbertEOFRotator -============================== +HilbertEOFRotator +================= -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.single .. autoclass:: HilbertEOFRotator :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.models.OPA.rst b/docs/api_reference/_autosummary/xeofs.single.OPA.rst similarity index 85% rename from docs/api_reference/_autosummary/xeofs.models.OPA.rst rename to docs/api_reference/_autosummary/xeofs.single.OPA.rst index 53000a77..dc2094f4 100644 --- a/docs/api_reference/_autosummary/xeofs.models.OPA.rst +++ b/docs/api_reference/_autosummary/xeofs.single.OPA.rst @@ -1,11 +1,10 @@ -xeofs.models.OPA -================ +OPA +=== -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.single .. autoclass:: OPA :members: - :show-inheritance: :inherited-members: diff --git a/docs/_autosummary/xeofs.models.SparsePCA.rst b/docs/api_reference/_autosummary/xeofs.single.SparsePCA.rst similarity index 85% rename from docs/_autosummary/xeofs.models.SparsePCA.rst rename to docs/api_reference/_autosummary/xeofs.single.SparsePCA.rst index 068ccd89..11754996 100644 --- a/docs/_autosummary/xeofs.models.SparsePCA.rst +++ b/docs/api_reference/_autosummary/xeofs.single.SparsePCA.rst @@ -1,11 +1,10 @@ -xeofs.models.SparsePCA -====================== +SparsePCA +========= -.. currentmodule:: xeofs.models +.. currentmodule:: xeofs.single .. autoclass:: SparsePCA :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/_autosummary/xeofs.validation.EOFBootstrapper.rst b/docs/api_reference/_autosummary/xeofs.validation.EOFBootstrapper.rst index 748247b8..63bba4be 100644 --- a/docs/api_reference/_autosummary/xeofs.validation.EOFBootstrapper.rst +++ b/docs/api_reference/_autosummary/xeofs.validation.EOFBootstrapper.rst @@ -1,11 +1,10 @@ -xeofs.validation.EOFBootstrapper -================================ +EOFBootstrapper +=============== .. currentmodule:: xeofs.validation .. autoclass:: EOFBootstrapper :members: - :show-inheritance: :inherited-members: diff --git a/docs/api_reference/cross_set_analysis.rst b/docs/api_reference/cross_set_analysis.rst new file mode 100644 index 00000000..970b6a8f --- /dev/null +++ b/docs/api_reference/cross_set_analysis.rst @@ -0,0 +1,32 @@ +=============== +``xeofs.cross`` +=============== +Methods that investigate relationships or patterns between variables **across two** distinct datasets. + +.. autosummary:: + :toctree: _autosummary + :template: custom-class-template.rst + :recursive: + + ~xeofs.cross.MCA + ~xeofs.cross.CPCCA + ~xeofs.cross.ComplexMCA + ~xeofs.cross.ComplexCPCCA + ~xeofs.cross.HilbertMCA + ~xeofs.cross.HilbertCPCCA + +------------------------------ +Sparse Solutions via Rotation +------------------------------ + +.. autosummary:: + :toctree: _autosummary + :template: custom-class-template.rst + :recursive: + + ~xeofs.cross.MCARotator + ~xeofs.cross.CPCCARotator + ~xeofs.cross.ComplexMCARotator + ~xeofs.cross.ComplexCPCCARotator + ~xeofs.cross.HilbertMCARotator + ~xeofs.cross.HilbertCPCCARotator diff --git a/docs/api_reference/index.rst b/docs/api_reference/index.rst index 011fff02..c74ab81d 100644 --- a/docs/api_reference/index.rst +++ b/docs/api_reference/index.rst @@ -6,22 +6,24 @@ API Reference The package is under development, and its API may change. -The xeofs package focuses on eigenmethods for dimensionality reduction in climate science. These methods are categorized into two groups: +The xeofs package focuses on eigenmethods for dimensionality reduction in climate science. It is organized into methods that examine relationships between variables -1. :doc:`Single-Set Analysis `: Methods that examine relationships or patterns within a single dataset. -2. :doc:`Multi-Set Analysis `: Methods that investigate relationships or patterns between variables across two or more distinct datasets. +1. within a **single dataset** (``xeofs.single``), +2. across **two datasets** (``xeofs.cross``) and +3. across **more than two datasets** (``xeofs.multi``). -------------------- Single-Set Analysis -------------------- -A classic example of single-set analysis is Principal Component Analysis (PCA/EOF analysis), used to extract the dominant patterns of variability within a single dataset. While PCA can be applied to multiple (standardized) datasets simultaneously, it treats all datasets as one large dataset, maximizing overall variability without considering inter-dataset relationships. Consequently, the most important variables may come from only one dataset, ignoring others. +A classic example of :doc:`single-set analysis ` is Principal Component Analysis (PCA/EOF analysis), used to extract the dominant patterns of variability within a single dataset. While PCA can be applied to multiple (standardized) datasets simultaneously, it treats all datasets as one large dataset, maximizing overall variability without considering inter-dataset relationships. Consequently, the most important variables may come from only one dataset, ignoring others. --------------------- -Multi-Set Analysis --------------------- +---------------------------- +Cross and Multi-Set Analysis +---------------------------- + +Classic examples of :doc:`cross ` or :doc:`multi`-set analysis methods include Canonical Correlation Analysis (CCA), Maximum Covariance Analysis (MCA) and Redundancy Analyis (RDA). These techniques identify shared patterns of variability between two distinct datasets, focusing on common patterns rather than those unique to each dataset. -Examples of multi-set analysis methods include Canonical Correlation Analysis (CCA) and Maximum Covariance Analysis (MCA). These techniques identify shared patterns of variability between two or more datasets, focusing on common patterns rather than those unique to each dataset. For instance, if you have two datasets (e.g., monthly temperatures from tropical and polar regions over 70 years), CCA or MCA would likely highlight the global warming signal as the dominant pattern common to both datasets, while the seasonal cycle would not be dominant as it is only prominent in the polar region. Additionally, xeofs offers tools for :doc:`model evaluation `, though these are still in early development stages. @@ -30,9 +32,17 @@ Additionally, xeofs offers tools for :doc:`model evaluation `, .. toctree:: :maxdepth: 3 :hidden: + :caption: Methods single_set_analysis + cross_set_analysis multi_set_analysis - model_evaluation utilities +.. toctree:: + :maxdepth: 3 + :hidden: + :caption: Significance Testing + + model_evaluation + diff --git a/docs/api_reference/model_evaluation.rst b/docs/api_reference/model_evaluation.rst index d6d07718..7dc7dfd8 100644 --- a/docs/api_reference/model_evaluation.rst +++ b/docs/api_reference/model_evaluation.rst @@ -1,11 +1,11 @@ -================ -Model Evaluation -================ -Tools to assess the quality of your model. +==================== +``xeofs.validation`` +==================== +Tools to assess the significane of your model. .. autosummary:: :toctree: _autosummary :template: custom-class-template.rst :recursive: - xeofs.validation.EOFBootstrapper + ~xeofs.validation.EOFBootstrapper diff --git a/docs/api_reference/multi_set_analysis.rst b/docs/api_reference/multi_set_analysis.rst index 8086537c..ce4b1ea4 100644 --- a/docs/api_reference/multi_set_analysis.rst +++ b/docs/api_reference/multi_set_analysis.rst @@ -1,20 +1,14 @@ -================== -Multi-Set Analysis -================== -Methods that investigate relationships or patterns between variables across two or more distinct datasets. +================ +``xeofs.multi`` +================ +Methods that investigate relationships or patterns between variables across **more than two** distinct datasets. .. autosummary:: :toctree: _autosummary :template: custom-class-template.rst :recursive: - xeofs.models.MCA - xeofs.models.CCA - xeofs.models.CPCCA - xeofs.models.ComplexMCA - xeofs.models.ComplexCPCCA - xeofs.models.HilbertMCA - xeofs.models.HilbertCPCCA + ~xeofs.multi.CCA ------------------------------ Sparse Solutions via Rotation @@ -25,9 +19,3 @@ Sparse Solutions via Rotation :template: custom-class-template.rst :recursive: - xeofs.models.MCARotator - xeofs.models.CPCCARotator - xeofs.models.ComplexMCARotator - xeofs.models.ComplexCPCCARotator - xeofs.models.HilbertMCARotator - xeofs.models.HilbertCPCCARotator diff --git a/docs/api_reference/single_set_analysis.rst b/docs/api_reference/single_set_analysis.rst index dd6d5df9..6a975b8a 100644 --- a/docs/api_reference/single_set_analysis.rst +++ b/docs/api_reference/single_set_analysis.rst @@ -1,21 +1,21 @@ -==================== -Single-Set Analysis -==================== +================ +``xeofs.single`` +================ -Methods that examine relationships among variables within a single dataset, or when multiple datasets are combined and analyzed as one. +Methods that investigate relationships or patterns between variables within a single dataset. .. autosummary:: :toctree: _autosummary :template: custom-class-template.rst :recursive: - xeofs.models.EOF - xeofs.models.ComplexEOF - xeofs.models.HilbertEOF - xeofs.models.ExtendedEOF - xeofs.models.OPA - xeofs.models.GWPCA - xeofs.models.SparsePCA + ~xeofs.single.EOF + ~xeofs.single.ComplexEOF + ~xeofs.single.HilbertEOF + ~xeofs.single.ExtendedEOF + ~xeofs.single.OPA + ~xeofs.single.GWPCA + ~xeofs.single.SparsePCA ------------------------------ @@ -27,6 +27,6 @@ Sparse Solutions via Rotation :template: custom-class-template.rst :recursive: - xeofs.models.EOFRotator - xeofs.models.ComplexEOFRotator - xeofs.models.HilbertEOFRotator \ No newline at end of file + ~xeofs.single.EOFRotator + ~xeofs.single.ComplexEOFRotator + ~xeofs.single.HilbertEOFRotator \ No newline at end of file diff --git a/docs/api_reference/utilities.rst b/docs/api_reference/utilities.rst index cf1e6e0b..9f4b5682 100644 --- a/docs/api_reference/utilities.rst +++ b/docs/api_reference/utilities.rst @@ -1,14 +1,14 @@ ========= Utilities ========= -Support functions. +Tools that may be helpful .. autosummary:: :toctree: _autosummary :template: custom-class-template.rst :recursive: - xeofs.models.RotatorFactory + ~xeofs.RotatorFactory diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_eeof_001.png b/docs/auto_examples/1single/images/sphx_glr_plot_eeof_001.png index 5535ac43..5738db32 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_eeof_001.png and b/docs/auto_examples/1single/images/sphx_glr_plot_eeof_001.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_eeof_002.png b/docs/auto_examples/1single/images/sphx_glr_plot_eeof_002.png index 3a57887f..8080e895 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_eeof_002.png and b/docs/auto_examples/1single/images/sphx_glr_plot_eeof_002.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_eeof_003.png b/docs/auto_examples/1single/images/sphx_glr_plot_eeof_003.png index fa6e1144..a43b108f 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_eeof_003.png and b/docs/auto_examples/1single/images/sphx_glr_plot_eeof_003.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_eeof_trend_001.png b/docs/auto_examples/1single/images/sphx_glr_plot_eeof_trend_001.png index 9e35afcb..50e9f8d8 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_eeof_trend_001.png and b/docs/auto_examples/1single/images/sphx_glr_plot_eeof_trend_001.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_eeof_trend_002.png b/docs/auto_examples/1single/images/sphx_glr_plot_eeof_trend_002.png index 18441375..7fb77f55 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_eeof_trend_002.png and b/docs/auto_examples/1single/images/sphx_glr_plot_eeof_trend_002.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_eeof_trend_003.png b/docs/auto_examples/1single/images/sphx_glr_plot_eeof_trend_003.png index 165aa6df..3ca36fdd 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_eeof_trend_003.png and b/docs/auto_examples/1single/images/sphx_glr_plot_eeof_trend_003.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_eof-smode_001.png b/docs/auto_examples/1single/images/sphx_glr_plot_eof-smode_001.png index 49615fde..fe0814bf 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_eof-smode_001.png and b/docs/auto_examples/1single/images/sphx_glr_plot_eof-smode_001.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_eof-tmode_001.png b/docs/auto_examples/1single/images/sphx_glr_plot_eof-tmode_001.png index 48a24ea6..d7dfb2c1 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_eof-tmode_001.png and b/docs/auto_examples/1single/images/sphx_glr_plot_eof-tmode_001.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_gwpca_001.png b/docs/auto_examples/1single/images/sphx_glr_plot_gwpca_001.png index 080b2db8..1512e549 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_gwpca_001.png and b/docs/auto_examples/1single/images/sphx_glr_plot_gwpca_001.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_gwpca_002.png b/docs/auto_examples/1single/images/sphx_glr_plot_gwpca_002.png index a78ea394..101ecca7 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_gwpca_002.png and b/docs/auto_examples/1single/images/sphx_glr_plot_gwpca_002.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_hilbert_eof_001.png b/docs/auto_examples/1single/images/sphx_glr_plot_hilbert_eof_001.png index 37639b29..c3dc6518 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_hilbert_eof_001.png and b/docs/auto_examples/1single/images/sphx_glr_plot_hilbert_eof_001.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_hilbert_eof_002.png b/docs/auto_examples/1single/images/sphx_glr_plot_hilbert_eof_002.png index 8f1456fe..b3fece2e 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_hilbert_eof_002.png and b/docs/auto_examples/1single/images/sphx_glr_plot_hilbert_eof_002.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_hilbert_eof_004.png b/docs/auto_examples/1single/images/sphx_glr_plot_hilbert_eof_004.png index 26e1d783..49f8a91a 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_hilbert_eof_004.png and b/docs/auto_examples/1single/images/sphx_glr_plot_hilbert_eof_004.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_mreof_001.png b/docs/auto_examples/1single/images/sphx_glr_plot_mreof_001.png index 445d06f5..96c346c3 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_mreof_001.png and b/docs/auto_examples/1single/images/sphx_glr_plot_mreof_001.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_multivariate-eof_001.png b/docs/auto_examples/1single/images/sphx_glr_plot_multivariate-eof_001.png index 3ed35002..7d75a14f 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_multivariate-eof_001.png and b/docs/auto_examples/1single/images/sphx_glr_plot_multivariate-eof_001.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_rotated_eof_001.png b/docs/auto_examples/1single/images/sphx_glr_plot_rotated_eof_001.png index ce7389fd..8489241a 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_rotated_eof_001.png and b/docs/auto_examples/1single/images/sphx_glr_plot_rotated_eof_001.png differ diff --git a/docs/auto_examples/1single/images/sphx_glr_plot_weighted-eof_001.png b/docs/auto_examples/1single/images/sphx_glr_plot_weighted-eof_001.png index e99e6a9e..c8d0078e 100644 Binary files a/docs/auto_examples/1single/images/sphx_glr_plot_weighted-eof_001.png and b/docs/auto_examples/1single/images/sphx_glr_plot_weighted-eof_001.png differ diff --git a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eeof_thumb.png b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eeof_thumb.png index db423829..fedce0de 100644 Binary files a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eeof_thumb.png and b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eeof_thumb.png differ diff --git a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eeof_trend_thumb.png b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eeof_trend_thumb.png index c7af6964..4552a60f 100644 Binary files a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eeof_trend_thumb.png and b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eeof_trend_thumb.png differ diff --git a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eof-smode_thumb.png b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eof-smode_thumb.png index 081c54cf..0677f04a 100644 Binary files a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eof-smode_thumb.png and b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eof-smode_thumb.png differ diff --git a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eof-tmode_thumb.png b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eof-tmode_thumb.png index e1a73f04..4557bf9b 100644 Binary files a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eof-tmode_thumb.png and b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_eof-tmode_thumb.png differ diff --git a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_gwpca_thumb.png b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_gwpca_thumb.png index 32f370d6..b7a9a0fd 100644 Binary files a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_gwpca_thumb.png and b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_gwpca_thumb.png differ diff --git a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_hilbert_eof_thumb.png b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_hilbert_eof_thumb.png index 133244f0..582b7c73 100644 Binary files a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_hilbert_eof_thumb.png and b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_hilbert_eof_thumb.png differ diff --git a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_mreof_thumb.png b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_mreof_thumb.png index b1a2f9ca..8a1c9666 100644 Binary files a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_mreof_thumb.png and b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_mreof_thumb.png differ diff --git a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_multivariate-eof_thumb.png b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_multivariate-eof_thumb.png index 012f0e98..8b65e4ad 100644 Binary files a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_multivariate-eof_thumb.png and b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_multivariate-eof_thumb.png differ diff --git a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_rotated_eof_thumb.png b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_rotated_eof_thumb.png index a75cd05d..0416fc09 100644 Binary files a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_rotated_eof_thumb.png and b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_rotated_eof_thumb.png differ diff --git a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_weighted-eof_thumb.png b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_weighted-eof_thumb.png index 0d76d4f7..14a3e92f 100644 Binary files a/docs/auto_examples/1single/images/thumb/sphx_glr_plot_weighted-eof_thumb.png and b/docs/auto_examples/1single/images/thumb/sphx_glr_plot_weighted-eof_thumb.png differ diff --git a/docs/auto_examples/1single/plot_complex_eof.ipynb b/docs/auto_examples/1single/plot_complex_eof.ipynb index eb723a0a..91109d28 100644 --- a/docs/auto_examples/1single/plot_complex_eof.ipynb +++ b/docs/auto_examples/1single/plot_complex_eof.ipynb @@ -86,7 +86,7 @@ "metadata": {}, "outputs": [], "source": [ - "model = xe.models.ComplexEOF(n_modes=1, use_coslat=True, random_state=7)\n", + "model = xe.single.ComplexEOF(n_modes=1, use_coslat=True, random_state=7)\n", "model.fit(Z, dim=\"month\")" ] }, diff --git a/docs/auto_examples/1single/plot_complex_eof.py b/docs/auto_examples/1single/plot_complex_eof.py index 531dbc0a..1c07cb13 100644 --- a/docs/auto_examples/1single/plot_complex_eof.py +++ b/docs/auto_examples/1single/plot_complex_eof.py @@ -40,7 +40,7 @@ # each grid cell by the square root of the cosine of the latitude # (``use_coslat=True``). -model = xe.models.ComplexEOF(n_modes=1, use_coslat=True, random_state=7) +model = xe.single.ComplexEOF(n_modes=1, use_coslat=True, random_state=7) model.fit(Z, dim="month") # %% diff --git a/docs/auto_examples/1single/plot_complex_eof.py.md5 b/docs/auto_examples/1single/plot_complex_eof.py.md5 index 04ed7fb0..d2b482e4 100644 --- a/docs/auto_examples/1single/plot_complex_eof.py.md5 +++ b/docs/auto_examples/1single/plot_complex_eof.py.md5 @@ -1 +1 @@ -0b6e38b413a8b101e33314e6275a3817 \ No newline at end of file +00a4013c7fa42189d7980ec2a54d0eae \ No newline at end of file diff --git a/docs/auto_examples/1single/plot_complex_eof.rst b/docs/auto_examples/1single/plot_complex_eof.rst index df06e8d9..5f973424 100644 --- a/docs/auto_examples/1single/plot_complex_eof.rst +++ b/docs/auto_examples/1single/plot_complex_eof.rst @@ -46,7 +46,7 @@ Let's start by importing the necessary packages and loading the data: .. code-block:: none - + @@ -444,18 +444,18 @@ For this example, we'll use the ERA-Interim tutorial dataset ``eraint_uvz``: z (month, level, latitude, longitude) float64 ... u (month, level, latitude, longitude) float64 ... v (month, level, latitude, longitude) float64 ... - Attributes: (2)
  • Conventions :
    CF-1.0
    Info :
    Monthly ERA-Interim data. Downloaded and edited by fabien.maussion@uibk.ac.at


  • @@ -497,7 +497,7 @@ each grid cell by the square root of the cosine of the latitude .. code-block:: default - model = xe.models.ComplexEOF(n_modes=1, use_coslat=True, random_state=7) + model = xe.single.ComplexEOF(n_modes=1, use_coslat=True, random_state=7) model.fit(Z, dim="month") @@ -508,10 +508,10 @@ each grid cell by the square root of the cosine of the latitude .. code-block:: none - /home/nrieger/miniconda3/envs/xeofs/lib/python3.11/site-packages/scipy/sparse/linalg/_eigen/_svds.py:299: UserWarning: The problem size 2 minus the constraints size 0 is too small relative to the block size 1. Using a dense eigensolver instead of LOBPCG. + /home/nrieger/miniconda3/envs/xeofs/lib/python3.11/site-packages/scipy/sparse/linalg/_eigen/_svds.py:483: UserWarning: The problem size 2 minus the constraints size 0 is too small relative to the block size 1. Using a dense eigensolver instead of LOBPCG iterations.No output of the history of the iterations. _, eigvec = lobpcg(XH_X, X, tol=tol ** 2, maxiter=maxiter, - + @@ -577,7 +577,7 @@ where the amplitude is above a certain threshold (e.g., 0.004): .. rst-class:: sphx-glr-timing - **Total running time of the script:** (0 minutes 2.009 seconds) + **Total running time of the script:** (0 minutes 1.162 seconds) .. _sphx_glr_download_auto_examples_1single_plot_complex_eof.py: diff --git a/docs/auto_examples/1single/plot_complex_eof_codeobj.pickle b/docs/auto_examples/1single/plot_complex_eof_codeobj.pickle index 8fdbec33..e927b2a4 100644 Binary files a/docs/auto_examples/1single/plot_complex_eof_codeobj.pickle and b/docs/auto_examples/1single/plot_complex_eof_codeobj.pickle differ diff --git a/docs/auto_examples/1single/plot_eeof.ipynb b/docs/auto_examples/1single/plot_eeof.ipynb index 93ba4890..f2c98695 100644 --- a/docs/auto_examples/1single/plot_eeof.ipynb +++ b/docs/auto_examples/1single/plot_eeof.ipynb @@ -24,9 +24,10 @@ "metadata": {}, "outputs": [], "source": [ + "import matplotlib.pyplot as plt\n", "import xarray as xr\n", + "\n", "import xeofs as xe\n", - "import matplotlib.pyplot as plt\n", "\n", "xr.set_options(display_expand_data=False)" ] @@ -98,7 +99,7 @@ "metadata": {}, "outputs": [], "source": [ - "model = xe.models.ExtendedEOF(\n", + "model = xe.single.ExtendedEOF(\n", " n_modes=10, tau=4, embedding=40, n_pca_modes=50, use_coslat=True\n", ")\n", "model.fit(t2m, dim=\"time\")\n", @@ -183,7 +184,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/auto_examples/1single/plot_eeof.py b/docs/auto_examples/1single/plot_eeof.py index ab804f5b..4605b9fd 100644 --- a/docs/auto_examples/1single/plot_eeof.py +++ b/docs/auto_examples/1single/plot_eeof.py @@ -13,9 +13,10 @@ Let's begin by setting up the required packages and fetching the data: """ +import matplotlib.pyplot as plt import xarray as xr + import xeofs as xe -import matplotlib.pyplot as plt xr.set_options(display_expand_data=False) @@ -51,7 +52,7 @@ # With these parameters set, we proceed to instantiate the ``ExtendedEOF`` # model and fit our data. -model = xe.models.ExtendedEOF( +model = xe.single.ExtendedEOF( n_modes=10, tau=4, embedding=40, n_pca_modes=50, use_coslat=True ) model.fit(t2m, dim="time") diff --git a/docs/auto_examples/1single/plot_eeof.py.md5 b/docs/auto_examples/1single/plot_eeof.py.md5 index c812c58c..dd2a59f8 100644 --- a/docs/auto_examples/1single/plot_eeof.py.md5 +++ b/docs/auto_examples/1single/plot_eeof.py.md5 @@ -1 +1 @@ -df94cdd1a2a195a28f08b955bafe0677 \ No newline at end of file +dad4ad8d30e4903c94110f68dfed3398 \ No newline at end of file diff --git a/docs/auto_examples/1single/plot_eeof.rst b/docs/auto_examples/1single/plot_eeof.rst index e5e68399..4d381907 100644 --- a/docs/auto_examples/1single/plot_eeof.rst +++ b/docs/auto_examples/1single/plot_eeof.rst @@ -31,14 +31,15 @@ decomposed to yield its eigenvectors (components) and eigenvalues (explained var Let's begin by setting up the required packages and fetching the data: -.. GENERATED FROM PYTHON SOURCE LINES 15-22 +.. GENERATED FROM PYTHON SOURCE LINES 15-23 -.. code-block:: Python +.. code-block:: default + import matplotlib.pyplot as plt import xarray as xr + import xeofs as xe - import matplotlib.pyplot as plt xr.set_options(display_expand_data=False) @@ -51,17 +52,17 @@ Let's begin by setting up the required packages and fetching the data: .. code-block:: none - + -.. GENERATED FROM PYTHON SOURCE LINES 23-24 +.. GENERATED FROM PYTHON SOURCE LINES 24-25 Load the tutorial data. -.. GENERATED FROM PYTHON SOURCE LINES 24-27 +.. GENERATED FROM PYTHON SOURCE LINES 25-28 -.. code-block:: Python +.. code-block:: default t2m = xr.tutorial.load_dataset("air_temperature").air @@ -73,7 +74,7 @@ Load the tutorial data. -.. GENERATED FROM PYTHON SOURCE LINES 28-41 +.. GENERATED FROM PYTHON SOURCE LINES 29-42 Prior to conducting the EEOF analysis, it's essential to determine the structure of the lagged covariance matrix. This entails defining the time @@ -89,9 +90,9 @@ It's obvious that this way of constructing the lagged covariance matrix and subsequently decomposing it can be computationally expensive. For example, given our dataset's dimensions, -.. GENERATED FROM PYTHON SOURCE LINES 41-44 +.. GENERATED FROM PYTHON SOURCE LINES 42-45 -.. code-block:: Python +.. code-block:: default t2m.shape @@ -109,7 +110,7 @@ given our dataset's dimensions, -.. GENERATED FROM PYTHON SOURCE LINES 45-53 +.. GENERATED FROM PYTHON SOURCE LINES 46-54 the extended dataset would have 40 x 25 x 53 = 53000 features which is much larger than the original dataset's 1325 features. @@ -120,12 +121,12 @@ up with 40 x 50 = 200 (latent) features. With these parameters set, we proceed to instantiate the ``ExtendedEOF`` model and fit our data. -.. GENERATED FROM PYTHON SOURCE LINES 53-62 +.. GENERATED FROM PYTHON SOURCE LINES 54-63 -.. code-block:: Python +.. code-block:: default - model = xe.models.ExtendedEOF( + model = xe.single.ExtendedEOF( n_modes=10, tau=4, embedding=40, n_pca_modes=50, use_coslat=True ) model.fit(t2m, dim="time") @@ -504,102 +505,102 @@ model and fit our data. stroke: currentColor; fill: currentColor; } -
    <xarray.DataArray 'components' (mode: 10, embedding: 40, lat: 25, lon: 53)> Size: 4MB
    -    0.0003854 0.0003646 0.000357 0.0003562 ... -0.001459 -0.00105 -0.0006424
    +    
    <xarray.DataArray 'components' (mode: 10, embedding: 40, lat: 25, lon: 53)>
    +    0.0003855 0.0003648 0.0003573 0.0003565 ... -0.001427 -0.001012 -0.0006065
         Coordinates:
    -      * lat        (lat) float32 100B 15.0 17.5 20.0 22.5 ... 67.5 70.0 72.5 75.0
    -      * lon        (lon) float32 212B 200.0 202.5 205.0 207.5 ... 325.0 327.5 330.0
    -      * embedding  (embedding) int64 320B 0 4 8 12 16 20 ... 136 140 144 148 152 156
    -      * mode       (mode) int64 80B 1 2 3 4 5 6 7 8 9 10
    -    Attributes: (12/16)
    +      * lat        (lat) float32 15.0 17.5 20.0 22.5 25.0 ... 67.5 70.0 72.5 75.0
    +      * lon        (lon) float32 200.0 202.5 205.0 207.5 ... 322.5 325.0 327.5 330.0
    +      * embedding  (embedding) int64 0 4 8 12 16 20 24 ... 136 140 144 148 152 156
    +      * mode       (mode) int64 1 2 3 4 5 6 7 8 9 10
    +    Attributes: (12/15)
             model:          Extended EOF Analysis
             software:       xeofs
    -        version:        2.3.2
    -        date:           2024-03-31 20:34:10
    +        version:        1.2.0
    +        date:           2024-09-02 02:16:24
             n_modes:        10
             center:         True
             ...             ...
    +        sample_name:    sample
             feature_name:   feature
             random_state:   None
    -        verbose:        False
             compute:        True
             solver:         auto
    -        solver_kwargs:  {}
  • model :
    Extended EOF Analysis
    software :
    xeofs
    version :
    1.2.0
    date :
    2024-09-02 02:16:24
    n_modes :
    10
    center :
    True
    standardize :
    False
    use_coslat :
    True
    check_nans :
    True
    sample_name :
    sample
    feature_name :
    feature
    random_state :
    None
    compute :
    True
    solver :
    auto
    solver_kwargs :
    {}


  • -.. GENERATED FROM PYTHON SOURCE LINES 63-67 +.. GENERATED FROM PYTHON SOURCE LINES 64-68 A notable distinction from standard EOF analysis is the incorporation of an extra ``embedding`` dimension in the components. Nonetheless, the overarching methodology mirrors traditional EOF practices. The results, for instance, can be assessed by examining the explained variance ratio. -.. GENERATED FROM PYTHON SOURCE LINES 67-71 +.. GENERATED FROM PYTHON SOURCE LINES 68-72 -.. code-block:: Python +.. code-block:: default model.explained_variance_ratio().plot() @@ -617,13 +618,13 @@ for instance, can be assessed by examining the explained variance ratio. -.. GENERATED FROM PYTHON SOURCE LINES 72-73 +.. GENERATED FROM PYTHON SOURCE LINES 73-74 Additionally, we can look into the scores; let's spotlight mode 4. -.. GENERATED FROM PYTHON SOURCE LINES 73-77 +.. GENERATED FROM PYTHON SOURCE LINES 74-78 -.. code-block:: Python +.. code-block:: default scores.sel(mode=4).plot() @@ -641,15 +642,15 @@ Additionally, we can look into the scores; let's spotlight mode 4. -.. GENERATED FROM PYTHON SOURCE LINES 78-81 +.. GENERATED FROM PYTHON SOURCE LINES 79-82 In wrapping up, we visualize the corresponding EEOF component of mode 4. For visualization purposes, we'll focus on the component at a specific latitude, in this instance, 60 degrees north. -.. GENERATED FROM PYTHON SOURCE LINES 81-84 +.. GENERATED FROM PYTHON SOURCE LINES 82-85 -.. code-block:: Python +.. code-block:: default components.sel(mode=4, lat=60).plot() @@ -669,7 +670,7 @@ latitude, in this instance, 60 degrees north. .. rst-class:: sphx-glr-timing - **Total running time of the script:** (0 minutes 0.777 seconds) + **Total running time of the script:** (0 minutes 5.099 seconds) .. _sphx_glr_download_auto_examples_1single_plot_eeof.py: @@ -678,14 +679,17 @@ latitude, in this instance, 60 degrees north. .. container:: sphx-glr-footer sphx-glr-footer-example - .. container:: sphx-glr-download sphx-glr-download-jupyter - :download:`Download Jupyter notebook: plot_eeof.ipynb ` + .. container:: sphx-glr-download sphx-glr-download-python :download:`Download Python source code: plot_eeof.py ` + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_eeof.ipynb ` + .. only:: html diff --git a/docs/auto_examples/1single/plot_eeof_codeobj.pickle b/docs/auto_examples/1single/plot_eeof_codeobj.pickle index 02887345..e272407f 100644 Binary files a/docs/auto_examples/1single/plot_eeof_codeobj.pickle and b/docs/auto_examples/1single/plot_eeof_codeobj.pickle differ diff --git a/docs/auto_examples/1single/plot_eeof_trend.ipynb b/docs/auto_examples/1single/plot_eeof_trend.ipynb index 1bb3f455..f7850e99 100644 --- a/docs/auto_examples/1single/plot_eeof_trend.ipynb +++ b/docs/auto_examples/1single/plot_eeof_trend.ipynb @@ -1,187 +1,214 @@ { - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "\n# Removing nonlinear trends with EEOF analysis\n\nThis tutorial illustrates the application of Extended EOF (EEOF) analysis\nto isolate and remove nonlinear trends within a dataset.\n\nLet's begin by setting up the required packages and fetching the data.\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "import xarray as xr\nimport xeofs as xe\nimport matplotlib.pyplot as plt\n\nxr.set_options(display_expand_data=False)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We load the sea surface temperature (SST) data from the xarray tutorial.\nThe dataset consists of monthly averages from 1970 to 2021. To ensure the seasonal\ncycle doesn't overshadow the analysis, we remove the monthly climatologies.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "sst = xr.tutorial.open_dataset(\"ersstv5\").sst\nsst = sst.groupby(\"time.month\") - sst.groupby(\"time.month\").mean(\"time\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We start by performing a standard EOF analysis on the dataset.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "eof = xe.models.EOF(n_modes=10)\neof.fit(sst, dim=\"time\")\nscores = eof.scores()\ncomponents = eof.components()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We immediately see that the first mode represents the global warming trend.\nYet, the signal is somewhat muddled by short-term and year-to-year variations.\nNote the pronounced spikes around 1998 and 2016, hinting at the leakage of\nENSO signatures into this mode.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "fig, ax = plt.subplots(1, 2, figsize=(10, 5))\nscores.sel(mode=1).plot(ax=ax[0])\ncomponents.sel(mode=1).plot(ax=ax[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Now, let's try to identify this trend more cleanly. To this end, we perform an\nEEOF analysis on the same data with a suitably large embedding dimension.\nWe choose an embedding dimensioncorresponding to 120 months which is large enough\nto capture long-term trends. To speed up computation, we apply the EEOF analysis\nto the extended (lag) covariance matrix derived from the first 50 PCs.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "eeof = xe.models.ExtendedEOF(n_modes=5, tau=1, embedding=120, n_pca_modes=50)\neeof.fit(sst, dim=\"time\")\ncomponents_ext = eeof.components()\nscores_ext = eeof.scores()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The first mode now represents the global warming trend much more clearly.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "fig, ax = plt.subplots(1, 2, figsize=(10, 5))\nscores_ext.sel(mode=1).plot(ax=ax[0])\ncomponents_ext.sel(mode=1, embedding=0).plot(ax=ax[1])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can use this to the first mode to remove this nonlinear trend from our original dataset.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "sst_trends = eeof.inverse_transform(scores_ext.sel(mode=1))\nsst_detrended = sst - sst_trends.drop_vars(\"mode\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Reapplying the standard EOF analysis on our now detrended dataset:\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "eof_model_detrended = xe.models.EOF(n_modes=5)\neof_model_detrended.fit(sst_detrended, dim=\"time\")\nscores_detrended = eof_model_detrended.scores()\ncomponents_detrended = eof_model_detrended.components()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The first mode now represents ENSO without any trend component.\n\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": false - }, - "outputs": [], - "source": [ - "fig, ax = plt.subplots(1, 2, figsize=(10, 5))\nscores_detrended.sel(mode=1).plot(ax=ax[0])\ncomponents_detrended.sel(mode=1).plot(ax=ax[1])" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.4" - } - }, - "nbformat": 4, - "nbformat_minor": 0 -} \ No newline at end of file + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "# Removing nonlinear trends with EEOF analysis\n", + "\n", + "This tutorial illustrates the application of Extended EOF (EEOF) analysis\n", + "to isolate and remove nonlinear trends within a dataset.\n", + "\n", + "Let's begin by setting up the required packages and fetching the data.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import matplotlib.pyplot as plt\n", + "import xarray as xr\n", + "\n", + "import xeofs as xe\n", + "\n", + "xr.set_options(display_expand_data=False)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We load the sea surface temperature (SST) data from the xarray tutorial.\n", + "The dataset consists of monthly averages from 1970 to 2021. To ensure the seasonal\n", + "cycle doesn't overshadow the analysis, we remove the monthly climatologies.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "sst = xr.tutorial.open_dataset(\"ersstv5\").sst\n", + "sst = sst.groupby(\"time.month\") - sst.groupby(\"time.month\").mean(\"time\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We start by performing a standard EOF analysis on the dataset.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "eof = xe.single.EOF(n_modes=10)\n", + "eof.fit(sst, dim=\"time\")\n", + "scores = eof.scores()\n", + "components = eof.components()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We immediately see that the first mode represents the global warming trend.\n", + "Yet, the signal is somewhat muddled by short-term and year-to-year variations.\n", + "Note the pronounced spikes around 1998 and 2016, hinting at the leakage of\n", + "ENSO signatures into this mode.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig, ax = plt.subplots(1, 2, figsize=(10, 5))\n", + "scores.sel(mode=1).plot(ax=ax[0])\n", + "components.sel(mode=1).plot(ax=ax[1])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now, let's try to identify this trend more cleanly. To this end, we perform an\n", + "EEOF analysis on the same data with a suitably large embedding dimension.\n", + "We choose an embedding dimensioncorresponding to 120 months which is large enough\n", + "to capture long-term trends. To speed up computation, we apply the EEOF analysis\n", + "to the extended (lag) covariance matrix derived from the first 50 PCs.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "eeof = xe.single.ExtendedEOF(n_modes=5, tau=1, embedding=120, n_pca_modes=50)\n", + "eeof.fit(sst, dim=\"time\")\n", + "components_ext = eeof.components()\n", + "scores_ext = eeof.scores()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The first mode now represents the global warming trend much more clearly.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig, ax = plt.subplots(1, 2, figsize=(10, 5))\n", + "scores_ext.sel(mode=1).plot(ax=ax[0])\n", + "components_ext.sel(mode=1, embedding=0).plot(ax=ax[1])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can use this to the first mode to remove this nonlinear trend from our original dataset.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "sst_trends = eeof.inverse_transform(scores_ext.sel(mode=1))\n", + "sst_detrended = sst - sst_trends" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Reapplying the standard EOF analysis on our now detrended dataset:\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "eof_model_detrended = xe.single.EOF(n_modes=5)\n", + "eof_model_detrended.fit(sst_detrended, dim=\"time\")\n", + "scores_detrended = eof_model_detrended.scores()\n", + "components_detrended = eof_model_detrended.components()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The first mode now represents ENSO without any trend component.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig, ax = plt.subplots(1, 2, figsize=(10, 5))\n", + "scores_detrended.sel(mode=1).plot(ax=ax[0])\n", + "components_detrended.sel(mode=1).plot(ax=ax[1])" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/docs/auto_examples/1single/plot_eeof_trend.py b/docs/auto_examples/1single/plot_eeof_trend.py index d309c138..86fb6526 100644 --- a/docs/auto_examples/1single/plot_eeof_trend.py +++ b/docs/auto_examples/1single/plot_eeof_trend.py @@ -8,9 +8,10 @@ Let's begin by setting up the required packages and fetching the data. """ +import matplotlib.pyplot as plt import xarray as xr + import xeofs as xe -import matplotlib.pyplot as plt xr.set_options(display_expand_data=False) @@ -26,7 +27,7 @@ # %% # We start by performing a standard EOF analysis on the dataset. -eof = xe.models.EOF(n_modes=10) +eof = xe.single.EOF(n_modes=10) eof.fit(sst, dim="time") scores = eof.scores() components = eof.components() @@ -49,7 +50,7 @@ # to capture long-term trends. To speed up computation, we apply the EEOF analysis # to the extended (lag) covariance matrix derived from the first 50 PCs. -eeof = xe.models.ExtendedEOF(n_modes=5, tau=1, embedding=120, n_pca_modes=50) +eeof = xe.single.ExtendedEOF(n_modes=5, tau=1, embedding=120, n_pca_modes=50) eeof.fit(sst, dim="time") components_ext = eeof.components() scores_ext = eeof.scores() @@ -65,13 +66,13 @@ # We can use this to the first mode to remove this nonlinear trend from our original dataset. sst_trends = eeof.inverse_transform(scores_ext.sel(mode=1)) -sst_detrended = sst - sst_trends.drop_vars("mode") +sst_detrended = sst - sst_trends # %% # Reapplying the standard EOF analysis on our now detrended dataset: -eof_model_detrended = xe.models.EOF(n_modes=5) +eof_model_detrended = xe.single.EOF(n_modes=5) eof_model_detrended.fit(sst_detrended, dim="time") scores_detrended = eof_model_detrended.scores() components_detrended = eof_model_detrended.components() diff --git a/docs/auto_examples/1single/plot_eeof_trend.py.md5 b/docs/auto_examples/1single/plot_eeof_trend.py.md5 index 1bea8bf4..ce05ba56 100644 --- a/docs/auto_examples/1single/plot_eeof_trend.py.md5 +++ b/docs/auto_examples/1single/plot_eeof_trend.py.md5 @@ -1 +1 @@ -4ca793199afeed8a98375c67b7f63197 \ No newline at end of file +e84d0364f470ed5e3eae38a735ab52e0 \ No newline at end of file diff --git a/docs/auto_examples/1single/plot_eeof_trend.rst b/docs/auto_examples/1single/plot_eeof_trend.rst index 72ca435b..b273263c 100644 --- a/docs/auto_examples/1single/plot_eeof_trend.rst +++ b/docs/auto_examples/1single/plot_eeof_trend.rst @@ -26,14 +26,15 @@ to isolate and remove nonlinear trends within a dataset. Let's begin by setting up the required packages and fetching the data. -.. GENERATED FROM PYTHON SOURCE LINES 10-17 +.. GENERATED FROM PYTHON SOURCE LINES 10-18 .. code-block:: default + import matplotlib.pyplot as plt import xarray as xr + import xeofs as xe - import matplotlib.pyplot as plt xr.set_options(display_expand_data=False) @@ -46,17 +47,17 @@ Let's begin by setting up the required packages and fetching the data. .. code-block:: none - + -.. GENERATED FROM PYTHON SOURCE LINES 18-21 +.. GENERATED FROM PYTHON SOURCE LINES 19-22 We load the sea surface temperature (SST) data from the xarray tutorial. The dataset consists of monthly averages from 1970 to 2021. To ensure the seasonal cycle doesn't overshadow the analysis, we remove the monthly climatologies. -.. GENERATED FROM PYTHON SOURCE LINES 21-26 +.. GENERATED FROM PYTHON SOURCE LINES 22-27 .. code-block:: default @@ -72,16 +73,16 @@ cycle doesn't overshadow the analysis, we remove the monthly climatologies. -.. GENERATED FROM PYTHON SOURCE LINES 27-28 +.. GENERATED FROM PYTHON SOURCE LINES 28-29 We start by performing a standard EOF analysis on the dataset. -.. GENERATED FROM PYTHON SOURCE LINES 28-34 +.. GENERATED FROM PYTHON SOURCE LINES 29-35 .. code-block:: default - eof = xe.models.EOF(n_modes=10) + eof = xe.single.EOF(n_modes=10) eof.fit(sst, dim="time") scores = eof.scores() components = eof.components() @@ -93,14 +94,14 @@ We start by performing a standard EOF analysis on the dataset. -.. GENERATED FROM PYTHON SOURCE LINES 35-39 +.. GENERATED FROM PYTHON SOURCE LINES 36-40 We immediately see that the first mode represents the global warming trend. Yet, the signal is somewhat muddled by short-term and year-to-year variations. Note the pronounced spikes around 1998 and 2016, hinting at the leakage of ENSO signatures into this mode. -.. GENERATED FROM PYTHON SOURCE LINES 39-45 +.. GENERATED FROM PYTHON SOURCE LINES 40-46 .. code-block:: default @@ -124,11 +125,11 @@ ENSO signatures into this mode. .. code-block:: none - + -.. GENERATED FROM PYTHON SOURCE LINES 46-51 +.. GENERATED FROM PYTHON SOURCE LINES 47-52 Now, let's try to identify this trend more cleanly. To this end, we perform an EEOF analysis on the same data with a suitably large embedding dimension. @@ -136,12 +137,12 @@ We choose an embedding dimensioncorresponding to 120 months which is large enoug to capture long-term trends. To speed up computation, we apply the EEOF analysis to the extended (lag) covariance matrix derived from the first 50 PCs. -.. GENERATED FROM PYTHON SOURCE LINES 51-57 +.. GENERATED FROM PYTHON SOURCE LINES 52-58 .. code-block:: default - eeof = xe.models.ExtendedEOF(n_modes=5, tau=1, embedding=120, n_pca_modes=50) + eeof = xe.single.ExtendedEOF(n_modes=5, tau=1, embedding=120, n_pca_modes=50) eeof.fit(sst, dim="time") components_ext = eeof.components() scores_ext = eeof.scores() @@ -153,11 +154,11 @@ to the extended (lag) covariance matrix derived from the first 50 PCs. -.. GENERATED FROM PYTHON SOURCE LINES 58-59 +.. GENERATED FROM PYTHON SOURCE LINES 59-60 The first mode now represents the global warming trend much more clearly. -.. GENERATED FROM PYTHON SOURCE LINES 59-64 +.. GENERATED FROM PYTHON SOURCE LINES 60-65 .. code-block:: default @@ -180,21 +181,21 @@ The first mode now represents the global warming trend much more clearly. .. code-block:: none - + -.. GENERATED FROM PYTHON SOURCE LINES 65-66 +.. GENERATED FROM PYTHON SOURCE LINES 66-67 We can use this to the first mode to remove this nonlinear trend from our original dataset. -.. GENERATED FROM PYTHON SOURCE LINES 66-71 +.. GENERATED FROM PYTHON SOURCE LINES 67-72 .. code-block:: default sst_trends = eeof.inverse_transform(scores_ext.sel(mode=1)) - sst_detrended = sst - sst_trends.drop_vars("mode") + sst_detrended = sst - sst_trends @@ -204,16 +205,16 @@ We can use this to the first mode to remove this nonlinear trend from our origin -.. GENERATED FROM PYTHON SOURCE LINES 72-73 +.. GENERATED FROM PYTHON SOURCE LINES 73-74 Reapplying the standard EOF analysis on our now detrended dataset: -.. GENERATED FROM PYTHON SOURCE LINES 73-80 +.. GENERATED FROM PYTHON SOURCE LINES 74-81 .. code-block:: default - eof_model_detrended = xe.models.EOF(n_modes=5) + eof_model_detrended = xe.single.EOF(n_modes=5) eof_model_detrended.fit(sst_detrended, dim="time") scores_detrended = eof_model_detrended.scores() components_detrended = eof_model_detrended.components() @@ -226,11 +227,11 @@ Reapplying the standard EOF analysis on our now detrended dataset: -.. GENERATED FROM PYTHON SOURCE LINES 81-82 +.. GENERATED FROM PYTHON SOURCE LINES 82-83 The first mode now represents ENSO without any trend component. -.. GENERATED FROM PYTHON SOURCE LINES 82-88 +.. GENERATED FROM PYTHON SOURCE LINES 83-89 .. code-block:: default @@ -254,14 +255,14 @@ The first mode now represents ENSO without any trend component. .. code-block:: none - + .. rst-class:: sphx-glr-timing - **Total running time of the script:** (0 minutes 28.072 seconds) + **Total running time of the script:** (0 minutes 7.526 seconds) .. _sphx_glr_download_auto_examples_1single_plot_eeof_trend.py: diff --git a/docs/auto_examples/1single/plot_eeof_trend_codeobj.pickle b/docs/auto_examples/1single/plot_eeof_trend_codeobj.pickle index dfc530a8..f2098a18 100644 Binary files a/docs/auto_examples/1single/plot_eeof_trend_codeobj.pickle and b/docs/auto_examples/1single/plot_eeof_trend_codeobj.pickle differ diff --git a/docs/auto_examples/1single/plot_eof-smode.ipynb b/docs/auto_examples/1single/plot_eof-smode.ipynb index 97441b1c..2e186a73 100644 --- a/docs/auto_examples/1single/plot_eof-smode.ipynb +++ b/docs/auto_examples/1single/plot_eof-smode.ipynb @@ -27,7 +27,7 @@ "from cartopy.crs import EqualEarth, PlateCarree\n", "from matplotlib.gridspec import GridSpec\n", "\n", - "from xeofs.models import SparsePCA" + "import xeofs as xe" ] }, { @@ -62,7 +62,7 @@ "metadata": {}, "outputs": [], "source": [ - "model = SparsePCA(n_modes=4, alpha=1e-5)\n", + "model = xe.single.SparsePCA(n_modes=4, alpha=1e-5)\n", "model.fit(sst, dim=\"time\")\n", "expvar = model.explained_variance()\n", "expvar_ratio = model.explained_variance_ratio()\n", diff --git a/docs/auto_examples/1single/plot_eof-smode.py b/docs/auto_examples/1single/plot_eof-smode.py index a81d00c0..479367d7 100644 --- a/docs/auto_examples/1single/plot_eof-smode.py +++ b/docs/auto_examples/1single/plot_eof-smode.py @@ -18,7 +18,7 @@ from cartopy.crs import EqualEarth, PlateCarree from matplotlib.gridspec import GridSpec -from xeofs.models import SparsePCA +import xeofs as xe # %% # We use sea surface temperature data from 1990 to 2017, consistent with the original paper. @@ -29,7 +29,7 @@ # %% # We perform sparse PCA using the `alpha` and `beta` parameters, which define the sparsity imposed by the elastic net (refer to Table 1 in the paper). In our analysis, we set `alpha` to 1e-5, as specified by the authors. Although the authors do not specify a value for `beta`, it appears that the results are not highly sensitive to this parameter. Therefore, we use the default `beta` value of 1e-4. -model = SparsePCA(n_modes=4, alpha=1e-5) +model = xe.single.SparsePCA(n_modes=4, alpha=1e-5) model.fit(sst, dim="time") expvar = model.explained_variance() expvar_ratio = model.explained_variance_ratio() diff --git a/docs/auto_examples/1single/plot_eof-smode.py.md5 b/docs/auto_examples/1single/plot_eof-smode.py.md5 index 59576489..981c2288 100644 --- a/docs/auto_examples/1single/plot_eof-smode.py.md5 +++ b/docs/auto_examples/1single/plot_eof-smode.py.md5 @@ -1 +1 @@ -bb0b6c390528787d00735dd586afb457 \ No newline at end of file +6d0f12994146614921c2bbc9e655d938 \ No newline at end of file diff --git a/docs/auto_examples/1single/plot_eof-smode.rst b/docs/auto_examples/1single/plot_eof-smode.rst index 1f930ac2..c8012335 100644 --- a/docs/auto_examples/1single/plot_eof-smode.rst +++ b/docs/auto_examples/1single/plot_eof-smode.rst @@ -40,7 +40,7 @@ References from cartopy.crs import EqualEarth, PlateCarree from matplotlib.gridspec import GridSpec - from xeofs.models import SparsePCA + import xeofs as xe @@ -77,7 +77,7 @@ We perform sparse PCA using the `alpha` and `beta` parameters, which define the .. code-block:: default - model = SparsePCA(n_modes=4, alpha=1e-5) + model = xe.single.SparsePCA(n_modes=4, alpha=1e-5) model.fit(sst, dim="time") expvar = model.explained_variance() expvar_ratio = model.explained_variance_ratio() @@ -159,7 +159,7 @@ Examining the first four modes, we clearly identify ENSO as the fourth mode. .. rst-class:: sphx-glr-timing - **Total running time of the script:** (0 minutes 7.992 seconds) + **Total running time of the script:** (0 minutes 7.551 seconds) .. _sphx_glr_download_auto_examples_1single_plot_eof-smode.py: diff --git a/docs/auto_examples/1single/plot_eof-smode_codeobj.pickle b/docs/auto_examples/1single/plot_eof-smode_codeobj.pickle index 78dfade0..f93d16e6 100644 Binary files a/docs/auto_examples/1single/plot_eof-smode_codeobj.pickle and b/docs/auto_examples/1single/plot_eof-smode_codeobj.pickle differ diff --git a/docs/auto_examples/1single/plot_eof-tmode.ipynb b/docs/auto_examples/1single/plot_eof-tmode.ipynb index ac26dc99..e9254f10 100644 --- a/docs/auto_examples/1single/plot_eof-tmode.ipynb +++ b/docs/auto_examples/1single/plot_eof-tmode.ipynb @@ -18,12 +18,12 @@ "metadata": {}, "outputs": [], "source": [ - "import xarray as xr\n", "import matplotlib.pyplot as plt\n", - "from matplotlib.gridspec import GridSpec\n", + "import xarray as xr\n", "from cartopy.crs import EqualEarth, PlateCarree\n", + "from matplotlib.gridspec import GridSpec\n", "\n", - "from xeofs.models import EOF\n", + "import xeofs as xe\n", "\n", "sst = xr.tutorial.open_dataset(\"ersstv5\")[\"sst\"]" ] @@ -42,7 +42,7 @@ "metadata": {}, "outputs": [], "source": [ - "model = EOF(n_modes=5)\n", + "model = xe.single.EOF(n_modes=5)\n", "model.fit(sst, dim=(\"lat\", \"lon\"))\n", "expvar = model.explained_variance_ratio()\n", "components = model.components()\n", @@ -99,7 +99,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/auto_examples/1single/plot_eof-tmode.py b/docs/auto_examples/1single/plot_eof-tmode.py index 6a618e3e..f85b97e9 100644 --- a/docs/auto_examples/1single/plot_eof-tmode.py +++ b/docs/auto_examples/1single/plot_eof-tmode.py @@ -7,19 +7,19 @@ Load packages and data: """ -import xarray as xr import matplotlib.pyplot as plt -from matplotlib.gridspec import GridSpec +import xarray as xr from cartopy.crs import EqualEarth, PlateCarree +from matplotlib.gridspec import GridSpec -from xeofs.models import EOF +import xeofs as xe sst = xr.tutorial.open_dataset("ersstv5")["sst"] # %% # Perform the actual analysis -model = EOF(n_modes=5) +model = xe.single.EOF(n_modes=5) model.fit(sst, dim=("lat", "lon")) expvar = model.explained_variance_ratio() components = model.components() diff --git a/docs/auto_examples/1single/plot_eof-tmode.py.md5 b/docs/auto_examples/1single/plot_eof-tmode.py.md5 index 0e6e4115..2a678441 100644 --- a/docs/auto_examples/1single/plot_eof-tmode.py.md5 +++ b/docs/auto_examples/1single/plot_eof-tmode.py.md5 @@ -1 +1 @@ -cb4eb5e93d5365bebbbecc2581f70358 \ No newline at end of file +af1f1248f081012ef3cb40381d6bbe73 \ No newline at end of file diff --git a/docs/auto_examples/1single/plot_eof-tmode.rst b/docs/auto_examples/1single/plot_eof-tmode.rst index 04053bb4..b0857bf6 100644 --- a/docs/auto_examples/1single/plot_eof-tmode.rst +++ b/docs/auto_examples/1single/plot_eof-tmode.rst @@ -27,15 +27,15 @@ Load packages and data: .. GENERATED FROM PYTHON SOURCE LINES 9-19 -.. code-block:: Python +.. code-block:: default - import xarray as xr import matplotlib.pyplot as plt - from matplotlib.gridspec import GridSpec + import xarray as xr from cartopy.crs import EqualEarth, PlateCarree + from matplotlib.gridspec import GridSpec - from xeofs.models import EOF + import xeofs as xe sst = xr.tutorial.open_dataset("ersstv5")["sst"] @@ -52,10 +52,10 @@ Perform the actual analysis .. GENERATED FROM PYTHON SOURCE LINES 21-28 -.. code-block:: Python +.. code-block:: default - model = EOF(n_modes=5) + model = xe.single.EOF(n_modes=5) model.fit(sst, dim=("lat", "lon")) expvar = model.explained_variance_ratio() components = model.components() @@ -74,7 +74,7 @@ Create figure showing the first two modes .. GENERATED FROM PYTHON SOURCE LINES 30-48 -.. code-block:: Python +.. code-block:: default proj = EqualEarth(central_longitude=180) @@ -109,7 +109,7 @@ Create figure showing the first two modes .. rst-class:: sphx-glr-timing - **Total running time of the script:** (0 minutes 2.323 seconds) + **Total running time of the script:** (0 minutes 3.854 seconds) .. _sphx_glr_download_auto_examples_1single_plot_eof-tmode.py: @@ -118,14 +118,17 @@ Create figure showing the first two modes .. container:: sphx-glr-footer sphx-glr-footer-example - .. container:: sphx-glr-download sphx-glr-download-jupyter - :download:`Download Jupyter notebook: plot_eof-tmode.ipynb ` + .. container:: sphx-glr-download sphx-glr-download-python :download:`Download Python source code: plot_eof-tmode.py ` + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_eof-tmode.ipynb ` + .. only:: html diff --git a/docs/auto_examples/1single/plot_eof-tmode_codeobj.pickle b/docs/auto_examples/1single/plot_eof-tmode_codeobj.pickle index de37415c..34517ab3 100644 Binary files a/docs/auto_examples/1single/plot_eof-tmode_codeobj.pickle and b/docs/auto_examples/1single/plot_eof-tmode_codeobj.pickle differ diff --git a/docs/auto_examples/1single/plot_gwpca.ipynb b/docs/auto_examples/1single/plot_gwpca.ipynb index 2d12398f..6cc31b2c 100644 --- a/docs/auto_examples/1single/plot_gwpca.ipynb +++ b/docs/auto_examples/1single/plot_gwpca.ipynb @@ -43,18 +43,18 @@ "outputs": [], "source": [ "# For the analysis\n", - "import numpy as np\n", - "import xarray as xr\n", - "import xeofs as xe\n", - "\n", "# For visualization\n", "import matplotlib.pyplot as plt\n", - "import seaborn as sns\n", + "import numpy as np\n", "\n", "# For accessing R packages\n", "import rpy2.robjects as ro\n", + "import seaborn as sns\n", + "import xarray as xr\n", + "from rpy2.robjects import pandas2ri\n", "from rpy2.robjects.packages import importr\n", - "from rpy2.robjects import pandas2ri" + "\n", + "import xeofs as xe" ] }, { @@ -155,7 +155,7 @@ "metadata": {}, "outputs": [], "source": [ - "gwpca = xe.models.GWPCA(\n", + "gwpca = xe.single.GWPCA(\n", " n_modes=5,\n", " standardize=True,\n", " metric=\"euclidean\",\n", @@ -285,7 +285,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/auto_examples/1single/plot_gwpca.py b/docs/auto_examples/1single/plot_gwpca.py index f1542228..2a9462cf 100644 --- a/docs/auto_examples/1single/plot_gwpca.py +++ b/docs/auto_examples/1single/plot_gwpca.py @@ -32,18 +32,18 @@ """ # For the analysis -import numpy as np -import xarray as xr -import xeofs as xe - # For visualization import matplotlib.pyplot as plt -import seaborn as sns +import numpy as np # For accessing R packages import rpy2.robjects as ro -from rpy2.robjects.packages import importr +import seaborn as sns +import xarray as xr from rpy2.robjects import pandas2ri +from rpy2.robjects.packages import importr + +import xeofs as xe # %% # Next, we'll install the R package `mvoutlier `_ @@ -96,7 +96,7 @@ # kilometers. Lastly, we'll standardize the input to ensure consistent scales # for the chemical elements. -gwpca = xe.models.GWPCA( +gwpca = xe.single.GWPCA( n_modes=5, standardize=True, metric="euclidean", diff --git a/docs/auto_examples/1single/plot_gwpca.py.md5 b/docs/auto_examples/1single/plot_gwpca.py.md5 index 638f2378..74b515fb 100644 --- a/docs/auto_examples/1single/plot_gwpca.py.md5 +++ b/docs/auto_examples/1single/plot_gwpca.py.md5 @@ -1 +1 @@ -77b9f112f4bc958b9490200a1e64d954 \ No newline at end of file +ae1e8af4b123c765c1cfd4d63d9386c2 \ No newline at end of file diff --git a/docs/auto_examples/1single/plot_gwpca.rst b/docs/auto_examples/1single/plot_gwpca.rst index 11fe5aa3..35913c3a 100644 --- a/docs/auto_examples/1single/plot_gwpca.rst +++ b/docs/auto_examples/1single/plot_gwpca.rst @@ -51,22 +51,22 @@ Let's import the necessary packages. .. GENERATED FROM PYTHON SOURCE LINES 33-48 -.. code-block:: Python +.. code-block:: default # For the analysis - import numpy as np - import xarray as xr - import xeofs as xe - # For visualization import matplotlib.pyplot as plt - import seaborn as sns + import numpy as np # For accessing R packages import rpy2.robjects as ro - from rpy2.robjects.packages import importr + import seaborn as sns + import xarray as xr from rpy2.robjects import pandas2ri + from rpy2.robjects.packages import importr + + import xeofs as xe @@ -82,7 +82,7 @@ using the `rpy2 `_ package. .. GENERATED FROM PYTHON SOURCE LINES 51-57 -.. code-block:: Python +.. code-block:: default xr.set_options(display_expand_data=False) @@ -98,189 +98,6 @@ using the `rpy2 `_ package. .. code-block:: none - R[write to console]: also installing the dependencies ‘DEoptimR’, ‘sgeostat’, ‘robustbase’ - - - R[write to console]: trying URL 'https://cloud.r-project.org/src/contrib/DEoptimR_1.1-3.tar.gz' - - R[write to console]: Content type 'application/x-gzip' - R[write to console]: length 19222 bytes (18 KB) - - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: - - R[write to console]: downloaded 18 KB - - - R[write to console]: trying URL 'https://cloud.r-project.org/src/contrib/sgeostat_1.0-27.tar.gz' - - R[write to console]: Content type 'application/x-gzip' - R[write to console]: length 35685 bytes (34 KB) - - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: - - R[write to console]: downloaded 34 KB - - - R[write to console]: trying URL 'https://cloud.r-project.org/src/contrib/robustbase_0.99-2.tar.gz' - - R[write to console]: Content type 'application/x-gzip' - R[write to console]: length 2292086 bytes (2.2 MB) - - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: = - R[write to console]: - - R[write to console]: downloaded 2.2 MB - - R[write to console]: trying URL 'https://cloud.r-project.org/src/contrib/mvoutlier_2.1.1.tar.gz' R[write to console]: Content type 'application/x-gzip' @@ -345,7 +162,7 @@ using the `rpy2 `_ package. R[write to console]: R[write to console]: The downloaded source packages are in - ‘/tmp/Rtmpr8iXHn/downloaded_packages’ + ‘/tmp/RtmpRG5HUi/downloaded_packages’ R[write to console]: R[write to console]: @@ -355,7 +172,7 @@ using the `rpy2 `_ package. R[write to console]: done - [0] + [0] @@ -367,7 +184,7 @@ in the Baltic Sea region. This will help us visually represent the GWPCA results .. GENERATED FROM PYTHON SOURCE LINES 61-75 -.. code-block:: Python +.. code-block:: default ro.r( @@ -533,7 +350,7 @@ Since ``xeofs`` uses ``xarray``, we convert the data into an ``xarray.DataArray` .. GENERATED FROM PYTHON SOURCE LINES 77-86 -.. code-block:: Python +.. code-block:: default data_df = data_df.rename(columns={"ID": "station"}).set_index("station") @@ -915,22 +732,22 @@ Since ``xeofs`` uses ``xarray``, we convert the data into an ``xarray.DataArray` stroke: currentColor; fill: currentColor; } -
    <xarray.DataArray (element: 10, station: 768)> Size: 61kB
    +    
    <xarray.DataArray (element: 10, station: 768)>
         43.61 58.73 58.14 43.98 60.9 54.0 82.72 ... 0.196 0.202 0.207 0.109 0.141 0.185
         Coordinates:
    -      * station  (station) object 6kB MultiIndex
    -      * x        (station) float64 6kB -6.197e+05 2.147e+05 ... -2.82e+05 -1.273e+05
    -      * y        (station) float64 6kB 6.805e+06 7.746e+06 ... 5.796e+06 6.523e+06
    -      * element  (element) object 80B 'SiO2_T' 'TiO2_T' ... 'K2O_T' 'P2O5_T'


  • @@ -974,10 +791,10 @@ for the chemical elements. .. GENERATED FROM PYTHON SOURCE LINES 98-110 -.. code-block:: Python +.. code-block:: default - gwpca = xe.models.GWPCA( + gwpca = xe.single.GWPCA( n_modes=5, standardize=True, metric="euclidean", @@ -1359,28 +1176,15 @@ for the chemical elements. stroke: currentColor; fill: currentColor; } -
    <xarray.DataArray 'components' (mode: 5, element: 10, station: 768)> Size: 307kB
    +    
    <xarray.DataArray 'components' (mode: 5, element: 10, station: 768)>
         0.1813 -0.3584 0.1243 0.2 -0.3812 ... 0.1229 -0.2865 0.4732 0.4197 -0.4249
         Coordinates:
    -      * mode     (mode) int64 40B 1 2 3 4 5
    -      * element  (element) object 80B 'SiO2_T' 'TiO2_T' ... 'K2O_T' 'P2O5_T'
    -      * station  (station) object 6kB MultiIndex
    -      * x        (station) float64 6kB -6.197e+05 2.147e+05 ... -2.82e+05 -1.273e+05
    -      * y        (station) float64 6kB 6.805e+06 7.746e+06 ... 5.796e+06 6.523e+06
    -    Attributes: (12/16)
    -        model:          GWPCA
    -        software:       xeofs
    -        version:        2.3.2
    -        date:           2024-03-31 21:13:03
    -        n_modes:        5
    -        center:         True
    -        ...             ...
    -        feature_name:   feature
    -        random_state:   None
    -        verbose:        False
    -        compute:        True
    -        solver:         auto
    -        solver_kwargs:  {}
  • model :
    GWPCA
    software :
    xeofs
    version :
    1.2.0
    date :
    2024-09-02 02:16:58
    n_modes :
    5
    center :
    True
    standardize :
    True
    use_coslat :
    False
    check_nans :
    True
    sample_name :
    sample
    feature_name :
    feature
    random_state :
    None
    compute :
    True
    solver :
    auto
    solver_kwargs :
    {}


  • @@ -1461,7 +1265,7 @@ dominate the local PCAs. .. GENERATED FROM PYTHON SOURCE LINES 116-120 -.. code-block:: Python +.. code-block:: default llwc = gwpca.largest_locally_weighted_components() @@ -1838,20 +1642,20 @@ dominate the local PCAs. stroke: currentColor; fill: currentColor; } -
    <xarray.DataArray 'largest_locally_weighted_components' (mode: 5, station: 768)> Size: 31kB
    +    
    <xarray.DataArray 'largest_locally_weighted_components' (mode: 5, station: 768)>
         'MgO_T' 'Al2O3_T' 'MgO_T' 'TiO2_T' ... 'K2O_T' 'Fe2O3_T' 'Fe2O3_T' 'CaO_T'
         Coordinates:
    -      * mode     (mode) int64 40B 1 2 3 4 5
    -      * station  (station) object 6kB MultiIndex
    -      * x        (station) float64 6kB -6.197e+05 2.147e+05 ... -2.82e+05 -1.273e+05
    -      * y        (station) float64 6kB 6.805e+06 7.746e+06 ... 5.796e+06 6.523e+06


  • @@ -1887,7 +1691,7 @@ For demonstation, we'll concentrate on the first mode: .. GENERATED FROM PYTHON SOURCE LINES 126-157 -.. code-block:: Python +.. code-block:: default llwc1_df = llwc.sel(mode=1).to_dataframe() @@ -1942,7 +1746,7 @@ roughly 40% to 70%. .. GENERATED FROM PYTHON SOURCE LINES 163-176 -.. code-block:: Python +.. code-block:: default @@ -1972,7 +1776,7 @@ roughly 40% to 70%. .. rst-class:: sphx-glr-timing - **Total running time of the script:** (0 minutes 34.593 seconds) + **Total running time of the script:** (0 minutes 39.918 seconds) .. _sphx_glr_download_auto_examples_1single_plot_gwpca.py: @@ -1981,14 +1785,17 @@ roughly 40% to 70%. .. container:: sphx-glr-footer sphx-glr-footer-example - .. container:: sphx-glr-download sphx-glr-download-jupyter - :download:`Download Jupyter notebook: plot_gwpca.ipynb ` + .. container:: sphx-glr-download sphx-glr-download-python :download:`Download Python source code: plot_gwpca.py ` + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_gwpca.ipynb ` + .. only:: html diff --git a/docs/auto_examples/1single/plot_gwpca_codeobj.pickle b/docs/auto_examples/1single/plot_gwpca_codeobj.pickle index 508520c6..bd8fd2fe 100644 Binary files a/docs/auto_examples/1single/plot_gwpca_codeobj.pickle and b/docs/auto_examples/1single/plot_gwpca_codeobj.pickle differ diff --git a/docs/auto_examples/1single/plot_hilbert_eof.ipynb b/docs/auto_examples/1single/plot_hilbert_eof.ipynb index f61538cd..e704627c 100644 --- a/docs/auto_examples/1single/plot_hilbert_eof.ipynb +++ b/docs/auto_examples/1single/plot_hilbert_eof.ipynb @@ -67,7 +67,7 @@ "outputs": [], "source": [ "kwargs = dict(n_modes=4, use_coslat=True, random_state=7)\n", - "model = xe.models.HilbertEOF(padding=\"none\", **kwargs)" + "model = xe.single.HilbertEOF(padding=\"none\", **kwargs)" ] }, { @@ -175,7 +175,7 @@ "metadata": {}, "outputs": [], "source": [ - "model_ext = xe.models.HilbertEOF(padding=\"exp\", decay_factor=0.01, **kwargs)\n", + "model_ext = xe.single.HilbertEOF(padding=\"exp\", decay_factor=0.01, **kwargs)\n", "model_ext.fit(sst, dim=\"time\")\n", "scores_ext = model_ext.scores().sel(mode=slice(1, 4))\n", "\n", diff --git a/docs/auto_examples/1single/plot_hilbert_eof.py b/docs/auto_examples/1single/plot_hilbert_eof.py index 2d7e4438..a2fd42ba 100644 --- a/docs/auto_examples/1single/plot_hilbert_eof.py +++ b/docs/auto_examples/1single/plot_hilbert_eof.py @@ -45,7 +45,7 @@ # options to mitigate potential edge effects, we'll begin with no padding. kwargs = dict(n_modes=4, use_coslat=True, random_state=7) -model = xe.models.HilbertEOF(padding="none", **kwargs) +model = xe.single.HilbertEOF(padding="none", **kwargs) # %% # Now, we fit the model to the data and extract the explained variance. @@ -95,7 +95,7 @@ # controls the decay rate of the exponential function measured in multiples of # the time series length. Let's see how the decay parameter impacts the results: -model_ext = xe.models.HilbertEOF(padding="exp", decay_factor=0.01, **kwargs) +model_ext = xe.single.HilbertEOF(padding="exp", decay_factor=0.01, **kwargs) model_ext.fit(sst, dim="time") scores_ext = model_ext.scores().sel(mode=slice(1, 4)) diff --git a/docs/auto_examples/1single/plot_hilbert_eof.py.md5 b/docs/auto_examples/1single/plot_hilbert_eof.py.md5 index d95badad..c20a2e8f 100644 --- a/docs/auto_examples/1single/plot_hilbert_eof.py.md5 +++ b/docs/auto_examples/1single/plot_hilbert_eof.py.md5 @@ -1 +1 @@ -08b8dfc4f967adfe6099dc728512e122 \ No newline at end of file +28f29c6a357ea6a325769ce0c7554634 \ No newline at end of file diff --git a/docs/auto_examples/1single/plot_hilbert_eof.rst b/docs/auto_examples/1single/plot_hilbert_eof.rst index 77b50f91..ddcf5a25 100644 --- a/docs/auto_examples/1single/plot_hilbert_eof.rst +++ b/docs/auto_examples/1single/plot_hilbert_eof.rst @@ -435,14 +435,14 @@ Let's start by importing the necessary packages and loading the data: * lat (lat) float32 88.0 86.0 84.0 82.0 80.0 ... -82.0 -84.0 -86.0 -88.0 * lon (lon) float32 0.0 2.0 4.0 6.0 8.0 ... 350.0 352.0 354.0 356.0 358.0 * time (time) datetime64[ns] 1970-01-01 1970-02-01 ... 2021-12-01 - Attributes: (9)
  • long_name :
    Monthly Means of Sea Surface Temperature
    units :
    degC
    var_desc :
    Sea Surface Temperature
    level_desc :
    Surface
    statistic :
    Mean
    dataset :
    NOAA Extended Reconstructed SST V5
    parent_stat :
    Individual Values
    actual_range :
    [-1.8 42.32636]
    valid_range :
    [-1.8 45. ]


  • @@ -498,7 +498,7 @@ options to mitigate potential edge effects, we'll begin with no padding. kwargs = dict(n_modes=4, use_coslat=True, random_state=7) - model = xe.models.HilbertEOF(padding="none", **kwargs) + model = xe.single.HilbertEOF(padding="none", **kwargs) @@ -911,10 +911,10 @@ Let's have a look at the explained variance of the first five modes: fill: currentColor; }
    <xarray.DataArray 'explained_variance' (mode: 4)>
    -    array([50686.,  1705.,  1105.,   519.])
    +    5.069e+04 1.705e+03 1.105e+03 519.0
         Coordinates:
           * mode     (mode) int64 1 2 3 4
    -    Attributes: (16)
    + Attributes: (15)

    @@ -1304,10 +1304,10 @@ variance, we see that the first mode explains about 88.8 %. fill: currentColor; }
    <xarray.DataArray 'explained_variance_ratio' (mode: 4)>
    -    array([88.8,  3. ,  1.9,  0.9])
    +    88.8 3.0 1.9 0.9
         Coordinates:
           * mode     (mode) int64 1 2 3 4
    -    Attributes: (16)
    + Attributes: (15)

    @@ -1364,7 +1364,7 @@ the time series length. Let's see how the decay parameter impacts the results: .. code-block:: default - model_ext = xe.models.HilbertEOF(padding="exp", decay_factor=0.01, **kwargs) + model_ext = xe.single.HilbertEOF(padding="exp", decay_factor=0.01, **kwargs) model_ext.fit(sst, dim="time") scores_ext = model_ext.scores().sel(mode=slice(1, 4)) @@ -1441,7 +1441,7 @@ This is in agreement with mode 3 of the standard EOF analysis. .. rst-class:: sphx-glr-timing - **Total running time of the script:** (0 minutes 6.103 seconds) + **Total running time of the script:** (0 minutes 8.098 seconds) .. _sphx_glr_download_auto_examples_1single_plot_hilbert_eof.py: diff --git a/docs/auto_examples/1single/plot_hilbert_eof_codeobj.pickle b/docs/auto_examples/1single/plot_hilbert_eof_codeobj.pickle index 3eee9fe5..cc488e64 100644 Binary files a/docs/auto_examples/1single/plot_hilbert_eof_codeobj.pickle and b/docs/auto_examples/1single/plot_hilbert_eof_codeobj.pickle differ diff --git a/docs/auto_examples/1single/plot_mreof.ipynb b/docs/auto_examples/1single/plot_mreof.ipynb index 9eceb7e9..3b209a07 100644 --- a/docs/auto_examples/1single/plot_mreof.ipynb +++ b/docs/auto_examples/1single/plot_mreof.ipynb @@ -17,12 +17,12 @@ "outputs": [], "source": [ "# Load packages and data:\n", - "import xarray as xr\n", "import matplotlib.pyplot as plt\n", - "from matplotlib.gridspec import GridSpec\n", + "import xarray as xr\n", "from cartopy.crs import PlateCarree\n", + "from matplotlib.gridspec import GridSpec\n", "\n", - "from xeofs.models import EOF, EOFRotator" + "import xeofs as xe" ] }, { @@ -61,9 +61,9 @@ "outputs": [], "source": [ "multivariate_data = [subset1, subset2, subset3, subset4]\n", - "mpca = EOF(n_modes=100, standardize=False, use_coslat=True)\n", + "mpca = xe.single.EOF(n_modes=100, standardize=False, use_coslat=True)\n", "mpca.fit(multivariate_data, dim=\"time\")\n", - "rotator = EOFRotator(n_modes=20)\n", + "rotator = xe.single.EOFRotator(n_modes=20)\n", "rotator.fit(mpca)\n", "rcomponents = rotator.components()\n", "rscores = rotator.scores()" @@ -135,7 +135,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/auto_examples/1single/plot_mreof.py b/docs/auto_examples/1single/plot_mreof.py index 5bbc6a0f..e8c14b50 100644 --- a/docs/auto_examples/1single/plot_mreof.py +++ b/docs/auto_examples/1single/plot_mreof.py @@ -6,12 +6,12 @@ """ # Load packages and data: -import xarray as xr import matplotlib.pyplot as plt -from matplotlib.gridspec import GridSpec +import xarray as xr from cartopy.crs import PlateCarree +from matplotlib.gridspec import GridSpec -from xeofs.models import EOF, EOFRotator +import xeofs as xe # %% # Create four different dataarrayss @@ -25,9 +25,9 @@ # Perform the actual analysis multivariate_data = [subset1, subset2, subset3, subset4] -mpca = EOF(n_modes=100, standardize=False, use_coslat=True) +mpca = xe.single.EOF(n_modes=100, standardize=False, use_coslat=True) mpca.fit(multivariate_data, dim="time") -rotator = EOFRotator(n_modes=20) +rotator = xe.single.EOFRotator(n_modes=20) rotator.fit(mpca) rcomponents = rotator.components() rscores = rotator.scores() diff --git a/docs/auto_examples/1single/plot_mreof.py.md5 b/docs/auto_examples/1single/plot_mreof.py.md5 index 57db41e3..67b0fb08 100644 --- a/docs/auto_examples/1single/plot_mreof.py.md5 +++ b/docs/auto_examples/1single/plot_mreof.py.md5 @@ -1 +1 @@ -26962c4661f0e87127e3ebcadae33bb7 \ No newline at end of file +d7e99506275146b34aa2607581ecbd1e \ No newline at end of file diff --git a/docs/auto_examples/1single/plot_mreof.rst b/docs/auto_examples/1single/plot_mreof.rst index 105ef3cc..f8dcd602 100644 --- a/docs/auto_examples/1single/plot_mreof.rst +++ b/docs/auto_examples/1single/plot_mreof.rst @@ -25,16 +25,16 @@ Multivariate EOF analysis with additional Varimax rotation. .. GENERATED FROM PYTHON SOURCE LINES 7-16 -.. code-block:: Python +.. code-block:: default # Load packages and data: - import xarray as xr import matplotlib.pyplot as plt - from matplotlib.gridspec import GridSpec + import xarray as xr from cartopy.crs import PlateCarree + from matplotlib.gridspec import GridSpec - from xeofs.models import EOF, EOFRotator + import xeofs as xe @@ -49,7 +49,7 @@ Create four different dataarrayss .. GENERATED FROM PYTHON SOURCE LINES 18-24 -.. code-block:: Python +.. code-block:: default sst = xr.tutorial.open_dataset("ersstv5")["sst"] subset1 = sst.isel(lon=slice(0, 45)) @@ -70,13 +70,13 @@ Perform the actual analysis .. GENERATED FROM PYTHON SOURCE LINES 26-35 -.. code-block:: Python +.. code-block:: default multivariate_data = [subset1, subset2, subset3, subset4] - mpca = EOF(n_modes=100, standardize=False, use_coslat=True) + mpca = xe.single.EOF(n_modes=100, standardize=False, use_coslat=True) mpca.fit(multivariate_data, dim="time") - rotator = EOFRotator(n_modes=20) + rotator = xe.single.EOFRotator(n_modes=20) rotator.fit(mpca) rcomponents = rotator.components() rscores = rotator.scores() @@ -94,7 +94,7 @@ Plot mode 1 .. GENERATED FROM PYTHON SOURCE LINES 37-71 -.. code-block:: Python +.. code-block:: default mode = 5 @@ -145,7 +145,7 @@ Plot mode 1 .. rst-class:: sphx-glr-timing - **Total running time of the script:** (0 minutes 1.727 seconds) + **Total running time of the script:** (0 minutes 3.255 seconds) .. _sphx_glr_download_auto_examples_1single_plot_mreof.py: @@ -154,14 +154,17 @@ Plot mode 1 .. container:: sphx-glr-footer sphx-glr-footer-example - .. container:: sphx-glr-download sphx-glr-download-jupyter - :download:`Download Jupyter notebook: plot_mreof.ipynb ` + .. container:: sphx-glr-download sphx-glr-download-python :download:`Download Python source code: plot_mreof.py ` + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_mreof.ipynb ` + .. only:: html diff --git a/docs/auto_examples/1single/plot_mreof_codeobj.pickle b/docs/auto_examples/1single/plot_mreof_codeobj.pickle index bd2093e1..778fef67 100644 Binary files a/docs/auto_examples/1single/plot_mreof_codeobj.pickle and b/docs/auto_examples/1single/plot_mreof_codeobj.pickle differ diff --git a/docs/auto_examples/1single/plot_multivariate-eof.ipynb b/docs/auto_examples/1single/plot_multivariate-eof.ipynb index 6983769d..853147e6 100644 --- a/docs/auto_examples/1single/plot_multivariate-eof.ipynb +++ b/docs/auto_examples/1single/plot_multivariate-eof.ipynb @@ -17,12 +17,12 @@ "outputs": [], "source": [ "# Load packages and data:\n", - "import xarray as xr\n", "import matplotlib.pyplot as plt\n", - "from matplotlib.gridspec import GridSpec\n", + "import xarray as xr\n", "from cartopy.crs import PlateCarree\n", + "from matplotlib.gridspec import GridSpec\n", "\n", - "from xeofs.models import EOF\n", + "import xeofs as xe\n", "\n", "# Create four different dataarrayss\n", "sst = xr.tutorial.open_dataset(\"ersstv5\")[\"sst\"]\n", @@ -47,7 +47,7 @@ "metadata": {}, "outputs": [], "source": [ - "pca = EOF(n_modes=10, standardize=False, use_coslat=True)\n", + "pca = xe.single.EOF(n_modes=10, standardize=False, use_coslat=True)\n", "pca.fit(multivariate_data, dim=\"time\")\n", "components = pca.components()\n", "scores = pca.scores()" @@ -119,7 +119,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/auto_examples/1single/plot_multivariate-eof.py b/docs/auto_examples/1single/plot_multivariate-eof.py index 31c6f623..84d0a36b 100644 --- a/docs/auto_examples/1single/plot_multivariate-eof.py +++ b/docs/auto_examples/1single/plot_multivariate-eof.py @@ -6,12 +6,12 @@ """ # Load packages and data: -import xarray as xr import matplotlib.pyplot as plt -from matplotlib.gridspec import GridSpec +import xarray as xr from cartopy.crs import PlateCarree +from matplotlib.gridspec import GridSpec -from xeofs.models import EOF +import xeofs as xe # Create four different dataarrayss sst = xr.tutorial.open_dataset("ersstv5")["sst"] @@ -24,7 +24,7 @@ # %% # Perform the actual analysis -pca = EOF(n_modes=10, standardize=False, use_coslat=True) +pca = xe.single.EOF(n_modes=10, standardize=False, use_coslat=True) pca.fit(multivariate_data, dim="time") components = pca.components() scores = pca.scores() diff --git a/docs/auto_examples/1single/plot_multivariate-eof.py.md5 b/docs/auto_examples/1single/plot_multivariate-eof.py.md5 index d14cd21e..10cd5ff8 100644 --- a/docs/auto_examples/1single/plot_multivariate-eof.py.md5 +++ b/docs/auto_examples/1single/plot_multivariate-eof.py.md5 @@ -1 +1 @@ -d76ccd30400590a4e6d49d598bbdb423 \ No newline at end of file +58257a2d0b4d2b974cf43e80e195be40 \ No newline at end of file diff --git a/docs/auto_examples/1single/plot_multivariate-eof.rst b/docs/auto_examples/1single/plot_multivariate-eof.rst index f0ef5c22..0af5e513 100644 --- a/docs/auto_examples/1single/plot_multivariate-eof.rst +++ b/docs/auto_examples/1single/plot_multivariate-eof.rst @@ -25,16 +25,16 @@ Multivariate EOF analysis. .. GENERATED FROM PYTHON SOURCE LINES 7-24 -.. code-block:: Python +.. code-block:: default # Load packages and data: - import xarray as xr import matplotlib.pyplot as plt - from matplotlib.gridspec import GridSpec + import xarray as xr from cartopy.crs import PlateCarree + from matplotlib.gridspec import GridSpec - from xeofs.models import EOF + import xeofs as xe # Create four different dataarrayss sst = xr.tutorial.open_dataset("ersstv5")["sst"] @@ -57,10 +57,10 @@ Perform the actual analysis .. GENERATED FROM PYTHON SOURCE LINES 26-32 -.. code-block:: Python +.. code-block:: default - pca = EOF(n_modes=10, standardize=False, use_coslat=True) + pca = xe.single.EOF(n_modes=10, standardize=False, use_coslat=True) pca.fit(multivariate_data, dim="time") components = pca.components() scores = pca.scores() @@ -78,7 +78,7 @@ Plot mode 1 .. GENERATED FROM PYTHON SOURCE LINES 34-68 -.. code-block:: Python +.. code-block:: default mode = 5 @@ -129,7 +129,7 @@ Plot mode 1 .. rst-class:: sphx-glr-timing - **Total running time of the script:** (0 minutes 1.211 seconds) + **Total running time of the script:** (0 minutes 2.083 seconds) .. _sphx_glr_download_auto_examples_1single_plot_multivariate-eof.py: @@ -138,14 +138,17 @@ Plot mode 1 .. container:: sphx-glr-footer sphx-glr-footer-example - .. container:: sphx-glr-download sphx-glr-download-jupyter - :download:`Download Jupyter notebook: plot_multivariate-eof.ipynb ` + .. container:: sphx-glr-download sphx-glr-download-python :download:`Download Python source code: plot_multivariate-eof.py ` + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_multivariate-eof.ipynb ` + .. only:: html diff --git a/docs/auto_examples/1single/plot_multivariate-eof_codeobj.pickle b/docs/auto_examples/1single/plot_multivariate-eof_codeobj.pickle index 96521a56..e3afdbd9 100644 Binary files a/docs/auto_examples/1single/plot_multivariate-eof_codeobj.pickle and b/docs/auto_examples/1single/plot_multivariate-eof_codeobj.pickle differ diff --git a/docs/auto_examples/1single/plot_rotated_eof.ipynb b/docs/auto_examples/1single/plot_rotated_eof.ipynb index b92799d3..caf07ab1 100644 --- a/docs/auto_examples/1single/plot_rotated_eof.ipynb +++ b/docs/auto_examples/1single/plot_rotated_eof.ipynb @@ -33,14 +33,13 @@ "metadata": {}, "outputs": [], "source": [ - "import xarray as xr\n", "import matplotlib.pyplot as plt\n", "import seaborn as sns\n", + "import xarray as xr\n", + "from cartopy.crs import PlateCarree, Robinson\n", "from matplotlib.gridspec import GridSpec\n", - "from cartopy.crs import Robinson, PlateCarree\n", - "\n", - "from xeofs.models import EOF, EOFRotator\n", "\n", + "import xeofs as xe\n", "\n", "sns.set_context(\"paper\")\n", "\n", @@ -64,17 +63,17 @@ "components = []\n", "scores = []\n", "# (1) Standard EOF without regularization\n", - "model = EOF(n_modes=100, standardize=True, use_coslat=True)\n", + "model = xe.single.EOF(n_modes=100, standardize=True, use_coslat=True)\n", "model.fit(sst, dim=\"time\")\n", "components.append(model.components())\n", "scores.append(model.scores())\n", "# (2) Varimax-rotated EOF analysis\n", - "rot_var = EOFRotator(n_modes=50, power=1)\n", + "rot_var = xe.single.EOFRotator(n_modes=50, power=1)\n", "rot_var.fit(model)\n", "components.append(rot_var.components())\n", "scores.append(rot_var.scores())\n", "# (3) Promax-rotated EOF analysis\n", - "rot_pro = EOFRotator(n_modes=50, power=4)\n", + "rot_pro = xe.single.EOFRotator(n_modes=50, power=4)\n", "rot_pro.fit(model)\n", "components.append(rot_pro.components())\n", "scores.append(rot_pro.scores())" @@ -148,7 +147,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/auto_examples/1single/plot_rotated_eof.py b/docs/auto_examples/1single/plot_rotated_eof.py index 0f8b296b..33f6ab72 100644 --- a/docs/auto_examples/1single/plot_rotated_eof.py +++ b/docs/auto_examples/1single/plot_rotated_eof.py @@ -22,14 +22,13 @@ We'll start by loading the necessary packages and data: """ -import xarray as xr import matplotlib.pyplot as plt import seaborn as sns +import xarray as xr +from cartopy.crs import PlateCarree, Robinson from matplotlib.gridspec import GridSpec -from cartopy.crs import Robinson, PlateCarree - -from xeofs.models import EOF, EOFRotator +import xeofs as xe sns.set_context("paper") @@ -42,17 +41,17 @@ components = [] scores = [] # (1) Standard EOF without regularization -model = EOF(n_modes=100, standardize=True, use_coslat=True) +model = xe.single.EOF(n_modes=100, standardize=True, use_coslat=True) model.fit(sst, dim="time") components.append(model.components()) scores.append(model.scores()) # (2) Varimax-rotated EOF analysis -rot_var = EOFRotator(n_modes=50, power=1) +rot_var = xe.single.EOFRotator(n_modes=50, power=1) rot_var.fit(model) components.append(rot_var.components()) scores.append(rot_var.scores()) # (3) Promax-rotated EOF analysis -rot_pro = EOFRotator(n_modes=50, power=4) +rot_pro = xe.single.EOFRotator(n_modes=50, power=4) rot_pro.fit(model) components.append(rot_pro.components()) scores.append(rot_pro.scores()) diff --git a/docs/auto_examples/1single/plot_rotated_eof.py.md5 b/docs/auto_examples/1single/plot_rotated_eof.py.md5 index 46e91846..b79832fd 100644 --- a/docs/auto_examples/1single/plot_rotated_eof.py.md5 +++ b/docs/auto_examples/1single/plot_rotated_eof.py.md5 @@ -1 +1 @@ -88116a4a1f79c81f89706b7345a6ba70 \ No newline at end of file +669ef8691b2403f69184b4309b0f154b \ No newline at end of file diff --git a/docs/auto_examples/1single/plot_rotated_eof.rst b/docs/auto_examples/1single/plot_rotated_eof.rst index 410b573f..89024f93 100644 --- a/docs/auto_examples/1single/plot_rotated_eof.rst +++ b/docs/auto_examples/1single/plot_rotated_eof.rst @@ -40,19 +40,18 @@ without regularization, (2) with Varimax rotation, and (3) with Promax rotation. We'll start by loading the necessary packages and data: -.. GENERATED FROM PYTHON SOURCE LINES 24-39 +.. GENERATED FROM PYTHON SOURCE LINES 24-38 -.. code-block:: Python +.. code-block:: default - import xarray as xr import matplotlib.pyplot as plt import seaborn as sns + import xarray as xr + from cartopy.crs import PlateCarree, Robinson from matplotlib.gridspec import GridSpec - from cartopy.crs import Robinson, PlateCarree - - from xeofs.models import EOF, EOFRotator + import xeofs as xe sns.set_context("paper") @@ -66,29 +65,29 @@ We'll start by loading the necessary packages and data: -.. GENERATED FROM PYTHON SOURCE LINES 40-41 +.. GENERATED FROM PYTHON SOURCE LINES 39-40 Perform the actual analysis -.. GENERATED FROM PYTHON SOURCE LINES 41-61 +.. GENERATED FROM PYTHON SOURCE LINES 40-60 -.. code-block:: Python +.. code-block:: default components = [] scores = [] # (1) Standard EOF without regularization - model = EOF(n_modes=100, standardize=True, use_coslat=True) + model = xe.single.EOF(n_modes=100, standardize=True, use_coslat=True) model.fit(sst, dim="time") components.append(model.components()) scores.append(model.scores()) # (2) Varimax-rotated EOF analysis - rot_var = EOFRotator(n_modes=50, power=1) + rot_var = xe.single.EOFRotator(n_modes=50, power=1) rot_var.fit(model) components.append(rot_var.components()) scores.append(rot_var.scores()) # (3) Promax-rotated EOF analysis - rot_pro = EOFRotator(n_modes=50, power=4) + rot_pro = xe.single.EOFRotator(n_modes=50, power=4) rot_pro.fit(model) components.append(rot_pro.components()) scores.append(rot_pro.scores()) @@ -98,17 +97,10 @@ Perform the actual analysis -.. rst-class:: sphx-glr-script-out - - .. code-block:: none - /home/slevang/miniconda3/envs/xeofs-docs/lib/python3.11/site-packages/numpy/lib/nanfunctions.py:1879: RuntimeWarning: Degrees of freedom <= 0 for slice. - var = nanvar(a, axis=axis, dtype=dtype, out=out, ddof=ddof, - - -.. GENERATED FROM PYTHON SOURCE LINES 62-67 +.. GENERATED FROM PYTHON SOURCE LINES 61-66 Create figure showing the first 6 modes for all 3 cases. While the first mode is very similar in all three cases the subsequent modes of the standard @@ -116,9 +108,9 @@ solution exhibit dipole and tripole-like patterns. Under Varimax and Promax rotation, these structures completely disappear suggesting that these patterns were mere artifacts due to the orthogonality. -.. GENERATED FROM PYTHON SOURCE LINES 67-99 +.. GENERATED FROM PYTHON SOURCE LINES 66-98 -.. code-block:: Python +.. code-block:: default proj = Robinson(central_longitude=180) @@ -167,7 +159,7 @@ were mere artifacts due to the orthogonality. .. rst-class:: sphx-glr-timing - **Total running time of the script:** (0 minutes 6.472 seconds) + **Total running time of the script:** (0 minutes 11.973 seconds) .. _sphx_glr_download_auto_examples_1single_plot_rotated_eof.py: @@ -176,14 +168,17 @@ were mere artifacts due to the orthogonality. .. container:: sphx-glr-footer sphx-glr-footer-example - .. container:: sphx-glr-download sphx-glr-download-jupyter - :download:`Download Jupyter notebook: plot_rotated_eof.ipynb ` + .. container:: sphx-glr-download sphx-glr-download-python :download:`Download Python source code: plot_rotated_eof.py ` + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_rotated_eof.ipynb ` + .. only:: html diff --git a/docs/auto_examples/1single/plot_rotated_eof_codeobj.pickle b/docs/auto_examples/1single/plot_rotated_eof_codeobj.pickle index 63217752..4df019f7 100644 Binary files a/docs/auto_examples/1single/plot_rotated_eof_codeobj.pickle and b/docs/auto_examples/1single/plot_rotated_eof_codeobj.pickle differ diff --git a/docs/auto_examples/1single/plot_weighted-eof.ipynb b/docs/auto_examples/1single/plot_weighted-eof.ipynb index d9aef5e2..85f6f443 100644 --- a/docs/auto_examples/1single/plot_weighted-eof.ipynb +++ b/docs/auto_examples/1single/plot_weighted-eof.ipynb @@ -22,13 +22,13 @@ "metadata": {}, "outputs": [], "source": [ - "import xarray as xr\n", "import matplotlib.pyplot as plt\n", "import seaborn as sns\n", - "from matplotlib.gridspec import GridSpec\n", + "import xarray as xr\n", "from cartopy.crs import Orthographic, PlateCarree\n", + "from matplotlib.gridspec import GridSpec\n", "\n", - "from xeofs.models import EOF\n", + "import xeofs as xe\n", "\n", "sns.set_context(\"paper\")\n", "\n", @@ -52,22 +52,22 @@ "components = []\n", "scores = []\n", "# (1) Based on covariance matrix\n", - "model_cov = EOF(n_modes=5, standardize=False, use_coslat=False)\n", + "model_cov = xe.single.EOF(n_modes=5, standardize=False, use_coslat=False)\n", "model_cov.fit(t2m, \"time\")\n", "components.append(model_cov.components())\n", "scores.append(model_cov.scores())\n", "# (2) Based on coslat weighted covariance matrix\n", - "model_lat = EOF(n_modes=5, standardize=False, use_coslat=True)\n", + "model_lat = xe.single.EOF(n_modes=5, standardize=False, use_coslat=True)\n", "model_lat.fit(t2m, \"time\")\n", "components.append(model_lat.components())\n", "scores.append(model_lat.scores())\n", "# (3) Based on correlation matrix\n", - "model_cor = EOF(n_modes=5, standardize=True, use_coslat=False)\n", + "model_cor = xe.single.EOF(n_modes=5, standardize=True, use_coslat=False)\n", "model_cor.fit(t2m, \"time\")\n", "components.append(model_cor.components())\n", "scores.append(model_cor.scores())\n", "# (4) Based on coslat weighted correlation matrix\n", - "model_cor_lat = EOF(n_modes=5, standardize=True, use_coslat=True)\n", + "model_cor_lat = xe.single.EOF(n_modes=5, standardize=True, use_coslat=True)\n", "model_cor_lat.fit(t2m, \"time\")\n", "components.append(model_cor_lat.components())\n", "scores.append(model_cor_lat.scores())" @@ -139,7 +139,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/auto_examples/1single/plot_weighted-eof.py b/docs/auto_examples/1single/plot_weighted-eof.py index 26a88473..6d67964b 100644 --- a/docs/auto_examples/1single/plot_weighted-eof.py +++ b/docs/auto_examples/1single/plot_weighted-eof.py @@ -11,13 +11,13 @@ Load packages and data: """ -import xarray as xr import matplotlib.pyplot as plt import seaborn as sns -from matplotlib.gridspec import GridSpec +import xarray as xr from cartopy.crs import Orthographic, PlateCarree +from matplotlib.gridspec import GridSpec -from xeofs.models import EOF +import xeofs as xe sns.set_context("paper") @@ -29,22 +29,22 @@ components = [] scores = [] # (1) Based on covariance matrix -model_cov = EOF(n_modes=5, standardize=False, use_coslat=False) +model_cov = xe.single.EOF(n_modes=5, standardize=False, use_coslat=False) model_cov.fit(t2m, "time") components.append(model_cov.components()) scores.append(model_cov.scores()) # (2) Based on coslat weighted covariance matrix -model_lat = EOF(n_modes=5, standardize=False, use_coslat=True) +model_lat = xe.single.EOF(n_modes=5, standardize=False, use_coslat=True) model_lat.fit(t2m, "time") components.append(model_lat.components()) scores.append(model_lat.scores()) # (3) Based on correlation matrix -model_cor = EOF(n_modes=5, standardize=True, use_coslat=False) +model_cor = xe.single.EOF(n_modes=5, standardize=True, use_coslat=False) model_cor.fit(t2m, "time") components.append(model_cor.components()) scores.append(model_cor.scores()) # (4) Based on coslat weighted correlation matrix -model_cor_lat = EOF(n_modes=5, standardize=True, use_coslat=True) +model_cor_lat = xe.single.EOF(n_modes=5, standardize=True, use_coslat=True) model_cor_lat.fit(t2m, "time") components.append(model_cor_lat.components()) scores.append(model_cor_lat.scores()) diff --git a/docs/auto_examples/1single/plot_weighted-eof.py.md5 b/docs/auto_examples/1single/plot_weighted-eof.py.md5 index b015a5cf..329812d3 100644 --- a/docs/auto_examples/1single/plot_weighted-eof.py.md5 +++ b/docs/auto_examples/1single/plot_weighted-eof.py.md5 @@ -1 +1 @@ -8ca089ebc9dfca92e49dbd81d4047695 \ No newline at end of file +7047ce79c7b692b5a3beb8b39381ff16 \ No newline at end of file diff --git a/docs/auto_examples/1single/plot_weighted-eof.rst b/docs/auto_examples/1single/plot_weighted-eof.rst index 90769f45..0cb3b794 100644 --- a/docs/auto_examples/1single/plot_weighted-eof.rst +++ b/docs/auto_examples/1single/plot_weighted-eof.rst @@ -31,16 +31,16 @@ Load packages and data: .. GENERATED FROM PYTHON SOURCE LINES 13-26 -.. code-block:: Python +.. code-block:: default - import xarray as xr import matplotlib.pyplot as plt import seaborn as sns - from matplotlib.gridspec import GridSpec + import xarray as xr from cartopy.crs import Orthographic, PlateCarree + from matplotlib.gridspec import GridSpec - from xeofs.models import EOF + import xeofs as xe sns.set_context("paper") @@ -59,28 +59,28 @@ Perform the actual analysis .. GENERATED FROM PYTHON SOURCE LINES 28-53 -.. code-block:: Python +.. code-block:: default components = [] scores = [] # (1) Based on covariance matrix - model_cov = EOF(n_modes=5, standardize=False, use_coslat=False) + model_cov = xe.single.EOF(n_modes=5, standardize=False, use_coslat=False) model_cov.fit(t2m, "time") components.append(model_cov.components()) scores.append(model_cov.scores()) # (2) Based on coslat weighted covariance matrix - model_lat = EOF(n_modes=5, standardize=False, use_coslat=True) + model_lat = xe.single.EOF(n_modes=5, standardize=False, use_coslat=True) model_lat.fit(t2m, "time") components.append(model_lat.components()) scores.append(model_lat.scores()) # (3) Based on correlation matrix - model_cor = EOF(n_modes=5, standardize=True, use_coslat=False) + model_cor = xe.single.EOF(n_modes=5, standardize=True, use_coslat=False) model_cor.fit(t2m, "time") components.append(model_cor.components()) scores.append(model_cor.scores()) # (4) Based on coslat weighted correlation matrix - model_cor_lat = EOF(n_modes=5, standardize=True, use_coslat=True) + model_cor_lat = xe.single.EOF(n_modes=5, standardize=True, use_coslat=True) model_cor_lat.fit(t2m, "time") components.append(model_cor_lat.components()) scores.append(model_cor_lat.scores()) @@ -99,7 +99,7 @@ Create figure showing the first mode for all 4 cases .. GENERATED FROM PYTHON SOURCE LINES 55-89 -.. code-block:: Python +.. code-block:: default proj = Orthographic(central_latitude=30, central_longitude=-80) @@ -150,7 +150,7 @@ Create figure showing the first mode for all 4 cases .. rst-class:: sphx-glr-timing - **Total running time of the script:** (0 minutes 1.928 seconds) + **Total running time of the script:** (0 minutes 2.990 seconds) .. _sphx_glr_download_auto_examples_1single_plot_weighted-eof.py: @@ -159,14 +159,17 @@ Create figure showing the first mode for all 4 cases .. container:: sphx-glr-footer sphx-glr-footer-example - .. container:: sphx-glr-download sphx-glr-download-jupyter - :download:`Download Jupyter notebook: plot_weighted-eof.ipynb ` + .. container:: sphx-glr-download sphx-glr-download-python :download:`Download Python source code: plot_weighted-eof.py ` + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_weighted-eof.ipynb ` + .. only:: html diff --git a/docs/auto_examples/1single/plot_weighted-eof_codeobj.pickle b/docs/auto_examples/1single/plot_weighted-eof_codeobj.pickle index be19075a..63a2501d 100644 Binary files a/docs/auto_examples/1single/plot_weighted-eof_codeobj.pickle and b/docs/auto_examples/1single/plot_weighted-eof_codeobj.pickle differ diff --git a/docs/auto_examples/1single/sg_execution_times.rst b/docs/auto_examples/1single/sg_execution_times.rst index 090f80b7..996f063c 100644 --- a/docs/auto_examples/1single/sg_execution_times.rst +++ b/docs/auto_examples/1single/sg_execution_times.rst @@ -6,14 +6,14 @@ Computation times ================= -**00:02.009** total execution time for **auto_examples_1single** files: +**00:07.526** total execution time for **auto_examples_1single** files: +-----------------------------------------------------------------------------------------------+-----------+--------+ -| :ref:`sphx_glr_auto_examples_1single_plot_complex_eof.py` (``plot_complex_eof.py``) | 00:02.009 | 0.0 MB | +| :ref:`sphx_glr_auto_examples_1single_plot_eeof_trend.py` (``plot_eeof_trend.py``) | 00:07.526 | 0.0 MB | +-----------------------------------------------------------------------------------------------+-----------+--------+ -| :ref:`sphx_glr_auto_examples_1single_plot_eeof.py` (``plot_eeof.py``) | 00:00.000 | 0.0 MB | +| :ref:`sphx_glr_auto_examples_1single_plot_complex_eof.py` (``plot_complex_eof.py``) | 00:00.000 | 0.0 MB | +-----------------------------------------------------------------------------------------------+-----------+--------+ -| :ref:`sphx_glr_auto_examples_1single_plot_eeof_trend.py` (``plot_eeof_trend.py``) | 00:00.000 | 0.0 MB | +| :ref:`sphx_glr_auto_examples_1single_plot_eeof.py` (``plot_eeof.py``) | 00:00.000 | 0.0 MB | +-----------------------------------------------------------------------------------------------+-----------+--------+ | :ref:`sphx_glr_auto_examples_1single_plot_eof-smode.py` (``plot_eof-smode.py``) | 00:00.000 | 0.0 MB | +-----------------------------------------------------------------------------------------------+-----------+--------+ diff --git a/docs/auto_examples/2cross/images/sphx_glr_plot_mca_001.png b/docs/auto_examples/2cross/images/sphx_glr_plot_mca_001.png new file mode 100644 index 00000000..2c5ee20e Binary files /dev/null and b/docs/auto_examples/2cross/images/sphx_glr_plot_mca_001.png differ diff --git a/docs/auto_examples/2cross/images/sphx_glr_plot_rotated_mca_001.png b/docs/auto_examples/2cross/images/sphx_glr_plot_rotated_mca_001.png new file mode 100644 index 00000000..e7ea9017 Binary files /dev/null and b/docs/auto_examples/2cross/images/sphx_glr_plot_rotated_mca_001.png differ diff --git a/docs/auto_examples/2cross/images/thumb/sphx_glr_plot_mca_thumb.png b/docs/auto_examples/2cross/images/thumb/sphx_glr_plot_mca_thumb.png new file mode 100644 index 00000000..c6a52d88 Binary files /dev/null and b/docs/auto_examples/2cross/images/thumb/sphx_glr_plot_mca_thumb.png differ diff --git a/docs/auto_examples/2cross/images/thumb/sphx_glr_plot_rotated_mca_thumb.png b/docs/auto_examples/2cross/images/thumb/sphx_glr_plot_rotated_mca_thumb.png new file mode 100644 index 00000000..8d6dc8db Binary files /dev/null and b/docs/auto_examples/2cross/images/thumb/sphx_glr_plot_rotated_mca_thumb.png differ diff --git a/docs/auto_examples/2cross/index.rst b/docs/auto_examples/2cross/index.rst new file mode 100644 index 00000000..7400c8e6 --- /dev/null +++ b/docs/auto_examples/2cross/index.rst @@ -0,0 +1,59 @@ + + +.. _sphx_glr_auto_examples_2cross: + +2 | Cross-Set Analysis +======================== + + + +.. raw:: html + +
    + + +.. raw:: html + +
    + +.. only:: html + + .. image:: /auto_examples/2cross/images/thumb/sphx_glr_plot_mca_thumb.png + :alt: + + :ref:`sphx_glr_auto_examples_2cross_plot_mca.py` + +.. raw:: html + +
    Maximum Covariance Analysis
    +
    + + +.. raw:: html + +
    + +.. only:: html + + .. image:: /auto_examples/2cross/images/thumb/sphx_glr_plot_rotated_mca_thumb.png + :alt: + + :ref:`sphx_glr_auto_examples_2cross_plot_rotated_mca.py` + +.. raw:: html + +
    Rotated Maximum Covariance Analysis
    +
    + + +.. raw:: html + +
    + + +.. toctree:: + :hidden: + + /auto_examples/2cross/plot_mca + /auto_examples/2cross/plot_rotated_mca + diff --git a/docs/auto_examples/2cross/plot_mca.ipynb b/docs/auto_examples/2cross/plot_mca.ipynb new file mode 100644 index 00000000..16d73cb7 --- /dev/null +++ b/docs/auto_examples/2cross/plot_mca.ipynb @@ -0,0 +1,219 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "# Maximum Covariance Analysis\n", + "\n", + "Maximum Covariance Analysis (MCA) between two data sets.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Load packages and data:\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import xarray as xr\n", + "from cartopy.crs import Orthographic, PlateCarree\n", + "from cartopy.feature import LAND\n", + "from matplotlib.gridspec import GridSpec\n", + "\n", + "import xeofs as xe" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Create 2 different DataArrays\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "t2m = xr.tutorial.load_dataset(\"air_temperature\")[\"air\"]\n", + "da1 = t2m.isel(lon=slice(0, 26))\n", + "da2 = t2m.isel(lon=slice(27, None))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Perform MCA\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "mca = xe.cross.MCA(n_modes=20, standardize=False, use_coslat=True)\n", + "mca.fit(da1, da2, dim=\"time\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Get singular vectors, projections (PCs), homogeneous and heterogeneous\n", + "patterns:\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "singular_vectors = mca.components()\n", + "scores = mca.scores()\n", + "hom_pats, pvals_hom = mca.homogeneous_patterns()\n", + "het_pats, pvals_het = mca.heterogeneous_patterns()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "When two fields are expected, the output of the above methods is a list of\n", + "length 2, with the first and second entry containing the relevant object for\n", + "``X`` and ``Y``. For example, the p-values obtained from the two-sided t-test\n", + "for the homogeneous patterns of ``X`` are:\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pvals_hom[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Create a mask to identifiy where p-values are below 0.05\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "hom_mask = [values < 0.05 for values in pvals_hom]\n", + "het_mask = [values < 0.05 for values in pvals_het]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Plot some relevant quantities of mode 2.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "lonlats = [\n", + " np.meshgrid(pvals_hom[0].lon.values, pvals_hom[0].lat.values),\n", + " np.meshgrid(pvals_hom[1].lon.values, pvals_hom[1].lat.values),\n", + "]\n", + "proj = [\n", + " Orthographic(central_latitude=30, central_longitude=-120),\n", + " Orthographic(central_latitude=30, central_longitude=-60),\n", + "]\n", + "kwargs1 = {\"cmap\": \"BrBG\", \"vmin\": -0.05, \"vmax\": 0.05, \"transform\": PlateCarree()}\n", + "kwargs2 = {\"cmap\": \"RdBu\", \"vmin\": -1, \"vmax\": 1, \"transform\": PlateCarree()}\n", + "\n", + "mode = 2\n", + "\n", + "fig = plt.figure(figsize=(7, 14))\n", + "gs = GridSpec(5, 2)\n", + "ax1 = [fig.add_subplot(gs[0, i], projection=proj[i]) for i in range(2)]\n", + "ax2 = [fig.add_subplot(gs[1, i], projection=proj[i]) for i in range(2)]\n", + "ax3 = [fig.add_subplot(gs[2, i], projection=proj[i]) for i in range(2)]\n", + "ax4 = [fig.add_subplot(gs[3, i]) for i in range(2)]\n", + "\n", + "for i, a in enumerate(ax1):\n", + " singular_vectors[i].sel(mode=mode).plot(ax=a, **kwargs1)\n", + "\n", + "for i, a in enumerate(ax2):\n", + " hom_pats[i].sel(mode=mode).plot(ax=a, **kwargs2)\n", + " a.scatter(\n", + " lonlats[i][0],\n", + " lonlats[i][1],\n", + " hom_mask[i].sel(mode=mode).values * 0.5,\n", + " color=\"k\",\n", + " alpha=0.5,\n", + " transform=PlateCarree(),\n", + " )\n", + "for i, a in enumerate(ax3):\n", + " het_pats[i].sel(mode=mode).plot(ax=a, **kwargs2)\n", + " a.scatter(\n", + " lonlats[i][0],\n", + " lonlats[i][1],\n", + " het_mask[i].sel(mode=mode).values * 0.5,\n", + " color=\"k\",\n", + " alpha=0.5,\n", + " transform=PlateCarree(),\n", + " )\n", + "\n", + "for i, a in enumerate(ax4):\n", + " scores[i].sel(mode=mode).plot(ax=a)\n", + " a.set_xlabel(\"\")\n", + "\n", + "\n", + "for a in np.ravel([ax1, ax2, ax3]):\n", + " a.coastlines(color=\".5\")\n", + " a.add_feature(LAND)\n", + "\n", + "plt.tight_layout()\n", + "plt.savefig(\"mca.jpg\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/examples/2multi/plot_mca.py b/docs/auto_examples/2cross/plot_mca.py similarity index 97% rename from examples/2multi/plot_mca.py rename to docs/auto_examples/2cross/plot_mca.py index d3b48de7..a93d5546 100644 --- a/examples/2multi/plot_mca.py +++ b/docs/auto_examples/2cross/plot_mca.py @@ -6,14 +6,14 @@ """ # Load packages and data: +import matplotlib.pyplot as plt import numpy as np import xarray as xr -import matplotlib.pyplot as plt -from matplotlib.gridspec import GridSpec from cartopy.crs import Orthographic, PlateCarree from cartopy.feature import LAND +from matplotlib.gridspec import GridSpec -from xeofs.models import MCA +import xeofs as xe # %% # Create 2 different DataArrays @@ -25,7 +25,7 @@ # %% # Perform MCA -mca = MCA(n_modes=20, standardize=False, use_coslat=True) +mca = xe.cross.MCA(n_modes=20, standardize=False, use_coslat=True) mca.fit(da1, da2, dim="time") # %% diff --git a/docs/auto_examples/2cross/plot_mca.py.md5 b/docs/auto_examples/2cross/plot_mca.py.md5 new file mode 100644 index 00000000..d980cde7 --- /dev/null +++ b/docs/auto_examples/2cross/plot_mca.py.md5 @@ -0,0 +1 @@ +e696fb777ef84ec447201ca9c01d1dfe \ No newline at end of file diff --git a/docs/auto_examples/2cross/plot_mca.rst b/docs/auto_examples/2cross/plot_mca.rst new file mode 100644 index 00000000..8f072e19 --- /dev/null +++ b/docs/auto_examples/2cross/plot_mca.rst @@ -0,0 +1,740 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "auto_examples/2cross/plot_mca.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_auto_examples_2cross_plot_mca.py: + + +Maximum Covariance Analysis +=========================== + +Maximum Covariance Analysis (MCA) between two data sets. + +.. GENERATED FROM PYTHON SOURCE LINES 7-18 + +.. code-block:: default + + + # Load packages and data: + import matplotlib.pyplot as plt + import numpy as np + import xarray as xr + from cartopy.crs import Orthographic, PlateCarree + from cartopy.feature import LAND + from matplotlib.gridspec import GridSpec + + import xeofs as xe + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 19-20 + +Create 2 different DataArrays + +.. GENERATED FROM PYTHON SOURCE LINES 20-25 + +.. code-block:: default + + + t2m = xr.tutorial.load_dataset("air_temperature")["air"] + da1 = t2m.isel(lon=slice(0, 26)) + da2 = t2m.isel(lon=slice(27, None)) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 26-27 + +Perform MCA + +.. GENERATED FROM PYTHON SOURCE LINES 27-31 + +.. code-block:: default + + + mca = xe.cross.MCA(n_modes=20, standardize=False, use_coslat=True) + mca.fit(da1, da2, dim="time") + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 32-34 + +Get singular vectors, projections (PCs), homogeneous and heterogeneous +patterns: + +.. GENERATED FROM PYTHON SOURCE LINES 34-40 + +.. code-block:: default + + + singular_vectors = mca.components() + scores = mca.scores() + hom_pats, pvals_hom = mca.homogeneous_patterns() + het_pats, pvals_het = mca.heterogeneous_patterns() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 41-45 + +When two fields are expected, the output of the above methods is a list of +length 2, with the first and second entry containing the relevant object for +``X`` and ``Y``. For example, the p-values obtained from the two-sided t-test +for the homogeneous patterns of ``X`` are: + +.. GENERATED FROM PYTHON SOURCE LINES 45-48 + +.. code-block:: default + + + pvals_hom[0] + + + + + + +.. raw:: html + +
    +
    + + + + + + + + + + + + + + +
    <xarray.DataArray 'pvalues_of_left_homogeneous_patterns' (mode: 20, lat: 25,
    +                                                              lon: 26)>
    +    array([[[0.00000000e+000, 3.32602740e-290, 2.86712051e-286, ...,
    +             1.32392878e-119, 9.37036267e-186, 0.00000000e+000],
    +            [0.00000000e+000, 0.00000000e+000, 2.87964095e-272, ...,
    +             8.56508898e-140, 4.38532320e-131, 6.54040862e-156],
    +            [0.00000000e+000, 0.00000000e+000, 0.00000000e+000, ...,
    +             6.15522890e-039, 9.85389342e-095, 4.88663208e-307],
    +            ...,
    +            [0.00000000e+000, 0.00000000e+000, 0.00000000e+000, ...,
    +             0.00000000e+000, 0.00000000e+000, 0.00000000e+000],
    +            [0.00000000e+000, 0.00000000e+000, 0.00000000e+000, ...,
    +             0.00000000e+000, 0.00000000e+000, 0.00000000e+000],
    +            [0.00000000e+000, 0.00000000e+000, 0.00000000e+000, ...,
    +             0.00000000e+000, 0.00000000e+000, 0.00000000e+000]],
    +
    +           [[1.47816130e-058, 9.59629629e-069, 2.49426225e-077, ...,
    +             4.04710709e-067, 1.68530273e-001, 1.57942472e-054],
    +            [5.21001151e-051, 1.96182008e-055, 4.53391465e-063, ...,
    +             8.23812362e-066, 5.60289947e-142, 3.17052995e-126],
    +            [4.26755773e-027, 2.16865183e-023, 5.55666292e-044, ...,
    +             2.57077253e-302, 7.17664416e-212, 1.99383130e-056],
    +    ...
    +            [3.76773409e-015, 3.63595418e-016, 8.16407667e-016, ...,
    +             7.94237942e-001, 5.61772548e-001, 4.05805498e-001],
    +            [1.34077306e-010, 2.27353460e-010, 1.17695930e-009, ...,
    +             2.36922957e-001, 1.51587935e-001, 1.10342500e-001],
    +            [1.25888311e-009, 3.73287601e-009, 1.96403692e-008, ...,
    +             1.74507572e-001, 1.55251763e-001, 1.57874323e-001]],
    +
    +           [[3.23043237e-001, 1.34846412e-002, 1.49786437e-002, ...,
    +             8.59230128e-001, 9.39956204e-005, 7.73160479e-010],
    +            [3.30997229e-001, 1.89803851e-001, 1.63645931e-001, ...,
    +             1.15188423e-003, 1.70856527e-004, 3.80119477e-006],
    +            [4.80410883e-001, 3.58267371e-001, 1.05572199e-001, ...,
    +             1.33882929e-007, 8.22926072e-005, 3.56634908e-004],
    +            ...,
    +            [9.48231125e-019, 3.95379639e-018, 4.27550070e-017, ...,
    +             6.62164515e-001, 6.17150180e-001, 5.85772391e-001],
    +            [1.19130038e-006, 4.18324319e-006, 1.51748129e-005, ...,
    +             8.09046665e-001, 9.15065924e-001, 9.75206621e-001],
    +            [4.03876714e-002, 7.52746031e-002, 1.39540472e-001, ...,
    +             8.48692484e-002, 1.65380473e-001, 2.84566826e-001]]])
    +    Coordinates:
    +      * lat      (lat) float32 15.0 17.5 20.0 22.5 25.0 ... 65.0 67.5 70.0 72.5 75.0
    +      * lon      (lon) float32 200.0 202.5 205.0 207.5 ... 255.0 257.5 260.0 262.5
    +      * mode     (mode) int64 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
    +    Attributes: (12/18)
    +        model:                    Maximum Covariance Analysis
    +        software:                 xeofs
    +        version:                  1.2.0
    +        date:                     2024-09-02 02:52:38
    +        n_modes:                  20
    +        center:                   ['True', 'True']
    +        ...                       ...
    +        alpha:                    [1.0, 1.0]
    +        sample_name:              sample
    +        feature_name:             ['feature1', 'feature2']
    +        random_state:             None
    +        compute:                  True
    +        solver:                   auto
    +
    +
    +
    + +.. GENERATED FROM PYTHON SOURCE LINES 49-50 + +Create a mask to identifiy where p-values are below 0.05 + +.. GENERATED FROM PYTHON SOURCE LINES 50-55 + +.. code-block:: default + + + hom_mask = [values < 0.05 for values in pvals_hom] + het_mask = [values < 0.05 for values in pvals_het] + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 56-57 + +Plot some relevant quantities of mode 2. + +.. GENERATED FROM PYTHON SOURCE LINES 57-113 + +.. code-block:: default + + + lonlats = [ + np.meshgrid(pvals_hom[0].lon.values, pvals_hom[0].lat.values), + np.meshgrid(pvals_hom[1].lon.values, pvals_hom[1].lat.values), + ] + proj = [ + Orthographic(central_latitude=30, central_longitude=-120), + Orthographic(central_latitude=30, central_longitude=-60), + ] + kwargs1 = {"cmap": "BrBG", "vmin": -0.05, "vmax": 0.05, "transform": PlateCarree()} + kwargs2 = {"cmap": "RdBu", "vmin": -1, "vmax": 1, "transform": PlateCarree()} + + mode = 2 + + fig = plt.figure(figsize=(7, 14)) + gs = GridSpec(5, 2) + ax1 = [fig.add_subplot(gs[0, i], projection=proj[i]) for i in range(2)] + ax2 = [fig.add_subplot(gs[1, i], projection=proj[i]) for i in range(2)] + ax3 = [fig.add_subplot(gs[2, i], projection=proj[i]) for i in range(2)] + ax4 = [fig.add_subplot(gs[3, i]) for i in range(2)] + + for i, a in enumerate(ax1): + singular_vectors[i].sel(mode=mode).plot(ax=a, **kwargs1) + + for i, a in enumerate(ax2): + hom_pats[i].sel(mode=mode).plot(ax=a, **kwargs2) + a.scatter( + lonlats[i][0], + lonlats[i][1], + hom_mask[i].sel(mode=mode).values * 0.5, + color="k", + alpha=0.5, + transform=PlateCarree(), + ) + for i, a in enumerate(ax3): + het_pats[i].sel(mode=mode).plot(ax=a, **kwargs2) + a.scatter( + lonlats[i][0], + lonlats[i][1], + het_mask[i].sel(mode=mode).values * 0.5, + color="k", + alpha=0.5, + transform=PlateCarree(), + ) + + for i, a in enumerate(ax4): + scores[i].sel(mode=mode).plot(ax=a) + a.set_xlabel("") + + + for a in np.ravel([ax1, ax2, ax3]): + a.coastlines(color=".5") + a.add_feature(LAND) + + plt.tight_layout() + plt.savefig("mca.jpg") + + + +.. image-sg:: /auto_examples/2cross/images/sphx_glr_plot_mca_001.png + :alt: mode = 2, mode = 2, mode = 2, mode = 2, mode = 2, mode = 2, mode = 2, mode = 2 + :srcset: /auto_examples/2cross/images/sphx_glr_plot_mca_001.png + :class: sphx-glr-single-img + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 10.270 seconds) + + +.. _sphx_glr_download_auto_examples_2cross_plot_mca.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + + + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_mca.py ` + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_mca.ipynb ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/auto_examples/2cross/plot_mca_codeobj.pickle b/docs/auto_examples/2cross/plot_mca_codeobj.pickle new file mode 100644 index 00000000..8013a6f4 Binary files /dev/null and b/docs/auto_examples/2cross/plot_mca_codeobj.pickle differ diff --git a/docs/auto_examples/2cross/plot_rotated_mca.ipynb b/docs/auto_examples/2cross/plot_rotated_mca.ipynb new file mode 100644 index 00000000..e69caebc --- /dev/null +++ b/docs/auto_examples/2cross/plot_rotated_mca.ipynb @@ -0,0 +1,237 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "# Rotated Maximum Covariance Analysis\n", + "\n", + "Rotated Maximum Covariance Analysis (MCA) between two data sets.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Load packages and data:\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import xarray as xr\n", + "from cartopy.crs import Orthographic, PlateCarree\n", + "from cartopy.feature import LAND\n", + "from matplotlib.gridspec import GridSpec\n", + "\n", + "import xeofs as xe" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Create 2 different DataArrays\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "t2m = xr.tutorial.load_dataset(\"air_temperature\")[\"air\"]\n", + "da1 = t2m.isel(lon=slice(0, 26))\n", + "da2 = t2m.isel(lon=slice(27, None))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Perform MCA\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "mca = xe.cross.MCA(n_modes=20, standardize=False, use_coslat=True)\n", + "mca.fit(da1, da2, dim=\"time\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Apply Varimax-rotation to MCA solution\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "rot = xe.cross.MCARotator(n_modes=10)\n", + "rot.fit(mca)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Get rotated singular vectors, projections (PCs), homogeneous and heterogeneous\n", + "patterns:\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "singular_vectors = rot.components()\n", + "scores = rot.scores()\n", + "hom_pats, pvals_hom = rot.homogeneous_patterns()\n", + "het_pats, pvals_het = rot.heterogeneous_patterns()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "When two fields are expected, the output of the above methods is a list of\n", + "length 2, with the first and second entry containing the relevant object for\n", + "``X`` and ``Y``. For example, the p-values obtained from the two-sided t-test\n", + "for the homogeneous patterns of ``X`` are:\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "pvals_hom[0]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Create a mask to identifiy where p-values are below 0.05\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "hom_mask = [values < 0.05 for values in pvals_hom]\n", + "het_mask = [values < 0.05 for values in pvals_het]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Plot some relevant quantities of mode 2.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "lonlats = [\n", + " np.meshgrid(pvals_hom[0].lon.values, pvals_hom[0].lat.values),\n", + " np.meshgrid(pvals_hom[1].lon.values, pvals_hom[1].lat.values),\n", + "]\n", + "proj = [\n", + " Orthographic(central_latitude=30, central_longitude=-120),\n", + " Orthographic(central_latitude=30, central_longitude=-60),\n", + "]\n", + "kwargs1 = {\"cmap\": \"BrBG\", \"vmin\": -0.05, \"vmax\": 0.05, \"transform\": PlateCarree()}\n", + "kwargs2 = {\"cmap\": \"RdBu\", \"vmin\": -1, \"vmax\": 1, \"transform\": PlateCarree()}\n", + "\n", + "mode = 2\n", + "\n", + "fig = plt.figure(figsize=(7, 14))\n", + "gs = GridSpec(5, 2)\n", + "ax1 = [fig.add_subplot(gs[0, i], projection=proj[i]) for i in range(2)]\n", + "ax2 = [fig.add_subplot(gs[1, i], projection=proj[i]) for i in range(2)]\n", + "ax3 = [fig.add_subplot(gs[2, i], projection=proj[i]) for i in range(2)]\n", + "ax4 = [fig.add_subplot(gs[3, i]) for i in range(2)]\n", + "\n", + "for i, a in enumerate(ax1):\n", + " singular_vectors[i].sel(mode=mode).plot(ax=a, **kwargs1)\n", + "\n", + "for i, a in enumerate(ax2):\n", + " hom_pats[i].sel(mode=mode).plot(ax=a, **kwargs2)\n", + " a.scatter(\n", + " lonlats[i][0],\n", + " lonlats[i][1],\n", + " hom_mask[i].sel(mode=mode).values * 0.5,\n", + " color=\"k\",\n", + " alpha=0.5,\n", + " transform=PlateCarree(),\n", + " )\n", + "for i, a in enumerate(ax3):\n", + " het_pats[i].sel(mode=mode).plot(ax=a, **kwargs2)\n", + " a.scatter(\n", + " lonlats[i][0],\n", + " lonlats[i][1],\n", + " het_mask[i].sel(mode=mode).values * 0.5,\n", + " color=\"k\",\n", + " alpha=0.5,\n", + " transform=PlateCarree(),\n", + " )\n", + "\n", + "for i, a in enumerate(ax4):\n", + " scores[i].sel(mode=mode).plot(ax=a)\n", + " a.set_xlabel(\"\")\n", + "\n", + "\n", + "for a in np.ravel([ax1, ax2, ax3]):\n", + " a.coastlines(color=\".5\")\n", + " a.add_feature(LAND)\n", + "\n", + "plt.tight_layout()\n", + "plt.savefig(\"rotated_mca.jpg\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/examples/2multi/plot_rotated_mca.py b/docs/auto_examples/2cross/plot_rotated_mca.py similarity index 95% rename from examples/2multi/plot_rotated_mca.py rename to docs/auto_examples/2cross/plot_rotated_mca.py index a7ec89fe..2ae0c501 100644 --- a/examples/2multi/plot_rotated_mca.py +++ b/docs/auto_examples/2cross/plot_rotated_mca.py @@ -6,14 +6,14 @@ """ # Load packages and data: +import matplotlib.pyplot as plt import numpy as np import xarray as xr -import matplotlib.pyplot as plt -from matplotlib.gridspec import GridSpec from cartopy.crs import Orthographic, PlateCarree from cartopy.feature import LAND +from matplotlib.gridspec import GridSpec -from xeofs.models import MCA, MCARotator +import xeofs as xe # %% # Create 2 different DataArrays @@ -25,13 +25,13 @@ # %% # Perform MCA -mca = MCA(n_modes=20, standardize=False, use_coslat=True) +mca = xe.cross.MCA(n_modes=20, standardize=False, use_coslat=True) mca.fit(da1, da2, dim="time") # %% # Apply Varimax-rotation to MCA solution -rot = MCARotator(n_modes=10) +rot = xe.cross.MCARotator(n_modes=10) rot.fit(mca) # %% diff --git a/docs/auto_examples/2cross/plot_rotated_mca.py.md5 b/docs/auto_examples/2cross/plot_rotated_mca.py.md5 new file mode 100644 index 00000000..deea9105 --- /dev/null +++ b/docs/auto_examples/2cross/plot_rotated_mca.py.md5 @@ -0,0 +1 @@ +00a21b73b61a542faad2eabd27cfcdf8 \ No newline at end of file diff --git a/docs/auto_examples/2cross/plot_rotated_mca.rst b/docs/auto_examples/2cross/plot_rotated_mca.rst new file mode 100644 index 00000000..a58d6b97 --- /dev/null +++ b/docs/auto_examples/2cross/plot_rotated_mca.rst @@ -0,0 +1,760 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "auto_examples/2cross/plot_rotated_mca.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_auto_examples_2cross_plot_rotated_mca.py: + + +Rotated Maximum Covariance Analysis +=================================== + +Rotated Maximum Covariance Analysis (MCA) between two data sets. + +.. GENERATED FROM PYTHON SOURCE LINES 7-18 + +.. code-block:: default + + + # Load packages and data: + import matplotlib.pyplot as plt + import numpy as np + import xarray as xr + from cartopy.crs import Orthographic, PlateCarree + from cartopy.feature import LAND + from matplotlib.gridspec import GridSpec + + import xeofs as xe + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 19-20 + +Create 2 different DataArrays + +.. GENERATED FROM PYTHON SOURCE LINES 20-25 + +.. code-block:: default + + + t2m = xr.tutorial.load_dataset("air_temperature")["air"] + da1 = t2m.isel(lon=slice(0, 26)) + da2 = t2m.isel(lon=slice(27, None)) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 26-27 + +Perform MCA + +.. GENERATED FROM PYTHON SOURCE LINES 27-31 + +.. code-block:: default + + + mca = xe.cross.MCA(n_modes=20, standardize=False, use_coslat=True) + mca.fit(da1, da2, dim="time") + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 32-33 + +Apply Varimax-rotation to MCA solution + +.. GENERATED FROM PYTHON SOURCE LINES 33-37 + +.. code-block:: default + + + rot = xe.cross.MCARotator(n_modes=10) + rot.fit(mca) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 38-40 + +Get rotated singular vectors, projections (PCs), homogeneous and heterogeneous +patterns: + +.. GENERATED FROM PYTHON SOURCE LINES 40-46 + +.. code-block:: default + + + singular_vectors = rot.components() + scores = rot.scores() + hom_pats, pvals_hom = rot.homogeneous_patterns() + het_pats, pvals_het = rot.heterogeneous_patterns() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 47-51 + +When two fields are expected, the output of the above methods is a list of +length 2, with the first and second entry containing the relevant object for +``X`` and ``Y``. For example, the p-values obtained from the two-sided t-test +for the homogeneous patterns of ``X`` are: + +.. GENERATED FROM PYTHON SOURCE LINES 51-54 + +.. code-block:: default + + + pvals_hom[0] + + + + + + +.. raw:: html + +
    +
    + + + + + + + + + + + + + + +
    <xarray.DataArray 'pvalues_of_left_homogeneous_patterns' (mode: 10, lat: 25,
    +                                                              lon: 26)>
    +    array([[[1.55655258e-089, 1.55392044e-068, 7.80119302e-062, ...,
    +             2.23581515e-050, 8.33308320e-120, 6.00836750e-251],
    +            [2.11872977e-114, 9.49780753e-082, 3.11167453e-056, ...,
    +             6.65183702e-085, 1.64000665e-088, 1.04300755e-106],
    +            [7.79953584e-175, 1.21595072e-154, 4.65953803e-091, ...,
    +             1.38541944e-037, 4.80775593e-072, 1.33473247e-185],
    +            ...,
    +            [0.00000000e+000, 0.00000000e+000, 0.00000000e+000, ...,
    +             0.00000000e+000, 0.00000000e+000, 0.00000000e+000],
    +            [0.00000000e+000, 0.00000000e+000, 0.00000000e+000, ...,
    +             0.00000000e+000, 0.00000000e+000, 0.00000000e+000],
    +            [0.00000000e+000, 0.00000000e+000, 0.00000000e+000, ...,
    +             0.00000000e+000, 0.00000000e+000, 0.00000000e+000]],
    +
    +           [[0.00000000e+000, 0.00000000e+000, 0.00000000e+000, ...,
    +             4.94162415e-127, 8.44446124e-042, 1.03146716e-010],
    +            [0.00000000e+000, 0.00000000e+000, 0.00000000e+000, ...,
    +             1.73955755e-006, 4.63745686e-001, 7.64186484e-001],
    +            [2.33739357e-315, 5.08908579e-295, 0.00000000e+000, ...,
    +             3.29248281e-031, 3.24235210e-009, 2.47163748e-013],
    +    ...
    +            [1.08624311e-031, 4.60559037e-034, 3.62929634e-035, ...,
    +             7.70733680e-008, 1.69130069e-007, 8.67958275e-007],
    +            [1.51071779e-017, 3.36574156e-018, 3.12910697e-018, ...,
    +             1.32355292e-002, 2.10073640e-002, 3.66735430e-002],
    +            [1.51780815e-012, 1.69354791e-013, 4.50343023e-014, ...,
    +             7.46501624e-001, 7.70929744e-001, 8.17168088e-001]],
    +
    +           [[1.24025980e-001, 3.39107580e-002, 1.79221922e-003, ...,
    +             6.87092234e-001, 7.72235373e-001, 5.05983136e-001],
    +            [2.50157622e-002, 1.60428304e-002, 9.88481378e-004, ...,
    +             8.65538505e-001, 2.93513835e-001, 5.20600556e-002],
    +            [9.83552316e-003, 8.72086401e-002, 1.50031876e-001, ...,
    +             2.52774242e-001, 9.58005017e-003, 8.41498523e-005],
    +            ...,
    +            [3.04190682e-009, 5.42503823e-010, 1.40613929e-010, ...,
    +             2.01063167e-004, 2.26427972e-003, 2.14215655e-002],
    +            [2.50500607e-002, 2.25942528e-002, 2.38626492e-002, ...,
    +             3.52630729e-004, 2.33815544e-003, 1.47774024e-002],
    +            [9.70026611e-001, 9.87391731e-001, 9.61937720e-001, ...,
    +             5.70511628e-003, 1.52611723e-002, 3.62942940e-002]]])
    +    Coordinates:
    +      * lat      (lat) float32 15.0 17.5 20.0 22.5 25.0 ... 65.0 67.5 70.0 72.5 75.0
    +      * lon      (lon) float32 200.0 202.5 205.0 207.5 ... 255.0 257.5 260.0 262.5
    +      * mode     (mode) int64 1 2 3 4 5 6 7 8 9 10
    +    Attributes:
    +        model:     Rotated MCA
    +        software:  xeofs
    +        version:   1.2.0
    +        date:      2024-09-02 02:52:51
    +        n_modes:   10
    +        power:     1
    +        max_iter:  1000
    +        rtol:      1e-08
    +        compute:   True
    +
    +
    +
    + +.. GENERATED FROM PYTHON SOURCE LINES 55-56 + +Create a mask to identifiy where p-values are below 0.05 + +.. GENERATED FROM PYTHON SOURCE LINES 56-61 + +.. code-block:: default + + + hom_mask = [values < 0.05 for values in pvals_hom] + het_mask = [values < 0.05 for values in pvals_het] + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 62-63 + +Plot some relevant quantities of mode 2. + +.. GENERATED FROM PYTHON SOURCE LINES 63-119 + +.. code-block:: default + + + lonlats = [ + np.meshgrid(pvals_hom[0].lon.values, pvals_hom[0].lat.values), + np.meshgrid(pvals_hom[1].lon.values, pvals_hom[1].lat.values), + ] + proj = [ + Orthographic(central_latitude=30, central_longitude=-120), + Orthographic(central_latitude=30, central_longitude=-60), + ] + kwargs1 = {"cmap": "BrBG", "vmin": -0.05, "vmax": 0.05, "transform": PlateCarree()} + kwargs2 = {"cmap": "RdBu", "vmin": -1, "vmax": 1, "transform": PlateCarree()} + + mode = 2 + + fig = plt.figure(figsize=(7, 14)) + gs = GridSpec(5, 2) + ax1 = [fig.add_subplot(gs[0, i], projection=proj[i]) for i in range(2)] + ax2 = [fig.add_subplot(gs[1, i], projection=proj[i]) for i in range(2)] + ax3 = [fig.add_subplot(gs[2, i], projection=proj[i]) for i in range(2)] + ax4 = [fig.add_subplot(gs[3, i]) for i in range(2)] + + for i, a in enumerate(ax1): + singular_vectors[i].sel(mode=mode).plot(ax=a, **kwargs1) + + for i, a in enumerate(ax2): + hom_pats[i].sel(mode=mode).plot(ax=a, **kwargs2) + a.scatter( + lonlats[i][0], + lonlats[i][1], + hom_mask[i].sel(mode=mode).values * 0.5, + color="k", + alpha=0.5, + transform=PlateCarree(), + ) + for i, a in enumerate(ax3): + het_pats[i].sel(mode=mode).plot(ax=a, **kwargs2) + a.scatter( + lonlats[i][0], + lonlats[i][1], + het_mask[i].sel(mode=mode).values * 0.5, + color="k", + alpha=0.5, + transform=PlateCarree(), + ) + + for i, a in enumerate(ax4): + scores[i].sel(mode=mode).plot(ax=a) + a.set_xlabel("") + + + for a in np.ravel([ax1, ax2, ax3]): + a.coastlines(color=".5") + a.add_feature(LAND) + + plt.tight_layout() + plt.savefig("rotated_mca.jpg") + + + +.. image-sg:: /auto_examples/2cross/images/sphx_glr_plot_rotated_mca_001.png + :alt: mode = 2, mode = 2, mode = 2, mode = 2, mode = 2, mode = 2, mode = 2, mode = 2 + :srcset: /auto_examples/2cross/images/sphx_glr_plot_rotated_mca_001.png + :class: sphx-glr-single-img + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 6.533 seconds) + + +.. _sphx_glr_download_auto_examples_2cross_plot_rotated_mca.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + + + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_rotated_mca.py ` + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_rotated_mca.ipynb ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/auto_examples/2cross/plot_rotated_mca_codeobj.pickle b/docs/auto_examples/2cross/plot_rotated_mca_codeobj.pickle new file mode 100644 index 00000000..0442f632 Binary files /dev/null and b/docs/auto_examples/2cross/plot_rotated_mca_codeobj.pickle differ diff --git a/docs/auto_examples/2cross/sg_execution_times.rst b/docs/auto_examples/2cross/sg_execution_times.rst new file mode 100644 index 00000000..f92421a8 --- /dev/null +++ b/docs/auto_examples/2cross/sg_execution_times.rst @@ -0,0 +1,15 @@ + +:orphan: + +.. _sphx_glr_auto_examples_2cross_sg_execution_times: + + +Computation times +================= +**00:16.803** total execution time for **auto_examples_2cross** files: + ++------------------------------------------------------------------------------------+-----------+--------+ +| :ref:`sphx_glr_auto_examples_2cross_plot_mca.py` (``plot_mca.py``) | 00:10.270 | 0.0 MB | ++------------------------------------------------------------------------------------+-----------+--------+ +| :ref:`sphx_glr_auto_examples_2cross_plot_rotated_mca.py` (``plot_rotated_mca.py``) | 00:06.533 | 0.0 MB | ++------------------------------------------------------------------------------------+-----------+--------+ diff --git a/docs/auto_examples/2multi/images/sphx_glr_plot_cca_001.png b/docs/auto_examples/2multi/images/sphx_glr_plot_cca_001.png index 5a4c2a6e..10c26abb 100644 Binary files a/docs/auto_examples/2multi/images/sphx_glr_plot_cca_001.png and b/docs/auto_examples/2multi/images/sphx_glr_plot_cca_001.png differ diff --git a/docs/auto_examples/2multi/images/sphx_glr_plot_cca_002.png b/docs/auto_examples/2multi/images/sphx_glr_plot_cca_002.png index a02a021f..89687459 100644 Binary files a/docs/auto_examples/2multi/images/sphx_glr_plot_cca_002.png and b/docs/auto_examples/2multi/images/sphx_glr_plot_cca_002.png differ diff --git a/docs/auto_examples/2multi/images/sphx_glr_plot_mca_001.png b/docs/auto_examples/2multi/images/sphx_glr_plot_mca_001.png index 1755ad7c..3883e3f3 100644 Binary files a/docs/auto_examples/2multi/images/sphx_glr_plot_mca_001.png and b/docs/auto_examples/2multi/images/sphx_glr_plot_mca_001.png differ diff --git a/docs/auto_examples/2multi/images/sphx_glr_plot_rotated_mca_001.png b/docs/auto_examples/2multi/images/sphx_glr_plot_rotated_mca_001.png index de211f10..ef2ea0ac 100644 Binary files a/docs/auto_examples/2multi/images/sphx_glr_plot_rotated_mca_001.png and b/docs/auto_examples/2multi/images/sphx_glr_plot_rotated_mca_001.png differ diff --git a/docs/auto_examples/2multi/images/thumb/sphx_glr_plot_cca_thumb.png b/docs/auto_examples/2multi/images/thumb/sphx_glr_plot_cca_thumb.png index 9c16f7d5..2e99a4d0 100644 Binary files a/docs/auto_examples/2multi/images/thumb/sphx_glr_plot_cca_thumb.png and b/docs/auto_examples/2multi/images/thumb/sphx_glr_plot_cca_thumb.png differ diff --git a/docs/auto_examples/2multi/images/thumb/sphx_glr_plot_mca_thumb.png b/docs/auto_examples/2multi/images/thumb/sphx_glr_plot_mca_thumb.png index b4771316..7949e785 100644 Binary files a/docs/auto_examples/2multi/images/thumb/sphx_glr_plot_mca_thumb.png and b/docs/auto_examples/2multi/images/thumb/sphx_glr_plot_mca_thumb.png differ diff --git a/docs/auto_examples/2multi/images/thumb/sphx_glr_plot_rotated_mca_thumb.png b/docs/auto_examples/2multi/images/thumb/sphx_glr_plot_rotated_mca_thumb.png index 292b6200..a3ed60a4 100644 Binary files a/docs/auto_examples/2multi/images/thumb/sphx_glr_plot_rotated_mca_thumb.png and b/docs/auto_examples/2multi/images/thumb/sphx_glr_plot_rotated_mca_thumb.png differ diff --git a/docs/auto_examples/2multi/plot_cca.ipynb b/docs/auto_examples/2multi/plot_cca.ipynb index be266233..f8e3b7f4 100644 --- a/docs/auto_examples/2multi/plot_cca.ipynb +++ b/docs/auto_examples/2multi/plot_cca.ipynb @@ -22,12 +22,12 @@ "metadata": {}, "outputs": [], "source": [ - "import xarray as xr\n", - "import xeofs as xe\n", - "\n", + "import cartopy.crs as ccrs\n", "import matplotlib.pyplot as plt\n", + "import xarray as xr\n", "from matplotlib.gridspec import GridSpec\n", - "import cartopy.crs as ccrs" + "\n", + "import xeofs as xe" ] }, { @@ -102,7 +102,7 @@ "metadata": {}, "outputs": [], "source": [ - "model = xe.models.CCA(\n", + "model = xe.multi.CCA(\n", " n_modes=2,\n", " use_coslat=True,\n", " pca=True,\n", @@ -189,7 +189,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/auto_examples/2multi/plot_cca.py b/docs/auto_examples/2multi/plot_cca.py index e57af825..70f6a68e 100644 --- a/docs/auto_examples/2multi/plot_cca.py +++ b/docs/auto_examples/2multi/plot_cca.py @@ -11,12 +11,12 @@ First, we'll import the necessary modules. """ -import xarray as xr -import xeofs as xe - +import cartopy.crs as ccrs import matplotlib.pyplot as plt +import xarray as xr from matplotlib.gridspec import GridSpec -import cartopy.crs as ccrs + +import xeofs as xe # %% # Next, we load the data and compute the SST anomalies. This removes the @@ -56,7 +56,7 @@ # Note that if our initial PCA modes don't hit the 90% variance target, ``xeofs`` # will give a warning. -model = xe.models.CCA( +model = xe.multi.CCA( n_modes=2, use_coslat=True, pca=True, diff --git a/docs/auto_examples/2multi/plot_cca.py.md5 b/docs/auto_examples/2multi/plot_cca.py.md5 index d0fdf998..40b748fb 100644 --- a/docs/auto_examples/2multi/plot_cca.py.md5 +++ b/docs/auto_examples/2multi/plot_cca.py.md5 @@ -1 +1 @@ -89f56b4ed3606f9c5a94189c1c497250 \ No newline at end of file +ccd6cf17e9ee3ea88ba470a3180fe224 \ No newline at end of file diff --git a/docs/auto_examples/2multi/plot_cca.rst b/docs/auto_examples/2multi/plot_cca.rst index 51cbe8dc..466135c9 100644 --- a/docs/auto_examples/2multi/plot_cca.rst +++ b/docs/auto_examples/2multi/plot_cca.rst @@ -31,15 +31,15 @@ First, we'll import the necessary modules. .. GENERATED FROM PYTHON SOURCE LINES 13-21 -.. code-block:: Python +.. code-block:: default - import xarray as xr - import xeofs as xe - + import cartopy.crs as ccrs import matplotlib.pyplot as plt + import xarray as xr from matplotlib.gridspec import GridSpec - import cartopy.crs as ccrs + + import xeofs as xe @@ -55,7 +55,7 @@ monthly climatologies, so the seasonal cycle doesn't impact our CCA. .. GENERATED FROM PYTHON SOURCE LINES 24-29 -.. code-block:: Python +.. code-block:: default sst = xr.tutorial.load_dataset("ersstv5").sst @@ -75,7 +75,7 @@ Now, we define the three regions of interest and store them in a list. .. GENERATED FROM PYTHON SOURCE LINES 31-38 -.. code-block:: Python +.. code-block:: default indian = sst.sel(lon=slice(35, 115), lat=slice(30, -30)) @@ -115,10 +115,10 @@ will give a warning. .. GENERATED FROM PYTHON SOURCE LINES 58-70 -.. code-block:: Python +.. code-block:: default - model = xe.models.CCA( + model = xe.multi.CCA( n_modes=2, use_coslat=True, pca=True, @@ -142,7 +142,7 @@ Let's look at the canonical loadings (components) of the first mode. .. GENERATED FROM PYTHON SOURCE LINES 72-95 -.. code-block:: Python +.. code-block:: default mode = 1 @@ -185,7 +185,7 @@ And lastly, we'll check out the canonical variates (scores) of the first mode. .. GENERATED FROM PYTHON SOURCE LINES 97-103 -.. code-block:: Python +.. code-block:: default fig, ax = plt.subplots(figsize=(12, 4)) @@ -207,14 +207,14 @@ And lastly, we'll check out the canonical variates (scores) of the first mode. .. code-block:: none - + .. rst-class:: sphx-glr-timing - **Total running time of the script:** (0 minutes 1.632 seconds) + **Total running time of the script:** (0 minutes 2.798 seconds) .. _sphx_glr_download_auto_examples_2multi_plot_cca.py: @@ -223,14 +223,17 @@ And lastly, we'll check out the canonical variates (scores) of the first mode. .. container:: sphx-glr-footer sphx-glr-footer-example - .. container:: sphx-glr-download sphx-glr-download-jupyter - :download:`Download Jupyter notebook: plot_cca.ipynb ` + .. container:: sphx-glr-download sphx-glr-download-python :download:`Download Python source code: plot_cca.py ` + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_cca.ipynb ` + .. only:: html diff --git a/docs/auto_examples/2multi/plot_cca_codeobj.pickle b/docs/auto_examples/2multi/plot_cca_codeobj.pickle index e9a23cf3..05060618 100644 Binary files a/docs/auto_examples/2multi/plot_cca_codeobj.pickle and b/docs/auto_examples/2multi/plot_cca_codeobj.pickle differ diff --git a/docs/auto_examples/2multi/plot_mca.ipynb b/docs/auto_examples/2multi/plot_mca.ipynb index 0cdfe016..16d73cb7 100644 --- a/docs/auto_examples/2multi/plot_mca.ipynb +++ b/docs/auto_examples/2multi/plot_mca.ipynb @@ -17,14 +17,14 @@ "outputs": [], "source": [ "# Load packages and data:\n", + "import matplotlib.pyplot as plt\n", "import numpy as np\n", "import xarray as xr\n", - "import matplotlib.pyplot as plt\n", - "from matplotlib.gridspec import GridSpec\n", "from cartopy.crs import Orthographic, PlateCarree\n", "from cartopy.feature import LAND\n", + "from matplotlib.gridspec import GridSpec\n", "\n", - "from xeofs.models import MCA" + "import xeofs as xe" ] }, { @@ -60,7 +60,7 @@ "metadata": {}, "outputs": [], "source": [ - "mca = MCA(n_modes=20, standardize=False, use_coslat=True)\n", + "mca = xe.cross.MCA(n_modes=20, standardize=False, use_coslat=True)\n", "mca.fit(da1, da2, dim=\"time\")" ] }, @@ -211,7 +211,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/auto_examples/2multi/plot_mca.py b/docs/auto_examples/2multi/plot_mca.py index d3b48de7..a93d5546 100644 --- a/docs/auto_examples/2multi/plot_mca.py +++ b/docs/auto_examples/2multi/plot_mca.py @@ -6,14 +6,14 @@ """ # Load packages and data: +import matplotlib.pyplot as plt import numpy as np import xarray as xr -import matplotlib.pyplot as plt -from matplotlib.gridspec import GridSpec from cartopy.crs import Orthographic, PlateCarree from cartopy.feature import LAND +from matplotlib.gridspec import GridSpec -from xeofs.models import MCA +import xeofs as xe # %% # Create 2 different DataArrays @@ -25,7 +25,7 @@ # %% # Perform MCA -mca = MCA(n_modes=20, standardize=False, use_coslat=True) +mca = xe.cross.MCA(n_modes=20, standardize=False, use_coslat=True) mca.fit(da1, da2, dim="time") # %% diff --git a/docs/auto_examples/2multi/plot_mca.py.md5 b/docs/auto_examples/2multi/plot_mca.py.md5 index 99600e23..d980cde7 100644 --- a/docs/auto_examples/2multi/plot_mca.py.md5 +++ b/docs/auto_examples/2multi/plot_mca.py.md5 @@ -1 +1 @@ -76e68784ad5c6cb6f7eb0924c3019da9 \ No newline at end of file +e696fb777ef84ec447201ca9c01d1dfe \ No newline at end of file diff --git a/docs/auto_examples/2multi/plot_mca.rst b/docs/auto_examples/2multi/plot_mca.rst index 06fa7336..ec378a1f 100644 --- a/docs/auto_examples/2multi/plot_mca.rst +++ b/docs/auto_examples/2multi/plot_mca.rst @@ -25,18 +25,18 @@ Maximum Covariance Analysis (MCA) between two data sets. .. GENERATED FROM PYTHON SOURCE LINES 7-18 -.. code-block:: Python +.. code-block:: default # Load packages and data: + import matplotlib.pyplot as plt import numpy as np import xarray as xr - import matplotlib.pyplot as plt - from matplotlib.gridspec import GridSpec from cartopy.crs import Orthographic, PlateCarree from cartopy.feature import LAND + from matplotlib.gridspec import GridSpec - from xeofs.models import MCA + import xeofs as xe @@ -51,7 +51,7 @@ Create 2 different DataArrays .. GENERATED FROM PYTHON SOURCE LINES 20-25 -.. code-block:: Python +.. code-block:: default t2m = xr.tutorial.load_dataset("air_temperature")["air"] @@ -71,10 +71,10 @@ Perform MCA .. GENERATED FROM PYTHON SOURCE LINES 27-31 -.. code-block:: Python +.. code-block:: default - mca = MCA(n_modes=20, standardize=False, use_coslat=True) + mca = xe.cross.MCA(n_modes=20, standardize=False, use_coslat=True) mca.fit(da1, da2, dim="time") @@ -86,7 +86,7 @@ Perform MCA .. code-block:: none - + @@ -97,7 +97,7 @@ patterns: .. GENERATED FROM PYTHON SOURCE LINES 34-40 -.. code-block:: Python +.. code-block:: default singular_vectors = mca.components() @@ -121,7 +121,7 @@ for the homogeneous patterns of ``X`` are: .. GENERATED FROM PYTHON SOURCE LINES 45-48 -.. code-block:: Python +.. code-block:: default pvals_hom[0] @@ -498,31 +498,31 @@ for the homogeneous patterns of ``X`` are: fill: currentColor; }
    <xarray.DataArray 'pvalues_of_left_homogeneous_patterns' (mode: 20, lat: 25,
    -                                                              lon: 26)> Size: 104kB
    -    7.302e-298 4.926e-268 8.327e-265 3.64e-251 ... 0.04256 0.08683 0.1689 0.2897
    +                                                              lon: 26)>
    +    0.0 3.288e-290 2.871e-286 1.598e-271 ... 0.04189 0.08527 0.1659 0.2852
         Coordinates:
    -      * lat      (lat) float32 100B 15.0 17.5 20.0 22.5 25.0 ... 67.5 70.0 72.5 75.0
    -      * lon      (lon) float32 104B 200.0 202.5 205.0 207.5 ... 257.5 260.0 262.5
    -      * mode     (mode) int64 160B 1 2 3 4 5 6 7 8 9 ... 12 13 14 15 16 17 18 19 20
    -    Attributes: (12/16)
    -        model:          MCA
    -        software:       xeofs
    -        version:        2.3.2
    -        date:           2024-03-31 21:13:25
    -        n_modes:        20
    -        center:         True
    -        ...             ...
    -        compute:        True
    -        sample_name:    sample
    -        feature_name:   feature
    -        solver:         auto
    -        random_state:   None
    -        solver_kwargs:  {}
  • model :
    Maximum Covariance Analysis
    software :
    xeofs
    version :
    1.2.0
    date :
    2024-09-02 02:30:55
    n_modes :
    20
    center :
    ['True', 'True']
    standardize :
    ['False', 'False']
    use_coslat :
    ['True', 'True']
    check_nans :
    ['True', 'True']
    use_pca :
    ['True', 'True']
    n_pca_modes :
    [0.999, 0.999]
    pca_init_rank_reduction :
    [0.3, 0.3]
    alpha :
    [1.0, 1.0]
    sample_name :
    sample
    feature_name :
    ['feature1', 'feature2']
    random_state :
    None
    compute :
    True
    solver :
    auto


  • @@ -579,7 +579,7 @@ Create a mask to identifiy where p-values are below 0.05 .. GENERATED FROM PYTHON SOURCE LINES 50-55 -.. code-block:: Python +.. code-block:: default hom_mask = [values < 0.05 for values in pvals_hom] @@ -599,7 +599,7 @@ Plot some relevant quantities of mode 2. .. GENERATED FROM PYTHON SOURCE LINES 57-113 -.. code-block:: Python +.. code-block:: default lonlats = [ @@ -666,20 +666,13 @@ Plot some relevant quantities of mode 2. :class: sphx-glr-single-img -.. rst-class:: sphx-glr-script-out - - .. code-block:: none - - /home/slevang/miniconda3/envs/xeofs-docs/lib/python3.11/site-packages/cartopy/io/__init__.py:241: DownloadWarning: Downloading: https://naturalearth.s3.amazonaws.com/110m_physical/ne_110m_land.zip - warnings.warn(f'Downloading: {url}', DownloadWarning) - .. rst-class:: sphx-glr-timing - **Total running time of the script:** (0 minutes 3.519 seconds) + **Total running time of the script:** (0 minutes 8.251 seconds) .. _sphx_glr_download_auto_examples_2multi_plot_mca.py: @@ -688,14 +681,17 @@ Plot some relevant quantities of mode 2. .. container:: sphx-glr-footer sphx-glr-footer-example - .. container:: sphx-glr-download sphx-glr-download-jupyter - :download:`Download Jupyter notebook: plot_mca.ipynb ` + .. container:: sphx-glr-download sphx-glr-download-python :download:`Download Python source code: plot_mca.py ` + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_mca.ipynb ` + .. only:: html diff --git a/docs/auto_examples/2multi/plot_mca_codeobj.pickle b/docs/auto_examples/2multi/plot_mca_codeobj.pickle index 8279095a..ec889c42 100644 Binary files a/docs/auto_examples/2multi/plot_mca_codeobj.pickle and b/docs/auto_examples/2multi/plot_mca_codeobj.pickle differ diff --git a/docs/auto_examples/2multi/plot_rotated_mca.ipynb b/docs/auto_examples/2multi/plot_rotated_mca.ipynb index 8bc915f4..e69caebc 100644 --- a/docs/auto_examples/2multi/plot_rotated_mca.ipynb +++ b/docs/auto_examples/2multi/plot_rotated_mca.ipynb @@ -17,14 +17,14 @@ "outputs": [], "source": [ "# Load packages and data:\n", + "import matplotlib.pyplot as plt\n", "import numpy as np\n", "import xarray as xr\n", - "import matplotlib.pyplot as plt\n", - "from matplotlib.gridspec import GridSpec\n", "from cartopy.crs import Orthographic, PlateCarree\n", "from cartopy.feature import LAND\n", + "from matplotlib.gridspec import GridSpec\n", "\n", - "from xeofs.models import MCA, MCARotator" + "import xeofs as xe" ] }, { @@ -60,7 +60,7 @@ "metadata": {}, "outputs": [], "source": [ - "mca = MCA(n_modes=20, standardize=False, use_coslat=True)\n", + "mca = xe.cross.MCA(n_modes=20, standardize=False, use_coslat=True)\n", "mca.fit(da1, da2, dim=\"time\")" ] }, @@ -78,7 +78,7 @@ "metadata": {}, "outputs": [], "source": [ - "rot = MCARotator(n_modes=10)\n", + "rot = xe.cross.MCARotator(n_modes=10)\n", "rot.fit(mca)" ] }, @@ -229,7 +229,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/auto_examples/2multi/plot_rotated_mca.py b/docs/auto_examples/2multi/plot_rotated_mca.py index a7ec89fe..2ae0c501 100644 --- a/docs/auto_examples/2multi/plot_rotated_mca.py +++ b/docs/auto_examples/2multi/plot_rotated_mca.py @@ -6,14 +6,14 @@ """ # Load packages and data: +import matplotlib.pyplot as plt import numpy as np import xarray as xr -import matplotlib.pyplot as plt -from matplotlib.gridspec import GridSpec from cartopy.crs import Orthographic, PlateCarree from cartopy.feature import LAND +from matplotlib.gridspec import GridSpec -from xeofs.models import MCA, MCARotator +import xeofs as xe # %% # Create 2 different DataArrays @@ -25,13 +25,13 @@ # %% # Perform MCA -mca = MCA(n_modes=20, standardize=False, use_coslat=True) +mca = xe.cross.MCA(n_modes=20, standardize=False, use_coslat=True) mca.fit(da1, da2, dim="time") # %% # Apply Varimax-rotation to MCA solution -rot = MCARotator(n_modes=10) +rot = xe.cross.MCARotator(n_modes=10) rot.fit(mca) # %% diff --git a/docs/auto_examples/2multi/plot_rotated_mca.py.md5 b/docs/auto_examples/2multi/plot_rotated_mca.py.md5 index c25ed037..deea9105 100644 --- a/docs/auto_examples/2multi/plot_rotated_mca.py.md5 +++ b/docs/auto_examples/2multi/plot_rotated_mca.py.md5 @@ -1 +1 @@ -2a8fc6d56ead0b05f3f5c909c0e07bbb \ No newline at end of file +00a21b73b61a542faad2eabd27cfcdf8 \ No newline at end of file diff --git a/docs/auto_examples/2multi/plot_rotated_mca.rst b/docs/auto_examples/2multi/plot_rotated_mca.rst index 902df371..8036b097 100644 --- a/docs/auto_examples/2multi/plot_rotated_mca.rst +++ b/docs/auto_examples/2multi/plot_rotated_mca.rst @@ -25,18 +25,18 @@ Rotated Maximum Covariance Analysis (MCA) between two data sets. .. GENERATED FROM PYTHON SOURCE LINES 7-18 -.. code-block:: Python +.. code-block:: default # Load packages and data: + import matplotlib.pyplot as plt import numpy as np import xarray as xr - import matplotlib.pyplot as plt - from matplotlib.gridspec import GridSpec from cartopy.crs import Orthographic, PlateCarree from cartopy.feature import LAND + from matplotlib.gridspec import GridSpec - from xeofs.models import MCA, MCARotator + import xeofs as xe @@ -51,7 +51,7 @@ Create 2 different DataArrays .. GENERATED FROM PYTHON SOURCE LINES 20-25 -.. code-block:: Python +.. code-block:: default t2m = xr.tutorial.load_dataset("air_temperature")["air"] @@ -71,10 +71,10 @@ Perform MCA .. GENERATED FROM PYTHON SOURCE LINES 27-31 -.. code-block:: Python +.. code-block:: default - mca = MCA(n_modes=20, standardize=False, use_coslat=True) + mca = xe.cross.MCA(n_modes=20, standardize=False, use_coslat=True) mca.fit(da1, da2, dim="time") @@ -86,7 +86,7 @@ Perform MCA .. code-block:: none - + @@ -96,10 +96,10 @@ Apply Varimax-rotation to MCA solution .. GENERATED FROM PYTHON SOURCE LINES 33-37 -.. code-block:: Python +.. code-block:: default - rot = MCARotator(n_modes=10) + rot = xe.cross.MCARotator(n_modes=10) rot.fit(mca) @@ -111,7 +111,7 @@ Apply Varimax-rotation to MCA solution .. code-block:: none - + @@ -122,7 +122,7 @@ patterns: .. GENERATED FROM PYTHON SOURCE LINES 40-46 -.. code-block:: Python +.. code-block:: default singular_vectors = rot.components() @@ -146,7 +146,7 @@ for the homogeneous patterns of ``X`` are: .. GENERATED FROM PYTHON SOURCE LINES 51-54 -.. code-block:: Python +.. code-block:: default pvals_hom[0] @@ -523,28 +523,27 @@ for the homogeneous patterns of ``X`` are: fill: currentColor; }
    <xarray.DataArray 'pvalues_of_left_homogeneous_patterns' (mode: 10, lat: 25,
    -                                                              lon: 26)> Size: 52kB
    -    5.921e-81 4.092e-64 6.235e-58 5.975e-49 ... 0.001974 0.005479 0.01494 0.03604
    +                                                              lon: 26)>
    +    1.551e-89 1.547e-68 7.776e-62 2.972e-52 ... 0.002105 0.005704 0.01526 0.03629
         Coordinates:
    -      * lat      (lat) float32 100B 15.0 17.5 20.0 22.5 25.0 ... 67.5 70.0 72.5 75.0
    -      * lon      (lon) float32 104B 200.0 202.5 205.0 207.5 ... 257.5 260.0 262.5
    -      * mode     (mode) int64 80B 1 2 3 4 5 6 7 8 9 10
    +      * lat      (lat) float32 15.0 17.5 20.0 22.5 25.0 ... 65.0 67.5 70.0 72.5 75.0
    +      * lon      (lon) float32 200.0 202.5 205.0 207.5 ... 255.0 257.5 260.0 262.5
    +      * mode     (mode) int64 1 2 3 4 5 6 7 8 9 10
         Attributes:
    -        model:             Rotated MCA
    -        n_modes:           10
    -        power:             1
    -        max_iter:          1000
    -        rtol:              1e-08
    -        squared_loadings:  False
    -        compute:           True
    -        software:          xeofs
    -        version:           2.3.2
    -        date:              2024-03-31 21:13:29
  • model :
    Rotated MCA
    software :
    xeofs
    version :
    1.2.0
    date :
    2024-09-02 02:31:13
    n_modes :
    10
    power :
    1
    max_iter :
    1000
    rtol :
    1e-08
    compute :
    True


  • @@ -600,7 +599,7 @@ Create a mask to identifiy where p-values are below 0.05 .. GENERATED FROM PYTHON SOURCE LINES 56-61 -.. code-block:: Python +.. code-block:: default hom_mask = [values < 0.05 for values in pvals_hom] @@ -620,7 +619,7 @@ Plot some relevant quantities of mode 2. .. GENERATED FROM PYTHON SOURCE LINES 63-119 -.. code-block:: Python +.. code-block:: default lonlats = [ @@ -693,7 +692,7 @@ Plot some relevant quantities of mode 2. .. rst-class:: sphx-glr-timing - **Total running time of the script:** (0 minutes 2.207 seconds) + **Total running time of the script:** (0 minutes 13.021 seconds) .. _sphx_glr_download_auto_examples_2multi_plot_rotated_mca.py: @@ -702,14 +701,17 @@ Plot some relevant quantities of mode 2. .. container:: sphx-glr-footer sphx-glr-footer-example - .. container:: sphx-glr-download sphx-glr-download-jupyter - :download:`Download Jupyter notebook: plot_rotated_mca.ipynb ` + .. container:: sphx-glr-download sphx-glr-download-python :download:`Download Python source code: plot_rotated_mca.py ` + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_rotated_mca.ipynb ` + .. only:: html diff --git a/docs/auto_examples/2multi/plot_rotated_mca_codeobj.pickle b/docs/auto_examples/2multi/plot_rotated_mca_codeobj.pickle index ea3553d3..1d2dc827 100644 Binary files a/docs/auto_examples/2multi/plot_rotated_mca_codeobj.pickle and b/docs/auto_examples/2multi/plot_rotated_mca_codeobj.pickle differ diff --git a/docs/auto_examples/2multi/sg_execution_times.rst b/docs/auto_examples/2multi/sg_execution_times.rst index 815af82c..99753050 100644 --- a/docs/auto_examples/2multi/sg_execution_times.rst +++ b/docs/auto_examples/2multi/sg_execution_times.rst @@ -6,38 +6,12 @@ Computation times ================= -**00:07.357** total execution time for 3 files **from auto_examples/2multi**: - -.. container:: - - .. raw:: html - - - - - - - - .. list-table:: - :header-rows: 1 - :class: table table-striped sg-datatable - - * - Example - - Time - - Mem (MB) - * - :ref:`sphx_glr_auto_examples_2multi_plot_mca.py` (``plot_mca.py``) - - 00:03.519 - - 0.0 - * - :ref:`sphx_glr_auto_examples_2multi_plot_rotated_mca.py` (``plot_rotated_mca.py``) - - 00:02.207 - - 0.0 - * - :ref:`sphx_glr_auto_examples_2multi_plot_cca.py` (``plot_cca.py``) - - 00:01.632 - - 0.0 +**00:02.798** total execution time for **auto_examples_2multi** files: + ++------------------------------------------------------------------------------------+-----------+--------+ +| :ref:`sphx_glr_auto_examples_2multi_plot_cca.py` (``plot_cca.py``) | 00:02.798 | 0.0 MB | ++------------------------------------------------------------------------------------+-----------+--------+ +| :ref:`sphx_glr_auto_examples_2multi_plot_mca.py` (``plot_mca.py``) | 00:00.000 | 0.0 MB | ++------------------------------------------------------------------------------------+-----------+--------+ +| :ref:`sphx_glr_auto_examples_2multi_plot_rotated_mca.py` (``plot_rotated_mca.py``) | 00:00.000 | 0.0 MB | ++------------------------------------------------------------------------------------+-----------+--------+ diff --git a/docs/auto_examples/3multi/images/sphx_glr_plot_cca_001.png b/docs/auto_examples/3multi/images/sphx_glr_plot_cca_001.png new file mode 100644 index 00000000..10c26abb Binary files /dev/null and b/docs/auto_examples/3multi/images/sphx_glr_plot_cca_001.png differ diff --git a/docs/auto_examples/3multi/images/sphx_glr_plot_cca_002.png b/docs/auto_examples/3multi/images/sphx_glr_plot_cca_002.png new file mode 100644 index 00000000..89687459 Binary files /dev/null and b/docs/auto_examples/3multi/images/sphx_glr_plot_cca_002.png differ diff --git a/docs/auto_examples/3multi/images/thumb/sphx_glr_plot_cca_thumb.png b/docs/auto_examples/3multi/images/thumb/sphx_glr_plot_cca_thumb.png new file mode 100644 index 00000000..2e99a4d0 Binary files /dev/null and b/docs/auto_examples/3multi/images/thumb/sphx_glr_plot_cca_thumb.png differ diff --git a/docs/auto_examples/3multi/index.rst b/docs/auto_examples/3multi/index.rst new file mode 100644 index 00000000..30f52aef --- /dev/null +++ b/docs/auto_examples/3multi/index.rst @@ -0,0 +1,41 @@ + + +.. _sphx_glr_auto_examples_3multi: + +3 | Multi-Set Analysis +======================== + + + +.. raw:: html + +
    + + +.. raw:: html + +
    + +.. only:: html + + .. image:: /auto_examples/3multi/images/thumb/sphx_glr_plot_cca_thumb.png + :alt: + + :ref:`sphx_glr_auto_examples_3multi_plot_cca.py` + +.. raw:: html + +
    Canonical Correlation Analysis
    +
    + + +.. raw:: html + +
    + + +.. toctree:: + :hidden: + + /auto_examples/3multi/plot_cca + diff --git a/docs/auto_examples/3multi/plot_cca.ipynb b/docs/auto_examples/3multi/plot_cca.ipynb new file mode 100644 index 00000000..f8e3b7f4 --- /dev/null +++ b/docs/auto_examples/3multi/plot_cca.ipynb @@ -0,0 +1,197 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "# Canonical Correlation Analysis\n", + "\n", + "In this example, we're going to perform a Canonical Correlation Analysis (CCA)\n", + "on three datasets using the ERSSTv5 monthly sea surface temperature (SST) data\n", + "from 1970 to 2022. We divide this data into three areas: the Indian Ocean,\n", + "the Pacific Ocean, and the Atlantic Ocean. Our goal is to perform CCA on these\n", + "regions.\n", + "\n", + "First, we'll import the necessary modules.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import cartopy.crs as ccrs\n", + "import matplotlib.pyplot as plt\n", + "import xarray as xr\n", + "from matplotlib.gridspec import GridSpec\n", + "\n", + "import xeofs as xe" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Next, we load the data and compute the SST anomalies. This removes the\n", + "monthly climatologies, so the seasonal cycle doesn't impact our CCA.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "sst = xr.tutorial.load_dataset(\"ersstv5\").sst\n", + "sst = sst.groupby(\"time.month\") - sst.groupby(\"time.month\").mean(\"time\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now, we define the three regions of interest and store them in a list.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "indian = sst.sel(lon=slice(35, 115), lat=slice(30, -30))\n", + "pacific = sst.sel(lon=slice(130, 290), lat=slice(30, -30))\n", + "atlantic = sst.sel(lon=slice(320, 360), lat=slice(70, 10))\n", + "\n", + "data_list = [indian, pacific, atlantic]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We now perform CCA. Since we are dealing with a high-dimensional feature space, we first\n", + "perform PCA to reduce the dimensionality (this is kind of a regularized CCA) by setting\n", + "``pca=True``. By setting the ``variance_fraction`` keyword argument, we specify that we\n", + "want to keep the number of PCA modes that explain 90% of the variance in each of the\n", + "three data sets.\n", + "\n", + "An important parameter is ``init_pca_modes``. It specifies the number\n", + "of PCA modes that are initially compute before truncating them to account for 90 %. If this\n", + "number is small enough, randomized PCAs will be performed instead of the full SVD decomposition\n", + "which is much faster. We can also specify ``init_pca_modes`` as a float (0 < x <= 1),\n", + "in which case the number of PCA modes is given by the fraction of the data matrix's rank\n", + "The default is set to 0.75 which will ensure that randomized PCAs are performed.\n", + "\n", + "Given the nature of SST data, we might lower it to something like 0.3, since\n", + "we expect that most of the variance in the data will be explained by a small\n", + "number of PC modes.\n", + "\n", + "Note that if our initial PCA modes don't hit the 90% variance target, ``xeofs``\n", + "will give a warning.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "model = xe.multi.CCA(\n", + " n_modes=2,\n", + " use_coslat=True,\n", + " pca=True,\n", + " variance_fraction=0.9,\n", + " init_pca_modes=0.30,\n", + ")\n", + "model.fit(data_list, dim=\"time\")\n", + "components = model.components()\n", + "scores = model.scores()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's look at the canonical loadings (components) of the first mode.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "mode = 1\n", + "\n", + "central_longitudes = [\n", + " indian.lon.median().item(),\n", + " pacific.lon.median().item(),\n", + " pacific.lon.median().item(),\n", + "]\n", + "projections = [ccrs.PlateCarree(central_longitude=lon) for lon in central_longitudes]\n", + "\n", + "fig = plt.figure(figsize=(12, 2.5))\n", + "gs = GridSpec(1, 4, figure=fig, width_ratios=[2, 4, 1, 0.2])\n", + "axes = [fig.add_subplot(gs[0, i], projection=projections[i]) for i in range(3)]\n", + "cax = fig.add_subplot(1, 4, 4)\n", + "kwargs = dict(transform=ccrs.PlateCarree(), vmin=-1, vmax=1, cmap=\"RdBu_r\", cbar_ax=cax)\n", + "components[0].sel(mode=mode).plot(ax=axes[0], **kwargs)\n", + "components[1].sel(mode=mode).plot(ax=axes[1], **kwargs)\n", + "im = components[2].sel(mode=mode).plot(ax=axes[2], **kwargs)\n", + "fig.colorbar(im, cax=cax, orientation=\"vertical\")\n", + "for ax in axes:\n", + " ax.coastlines()\n", + " ax.set_title(\"\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "And lastly, we'll check out the canonical variates (scores) of the first mode.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig, ax = plt.subplots(figsize=(12, 4))\n", + "scores[0].sel(mode=mode).plot(ax=ax, label=\"Indian Ocean\")\n", + "scores[1].sel(mode=mode).plot(ax=ax, label=\"Central Pacific\")\n", + "scores[2].sel(mode=mode).plot(ax=ax, label=\"North Atlantic\")\n", + "ax.legend()" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/examples/2multi/plot_cca.py b/docs/auto_examples/3multi/plot_cca.py similarity index 99% rename from examples/2multi/plot_cca.py rename to docs/auto_examples/3multi/plot_cca.py index e57af825..70f6a68e 100644 --- a/examples/2multi/plot_cca.py +++ b/docs/auto_examples/3multi/plot_cca.py @@ -11,12 +11,12 @@ First, we'll import the necessary modules. """ -import xarray as xr -import xeofs as xe - +import cartopy.crs as ccrs import matplotlib.pyplot as plt +import xarray as xr from matplotlib.gridspec import GridSpec -import cartopy.crs as ccrs + +import xeofs as xe # %% # Next, we load the data and compute the SST anomalies. This removes the @@ -56,7 +56,7 @@ # Note that if our initial PCA modes don't hit the 90% variance target, ``xeofs`` # will give a warning. -model = xe.models.CCA( +model = xe.multi.CCA( n_modes=2, use_coslat=True, pca=True, diff --git a/docs/auto_examples/3multi/plot_cca.py.md5 b/docs/auto_examples/3multi/plot_cca.py.md5 new file mode 100644 index 00000000..40b748fb --- /dev/null +++ b/docs/auto_examples/3multi/plot_cca.py.md5 @@ -0,0 +1 @@ +ccd6cf17e9ee3ea88ba470a3180fe224 \ No newline at end of file diff --git a/docs/auto_examples/3multi/plot_cca.rst b/docs/auto_examples/3multi/plot_cca.rst new file mode 100644 index 00000000..7c7b6fce --- /dev/null +++ b/docs/auto_examples/3multi/plot_cca.rst @@ -0,0 +1,242 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "auto_examples/3multi/plot_cca.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_auto_examples_3multi_plot_cca.py: + + +Canonical Correlation Analysis +============================== + +In this example, we're going to perform a Canonical Correlation Analysis (CCA) +on three datasets using the ERSSTv5 monthly sea surface temperature (SST) data +from 1970 to 2022. We divide this data into three areas: the Indian Ocean, +the Pacific Ocean, and the Atlantic Ocean. Our goal is to perform CCA on these +regions. + +First, we'll import the necessary modules. + +.. GENERATED FROM PYTHON SOURCE LINES 13-21 + +.. code-block:: default + + + import cartopy.crs as ccrs + import matplotlib.pyplot as plt + import xarray as xr + from matplotlib.gridspec import GridSpec + + import xeofs as xe + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 22-24 + +Next, we load the data and compute the SST anomalies. This removes the +monthly climatologies, so the seasonal cycle doesn't impact our CCA. + +.. GENERATED FROM PYTHON SOURCE LINES 24-29 + +.. code-block:: default + + + sst = xr.tutorial.load_dataset("ersstv5").sst + sst = sst.groupby("time.month") - sst.groupby("time.month").mean("time") + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 30-31 + +Now, we define the three regions of interest and store them in a list. + +.. GENERATED FROM PYTHON SOURCE LINES 31-38 + +.. code-block:: default + + + indian = sst.sel(lon=slice(35, 115), lat=slice(30, -30)) + pacific = sst.sel(lon=slice(130, 290), lat=slice(30, -30)) + atlantic = sst.sel(lon=slice(320, 360), lat=slice(70, 10)) + + data_list = [indian, pacific, atlantic] + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 39-58 + +We now perform CCA. Since we are dealing with a high-dimensional feature space, we first +perform PCA to reduce the dimensionality (this is kind of a regularized CCA) by setting +``pca=True``. By setting the ``variance_fraction`` keyword argument, we specify that we +want to keep the number of PCA modes that explain 90% of the variance in each of the +three data sets. + +An important parameter is ``init_pca_modes``. It specifies the number +of PCA modes that are initially compute before truncating them to account for 90 %. If this +number is small enough, randomized PCAs will be performed instead of the full SVD decomposition +which is much faster. We can also specify ``init_pca_modes`` as a float (0 < x <= 1), +in which case the number of PCA modes is given by the fraction of the data matrix's rank +The default is set to 0.75 which will ensure that randomized PCAs are performed. + +Given the nature of SST data, we might lower it to something like 0.3, since +we expect that most of the variance in the data will be explained by a small +number of PC modes. + +Note that if our initial PCA modes don't hit the 90% variance target, ``xeofs`` +will give a warning. + +.. GENERATED FROM PYTHON SOURCE LINES 58-70 + +.. code-block:: default + + + model = xe.multi.CCA( + n_modes=2, + use_coslat=True, + pca=True, + variance_fraction=0.9, + init_pca_modes=0.30, + ) + model.fit(data_list, dim="time") + components = model.components() + scores = model.scores() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 71-72 + +Let's look at the canonical loadings (components) of the first mode. + +.. GENERATED FROM PYTHON SOURCE LINES 72-95 + +.. code-block:: default + + + mode = 1 + + central_longitudes = [ + indian.lon.median().item(), + pacific.lon.median().item(), + pacific.lon.median().item(), + ] + projections = [ccrs.PlateCarree(central_longitude=lon) for lon in central_longitudes] + + fig = plt.figure(figsize=(12, 2.5)) + gs = GridSpec(1, 4, figure=fig, width_ratios=[2, 4, 1, 0.2]) + axes = [fig.add_subplot(gs[0, i], projection=projections[i]) for i in range(3)] + cax = fig.add_subplot(1, 4, 4) + kwargs = dict(transform=ccrs.PlateCarree(), vmin=-1, vmax=1, cmap="RdBu_r", cbar_ax=cax) + components[0].sel(mode=mode).plot(ax=axes[0], **kwargs) + components[1].sel(mode=mode).plot(ax=axes[1], **kwargs) + im = components[2].sel(mode=mode).plot(ax=axes[2], **kwargs) + fig.colorbar(im, cax=cax, orientation="vertical") + for ax in axes: + ax.coastlines() + ax.set_title("") + + + + +.. image-sg:: /auto_examples/3multi/images/sphx_glr_plot_cca_001.png + :alt: plot cca + :srcset: /auto_examples/3multi/images/sphx_glr_plot_cca_001.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 96-97 + +And lastly, we'll check out the canonical variates (scores) of the first mode. + +.. GENERATED FROM PYTHON SOURCE LINES 97-103 + +.. code-block:: default + + + fig, ax = plt.subplots(figsize=(12, 4)) + scores[0].sel(mode=mode).plot(ax=ax, label="Indian Ocean") + scores[1].sel(mode=mode).plot(ax=ax, label="Central Pacific") + scores[2].sel(mode=mode).plot(ax=ax, label="North Atlantic") + ax.legend() + + + +.. image-sg:: /auto_examples/3multi/images/sphx_glr_plot_cca_002.png + :alt: mode = 1 + :srcset: /auto_examples/3multi/images/sphx_glr_plot_cca_002.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 6.563 seconds) + + +.. _sphx_glr_download_auto_examples_3multi_plot_cca.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + + + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_cca.py ` + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_cca.ipynb ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/auto_examples/3multi/plot_cca_codeobj.pickle b/docs/auto_examples/3multi/plot_cca_codeobj.pickle new file mode 100644 index 00000000..006138e1 Binary files /dev/null and b/docs/auto_examples/3multi/plot_cca_codeobj.pickle differ diff --git a/docs/auto_examples/3multi/sg_execution_times.rst b/docs/auto_examples/3multi/sg_execution_times.rst new file mode 100644 index 00000000..6befdc8e --- /dev/null +++ b/docs/auto_examples/3multi/sg_execution_times.rst @@ -0,0 +1,13 @@ + +:orphan: + +.. _sphx_glr_auto_examples_3multi_sg_execution_times: + + +Computation times +================= +**00:06.563** total execution time for **auto_examples_3multi** files: + ++--------------------------------------------------------------------+-----------+--------+ +| :ref:`sphx_glr_auto_examples_3multi_plot_cca.py` (``plot_cca.py``) | 00:06.563 | 0.0 MB | ++--------------------------------------------------------------------+-----------+--------+ diff --git a/docs/auto_examples/3validation/images/sphx_glr_plot_bootstrap_001.png b/docs/auto_examples/3validation/images/sphx_glr_plot_bootstrap_001.png index 6baca248..4c38317d 100644 Binary files a/docs/auto_examples/3validation/images/sphx_glr_plot_bootstrap_001.png and b/docs/auto_examples/3validation/images/sphx_glr_plot_bootstrap_001.png differ diff --git a/docs/auto_examples/3validation/images/thumb/sphx_glr_plot_bootstrap_thumb.png b/docs/auto_examples/3validation/images/thumb/sphx_glr_plot_bootstrap_thumb.png index 6081e99f..2ca76422 100644 Binary files a/docs/auto_examples/3validation/images/thumb/sphx_glr_plot_bootstrap_thumb.png and b/docs/auto_examples/3validation/images/thumb/sphx_glr_plot_bootstrap_thumb.png differ diff --git a/docs/auto_examples/3validation/plot_bootstrap.ipynb b/docs/auto_examples/3validation/plot_bootstrap.ipynb index a9b7b574..8312c300 100644 --- a/docs/auto_examples/3validation/plot_bootstrap.ipynb +++ b/docs/auto_examples/3validation/plot_bootstrap.ipynb @@ -18,14 +18,13 @@ "outputs": [], "source": [ "# Load packages and data:\n", + "import matplotlib.pyplot as plt\n", "import numpy as np\n", "import xarray as xr\n", - "import matplotlib.pyplot as plt\n", - "from matplotlib.gridspec import GridSpec\n", "from cartopy.crs import Orthographic, PlateCarree\n", + "from matplotlib.gridspec import GridSpec\n", "\n", - "from xeofs.models import EOF\n", - "from xeofs.validation import EOFBootstrapper" + "import xeofs as xe" ] }, { @@ -51,7 +50,7 @@ "metadata": {}, "outputs": [], "source": [ - "model = EOF(n_modes=5, standardize=False)\n", + "model = xe.single.EOF(n_modes=5, standardize=False)\n", "model.fit(t2m, dim=\"time\")\n", "expvar = model.explained_variance_ratio()\n", "components = model.components()\n", @@ -77,7 +76,7 @@ "source": [ "n_boot = 50\n", "\n", - "bs = EOFBootstrapper(n_bootstraps=n_boot)\n", + "bs = xe.validation.EOFBootstrapper(n_bootstraps=n_boot)\n", "bs.fit(model)\n", "bs_expvar = bs.explained_variance()\n", "ci_expvar = bs_expvar.quantile([0.025, 0.975], \"n\") # 95% confidence intervals\n", @@ -174,7 +173,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.8" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/docs/auto_examples/3validation/plot_bootstrap.py b/docs/auto_examples/3validation/plot_bootstrap.py index 02cec5dc..1cce033c 100644 --- a/docs/auto_examples/3validation/plot_bootstrap.py +++ b/docs/auto_examples/3validation/plot_bootstrap.py @@ -7,14 +7,13 @@ """ # Load packages and data: +import matplotlib.pyplot as plt import numpy as np import xarray as xr -import matplotlib.pyplot as plt -from matplotlib.gridspec import GridSpec from cartopy.crs import Orthographic, PlateCarree +from matplotlib.gridspec import GridSpec -from xeofs.models import EOF -from xeofs.validation import EOFBootstrapper +import xeofs as xe # %% @@ -23,7 +22,7 @@ # %% # Perform EOF analysis -model = EOF(n_modes=5, standardize=False) +model = xe.single.EOF(n_modes=5, standardize=False) model.fit(t2m, dim="time") expvar = model.explained_variance_ratio() components = model.components() @@ -38,7 +37,7 @@ n_boot = 50 -bs = EOFBootstrapper(n_bootstraps=n_boot) +bs = xe.validation.EOFBootstrapper(n_bootstraps=n_boot) bs.fit(model) bs_expvar = bs.explained_variance() ci_expvar = bs_expvar.quantile([0.025, 0.975], "n") # 95% confidence intervals diff --git a/docs/auto_examples/3validation/plot_bootstrap.py.md5 b/docs/auto_examples/3validation/plot_bootstrap.py.md5 index 8f63d45c..c683ebe5 100644 --- a/docs/auto_examples/3validation/plot_bootstrap.py.md5 +++ b/docs/auto_examples/3validation/plot_bootstrap.py.md5 @@ -1 +1 @@ -e9826d4566fc7f2d94555e72a94c50c8 \ No newline at end of file +dcdf2653c5cb47f1d8828948c6fdda17 \ No newline at end of file diff --git a/docs/auto_examples/3validation/plot_bootstrap.rst b/docs/auto_examples/3validation/plot_bootstrap.rst index e77e8f63..03348627 100644 --- a/docs/auto_examples/3validation/plot_bootstrap.rst +++ b/docs/auto_examples/3validation/plot_bootstrap.rst @@ -24,20 +24,19 @@ Significance testing of EOF analysis via bootstrap Test the significance of individual modes and obtain confidence intervals for both EOFs and PCs. -.. GENERATED FROM PYTHON SOURCE LINES 8-19 +.. GENERATED FROM PYTHON SOURCE LINES 8-18 -.. code-block:: Python +.. code-block:: default # Load packages and data: + import matplotlib.pyplot as plt import numpy as np import xarray as xr - import matplotlib.pyplot as plt - from matplotlib.gridspec import GridSpec from cartopy.crs import Orthographic, PlateCarree + from matplotlib.gridspec import GridSpec - from xeofs.models import EOF - from xeofs.validation import EOFBootstrapper + import xeofs as xe @@ -46,9 +45,9 @@ for both EOFs and PCs. -.. GENERATED FROM PYTHON SOURCE LINES 20-23 +.. GENERATED FROM PYTHON SOURCE LINES 19-22 -.. code-block:: Python +.. code-block:: default t2m = xr.tutorial.load_dataset("air_temperature")["air"] @@ -60,16 +59,16 @@ for both EOFs and PCs. -.. GENERATED FROM PYTHON SOURCE LINES 24-25 +.. GENERATED FROM PYTHON SOURCE LINES 23-24 Perform EOF analysis -.. GENERATED FROM PYTHON SOURCE LINES 25-33 +.. GENERATED FROM PYTHON SOURCE LINES 24-32 -.. code-block:: Python +.. code-block:: default - model = EOF(n_modes=5, standardize=False) + model = xe.single.EOF(n_modes=5, standardize=False) model.fit(t2m, dim="time") expvar = model.explained_variance_ratio() components = model.components() @@ -83,21 +82,21 @@ Perform EOF analysis -.. GENERATED FROM PYTHON SOURCE LINES 34-38 +.. GENERATED FROM PYTHON SOURCE LINES 33-37 Perform bootstrapping of the model to identy the number of significant modes. We perform 50 bootstraps. Note - if computationallly feasible - you typically want to choose higher numbers of bootstraps e.g. 1000. -.. GENERATED FROM PYTHON SOURCE LINES 38-55 +.. GENERATED FROM PYTHON SOURCE LINES 37-54 -.. code-block:: Python +.. code-block:: default n_boot = 50 - bs = EOFBootstrapper(n_bootstraps=n_boot) + bs = xe.validation.EOFBootstrapper(n_bootstraps=n_boot) bs.fit(model) bs_expvar = bs.explained_variance() ci_expvar = bs_expvar.quantile([0.025, 0.975], "n") # 95% confidence intervals @@ -119,21 +118,21 @@ numbers of bootstraps e.g. 1000. .. code-block:: none - 0%| | 0/50 [00:00` + .. container:: sphx-glr-download sphx-glr-download-python :download:`Download Python source code: plot_bootstrap.py ` + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_bootstrap.ipynb ` + .. only:: html diff --git a/docs/auto_examples/3validation/plot_bootstrap_codeobj.pickle b/docs/auto_examples/3validation/plot_bootstrap_codeobj.pickle index 1285dc7e..106ffc34 100644 Binary files a/docs/auto_examples/3validation/plot_bootstrap_codeobj.pickle and b/docs/auto_examples/3validation/plot_bootstrap_codeobj.pickle differ diff --git a/docs/auto_examples/3validation/sg_execution_times.rst b/docs/auto_examples/3validation/sg_execution_times.rst index fafa61f1..428a3ef4 100644 --- a/docs/auto_examples/3validation/sg_execution_times.rst +++ b/docs/auto_examples/3validation/sg_execution_times.rst @@ -6,32 +6,8 @@ Computation times ================= -**00:16.004** total execution time for 1 file **from auto_examples/3validation**: +**00:46.918** total execution time for **auto_examples_3validation** files: -.. container:: - - .. raw:: html - - - - - - - - .. list-table:: - :header-rows: 1 - :class: table table-striped sg-datatable - - * - Example - - Time - - Mem (MB) - * - :ref:`sphx_glr_auto_examples_3validation_plot_bootstrap.py` (``plot_bootstrap.py``) - - 00:16.004 - - 0.0 ++-------------------------------------------------------------------------------------+-----------+--------+ +| :ref:`sphx_glr_auto_examples_3validation_plot_bootstrap.py` (``plot_bootstrap.py``) | 00:46.918 | 0.0 MB | ++-------------------------------------------------------------------------------------+-----------+--------+ diff --git a/docs/auto_examples/4validation/images/sphx_glr_plot_bootstrap_001.png b/docs/auto_examples/4validation/images/sphx_glr_plot_bootstrap_001.png new file mode 100644 index 00000000..598a4f1a Binary files /dev/null and b/docs/auto_examples/4validation/images/sphx_glr_plot_bootstrap_001.png differ diff --git a/docs/auto_examples/4validation/images/thumb/sphx_glr_plot_bootstrap_thumb.png b/docs/auto_examples/4validation/images/thumb/sphx_glr_plot_bootstrap_thumb.png new file mode 100644 index 00000000..35eae974 Binary files /dev/null and b/docs/auto_examples/4validation/images/thumb/sphx_glr_plot_bootstrap_thumb.png differ diff --git a/docs/auto_examples/4validation/index.rst b/docs/auto_examples/4validation/index.rst new file mode 100644 index 00000000..39e779b7 --- /dev/null +++ b/docs/auto_examples/4validation/index.rst @@ -0,0 +1,40 @@ + + +.. _sphx_glr_auto_examples_4validation: + +4 | Validation +=============== + + +.. raw:: html + +
    + + +.. raw:: html + +
    + +.. only:: html + + .. image:: /auto_examples/4validation/images/thumb/sphx_glr_plot_bootstrap_thumb.png + :alt: + + :ref:`sphx_glr_auto_examples_4validation_plot_bootstrap.py` + +.. raw:: html + +
    Significance testing of EOF analysis via bootstrap
    +
    + + +.. raw:: html + +
    + + +.. toctree:: + :hidden: + + /auto_examples/4validation/plot_bootstrap + diff --git a/docs/auto_examples/4validation/plot_bootstrap.ipynb b/docs/auto_examples/4validation/plot_bootstrap.ipynb new file mode 100644 index 00000000..8312c300 --- /dev/null +++ b/docs/auto_examples/4validation/plot_bootstrap.ipynb @@ -0,0 +1,181 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "\n", + "# Significance testing of EOF analysis via bootstrap\n", + "\n", + "Test the significance of individual modes and obtain confidence intervals\n", + "for both EOFs and PCs.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Load packages and data:\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import xarray as xr\n", + "from cartopy.crs import Orthographic, PlateCarree\n", + "from matplotlib.gridspec import GridSpec\n", + "\n", + "import xeofs as xe" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "t2m = xr.tutorial.load_dataset(\"air_temperature\")[\"air\"]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Perform EOF analysis\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "model = xe.single.EOF(n_modes=5, standardize=False)\n", + "model.fit(t2m, dim=\"time\")\n", + "expvar = model.explained_variance_ratio()\n", + "components = model.components()\n", + "scores = model.scores()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Perform bootstrapping of the model to identy the number of significant modes.\n", + "We perform 50 bootstraps.\n", + "Note - if computationallly feasible - you typically want to choose higher\n", + "numbers of bootstraps e.g. 1000.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "n_boot = 50\n", + "\n", + "bs = xe.validation.EOFBootstrapper(n_bootstraps=n_boot)\n", + "bs.fit(model)\n", + "bs_expvar = bs.explained_variance()\n", + "ci_expvar = bs_expvar.quantile([0.025, 0.975], \"n\") # 95% confidence intervals\n", + "\n", + "q025 = ci_expvar.sel(quantile=0.025)\n", + "q975 = ci_expvar.sel(quantile=0.975)\n", + "\n", + "is_significant = q025 - q975.shift({\"mode\": -1}) > 0\n", + "n_significant_modes = (\n", + " is_significant.where(is_significant is True).cumsum(skipna=False).max().fillna(0)\n", + ")\n", + "print(\"{:} modes are significant at alpha=0.05\".format(n_significant_modes.values))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The bootstrapping procedure identifies 3 significant modes. We can also\n", + "compute the 95 % confidence intervals of the EOFs/PCs and mask out\n", + "insignificant elements of the obtained EOFs.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "ci_components = bs.components().quantile([0.025, 0.975], \"n\")\n", + "ci_scores = bs.scores().quantile([0.025, 0.975], \"n\")\n", + "\n", + "is_sig_comps = np.sign(ci_components).prod(\"quantile\") > 0" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Summarize the results in a figure.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "lons, lats = np.meshgrid(is_sig_comps.lon.values, is_sig_comps.lat.values)\n", + "proj = Orthographic(central_latitude=30, central_longitude=-80)\n", + "kwargs = {\"cmap\": \"RdBu\", \"vmin\": -0.05, \"vmax\": 0.05, \"transform\": PlateCarree()}\n", + "\n", + "fig = plt.figure(figsize=(10, 16))\n", + "gs = GridSpec(5, 2)\n", + "ax1 = [fig.add_subplot(gs[i, 0], projection=proj) for i in range(5)]\n", + "ax2 = [fig.add_subplot(gs[i, 1]) for i in range(5)]\n", + "\n", + "for i, (a1, a2) in enumerate(zip(ax1, ax2)):\n", + " a1.coastlines(color=\".5\")\n", + " components.isel(mode=i).plot(ax=a1, **kwargs)\n", + " a1.scatter(\n", + " lons,\n", + " lats,\n", + " is_sig_comps.isel(mode=i).values * 0.5,\n", + " color=\"k\",\n", + " alpha=0.5,\n", + " transform=PlateCarree(),\n", + " )\n", + " ci_scores.isel(mode=i, quantile=0).plot(ax=a2, color=\".3\", lw=\".5\", label=\"2.5%\")\n", + " ci_scores.isel(mode=i, quantile=1).plot(ax=a2, color=\".3\", lw=\".5\", label=\"97.5%\")\n", + " scores.isel(mode=i).plot(ax=a2, lw=\".5\", alpha=0.5, label=\"PC\")\n", + " a2.legend(loc=2)\n", + "\n", + "plt.tight_layout()\n", + "plt.savefig(\"bootstrap.jpg\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/examples/3validation/plot_bootstrap.py b/docs/auto_examples/4validation/plot_bootstrap.py similarity index 94% rename from examples/3validation/plot_bootstrap.py rename to docs/auto_examples/4validation/plot_bootstrap.py index 02cec5dc..1cce033c 100644 --- a/examples/3validation/plot_bootstrap.py +++ b/docs/auto_examples/4validation/plot_bootstrap.py @@ -7,14 +7,13 @@ """ # Load packages and data: +import matplotlib.pyplot as plt import numpy as np import xarray as xr -import matplotlib.pyplot as plt -from matplotlib.gridspec import GridSpec from cartopy.crs import Orthographic, PlateCarree +from matplotlib.gridspec import GridSpec -from xeofs.models import EOF -from xeofs.validation import EOFBootstrapper +import xeofs as xe # %% @@ -23,7 +22,7 @@ # %% # Perform EOF analysis -model = EOF(n_modes=5, standardize=False) +model = xe.single.EOF(n_modes=5, standardize=False) model.fit(t2m, dim="time") expvar = model.explained_variance_ratio() components = model.components() @@ -38,7 +37,7 @@ n_boot = 50 -bs = EOFBootstrapper(n_bootstraps=n_boot) +bs = xe.validation.EOFBootstrapper(n_bootstraps=n_boot) bs.fit(model) bs_expvar = bs.explained_variance() ci_expvar = bs_expvar.quantile([0.025, 0.975], "n") # 95% confidence intervals diff --git a/docs/auto_examples/4validation/plot_bootstrap.py.md5 b/docs/auto_examples/4validation/plot_bootstrap.py.md5 new file mode 100644 index 00000000..c683ebe5 --- /dev/null +++ b/docs/auto_examples/4validation/plot_bootstrap.py.md5 @@ -0,0 +1 @@ +dcdf2653c5cb47f1d8828948c6fdda17 \ No newline at end of file diff --git a/docs/auto_examples/4validation/plot_bootstrap.rst b/docs/auto_examples/4validation/plot_bootstrap.rst new file mode 100644 index 00000000..6c7208f1 --- /dev/null +++ b/docs/auto_examples/4validation/plot_bootstrap.rst @@ -0,0 +1,228 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "auto_examples/4validation/plot_bootstrap.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_auto_examples_4validation_plot_bootstrap.py: + + +Significance testing of EOF analysis via bootstrap +=================================================== + +Test the significance of individual modes and obtain confidence intervals +for both EOFs and PCs. + +.. GENERATED FROM PYTHON SOURCE LINES 8-18 + +.. code-block:: default + + + # Load packages and data: + import matplotlib.pyplot as plt + import numpy as np + import xarray as xr + from cartopy.crs import Orthographic, PlateCarree + from matplotlib.gridspec import GridSpec + + import xeofs as xe + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 19-22 + +.. code-block:: default + + + t2m = xr.tutorial.load_dataset("air_temperature")["air"] + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 23-24 + +Perform EOF analysis + +.. GENERATED FROM PYTHON SOURCE LINES 24-32 + +.. code-block:: default + + + model = xe.single.EOF(n_modes=5, standardize=False) + model.fit(t2m, dim="time") + expvar = model.explained_variance_ratio() + components = model.components() + scores = model.scores() + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 33-37 + +Perform bootstrapping of the model to identy the number of significant modes. +We perform 50 bootstraps. +Note - if computationallly feasible - you typically want to choose higher +numbers of bootstraps e.g. 1000. + +.. GENERATED FROM PYTHON SOURCE LINES 37-54 + +.. code-block:: default + + + n_boot = 50 + + bs = xe.validation.EOFBootstrapper(n_bootstraps=n_boot) + bs.fit(model) + bs_expvar = bs.explained_variance() + ci_expvar = bs_expvar.quantile([0.025, 0.975], "n") # 95% confidence intervals + + q025 = ci_expvar.sel(quantile=0.025) + q975 = ci_expvar.sel(quantile=0.975) + + is_significant = q025 - q975.shift({"mode": -1}) > 0 + n_significant_modes = ( + is_significant.where(is_significant is True).cumsum(skipna=False).max().fillna(0) + ) + print("{:} modes are significant at alpha=0.05".format(n_significant_modes.values)) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + 0%| | 0/50 [00:00 0 + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 66-67 + +Summarize the results in a figure. + +.. GENERATED FROM PYTHON SOURCE LINES 67-96 + +.. code-block:: default + + + + lons, lats = np.meshgrid(is_sig_comps.lon.values, is_sig_comps.lat.values) + proj = Orthographic(central_latitude=30, central_longitude=-80) + kwargs = {"cmap": "RdBu", "vmin": -0.05, "vmax": 0.05, "transform": PlateCarree()} + + fig = plt.figure(figsize=(10, 16)) + gs = GridSpec(5, 2) + ax1 = [fig.add_subplot(gs[i, 0], projection=proj) for i in range(5)] + ax2 = [fig.add_subplot(gs[i, 1]) for i in range(5)] + + for i, (a1, a2) in enumerate(zip(ax1, ax2)): + a1.coastlines(color=".5") + components.isel(mode=i).plot(ax=a1, **kwargs) + a1.scatter( + lons, + lats, + is_sig_comps.isel(mode=i).values * 0.5, + color="k", + alpha=0.5, + transform=PlateCarree(), + ) + ci_scores.isel(mode=i, quantile=0).plot(ax=a2, color=".3", lw=".5", label="2.5%") + ci_scores.isel(mode=i, quantile=1).plot(ax=a2, color=".3", lw=".5", label="97.5%") + scores.isel(mode=i).plot(ax=a2, lw=".5", alpha=0.5, label="PC") + a2.legend(loc=2) + + plt.tight_layout() + plt.savefig("bootstrap.jpg") + + + +.. image-sg:: /auto_examples/4validation/images/sphx_glr_plot_bootstrap_001.png + :alt: mode = 1, mode = 2, mode = 3, mode = 4, mode = 5, mode = 1, mode = 2, mode = 3, mode = 4, mode = 5 + :srcset: /auto_examples/4validation/images/sphx_glr_plot_bootstrap_001.png + :class: sphx-glr-single-img + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 32.065 seconds) + + +.. _sphx_glr_download_auto_examples_4validation_plot_bootstrap.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + + + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_bootstrap.py ` + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_bootstrap.ipynb ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/auto_examples/4validation/plot_bootstrap_codeobj.pickle b/docs/auto_examples/4validation/plot_bootstrap_codeobj.pickle new file mode 100644 index 00000000..a89e7116 Binary files /dev/null and b/docs/auto_examples/4validation/plot_bootstrap_codeobj.pickle differ diff --git a/docs/auto_examples/4validation/sg_execution_times.rst b/docs/auto_examples/4validation/sg_execution_times.rst new file mode 100644 index 00000000..ef197d4f --- /dev/null +++ b/docs/auto_examples/4validation/sg_execution_times.rst @@ -0,0 +1,13 @@ + +:orphan: + +.. _sphx_glr_auto_examples_4validation_sg_execution_times: + + +Computation times +================= +**00:32.065** total execution time for **auto_examples_4validation** files: + ++-------------------------------------------------------------------------------------+-----------+--------+ +| :ref:`sphx_glr_auto_examples_4validation_plot_bootstrap.py` (``plot_bootstrap.py``) | 00:32.065 | 0.0 MB | ++-------------------------------------------------------------------------------------+-----------+--------+ diff --git a/docs/auto_examples/auto_examples_jupyter.zip b/docs/auto_examples/auto_examples_jupyter.zip index 27267bd3..6ee59c6a 100644 Binary files a/docs/auto_examples/auto_examples_jupyter.zip and b/docs/auto_examples/auto_examples_jupyter.zip differ diff --git a/docs/auto_examples/auto_examples_python.zip b/docs/auto_examples/auto_examples_python.zip index 75c5ad7f..a95db945 100644 Binary files a/docs/auto_examples/auto_examples_python.zip and b/docs/auto_examples/auto_examples_python.zip differ diff --git a/docs/auto_examples/index.rst b/docs/auto_examples/index.rst index 1995abe5..1b3b606f 100644 --- a/docs/auto_examples/index.rst +++ b/docs/auto_examples/index.rst @@ -217,7 +217,7 @@ Here you can find some examples of how to use the library. -2 | Multi-Set Analysis +2 | Cross-Set Analysis ======================== @@ -229,52 +229,66 @@ Here you can find some examples of how to use the library. .. raw:: html -
    +
    .. only:: html - .. image:: /auto_examples/2multi/images/thumb/sphx_glr_plot_cca_thumb.png + .. image:: /auto_examples/2cross/images/thumb/sphx_glr_plot_mca_thumb.png :alt: - :ref:`sphx_glr_auto_examples_2multi_plot_cca.py` + :ref:`sphx_glr_auto_examples_2cross_plot_mca.py` .. raw:: html -
    Canonical Correlation Analysis
    +
    Maximum Covariance Analysis
    .. raw:: html -
    +
    .. only:: html - .. image:: /auto_examples/2multi/images/thumb/sphx_glr_plot_mca_thumb.png + .. image:: /auto_examples/2cross/images/thumb/sphx_glr_plot_rotated_mca_thumb.png :alt: - :ref:`sphx_glr_auto_examples_2multi_plot_mca.py` + :ref:`sphx_glr_auto_examples_2cross_plot_rotated_mca.py` .. raw:: html -
    Maximum Covariance Analysis
    +
    Rotated Maximum Covariance Analysis
    .. raw:: html -
    +
    + +3 | Multi-Set Analysis +======================== + + + +.. raw:: html + +
    + + +.. raw:: html + +
    .. only:: html - .. image:: /auto_examples/2multi/images/thumb/sphx_glr_plot_rotated_mca_thumb.png + .. image:: /auto_examples/3multi/images/thumb/sphx_glr_plot_cca_thumb.png :alt: - :ref:`sphx_glr_auto_examples_2multi_plot_rotated_mca.py` + :ref:`sphx_glr_auto_examples_3multi_plot_cca.py` .. raw:: html -
    Rotated Maximum Covariance Analysis
    +
    Canonical Correlation Analysis
    @@ -282,7 +296,7 @@ Here you can find some examples of how to use the library.
    -3 | Validation +4 | Validation =============== @@ -297,10 +311,10 @@ Here you can find some examples of how to use the library. .. only:: html - .. image:: /auto_examples/3validation/images/thumb/sphx_glr_plot_bootstrap_thumb.png + .. image:: /auto_examples/4validation/images/thumb/sphx_glr_plot_bootstrap_thumb.png :alt: - :ref:`sphx_glr_auto_examples_3validation_plot_bootstrap.py` + :ref:`sphx_glr_auto_examples_4validation_plot_bootstrap.py` .. raw:: html @@ -319,8 +333,9 @@ Here you can find some examples of how to use the library. /auto_examples/1single/index.rst - /auto_examples/2multi/index.rst - /auto_examples/3validation/index.rst + /auto_examples/2cross/index.rst + /auto_examples/3multi/index.rst + /auto_examples/4validation/index.rst .. only:: html diff --git a/docs/conf.py b/docs/conf.py index 69447cbe..24f9ae73 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -50,7 +50,7 @@ # ones. extensions = [ "sphinx.ext.napoleon", - "sphinx.ext.autodoc", + # "sphinx.ext.autodoc", "sphinx.ext.autosummary", "sphinx_gallery.gen_gallery", "sphinx_design", @@ -60,6 +60,11 @@ ] autosummary_generate = True # Turn on sphinx.ext.autosummary + +add_module_names = False +toc_object_entries_show_parents = "hide" +show_title_parents = False + # Sphinx-gallery stuff sphinx_gallery_conf = { "examples_dirs": "../examples", # path to your example scripts @@ -225,3 +230,40 @@ # A list of files that should not be packed into the epub file. epub_exclude_files = ["search.html"] + + +# Remove parents from titles in all .rst files +def shorten_titles(app): + # Recursively crawl through source directory and shorten titles in .rst files + def crawl_source_shorten_titles(path): + # List files in directory + for file_name in os.listdir(path): + # Build path to file + file_path = os.path.join(path, file_name) + + # Recursively crawl to next directory level + if os.path.isdir(file_path): + crawl_source_shorten_titles(file_path) + + # Modify .rst source file title + else: + _, extension = os.path.splitext(file_path) + if extension == ".rst": + # Read file, modify title, write back to file + with open(file_path, "r") as file: + lines = file.readlines() + lines[0] = lines[0].split(".")[-1] + lines[1] = ("=" * (len(lines[0]) - 1)) + "\n" + print(f"Shortened title in {file_path}") + with open(file_path, "w") as file: + file.writelines(lines) + + path = os.path.join(os.path.abspath(".."), "docs/api_reference/_autosummary") + crawl_source_shorten_titles(path) + + +# Connect to sphinx events (see https://www.sphinx-doc.org/en/master/extdev/event_callbacks.html#events) +# to shorten titles in all .rst files after the sphinx autosummary extension has run +# (otherwise the titles would be overwritten by the autosummary extension) +def setup(app): + app.connect("builder-inited", shorten_titles) diff --git a/docs/model_pca.ipynb b/docs/model_pca.ipynb index 1950b0d8..10a7854c 100644 --- a/docs/model_pca.ipynb +++ b/docs/model_pca.ipynb @@ -32,11 +32,11 @@ "from matplotlib.gridspec import GridSpec\n", "import cartopy.feature as cfeature\n", "from cartopy.crs import LambertAzimuthalEqualArea, PlateCarree\n", - "from xeofs.models import EOF\n", + "import xeofs as xe\n", "\n", "t2m = xr.tutorial.load_dataset(\"air_temperature\")[\"air\"]\n", "\n", - "model = EOF(n_modes=20, standardize=True, use_coslat=True)\n", + "model = xe.single.EOF(n_modes=20, standardize=True, use_coslat=True)\n", "model.fit(t2m, dim=\"time\")\n", "\n", "expvar = model.explained_variance_ratio()\n", diff --git a/docs/model_pca.md b/docs/model_pca.md index aa3d44cf..cb8e6175 100644 --- a/docs/model_pca.md +++ b/docs/model_pca.md @@ -20,7 +20,7 @@ import cartopy.feature as cfeature import ipywidgets as widgets from IPython.display import display from cartopy.crs import LambertAzimuthalEqualArea, PlateCarree -from xeofs.models import EOF +from xeofs.single import EOF t2m = xr.tutorial.load_dataset('air_temperature')['air'] diff --git a/docs/perf/xeofs_timings.py b/docs/perf/xeofs_timings.py index 9898a233..c06f11c8 100644 --- a/docs/perf/xeofs_timings.py +++ b/docs/perf/xeofs_timings.py @@ -1,11 +1,13 @@ # %% -import numpy as np +import timeit + +import dask import eofs -import xeofs as xe +import numpy as np import xarray as xr -import dask from tqdm import tqdm -import timeit + +import xeofs as xe # %% @@ -14,7 +16,7 @@ def fit_eofs(X, n_modes=2): def fit_xeofs(X, n_modes=2): - model = xe.models.EOF(n_modes=n_modes, random_state=5) + model = xe.single.EOF(n_modes=n_modes, random_state=5) model.fit(X, dim="time") diff --git a/docs/sphinx_custom_filters.py b/docs/sphinx_custom_filters.py new file mode 100644 index 00000000..76c96012 --- /dev/null +++ b/docs/sphinx_custom_filters.py @@ -0,0 +1,7 @@ +def basename(fullname): + """Extract the basename from a full class path.""" + return fullname.split(".")[-1] + + +def setup(app): + app.builder.templates.environment.filters["basename"] = basename diff --git a/docs/user_guide/core_functionalities/dask_support.rst b/docs/user_guide/core_functionalities/dask_support.rst index 6d7e3b73..f3151cb7 100644 --- a/docs/user_guide/core_functionalities/dask_support.rst +++ b/docs/user_guide/core_functionalities/dask_support.rst @@ -37,7 +37,7 @@ then be evaluated later using ``.compute()``. import numpy as np import xarray as xr - from xeofs.models import EOF, EOFRotator + from xeofs.single import EOF, EOFRotator data = xr.DataArray( da.random.random((5000, 720, 360), chunks=(100, 100, 100)), diff --git a/docs/user_guide/core_functionalities/model_serialization.rst b/docs/user_guide/core_functionalities/model_serialization.rst index 3dd425aa..2ab012de 100644 --- a/docs/user_guide/core_functionalities/model_serialization.rst +++ b/docs/user_guide/core_functionalities/model_serialization.rst @@ -7,7 +7,7 @@ fitted models to a portable format. .. code-block:: python - from xeofs.models import EOF + from xeofs.single import EOF model = EOF() model.fit(data, dim="time") diff --git a/docs/user_guide/model_implementation.rst b/docs/user_guide/model_implementation.rst index d9c0b7f6..b38eda0c 100644 --- a/docs/user_guide/model_implementation.rst +++ b/docs/user_guide/model_implementation.rst @@ -4,7 +4,8 @@ Implement Your Own Model The xeofs package has been designed with modularity in mind, allowing you to seamlessly incorporate new methods. For instance, if you'd like to introduce a new dimensionality reduction technique named ``MyModel``, -you can achieve this by inheriting the ``_BaseModel`` class and implementing its ``_fit_algorithm()`` method. +you can achieve this by inheriting of either the ``BaseModelSingleSet`` or ``BaseModelCrossSet`` class and +implementing its ``_fit_algorithm()`` method. Here's a detailed walkthrough on how to incorporate a new model: @@ -12,17 +13,17 @@ Here's a detailed walkthrough on how to incorporate a new model: 1. Inherit the BaseModel -------------------------------------------- -Your new model should inherit from the `_BaseModel` class. This abstract base class enables +Your new model should inherit from the `BaseModel` class. This abstract base class enables the transformation of any ND xarray object into a 2D ``xarray.DataArray`` with dimensions (sample, feature) and back. Additionally, it grants access to handy parameters like ``n_modes``, ``standardize``, and ``use_coslat``. .. code-block:: python - from xeofs.models._base_model import _BaseModel - from xeofs.models.decomposer import Decomposer + from xeofs.single.base_model_single_set import BaseModelSingleSet + from xeofs.linalg.decomposer import Decomposer - class MyModel(_BaseModel): + class MyModel(BaseModelSingleSet): def __init__(self, **kwargs): super().__init__(**kwargs) diff --git a/docs/user_guide/quickstart.ipynb b/docs/user_guide/quickstart.ipynb index 440bfd1d..347c63a3 100644 --- a/docs/user_guide/quickstart.ipynb +++ b/docs/user_guide/quickstart.ipynb @@ -70,7 +70,7 @@ "metadata": {}, "outputs": [], "source": [ - "model = xe.models.EOF(use_coslat=True)" + "model = xe.single.EOF(use_coslat=True)" ] }, { diff --git a/examples/1single/eof-tmode.jpg b/examples/1single/eof-tmode.jpg index 3e005db8..0a412a30 100644 Binary files a/examples/1single/eof-tmode.jpg and b/examples/1single/eof-tmode.jpg differ diff --git a/examples/1single/mreof-analysis.jpg b/examples/1single/mreof-analysis.jpg index a75b5fbb..712e17dd 100644 Binary files a/examples/1single/mreof-analysis.jpg and b/examples/1single/mreof-analysis.jpg differ diff --git a/examples/1single/multivariate-eof-analysis.jpg b/examples/1single/multivariate-eof-analysis.jpg index 18a30b92..0dabaf80 100644 Binary files a/examples/1single/multivariate-eof-analysis.jpg and b/examples/1single/multivariate-eof-analysis.jpg differ diff --git a/examples/1single/plot_complex_eof.py b/examples/1single/plot_complex_eof.py index 531dbc0a..1c07cb13 100644 --- a/examples/1single/plot_complex_eof.py +++ b/examples/1single/plot_complex_eof.py @@ -40,7 +40,7 @@ # each grid cell by the square root of the cosine of the latitude # (``use_coslat=True``). -model = xe.models.ComplexEOF(n_modes=1, use_coslat=True, random_state=7) +model = xe.single.ComplexEOF(n_modes=1, use_coslat=True, random_state=7) model.fit(Z, dim="month") # %% diff --git a/examples/1single/plot_eeof.py b/examples/1single/plot_eeof.py index ab804f5b..4605b9fd 100644 --- a/examples/1single/plot_eeof.py +++ b/examples/1single/plot_eeof.py @@ -13,9 +13,10 @@ Let's begin by setting up the required packages and fetching the data: """ +import matplotlib.pyplot as plt import xarray as xr + import xeofs as xe -import matplotlib.pyplot as plt xr.set_options(display_expand_data=False) @@ -51,7 +52,7 @@ # With these parameters set, we proceed to instantiate the ``ExtendedEOF`` # model and fit our data. -model = xe.models.ExtendedEOF( +model = xe.single.ExtendedEOF( n_modes=10, tau=4, embedding=40, n_pca_modes=50, use_coslat=True ) model.fit(t2m, dim="time") diff --git a/examples/1single/plot_eeof_trend.py b/examples/1single/plot_eeof_trend.py index d309c138..86fb6526 100644 --- a/examples/1single/plot_eeof_trend.py +++ b/examples/1single/plot_eeof_trend.py @@ -8,9 +8,10 @@ Let's begin by setting up the required packages and fetching the data. """ +import matplotlib.pyplot as plt import xarray as xr + import xeofs as xe -import matplotlib.pyplot as plt xr.set_options(display_expand_data=False) @@ -26,7 +27,7 @@ # %% # We start by performing a standard EOF analysis on the dataset. -eof = xe.models.EOF(n_modes=10) +eof = xe.single.EOF(n_modes=10) eof.fit(sst, dim="time") scores = eof.scores() components = eof.components() @@ -49,7 +50,7 @@ # to capture long-term trends. To speed up computation, we apply the EEOF analysis # to the extended (lag) covariance matrix derived from the first 50 PCs. -eeof = xe.models.ExtendedEOF(n_modes=5, tau=1, embedding=120, n_pca_modes=50) +eeof = xe.single.ExtendedEOF(n_modes=5, tau=1, embedding=120, n_pca_modes=50) eeof.fit(sst, dim="time") components_ext = eeof.components() scores_ext = eeof.scores() @@ -65,13 +66,13 @@ # We can use this to the first mode to remove this nonlinear trend from our original dataset. sst_trends = eeof.inverse_transform(scores_ext.sel(mode=1)) -sst_detrended = sst - sst_trends.drop_vars("mode") +sst_detrended = sst - sst_trends # %% # Reapplying the standard EOF analysis on our now detrended dataset: -eof_model_detrended = xe.models.EOF(n_modes=5) +eof_model_detrended = xe.single.EOF(n_modes=5) eof_model_detrended.fit(sst_detrended, dim="time") scores_detrended = eof_model_detrended.scores() components_detrended = eof_model_detrended.components() diff --git a/examples/1single/plot_eof-smode.py b/examples/1single/plot_eof-smode.py index a81d00c0..479367d7 100644 --- a/examples/1single/plot_eof-smode.py +++ b/examples/1single/plot_eof-smode.py @@ -18,7 +18,7 @@ from cartopy.crs import EqualEarth, PlateCarree from matplotlib.gridspec import GridSpec -from xeofs.models import SparsePCA +import xeofs as xe # %% # We use sea surface temperature data from 1990 to 2017, consistent with the original paper. @@ -29,7 +29,7 @@ # %% # We perform sparse PCA using the `alpha` and `beta` parameters, which define the sparsity imposed by the elastic net (refer to Table 1 in the paper). In our analysis, we set `alpha` to 1e-5, as specified by the authors. Although the authors do not specify a value for `beta`, it appears that the results are not highly sensitive to this parameter. Therefore, we use the default `beta` value of 1e-4. -model = SparsePCA(n_modes=4, alpha=1e-5) +model = xe.single.SparsePCA(n_modes=4, alpha=1e-5) model.fit(sst, dim="time") expvar = model.explained_variance() expvar_ratio = model.explained_variance_ratio() diff --git a/examples/1single/plot_eof-tmode.py b/examples/1single/plot_eof-tmode.py index 6a618e3e..f85b97e9 100644 --- a/examples/1single/plot_eof-tmode.py +++ b/examples/1single/plot_eof-tmode.py @@ -7,19 +7,19 @@ Load packages and data: """ -import xarray as xr import matplotlib.pyplot as plt -from matplotlib.gridspec import GridSpec +import xarray as xr from cartopy.crs import EqualEarth, PlateCarree +from matplotlib.gridspec import GridSpec -from xeofs.models import EOF +import xeofs as xe sst = xr.tutorial.open_dataset("ersstv5")["sst"] # %% # Perform the actual analysis -model = EOF(n_modes=5) +model = xe.single.EOF(n_modes=5) model.fit(sst, dim=("lat", "lon")) expvar = model.explained_variance_ratio() components = model.components() diff --git a/examples/1single/plot_gwpca.py b/examples/1single/plot_gwpca.py index f1542228..2a9462cf 100644 --- a/examples/1single/plot_gwpca.py +++ b/examples/1single/plot_gwpca.py @@ -32,18 +32,18 @@ """ # For the analysis -import numpy as np -import xarray as xr -import xeofs as xe - # For visualization import matplotlib.pyplot as plt -import seaborn as sns +import numpy as np # For accessing R packages import rpy2.robjects as ro -from rpy2.robjects.packages import importr +import seaborn as sns +import xarray as xr from rpy2.robjects import pandas2ri +from rpy2.robjects.packages import importr + +import xeofs as xe # %% # Next, we'll install the R package `mvoutlier `_ @@ -96,7 +96,7 @@ # kilometers. Lastly, we'll standardize the input to ensure consistent scales # for the chemical elements. -gwpca = xe.models.GWPCA( +gwpca = xe.single.GWPCA( n_modes=5, standardize=True, metric="euclidean", diff --git a/examples/1single/plot_hilbert_eof.py b/examples/1single/plot_hilbert_eof.py index 2d7e4438..a2fd42ba 100644 --- a/examples/1single/plot_hilbert_eof.py +++ b/examples/1single/plot_hilbert_eof.py @@ -45,7 +45,7 @@ # options to mitigate potential edge effects, we'll begin with no padding. kwargs = dict(n_modes=4, use_coslat=True, random_state=7) -model = xe.models.HilbertEOF(padding="none", **kwargs) +model = xe.single.HilbertEOF(padding="none", **kwargs) # %% # Now, we fit the model to the data and extract the explained variance. @@ -95,7 +95,7 @@ # controls the decay rate of the exponential function measured in multiples of # the time series length. Let's see how the decay parameter impacts the results: -model_ext = xe.models.HilbertEOF(padding="exp", decay_factor=0.01, **kwargs) +model_ext = xe.single.HilbertEOF(padding="exp", decay_factor=0.01, **kwargs) model_ext.fit(sst, dim="time") scores_ext = model_ext.scores().sel(mode=slice(1, 4)) diff --git a/examples/1single/plot_mreof.py b/examples/1single/plot_mreof.py index 5bbc6a0f..e8c14b50 100644 --- a/examples/1single/plot_mreof.py +++ b/examples/1single/plot_mreof.py @@ -6,12 +6,12 @@ """ # Load packages and data: -import xarray as xr import matplotlib.pyplot as plt -from matplotlib.gridspec import GridSpec +import xarray as xr from cartopy.crs import PlateCarree +from matplotlib.gridspec import GridSpec -from xeofs.models import EOF, EOFRotator +import xeofs as xe # %% # Create four different dataarrayss @@ -25,9 +25,9 @@ # Perform the actual analysis multivariate_data = [subset1, subset2, subset3, subset4] -mpca = EOF(n_modes=100, standardize=False, use_coslat=True) +mpca = xe.single.EOF(n_modes=100, standardize=False, use_coslat=True) mpca.fit(multivariate_data, dim="time") -rotator = EOFRotator(n_modes=20) +rotator = xe.single.EOFRotator(n_modes=20) rotator.fit(mpca) rcomponents = rotator.components() rscores = rotator.scores() diff --git a/examples/1single/plot_multivariate-eof.py b/examples/1single/plot_multivariate-eof.py index 31c6f623..84d0a36b 100644 --- a/examples/1single/plot_multivariate-eof.py +++ b/examples/1single/plot_multivariate-eof.py @@ -6,12 +6,12 @@ """ # Load packages and data: -import xarray as xr import matplotlib.pyplot as plt -from matplotlib.gridspec import GridSpec +import xarray as xr from cartopy.crs import PlateCarree +from matplotlib.gridspec import GridSpec -from xeofs.models import EOF +import xeofs as xe # Create four different dataarrayss sst = xr.tutorial.open_dataset("ersstv5")["sst"] @@ -24,7 +24,7 @@ # %% # Perform the actual analysis -pca = EOF(n_modes=10, standardize=False, use_coslat=True) +pca = xe.single.EOF(n_modes=10, standardize=False, use_coslat=True) pca.fit(multivariate_data, dim="time") components = pca.components() scores = pca.scores() diff --git a/examples/1single/plot_rotated_eof.py b/examples/1single/plot_rotated_eof.py index 0f8b296b..33f6ab72 100644 --- a/examples/1single/plot_rotated_eof.py +++ b/examples/1single/plot_rotated_eof.py @@ -22,14 +22,13 @@ We'll start by loading the necessary packages and data: """ -import xarray as xr import matplotlib.pyplot as plt import seaborn as sns +import xarray as xr +from cartopy.crs import PlateCarree, Robinson from matplotlib.gridspec import GridSpec -from cartopy.crs import Robinson, PlateCarree - -from xeofs.models import EOF, EOFRotator +import xeofs as xe sns.set_context("paper") @@ -42,17 +41,17 @@ components = [] scores = [] # (1) Standard EOF without regularization -model = EOF(n_modes=100, standardize=True, use_coslat=True) +model = xe.single.EOF(n_modes=100, standardize=True, use_coslat=True) model.fit(sst, dim="time") components.append(model.components()) scores.append(model.scores()) # (2) Varimax-rotated EOF analysis -rot_var = EOFRotator(n_modes=50, power=1) +rot_var = xe.single.EOFRotator(n_modes=50, power=1) rot_var.fit(model) components.append(rot_var.components()) scores.append(rot_var.scores()) # (3) Promax-rotated EOF analysis -rot_pro = EOFRotator(n_modes=50, power=4) +rot_pro = xe.single.EOFRotator(n_modes=50, power=4) rot_pro.fit(model) components.append(rot_pro.components()) scores.append(rot_pro.scores()) diff --git a/examples/1single/plot_weighted-eof.py b/examples/1single/plot_weighted-eof.py index 26a88473..6d67964b 100644 --- a/examples/1single/plot_weighted-eof.py +++ b/examples/1single/plot_weighted-eof.py @@ -11,13 +11,13 @@ Load packages and data: """ -import xarray as xr import matplotlib.pyplot as plt import seaborn as sns -from matplotlib.gridspec import GridSpec +import xarray as xr from cartopy.crs import Orthographic, PlateCarree +from matplotlib.gridspec import GridSpec -from xeofs.models import EOF +import xeofs as xe sns.set_context("paper") @@ -29,22 +29,22 @@ components = [] scores = [] # (1) Based on covariance matrix -model_cov = EOF(n_modes=5, standardize=False, use_coslat=False) +model_cov = xe.single.EOF(n_modes=5, standardize=False, use_coslat=False) model_cov.fit(t2m, "time") components.append(model_cov.components()) scores.append(model_cov.scores()) # (2) Based on coslat weighted covariance matrix -model_lat = EOF(n_modes=5, standardize=False, use_coslat=True) +model_lat = xe.single.EOF(n_modes=5, standardize=False, use_coslat=True) model_lat.fit(t2m, "time") components.append(model_lat.components()) scores.append(model_lat.scores()) # (3) Based on correlation matrix -model_cor = EOF(n_modes=5, standardize=True, use_coslat=False) +model_cor = xe.single.EOF(n_modes=5, standardize=True, use_coslat=False) model_cor.fit(t2m, "time") components.append(model_cor.components()) scores.append(model_cor.scores()) # (4) Based on coslat weighted correlation matrix -model_cor_lat = EOF(n_modes=5, standardize=True, use_coslat=True) +model_cor_lat = xe.single.EOF(n_modes=5, standardize=True, use_coslat=True) model_cor_lat.fit(t2m, "time") components.append(model_cor_lat.components()) scores.append(model_cor_lat.scores()) diff --git a/examples/1single/rotated_eof.jpg b/examples/1single/rotated_eof.jpg index 28b6966b..606374db 100644 Binary files a/examples/1single/rotated_eof.jpg and b/examples/1single/rotated_eof.jpg differ diff --git a/examples/1single/sparse_pca.jpg b/examples/1single/sparse_pca.jpg index 33df23cc..fcf8dada 100644 Binary files a/examples/1single/sparse_pca.jpg and b/examples/1single/sparse_pca.jpg differ diff --git a/examples/1single/weighted_eof.jpg b/examples/1single/weighted_eof.jpg index 07008c8f..8d642634 100644 Binary files a/examples/1single/weighted_eof.jpg and b/examples/1single/weighted_eof.jpg differ diff --git a/examples/2multi/README.rst b/examples/2cross/README.rst similarity index 52% rename from examples/2multi/README.rst rename to examples/2cross/README.rst index bb9bcfc7..9fd4a086 100644 --- a/examples/2multi/README.rst +++ b/examples/2cross/README.rst @@ -1,2 +1,2 @@ -2 | Multi-Set Analysis +2 | Cross-Set Analysis ======================== diff --git a/examples/2cross/mca.jpg b/examples/2cross/mca.jpg new file mode 100644 index 00000000..6a3fcd53 Binary files /dev/null and b/examples/2cross/mca.jpg differ diff --git a/examples/2cross/plot_mca.py b/examples/2cross/plot_mca.py new file mode 100644 index 00000000..a93d5546 --- /dev/null +++ b/examples/2cross/plot_mca.py @@ -0,0 +1,112 @@ +""" +Maximum Covariance Analysis +=========================== + +Maximum Covariance Analysis (MCA) between two data sets. +""" + +# Load packages and data: +import matplotlib.pyplot as plt +import numpy as np +import xarray as xr +from cartopy.crs import Orthographic, PlateCarree +from cartopy.feature import LAND +from matplotlib.gridspec import GridSpec + +import xeofs as xe + +# %% +# Create 2 different DataArrays + +t2m = xr.tutorial.load_dataset("air_temperature")["air"] +da1 = t2m.isel(lon=slice(0, 26)) +da2 = t2m.isel(lon=slice(27, None)) + +# %% +# Perform MCA + +mca = xe.cross.MCA(n_modes=20, standardize=False, use_coslat=True) +mca.fit(da1, da2, dim="time") + +# %% +# Get singular vectors, projections (PCs), homogeneous and heterogeneous +# patterns: + +singular_vectors = mca.components() +scores = mca.scores() +hom_pats, pvals_hom = mca.homogeneous_patterns() +het_pats, pvals_het = mca.heterogeneous_patterns() + +# %% +# When two fields are expected, the output of the above methods is a list of +# length 2, with the first and second entry containing the relevant object for +# ``X`` and ``Y``. For example, the p-values obtained from the two-sided t-test +# for the homogeneous patterns of ``X`` are: + +pvals_hom[0] + +# %% +# Create a mask to identifiy where p-values are below 0.05 + +hom_mask = [values < 0.05 for values in pvals_hom] +het_mask = [values < 0.05 for values in pvals_het] + + +# %% +# Plot some relevant quantities of mode 2. + +lonlats = [ + np.meshgrid(pvals_hom[0].lon.values, pvals_hom[0].lat.values), + np.meshgrid(pvals_hom[1].lon.values, pvals_hom[1].lat.values), +] +proj = [ + Orthographic(central_latitude=30, central_longitude=-120), + Orthographic(central_latitude=30, central_longitude=-60), +] +kwargs1 = {"cmap": "BrBG", "vmin": -0.05, "vmax": 0.05, "transform": PlateCarree()} +kwargs2 = {"cmap": "RdBu", "vmin": -1, "vmax": 1, "transform": PlateCarree()} + +mode = 2 + +fig = plt.figure(figsize=(7, 14)) +gs = GridSpec(5, 2) +ax1 = [fig.add_subplot(gs[0, i], projection=proj[i]) for i in range(2)] +ax2 = [fig.add_subplot(gs[1, i], projection=proj[i]) for i in range(2)] +ax3 = [fig.add_subplot(gs[2, i], projection=proj[i]) for i in range(2)] +ax4 = [fig.add_subplot(gs[3, i]) for i in range(2)] + +for i, a in enumerate(ax1): + singular_vectors[i].sel(mode=mode).plot(ax=a, **kwargs1) + +for i, a in enumerate(ax2): + hom_pats[i].sel(mode=mode).plot(ax=a, **kwargs2) + a.scatter( + lonlats[i][0], + lonlats[i][1], + hom_mask[i].sel(mode=mode).values * 0.5, + color="k", + alpha=0.5, + transform=PlateCarree(), + ) +for i, a in enumerate(ax3): + het_pats[i].sel(mode=mode).plot(ax=a, **kwargs2) + a.scatter( + lonlats[i][0], + lonlats[i][1], + het_mask[i].sel(mode=mode).values * 0.5, + color="k", + alpha=0.5, + transform=PlateCarree(), + ) + +for i, a in enumerate(ax4): + scores[i].sel(mode=mode).plot(ax=a) + a.set_xlabel("") + + +for a in np.ravel([ax1, ax2, ax3]): + a.coastlines(color=".5") + a.add_feature(LAND) + +plt.tight_layout() +plt.savefig("mca.jpg") diff --git a/examples/2cross/plot_rotated_mca.py b/examples/2cross/plot_rotated_mca.py new file mode 100644 index 00000000..2ae0c501 --- /dev/null +++ b/examples/2cross/plot_rotated_mca.py @@ -0,0 +1,118 @@ +""" +Rotated Maximum Covariance Analysis +=================================== + +Rotated Maximum Covariance Analysis (MCA) between two data sets. +""" + +# Load packages and data: +import matplotlib.pyplot as plt +import numpy as np +import xarray as xr +from cartopy.crs import Orthographic, PlateCarree +from cartopy.feature import LAND +from matplotlib.gridspec import GridSpec + +import xeofs as xe + +# %% +# Create 2 different DataArrays + +t2m = xr.tutorial.load_dataset("air_temperature")["air"] +da1 = t2m.isel(lon=slice(0, 26)) +da2 = t2m.isel(lon=slice(27, None)) + +# %% +# Perform MCA + +mca = xe.cross.MCA(n_modes=20, standardize=False, use_coslat=True) +mca.fit(da1, da2, dim="time") + +# %% +# Apply Varimax-rotation to MCA solution + +rot = xe.cross.MCARotator(n_modes=10) +rot.fit(mca) + +# %% +# Get rotated singular vectors, projections (PCs), homogeneous and heterogeneous +# patterns: + +singular_vectors = rot.components() +scores = rot.scores() +hom_pats, pvals_hom = rot.homogeneous_patterns() +het_pats, pvals_het = rot.heterogeneous_patterns() + +# %% +# When two fields are expected, the output of the above methods is a list of +# length 2, with the first and second entry containing the relevant object for +# ``X`` and ``Y``. For example, the p-values obtained from the two-sided t-test +# for the homogeneous patterns of ``X`` are: + +pvals_hom[0] + +# %% +# Create a mask to identifiy where p-values are below 0.05 + +hom_mask = [values < 0.05 for values in pvals_hom] +het_mask = [values < 0.05 for values in pvals_het] + + +# %% +# Plot some relevant quantities of mode 2. + +lonlats = [ + np.meshgrid(pvals_hom[0].lon.values, pvals_hom[0].lat.values), + np.meshgrid(pvals_hom[1].lon.values, pvals_hom[1].lat.values), +] +proj = [ + Orthographic(central_latitude=30, central_longitude=-120), + Orthographic(central_latitude=30, central_longitude=-60), +] +kwargs1 = {"cmap": "BrBG", "vmin": -0.05, "vmax": 0.05, "transform": PlateCarree()} +kwargs2 = {"cmap": "RdBu", "vmin": -1, "vmax": 1, "transform": PlateCarree()} + +mode = 2 + +fig = plt.figure(figsize=(7, 14)) +gs = GridSpec(5, 2) +ax1 = [fig.add_subplot(gs[0, i], projection=proj[i]) for i in range(2)] +ax2 = [fig.add_subplot(gs[1, i], projection=proj[i]) for i in range(2)] +ax3 = [fig.add_subplot(gs[2, i], projection=proj[i]) for i in range(2)] +ax4 = [fig.add_subplot(gs[3, i]) for i in range(2)] + +for i, a in enumerate(ax1): + singular_vectors[i].sel(mode=mode).plot(ax=a, **kwargs1) + +for i, a in enumerate(ax2): + hom_pats[i].sel(mode=mode).plot(ax=a, **kwargs2) + a.scatter( + lonlats[i][0], + lonlats[i][1], + hom_mask[i].sel(mode=mode).values * 0.5, + color="k", + alpha=0.5, + transform=PlateCarree(), + ) +for i, a in enumerate(ax3): + het_pats[i].sel(mode=mode).plot(ax=a, **kwargs2) + a.scatter( + lonlats[i][0], + lonlats[i][1], + het_mask[i].sel(mode=mode).values * 0.5, + color="k", + alpha=0.5, + transform=PlateCarree(), + ) + +for i, a in enumerate(ax4): + scores[i].sel(mode=mode).plot(ax=a) + a.set_xlabel("") + + +for a in np.ravel([ax1, ax2, ax3]): + a.coastlines(color=".5") + a.add_feature(LAND) + +plt.tight_layout() +plt.savefig("rotated_mca.jpg") diff --git a/examples/2cross/rotated_mca.jpg b/examples/2cross/rotated_mca.jpg new file mode 100644 index 00000000..1e9748a2 Binary files /dev/null and b/examples/2cross/rotated_mca.jpg differ diff --git a/examples/2multi/mca.jpg b/examples/2multi/mca.jpg deleted file mode 100644 index 664242ae..00000000 Binary files a/examples/2multi/mca.jpg and /dev/null differ diff --git a/examples/2multi/rotated_mca.jpg b/examples/2multi/rotated_mca.jpg deleted file mode 100644 index 47e11346..00000000 Binary files a/examples/2multi/rotated_mca.jpg and /dev/null differ diff --git a/examples/3multi/README.rst b/examples/3multi/README.rst new file mode 100644 index 00000000..ed4ea2f7 --- /dev/null +++ b/examples/3multi/README.rst @@ -0,0 +1,2 @@ +3 | Multi-Set Analysis +======================== diff --git a/examples/3multi/plot_cca.py b/examples/3multi/plot_cca.py new file mode 100644 index 00000000..70f6a68e --- /dev/null +++ b/examples/3multi/plot_cca.py @@ -0,0 +1,102 @@ +""" +Canonical Correlation Analysis +============================== + +In this example, we're going to perform a Canonical Correlation Analysis (CCA) +on three datasets using the ERSSTv5 monthly sea surface temperature (SST) data +from 1970 to 2022. We divide this data into three areas: the Indian Ocean, +the Pacific Ocean, and the Atlantic Ocean. Our goal is to perform CCA on these +regions. + +First, we'll import the necessary modules. +""" + +import cartopy.crs as ccrs +import matplotlib.pyplot as plt +import xarray as xr +from matplotlib.gridspec import GridSpec + +import xeofs as xe + +# %% +# Next, we load the data and compute the SST anomalies. This removes the +# monthly climatologies, so the seasonal cycle doesn't impact our CCA. + +sst = xr.tutorial.load_dataset("ersstv5").sst +sst = sst.groupby("time.month") - sst.groupby("time.month").mean("time") + + +# %% +# Now, we define the three regions of interest and store them in a list. + +indian = sst.sel(lon=slice(35, 115), lat=slice(30, -30)) +pacific = sst.sel(lon=slice(130, 290), lat=slice(30, -30)) +atlantic = sst.sel(lon=slice(320, 360), lat=slice(70, 10)) + +data_list = [indian, pacific, atlantic] + +# %% +# We now perform CCA. Since we are dealing with a high-dimensional feature space, we first +# perform PCA to reduce the dimensionality (this is kind of a regularized CCA) by setting +# ``pca=True``. By setting the ``variance_fraction`` keyword argument, we specify that we +# want to keep the number of PCA modes that explain 90% of the variance in each of the +# three data sets. +# +# An important parameter is ``init_pca_modes``. It specifies the number +# of PCA modes that are initially compute before truncating them to account for 90 %. If this +# number is small enough, randomized PCAs will be performed instead of the full SVD decomposition +# which is much faster. We can also specify ``init_pca_modes`` as a float (0 < x <= 1), +# in which case the number of PCA modes is given by the fraction of the data matrix's rank +# The default is set to 0.75 which will ensure that randomized PCAs are performed. +# +# Given the nature of SST data, we might lower it to something like 0.3, since +# we expect that most of the variance in the data will be explained by a small +# number of PC modes. +# +# Note that if our initial PCA modes don't hit the 90% variance target, ``xeofs`` +# will give a warning. + +model = xe.multi.CCA( + n_modes=2, + use_coslat=True, + pca=True, + variance_fraction=0.9, + init_pca_modes=0.30, +) +model.fit(data_list, dim="time") +components = model.components() +scores = model.scores() + +# %% +# Let's look at the canonical loadings (components) of the first mode. + +mode = 1 + +central_longitudes = [ + indian.lon.median().item(), + pacific.lon.median().item(), + pacific.lon.median().item(), +] +projections = [ccrs.PlateCarree(central_longitude=lon) for lon in central_longitudes] + +fig = plt.figure(figsize=(12, 2.5)) +gs = GridSpec(1, 4, figure=fig, width_ratios=[2, 4, 1, 0.2]) +axes = [fig.add_subplot(gs[0, i], projection=projections[i]) for i in range(3)] +cax = fig.add_subplot(1, 4, 4) +kwargs = dict(transform=ccrs.PlateCarree(), vmin=-1, vmax=1, cmap="RdBu_r", cbar_ax=cax) +components[0].sel(mode=mode).plot(ax=axes[0], **kwargs) +components[1].sel(mode=mode).plot(ax=axes[1], **kwargs) +im = components[2].sel(mode=mode).plot(ax=axes[2], **kwargs) +fig.colorbar(im, cax=cax, orientation="vertical") +for ax in axes: + ax.coastlines() + ax.set_title("") + +# %% +# And lastly, we'll check out the canonical variates (scores) of the first mode. + +fig, ax = plt.subplots(figsize=(12, 4)) +scores[0].sel(mode=mode).plot(ax=ax, label="Indian Ocean") +scores[1].sel(mode=mode).plot(ax=ax, label="Central Pacific") +scores[2].sel(mode=mode).plot(ax=ax, label="North Atlantic") +ax.legend() diff --git a/examples/3validation/bootstrap.jpg b/examples/3validation/bootstrap.jpg deleted file mode 100644 index 6767652c..00000000 Binary files a/examples/3validation/bootstrap.jpg and /dev/null differ diff --git a/examples/3validation/README.rst b/examples/4validation/README.rst similarity index 50% rename from examples/3validation/README.rst rename to examples/4validation/README.rst index ae6a2eef..245eeb5b 100644 --- a/examples/3validation/README.rst +++ b/examples/4validation/README.rst @@ -1,2 +1,2 @@ -3 | Validation +4 | Validation =============== \ No newline at end of file diff --git a/examples/4validation/bootstrap.jpg b/examples/4validation/bootstrap.jpg new file mode 100644 index 00000000..9d36cd14 Binary files /dev/null and b/examples/4validation/bootstrap.jpg differ diff --git a/examples/4validation/plot_bootstrap.py b/examples/4validation/plot_bootstrap.py new file mode 100644 index 00000000..1cce033c --- /dev/null +++ b/examples/4validation/plot_bootstrap.py @@ -0,0 +1,95 @@ +""" +Significance testing of EOF analysis via bootstrap +=================================================== + +Test the significance of individual modes and obtain confidence intervals +for both EOFs and PCs. +""" + +# Load packages and data: +import matplotlib.pyplot as plt +import numpy as np +import xarray as xr +from cartopy.crs import Orthographic, PlateCarree +from matplotlib.gridspec import GridSpec + +import xeofs as xe + +# %% + +t2m = xr.tutorial.load_dataset("air_temperature")["air"] + +# %% +# Perform EOF analysis + +model = xe.single.EOF(n_modes=5, standardize=False) +model.fit(t2m, dim="time") +expvar = model.explained_variance_ratio() +components = model.components() +scores = model.scores() + + +# %% +# Perform bootstrapping of the model to identy the number of significant modes. +# We perform 50 bootstraps. +# Note - if computationallly feasible - you typically want to choose higher +# numbers of bootstraps e.g. 1000. + +n_boot = 50 + +bs = xe.validation.EOFBootstrapper(n_bootstraps=n_boot) +bs.fit(model) +bs_expvar = bs.explained_variance() +ci_expvar = bs_expvar.quantile([0.025, 0.975], "n") # 95% confidence intervals + +q025 = ci_expvar.sel(quantile=0.025) +q975 = ci_expvar.sel(quantile=0.975) + +is_significant = q025 - q975.shift({"mode": -1}) > 0 +n_significant_modes = ( + is_significant.where(is_significant is True).cumsum(skipna=False).max().fillna(0) +) +print("{:} modes are significant at alpha=0.05".format(n_significant_modes.values)) + +# %% +# The bootstrapping procedure identifies 3 significant modes. We can also +# compute the 95 % confidence intervals of the EOFs/PCs and mask out +# insignificant elements of the obtained EOFs. + +ci_components = bs.components().quantile([0.025, 0.975], "n") +ci_scores = bs.scores().quantile([0.025, 0.975], "n") + +is_sig_comps = np.sign(ci_components).prod("quantile") > 0 + + +# %% +# Summarize the results in a figure. + + +lons, lats = np.meshgrid(is_sig_comps.lon.values, is_sig_comps.lat.values) +proj = Orthographic(central_latitude=30, central_longitude=-80) +kwargs = {"cmap": "RdBu", "vmin": -0.05, "vmax": 0.05, "transform": PlateCarree()} + +fig = plt.figure(figsize=(10, 16)) +gs = GridSpec(5, 2) +ax1 = [fig.add_subplot(gs[i, 0], projection=proj) for i in range(5)] +ax2 = [fig.add_subplot(gs[i, 1]) for i in range(5)] + +for i, (a1, a2) in enumerate(zip(ax1, ax2)): + a1.coastlines(color=".5") + components.isel(mode=i).plot(ax=a1, **kwargs) + a1.scatter( + lons, + lats, + is_sig_comps.isel(mode=i).values * 0.5, + color="k", + alpha=0.5, + transform=PlateCarree(), + ) + ci_scores.isel(mode=i, quantile=0).plot(ax=a2, color=".3", lw=".5", label="2.5%") + ci_scores.isel(mode=i, quantile=1).plot(ax=a2, color=".3", lw=".5", label="97.5%") + scores.isel(mode=i).plot(ax=a2, lw=".5", alpha=0.5, label="PC") + a2.legend(loc=2) + +plt.tight_layout() +plt.savefig("bootstrap.jpg") diff --git a/pyproject.toml b/pyproject.toml index 982b5fc8..1a14943f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,11 +39,11 @@ dev = [ docs = [ "rpy2>=3.5", "sphinx-gallery>=0.14", - "sphinx-design>=0.5", + "sphinx-design>=0.6", "sphinx-copybutton>=0.5", "nbsphinx>=0.9", - "pydata-sphinx-theme>=0.14", - "sphinx>=7.2", + "pydata-sphinx-theme>=0.15", + "sphinx>=8", "nbconvert>=7.9", "myst-parser>=3.0", "matplotlib>=3.4", diff --git a/tests/linalg/__init__.py b/tests/linalg/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/models/test_decomposer.py b/tests/linalg/test_decomposer.py similarity index 99% rename from tests/models/test_decomposer.py rename to tests/linalg/test_decomposer.py index 29009d0b..cc3d074b 100644 --- a/tests/models/test_decomposer.py +++ b/tests/linalg/test_decomposer.py @@ -2,7 +2,7 @@ import pytest from dask.array import Array as DaskArray # type: ignore -from xeofs.models.decomposer import Decomposer +from xeofs.linalg.decomposer import Decomposer from ..utilities import data_is_dask diff --git a/tests/models/cross/__init__.py b/tests/models/cross/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/models/test_cpcca.py b/tests/models/cross/test_cpcca.py similarity index 99% rename from tests/models/test_cpcca.py rename to tests/models/cross/test_cpcca.py index 5ebfe3f7..20035d23 100644 --- a/tests/models/test_cpcca.py +++ b/tests/models/cross/test_cpcca.py @@ -3,7 +3,7 @@ import pytest import xarray as xr -from xeofs.models.cpcca import CPCCA +from xeofs.cross import CPCCA def generate_random_data(shape, lazy=False, seed=142): diff --git a/tests/models/test_cpcca_complex_rotator.py b/tests/models/cross/test_cpcca_complex_rotator.py similarity index 93% rename from tests/models/test_cpcca_complex_rotator.py rename to tests/models/cross/test_cpcca_complex_rotator.py index 088e3309..c41c4d87 100644 --- a/tests/models/test_cpcca_complex_rotator.py +++ b/tests/models/cross/test_cpcca_complex_rotator.py @@ -3,14 +3,14 @@ import pytest import xarray as xr -from xeofs.models import HilbertCPCCA, HilbertCPCCARotator +from xeofs.cross import HilbertCPCCA, HilbertCPCCARotator def generate_random_data(shape, lazy=False, seed=142): rng = np.random.default_rng(seed) if lazy: return xr.DataArray( - da.random.random(shape, chunks=(5, 5)), + da.random.random(shape, chunks=(5, 5)), # type: ignore dims=["sample", "feature"], coords={"sample": np.arange(shape[0]), "feature": np.arange(shape[1])}, ) diff --git a/tests/models/test_cpcca_rotator.py b/tests/models/cross/test_cpcca_rotator.py similarity index 98% rename from tests/models/test_cpcca_rotator.py rename to tests/models/cross/test_cpcca_rotator.py index ce1c7c4a..c8770699 100644 --- a/tests/models/test_cpcca_rotator.py +++ b/tests/models/cross/test_cpcca_rotator.py @@ -3,7 +3,7 @@ import pytest import xarray as xr -from xeofs.models import CPCCA, CPCCARotator +from xeofs.cross import CPCCA, CPCCARotator def generate_random_data(shape, lazy=False, seed=142): diff --git a/tests/models/test_hilbert_mca.py b/tests/models/cross/test_hilbert_mca.py similarity index 99% rename from tests/models/test_hilbert_mca.py rename to tests/models/cross/test_hilbert_mca.py index d0cf9bca..5bd07338 100644 --- a/tests/models/test_hilbert_mca.py +++ b/tests/models/cross/test_hilbert_mca.py @@ -1,7 +1,7 @@ import pytest import xarray as xr -from xeofs.models import HilbertMCA +from xeofs.cross import HilbertMCA @pytest.fixture diff --git a/tests/models/test_hilbert_mca_rotator.py b/tests/models/cross/test_hilbert_mca_rotator.py similarity index 99% rename from tests/models/test_hilbert_mca_rotator.py rename to tests/models/cross/test_hilbert_mca_rotator.py index 3b7d066d..7ef22bf6 100644 --- a/tests/models/test_hilbert_mca_rotator.py +++ b/tests/models/cross/test_hilbert_mca_rotator.py @@ -3,7 +3,7 @@ import xarray as xr # Import the classes from your modules -from xeofs.models import HilbertMCA, HilbertMCARotator +from xeofs.cross import HilbertMCA, HilbertMCARotator @pytest.fixture diff --git a/tests/models/test_mca.py b/tests/models/cross/test_mca.py similarity index 99% rename from tests/models/test_mca.py rename to tests/models/cross/test_mca.py index 6216ced1..3d063685 100644 --- a/tests/models/test_mca.py +++ b/tests/models/cross/test_mca.py @@ -2,9 +2,9 @@ import pytest import xarray as xr -from xeofs.models.mca import MCA +from xeofs.cross import MCA -from ..utilities import data_is_dask +from ...utilities import data_is_dask @pytest.fixture diff --git a/tests/models/test_mca_rotator.py b/tests/models/cross/test_mca_rotator.py similarity index 99% rename from tests/models/test_mca_rotator.py rename to tests/models/cross/test_mca_rotator.py index f53db1bb..f467d331 100644 --- a/tests/models/test_mca_rotator.py +++ b/tests/models/cross/test_mca_rotator.py @@ -3,9 +3,9 @@ import xarray as xr # Import the classes from your modules -from xeofs.models import MCA, MCARotator +from xeofs.cross import MCA, MCARotator -from ..utilities import data_is_dask +from ...utilities import data_is_dask @pytest.fixture diff --git a/tests/models/multi/__init__.py b/tests/models/multi/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/models/test_cca.py b/tests/models/multi/test_cca.py similarity index 97% rename from tests/models/test_cca.py rename to tests/models/multi/test_cca.py index ea77c2af..038862d0 100644 --- a/tests/models/test_cca.py +++ b/tests/models/multi/test_cca.py @@ -1,6 +1,6 @@ import pytest -from xeofs.models.cca import CCA +from xeofs.multi import CCA @pytest.mark.parametrize( diff --git a/tests/models/single/__init__.py b/tests/models/single/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/models/test_eeof.py b/tests/models/single/test_eeof.py similarity index 99% rename from tests/models/test_eeof.py rename to tests/models/single/test_eeof.py index 1c150a2c..680a01cf 100644 --- a/tests/models/test_eeof.py +++ b/tests/models/single/test_eeof.py @@ -1,8 +1,8 @@ import numpy as np -import xarray as xr import pytest +import xarray as xr -from xeofs.models.eeof import ExtendedEOF +from xeofs.single import ExtendedEOF def test_init(): diff --git a/tests/models/test_eof.py b/tests/models/single/test_eof.py similarity index 99% rename from tests/models/test_eof.py rename to tests/models/single/test_eof.py index d1403e80..c7373152 100644 --- a/tests/models/test_eof.py +++ b/tests/models/single/test_eof.py @@ -1,8 +1,8 @@ import numpy as np -import xarray as xr import pytest +import xarray as xr -from xeofs.models.eof import EOF +from xeofs.single import EOF def test_init(): diff --git a/tests/models/test_eof_rotator.py b/tests/models/single/test_eof_rotator.py similarity index 98% rename from tests/models/test_eof_rotator.py rename to tests/models/single/test_eof_rotator.py index 91eaaa27..b8e84245 100644 --- a/tests/models/test_eof_rotator.py +++ b/tests/models/single/test_eof_rotator.py @@ -1,10 +1,11 @@ -import pytest import numpy as np +import pytest import xarray as xr -from xeofs.models import EOF, EOFRotator from xeofs.data_container import DataContainer -from ..utilities import data_is_dask +from xeofs.single import EOF, EOFRotator + +from ...utilities import data_is_dask @pytest.fixture diff --git a/tests/models/test_gwpca.py b/tests/models/single/test_gwpca.py similarity index 93% rename from tests/models/test_gwpca.py rename to tests/models/single/test_gwpca.py index 8b3fc353..c7431a67 100644 --- a/tests/models/test_gwpca.py +++ b/tests/models/single/test_gwpca.py @@ -1,6 +1,6 @@ import pytest -import xeofs as xe +import xeofs as xe # ============================================================================= # GENERALLY VALID TEST CASES @@ -31,7 +31,7 @@ [("bisquare"), ("gaussian"), ("exponential")], ) def test_fit(mock_data_array, kernel): - gwpca = xe.models.GWPCA( + gwpca = xe.single.GWPCA( n_modes=2, metric="haversine", kernel=kernel, bandwidth=5000 ) gwpca.fit(mock_data_array, dim=("lat", "lon")) @@ -49,4 +49,4 @@ def test_fit(mock_data_array, kernel): ) def test_fit_invalid(mock_data_array, metric, kernel, bandwidth): with pytest.raises(ValueError): - xe.models.GWPCA(n_modes=2, metric=metric, kernel=kernel, bandwidth=bandwidth) + xe.single.GWPCA(n_modes=2, metric=metric, kernel=kernel, bandwidth=bandwidth) diff --git a/tests/models/test_hilbert_eof.py b/tests/models/single/test_hilbert_eof.py similarity index 98% rename from tests/models/test_hilbert_eof.py rename to tests/models/single/test_hilbert_eof.py index 1235df85..ae7b8256 100644 --- a/tests/models/test_hilbert_eof.py +++ b/tests/models/single/test_hilbert_eof.py @@ -3,7 +3,7 @@ import numpy as np import pytest -from xeofs.models import HilbertEOF +from xeofs.single import HilbertEOF warnings.filterwarnings("ignore", message="numpy.dtype size changed") warnings.filterwarnings("ignore", message="numpy.ufunc size changed") diff --git a/tests/models/test_hilbert_eof_rotator.py b/tests/models/single/test_hilbert_eof_rotator.py similarity index 98% rename from tests/models/test_hilbert_eof_rotator.py rename to tests/models/single/test_hilbert_eof_rotator.py index e0d46b6a..9fc3bd3c 100644 --- a/tests/models/test_hilbert_eof_rotator.py +++ b/tests/models/single/test_hilbert_eof_rotator.py @@ -2,7 +2,7 @@ import xarray as xr from xeofs.data_container import DataContainer -from xeofs.models import HilbertEOF, HilbertEOFRotator +from xeofs.single import HilbertEOF, HilbertEOFRotator @pytest.fixture diff --git a/tests/models/test_opa.py b/tests/models/single/test_opa.py similarity index 99% rename from tests/models/test_opa.py rename to tests/models/single/test_opa.py index 57991bbb..d1781c93 100644 --- a/tests/models/test_opa.py +++ b/tests/models/single/test_opa.py @@ -1,8 +1,8 @@ import numpy as np -import xarray as xr import pytest +import xarray as xr -from xeofs.models import OPA +from xeofs.single import OPA @pytest.fixture diff --git a/tests/models/test_sparse_pca.py b/tests/models/single/test_sparse_pca.py similarity index 99% rename from tests/models/test_sparse_pca.py rename to tests/models/single/test_sparse_pca.py index ffc594a8..f4a1bc56 100644 --- a/tests/models/test_sparse_pca.py +++ b/tests/models/single/test_sparse_pca.py @@ -2,7 +2,7 @@ import pytest import xarray as xr -from xeofs.models import SparsePCA +from xeofs.single import SparsePCA def test_init(): diff --git a/tests/models/test_orthogonality.py b/tests/models/test_orthogonality.py deleted file mode 100644 index dad46f79..00000000 --- a/tests/models/test_orthogonality.py +++ /dev/null @@ -1,927 +0,0 @@ -import numpy as np -import pytest - -from xeofs.models import ( - EOF, - MCA, - EOFRotator, - HilbertEOF, - HilbertEOFRotator, - HilbertMCA, - HilbertMCARotator, - MCARotator, -) - - -def is_diagonal(matrix, tol=1e-10): - # Check if all off-diagonal elements are close to zero within the specified tolerance - off_diagonal_elements = matrix - np.diag(np.diag(matrix)) - return np.all(np.abs(off_diagonal_elements) < tol) - - -# Orthogonality -# ============================================================================= -# EOF -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -def test_eof_components(dim, use_coslat, mock_data_array): - """Components are orthogonal""" - model = EOF(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(mock_data_array, dim=dim) - V = model.data["components"].values - assert np.allclose( - V.T @ V, np.eye(V.shape[1]), atol=1e-5 - ), "Components are not orthogonal" - - -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -def test_eof_scores(dim, use_coslat, mock_data_array): - """Scores are orthogonal""" - model = EOF(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(mock_data_array, dim=dim) - U = model.data["scores"].values / model.data["norms"].values - assert np.allclose( - U.T @ U, np.eye(U.shape[1]), atol=1e-5 - ), "Scores are not orthogonal" - - -# Hilbert EOF -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -def test_ceof_components(dim, use_coslat, mock_data_array): - """Components are unitary""" - model = HilbertEOF(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(mock_data_array, dim=dim) - V = model.data["components"].values - assert np.allclose( - V.conj().T @ V, np.eye(V.shape[1]), atol=1e-5 - ), "Components are not unitary" - - -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -def test_ceof_scores(dim, use_coslat, mock_data_array): - """Scores are unitary""" - model = HilbertEOF(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(mock_data_array, dim=dim) - U = model.data["scores"].values / model.data["norms"].values - assert np.allclose( - U.conj().T @ U, np.eye(U.shape[1]), atol=1e-5 - ), "Scores are not unitary" - - -# Rotated EOF -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -def test_reof_components(dim, use_coslat, power, mock_data_array): - """Components are NOT orthogonal""" - model = EOF(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(mock_data_array, dim=dim) - rot = EOFRotator(n_modes=5, power=power) - rot.fit(model) - V = rot.data["components"].values - K = V.conj().T @ V - assert np.allclose( - np.diag(K), np.ones(V.shape[1]), atol=1e-5 - ), "Components are not normalized" - # Assert that off-diagonals are not zero - assert not np.allclose(K, np.eye(K.shape[0])), "Rotated components are orthogonal" - - -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -def test_reof_scores(dim, use_coslat, power, mock_data_array): - """Components are orthogonal only for Varimax rotation""" - model = EOF(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(mock_data_array, dim=dim) - rot = EOFRotator(n_modes=5, power=power) - rot.fit(model) - U = rot.data["scores"].values / rot.data["norms"].values - K = U.conj().T @ U - if power == 1: - # Varimax rotation does guarantee orthogonality - assert np.allclose( - K, np.eye(K.shape[1]), atol=1e-5 - ), "Components are not orthogonal" - else: - assert not np.allclose(K, np.eye(K.shape[1])), "Components are orthogonal" - - -# Hilbert rotated EOF -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -def test_creof_components(dim, use_coslat, power, mock_data_array): - """Components are NOT unitary""" - model = HilbertEOF(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(mock_data_array, dim=dim) - rot = HilbertEOFRotator(n_modes=5, power=power) - rot.fit(model) - V = rot.data["components"].values - K = V.conj().T @ V - assert np.allclose( - np.diag(K), np.ones(V.shape[1]), atol=1e-5 - ), "Components are not normalized" - # Assert that off-diagonals are not zero - assert not np.allclose(K, np.eye(K.shape[0])), "Rotated components are unitary" - - -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -def test_creof_scores(dim, use_coslat, power, mock_data_array): - """Components are unitary only for Varimax rotation""" - model = HilbertEOF(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(mock_data_array, dim=dim) - rot = HilbertEOFRotator(n_modes=5, power=power) - rot.fit(model) - U = rot.data["scores"].values / rot.data["norms"].values - K = U.conj().T @ U - if power == 1: - # Varimax rotation does guarantee unitarity - assert np.allclose( - K, np.eye(K.shape[1]), atol=1e-5 - ), "Components are not unitary" - else: - assert not np.allclose(K, np.eye(K.shape[1])), "Components are unitary" - - -# MCA -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -def test_mca_components(dim, use_coslat, mock_data_array): - """Components are orthogonal""" - data1 = mock_data_array.copy() - data2 = data1.copy() ** 2 - model = MCA(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(data1, data2, dim=dim) - V1 = model.data["components1"].values - V2 = model.data["components2"].values - K1 = V1.T @ V1 - K2 = V2.T @ V2 - assert np.allclose( - K1, np.eye(K1.shape[0]), rtol=1e-8 - ), "Left components are not orthogonal" - assert np.allclose( - K2, np.eye(K2.shape[0]), rtol=1e-8 - ), "Right components are not orthogonal" - - -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -def test_mca_scores(dim, use_coslat, mock_data_array): - """Scores are orthogonal""" - data1 = mock_data_array.copy() - data2 = data1.copy() ** 2 - model = MCA(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(data1, data2, dim=dim) - U1 = model.data["scores1"].values - U2 = model.data["scores2"].values - s = model.data["singular_values"].values - K = U1.T @ U2 / (U1.shape[0] - 1) - target = np.diag(s) - assert np.allclose(K, target, atol=1e-5), "Scores are not orthogonal" - - -# Hilbert MCA -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -def test_cmca_components(dim, use_coslat, mock_data_array): - """Components are unitary""" - data1 = mock_data_array.copy() - data2 = data1.copy() ** 2 - model = HilbertMCA(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(data1, data2, dim=dim) - V1 = model.data["components1"].values - V2 = model.data["components2"].values - K1 = V1.conj().T @ V1 - K2 = V2.conj().T @ V2 - assert np.allclose( - K1, np.eye(K1.shape[0]), atol=1e-5 - ), "Left components are not unitary" - assert np.allclose( - K2, np.eye(K2.shape[0]), atol=1e-5 - ), "Right components are not unitary" - - -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -def test_cmca_scores(dim, use_coslat, mock_data_array): - """Scores are unitary""" - data1 = mock_data_array.copy() - data2 = data1.copy() ** 2 - model = HilbertMCA(n_modes=6, standardize=True, use_coslat=use_coslat) - model.fit(data1, data2, dim=dim) - U1 = model.data["scores1"].values - U2 = model.data["scores2"].values - s = model.data["singular_values"].values - K = U1.conj().T @ U2 / (U1.shape[0] - 1) - target = np.diag(s) - assert np.allclose(K, target, atol=1e-5), "Scores are not unitary" - - -# Rotated MCA -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -def test_rmca_components(dim, use_coslat, power, mock_data_array): - """Components are NOT orthogonal""" - data1 = mock_data_array.copy() - data2 = data1.copy() ** 2 - model = MCA( - n_modes=19, - standardize=True, - use_coslat=use_coslat, - use_pca=True, - n_pca_modes=19, - ) - model.fit(data1, data2, dim=dim) - rot = MCARotator(n_modes=5, power=power) - rot.fit(model) - V1 = rot.data["components1"].values - V2 = rot.data["components2"].values - K1 = V1.conj().T @ V1 - K2 = V2.conj().T @ V2 - assert np.allclose( - np.diag(K1), np.ones(K1.shape[0]), rtol=1e-5 - ), "Components are not normalized" - assert np.allclose( - np.diag(K2), np.ones(K2.shape[0]), rtol=1e-5 - ), "Components are not normalized" - # Assert that off-diagonals are not zero - assert not np.allclose(K1, np.eye(K1.shape[0])), "Rotated components are orthogonal" - assert not np.allclose(K2, np.eye(K2.shape[0])), "Rotated components are orthogonal" - - -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -def test_rmca_scores(dim, use_coslat, power, mock_data_array): - """Components are orthogonal only for Varimax rotation""" - data1 = mock_data_array.copy() - data2 = data1.copy() ** 2 - model = MCA( - n_modes=5, - standardize=True, - use_coslat=use_coslat, - use_pca=False, - n_pca_modes=19, - ) - model.fit(data1, data2, dim=dim) - rot = MCARotator(n_modes=5, power=power) - rot.fit(model) - W1 = rot.data["norm1"].values - W2 = rot.data["norm2"].values - U1 = rot.data["scores1"].values - U2 = rot.data["scores2"].values - K = U1.T @ U2 / (U1.shape[0] - 1) - target = np.diag(W1 * W2) - if power == 1: - # Varimax rotation does guarantee orthogonality - np.testing.assert_allclose(K, target, atol=1e-5, rtol=1e-5) - else: - assert not is_diagonal(K), "Components are orthogonal" - - -# Hilbert Rotated MCA -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -def test_crmca_components(dim, use_coslat, power, mock_data_array): - """Components are NOT orthogonal""" - data1 = mock_data_array.copy() - data2 = data1.copy() ** 2 - model = HilbertMCA( - n_modes=19, standardize=True, use_coslat=use_coslat, use_pca=False - ) - model.fit(data1, data2, dim=dim) - rot = HilbertMCARotator(n_modes=5, power=power) - rot.fit(model) - V1 = rot.data["components1"].values - V2 = rot.data["components2"].values - K1 = V1.conj().T @ V1 - K2 = V2.conj().T @ V2 - assert np.allclose( - np.diag(K1), np.ones(K1.shape[0]), rtol=1e-5 - ), "Components are not normalized" - assert np.allclose( - np.diag(K2), np.ones(K2.shape[0]), rtol=1e-5 - ), "Components are not normalized" - # Assert that off-diagonals are not zero - assert not np.allclose(K1, np.eye(K1.shape[0])), "Rotated components are orthogonal" - assert not np.allclose(K2, np.eye(K2.shape[0])), "Rotated components are orthogonal" - - -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -def test_crmca_scores(dim, use_coslat, power, mock_data_array): - """Components are orthogonal only for Varimax rotation""" - data1 = mock_data_array.copy() - data2 = data1.copy() ** 2 - model = HilbertMCA( - n_modes=5, standardize=True, use_coslat=use_coslat, use_pca=False - ) - model.fit(data1, data2, dim=dim) - rot = HilbertMCARotator(n_modes=5, power=power) - rot.fit(model) - W1 = rot.data["norm1"].values - W2 = rot.data["norm2"].values - U1 = rot.data["scores1"].values - U2 = rot.data["scores2"].values - K = U1.conj().T @ U2 / (U1.shape[0] - 1) - target = np.diag(W1 * W2) - if power == 1: - # Varimax rotation does guarantee orthogonality - np.testing.assert_allclose(K, target, atol=1e-5, rtol=1e-5) - else: - assert not np.allclose(K, target), "Components are orthogonal" - - -# Transform -# ============================================================================= -# EOF -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -@pytest.mark.parametrize("normalized", [True, False]) -def test_eof_transform(dim, use_coslat, mock_data_array, normalized): - """Transforming the original data results in the model scores""" - model = EOF( - n_modes=5, - standardize=True, - use_coslat=use_coslat, - random_state=5, - ) - model.fit(mock_data_array, dim=dim) - scores = model.scores(normalized=normalized) - pseudo_scores = model.transform(mock_data_array, normalized=normalized) - assert np.allclose( - scores, pseudo_scores, atol=1e-4 - ), "Transformed data does not match the scores" - - -# Hilbert EOF -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -@pytest.mark.parametrize("normalized", [True, False]) -def test_ceof_transform(dim, use_coslat, mock_data_array, normalized): - """Not implemented yet""" - model = HilbertEOF(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(mock_data_array, dim=dim) - model.scores(normalized=normalized) - with pytest.raises(NotImplementedError): - model.transform(mock_data_array, normalized=normalized) - - -# Rotated EOF -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -@pytest.mark.parametrize("normalized", [True, False]) -def test_reof_transform(dim, use_coslat, power, mock_data_array, normalized): - """Transforming the original data results in the model scores""" - model = EOF(n_modes=5, standardize=True, use_coslat=use_coslat, random_state=5) - model.fit(mock_data_array, dim=dim) - rot = EOFRotator(n_modes=5, power=power) - rot.fit(model) - scores = rot.scores(normalized=normalized) - pseudo_scores = rot.transform(mock_data_array, normalized=normalized) - np.testing.assert_allclose( - scores, - pseudo_scores, - rtol=5e-3, - err_msg="Transformed data does not match the scores", - ) - - -# Hilbert Rotated EOF -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -@pytest.mark.parametrize("normalized", [True, False]) -def test_creof_transform(dim, use_coslat, power, mock_data_array, normalized): - """not implemented yet""" - model = HilbertEOF(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(mock_data_array, dim=dim) - rot = HilbertEOFRotator(n_modes=5, power=power) - rot.fit(model) - rot.scores(normalized=normalized) - with pytest.raises(NotImplementedError): - rot.transform(mock_data_array, normalized=normalized) - - -# MCA -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -def test_mca_transform(dim, use_coslat, mock_data_array): - """Transforming the original data results in the model scores""" - data1 = mock_data_array.copy() - data2 = data1.copy() ** 2 - model = MCA(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(data1, data2, dim=dim) - scores1, scores2 = model.scores() - pseudo_scores1, pseudo_scores2 = model.transform(X=data1, Y=data2) - assert np.allclose( - scores1, pseudo_scores1, atol=1e-4 - ), "Transformed data does not match the scores" - assert np.allclose( - scores2, pseudo_scores2, atol=1e-4 - ), "Transformed data does not match the scores" - - -# Hilbert MCA -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -def test_cmca_transform(dim, use_coslat, mock_data_array): - """Transforming the original data results in the model scores""" - data1 = mock_data_array.copy() - data2 = data1.copy() ** 2 - model = HilbertMCA(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(data1, data2, dim=dim) - scores1, scores2 = model.scores() - with pytest.raises(NotImplementedError): - pseudo_scores1, pseudo_scores2 = model.transform(X=data1, Y=data2) - - -# Rotated MCA -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -def test_rmca_transform(dim, use_coslat, power, mock_data_array): - """Transforming the original data results in the model scores""" - X = mock_data_array.copy() - Y = X.copy() ** 2 - model = MCA(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(X, Y, dim=dim) - rot = MCARotator(n_modes=5, power=power) - rot.fit(model) - scores1, scores2 = rot.scores() - pseudo_scores1, pseudo_scores2 = rot.transform(X=X, Y=Y) - assert np.allclose( - scores1, pseudo_scores1, atol=1e-5 - ), "Transformed data does not match the scores" - assert np.allclose( - scores2, pseudo_scores2, atol=1e-5 - ), "Transformed data does not match the scores" - - -# Hilbert Rotated MCA -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -def test_crmca_transform(dim, use_coslat, power, mock_data_array): - """Transforming the original data results in the model scores""" - data1 = mock_data_array.copy() - data2 = data1.copy() ** 2 - model = HilbertMCA(n_modes=5, standardize=True, use_coslat=use_coslat) - model.fit(data1, data2, dim=dim) - rot = HilbertMCARotator(n_modes=5, power=power) - rot.fit(model) - scores1, scores2 = rot.scores() - with pytest.raises(NotImplementedError): - pseudo_scores1, pseudo_scores2 = rot.transform(X=data1, Y=data2) - - -# Reconstruct -# ============================================================================= -def r2_score(x, y, dim=None): - """Compute the R2 score between two DataArrays - - Parameters - ---------- - x : xr.DataArray - Reference data - y : xr.DataArray - Testing data to be compared with the reference data - dim : str or sequence of str, optional - Dimension(s) over which to compute the R2 score (the default is None, which - means that the R2 score is computed over all dimensions) - - Returns - ------- - r2_score : xr.DataArray - R2 score between x and y - - """ - ssres = ((x - y) ** 2).sum(dim) - sstot = ((x - x.mean(dim)) ** 2).sum(dim) - return 1 - (ssres / sstot) - - -# EOF -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -@pytest.mark.parametrize("normalized", [True, False]) -def test_eof_inverse_transform(dim, use_coslat, mock_data_array, normalized): - """Inverse transform produces an approximate reconstruction of the original data""" - data = mock_data_array - model = EOF(n_modes=19, standardize=True, use_coslat=use_coslat) - model.fit(data, dim=dim) - scores = model.scores(normalized=normalized) - data_rec = model.inverse_transform(scores, normalized=normalized) - r2 = r2_score(data, data_rec, dim=dim) - r2 = r2.mean() - # Choose a threshold of 0.95; a bit arbitrary - assert r2 > 0.95, "Inverse transform does not produce a good reconstruction" - - -# Hilbert EOF -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -@pytest.mark.parametrize("normalized", [True, False]) -def test_ceof_inverse_transform(dim, use_coslat, mock_data_array, normalized): - """Inverse transform produces an approximate reconstruction of the original data""" - data = mock_data_array - model = HilbertEOF(n_modes=19, standardize=True, use_coslat=use_coslat) - model.fit(data, dim=dim) - scores = model.scores(normalized=normalized) - data_rec = model.inverse_transform(scores, normalized=normalized).real - r2 = r2_score(data, data_rec, dim=dim) - r2 = r2.mean() - # Choose a threshold of 0.95; a bit arbitrary - assert r2 > 0.95, "Inverse transform does not produce a good reconstruction" - - -# Rotated EOF -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -@pytest.mark.parametrize("normalized", [True, False]) -def test_reof_inverse_transform(dim, use_coslat, power, mock_data_array, normalized): - """Inverse transform produces an approximate reconstruction of the original data""" - data = mock_data_array - model = EOF(n_modes=19, standardize=True, use_coslat=use_coslat) - model.fit(data, dim=dim) - rot = EOFRotator(n_modes=19, power=power) - rot.fit(model) - scores = rot.scores(normalized=normalized) - data_rec = rot.inverse_transform(scores, normalized=normalized).real - r2 = r2_score(data, data_rec, dim=dim) - r2 = r2.mean() - # Choose a threshold of 0.95; a bit arbitrary - assert ( - r2 > 0.95 - ), f"Inverse transform does not produce a good reconstruction (R2={r2.values:.2f})" - - -# Hilbert Rotated EOF -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -@pytest.mark.parametrize("normalized", [True, False]) -def test_creof_inverse_transform(dim, use_coslat, power, mock_data_array, normalized): - """Inverse transform produces an approximate reconstruction of the original data""" - data = mock_data_array - model = HilbertEOF(n_modes=19, standardize=True, use_coslat=use_coslat) - model.fit(data, dim=dim) - rot = HilbertEOFRotator(n_modes=19, power=power) - rot.fit(model) - scores = rot.scores(normalized=normalized) - data_rec = rot.inverse_transform(scores, normalized=normalized).real - r2 = r2_score(data, data_rec, dim=dim) - r2 = r2.mean() - # Choose a threshold of 0.95; a bit arbitrary - assert ( - r2 > 0.95 - ), f"Inverse transform does not produce a good reconstruction (R2={r2.values:.2f})" - - -# MCA -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -def test_mca_inverse_transform(dim, use_coslat, mock_data_array): - """Inverse transform produces an approximate reconstruction of the original data""" - data1 = mock_data_array.copy() - data2 = data1.copy() ** 2 - model = MCA(n_modes=19, standardize=True, use_coslat=use_coslat, n_pca_modes="all") - model.fit(data1, data2, dim=dim) - scores1 = model.data["scores1"] - scores2 = model.data["scores2"] - data1_rec, data2_rec = model.inverse_transform(scores1, scores2) - r2_1 = r2_score(data1, data1_rec, dim=dim) - r2_2 = r2_score(data2, data2_rec, dim=dim) - r2_1 = r2_1.mean() - r2_2 = r2_2.mean() - # Choose a threshold of 0.95; a bit arbitrary - assert ( - r2_1 > 0.95 - ), f"Inverse transform does not produce a good reconstruction of left field (R2={r2_1.values:.2f})" - assert ( - r2_2 > 0.95 - ), f"Inverse transform does not produce a good reconstruction of right field (R2={r2_2.values:.2f})" - - -# Hilbert MCA -@pytest.mark.parametrize( - "dim, use_coslat", - [ - (("time",), True), - (("lat", "lon"), False), - (("lon", "lat"), False), - ], -) -def test_cmca_inverse_transform(dim, use_coslat, mock_data_array): - """Inverse transform produces an approximate reconstruction of the original data""" - data1 = mock_data_array.copy() - data2 = data1.copy() ** 2 - model = HilbertMCA( - n_modes=19, standardize=True, use_coslat=use_coslat, n_pca_modes="all" - ) - model.fit(data1, data2, dim=dim) - scores1 = model.data["scores1"] - scores2 = model.data["scores2"] - data1_rec, data2_rec = model.inverse_transform(scores1, scores2) - r2_1 = r2_score(data1, data1_rec, dim=dim) - r2_2 = r2_score(data2, data2_rec, dim=dim) - r2_1 = r2_1.mean() - r2_2 = r2_2.mean() - # Choose a threshold of 0.95; a bit arbitrary - assert ( - r2_1 > 0.95 - ), f"Inverse transform does not produce a good reconstruction of left field (R2={r2_1.values:.2f})" - assert ( - r2_2 > 0.95 - ), f"Inverse transform does not produce a good reconstruction of right field (R2={r2_2.values:.2f})" - - -# Rotated MCA -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -def test_rmca_inverse_transform(dim, use_coslat, power, mock_data_array): - """Inverse transform produces an approximate reconstruction of the original data""" - data1 = mock_data_array.copy() - data2 = data1.copy() ** 2 - model = MCA(n_modes=15, standardize=True, use_coslat=use_coslat, n_pca_modes="all") - model.fit(data1, data2, dim=dim) - rot = MCARotator(n_modes=15, power=power) - rot.fit(model) - scores1 = rot.data["scores1"] - scores2 = rot.data["scores2"] - data1_rec, data2_rec = rot.inverse_transform(scores1, scores2) - r2_1 = r2_score(data1, data1_rec, dim=dim) - r2_2 = r2_score(data2, data2_rec, dim=dim) - r2_1 = r2_1.mean() - r2_2 = r2_2.mean() - # Choose a threshold of 0.95; a bit arbitrary - assert ( - r2_1 > 0.95 - ), f"Inverse transform does not produce a good reconstruction of left field (R2={r2_1.values:.2f})" - assert ( - r2_2 > 0.95 - ), f"Inverse transform does not produce a good reconstruction of right field (R2={r2_2.values:.2f})" - - -# Hilbert Rotated MCA -@pytest.mark.parametrize( - "dim, use_coslat, power", - [ - (("time",), True, 1), - (("lat", "lon"), False, 1), - (("lon", "lat"), False, 1), - (("time",), True, 2), - (("lat", "lon"), False, 2), - (("lon", "lat"), False, 2), - ], -) -def test_crmca_inverse_transform(dim, use_coslat, power, mock_data_array): - """Inverse transform produces an approximate reconstruction of the original data""" - # NOTE: The lobpcg SVD solver for Hilbert matrices requires a small number of modes - # compared to the actual data size. Since we have a small test set here we only use - # 10 modes for the test. Therefore, the threshold for the R2 score is lower than for - # the other tests. - data1 = mock_data_array.copy() - data2 = data1.copy() ** 2 - model = HilbertMCA( - n_modes=10, standardize=True, use_coslat=use_coslat, use_pca=False - ) - model.fit(data1, data2, dim=dim) - rot = HilbertMCARotator(n_modes=10, power=power) - rot.fit(model) - scores1 = rot.data["scores1"] - scores2 = rot.data["scores2"] - data1_rec, data2_rec = rot.inverse_transform(scores1, scores2) - r2_1 = r2_score(data1, data1_rec, dim=dim) - r2_2 = r2_score(data2, data2_rec, dim=dim) - r2_1 = r2_1.mean() - r2_2 = r2_2.mean() - # Choose a threshold of 0.95; a bit arbitrary - assert ( - r2_1 > 0.95 - ), f"Inverse transform does not produce a good reconstruction of left field (R2={r2_1.values:.2f})" - assert ( - r2_2 > 0.95 - ), f"Inverse transform does not produce a good reconstruction of right field (R2={r2_2.values:.2f})" diff --git a/tests/models/test_rotator.py b/tests/models/test_rotator_factory.py similarity index 88% rename from tests/models/test_rotator.py rename to tests/models/test_rotator_factory.py index f092a268..78ade121 100644 --- a/tests/models/test_rotator.py +++ b/tests/models/test_rotator_factory.py @@ -1,16 +1,8 @@ import pytest -from xeofs.models import ( - EOF, - MCA, - EOFRotator, - HilbertEOF, - HilbertEOFRotator, - HilbertMCA, - HilbertMCARotator, - MCARotator, -) -from xeofs.models.rotator_factory import RotatorFactory +from xeofs.cross import MCA, HilbertMCA, HilbertMCARotator, MCARotator +from xeofs.rotator_factory import RotatorFactory +from xeofs.single import EOF, EOFRotator, HilbertEOF, HilbertEOFRotator # RotatorFactory should be imported from its module # from module import RotatorFactory diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/validation/test_eof_bootstrapper.py b/tests/validation/test_eof_bootstrapper.py index c4501c03..cb6a8ee7 100644 --- a/tests/validation/test_eof_bootstrapper.py +++ b/tests/validation/test_eof_bootstrapper.py @@ -1,8 +1,8 @@ +import numpy as np import pytest import xarray as xr -import numpy as np -from xeofs.models import EOF +from xeofs.single import EOF from xeofs.validation import EOFBootstrapper diff --git a/xeofs/__init__.py b/xeofs/__init__.py index ac3e5d18..b4d99e2e 100644 --- a/xeofs/__init__.py +++ b/xeofs/__init__.py @@ -1,4 +1,5 @@ -from xeofs import models, validation +from xeofs import cross, multi, single, validation from xeofs._version import __version__ +from xeofs.rotator_factory import RotatorFactory -__all__ = ["models", "validation", "__version__"] +__all__ = ["single", "cross", "multi", "RotatorFactory", "validation", "__version__"] diff --git a/xeofs/models/_base_model.py b/xeofs/base_model.py similarity index 95% rename from xeofs/models/_base_model.py rename to xeofs/base_model.py index 70ccf603..7f13899e 100644 --- a/xeofs/models/_base_model.py +++ b/xeofs/base_model.py @@ -7,12 +7,10 @@ import xarray as xr from typing_extensions import Self -from .._version import __version__ -from ..utils.data_types import DataArray -from ..utils.io import insert_placeholders, open_model_tree, write_model_tree -from ..utils.xarray_utils import ( - data_is_dask, -) +from ._version import __version__ +from .utils.data_types import DataArray +from .utils.io import insert_placeholders, open_model_tree, write_model_tree +from .utils.xarray_utils import data_is_dask try: from xarray.core.datatree import DataTree # type: ignore @@ -25,7 +23,7 @@ xr.set_options(keep_attrs=True) -class _BaseModel(ABC): +class BaseModel(ABC): """ Abstract base class for an xeofs model. @@ -178,7 +176,7 @@ def load( Returns ------- - model : _BaseModel + model : BaseModel The loaded model. """ diff --git a/xeofs/models/__init__.py b/xeofs/cross/__init__.py similarity index 71% rename from xeofs/models/__init__.py rename to xeofs/cross/__init__.py index c05aef1b..830df258 100644 --- a/xeofs/models/__init__.py +++ b/xeofs/cross/__init__.py @@ -1,35 +1,16 @@ import warnings -from .cca import CCA from .cpcca import CPCCA, ComplexCPCCA, HilbertCPCCA from .cpcca_rotator import ComplexCPCCARotator, CPCCARotator, HilbertCPCCARotator -from .eeof import ExtendedEOF -from .eof import EOF, ComplexEOF, HilbertEOF -from .eof_rotator import ComplexEOFRotator, EOFRotator, HilbertEOFRotator -from .gwpca import GWPCA from .mca import MCA, ComplexMCA, HilbertMCA from .mca_rotator import ComplexMCARotator, HilbertMCARotator, MCARotator -from .opa import OPA -from .rotator_factory import RotatorFactory -from .sparse_pca import SparsePCA __all__ = [ - "EOF", - "ExtendedEOF", - "SparsePCA", - "OPA", - "GWPCA", - "ComplexEOF", "ComplexMCA", "ComplexCPCCA", - "HilbertEOF", "HilbertMCA", "HilbertCPCCA", - "EOFRotator", - "ComplexEOFRotator", - "HilbertEOFRotator", "MCA", - "CCA", "CPCCA", "MCARotator", "CPCCARotator", @@ -37,7 +18,6 @@ "ComplexCPCCARotator", "HilbertMCARotator", "HilbertCPCCARotator", - "RotatorFactory", ] diff --git a/xeofs/models/_base_model_cross_set.py b/xeofs/cross/base_model_cross_set.py similarity index 99% rename from xeofs/models/_base_model_cross_set.py rename to xeofs/cross/base_model_cross_set.py index 1939435b..f3cf5472 100644 --- a/xeofs/models/_base_model_cross_set.py +++ b/xeofs/cross/base_model_cross_set.py @@ -4,16 +4,16 @@ from numpy.random import Generator from typing_extensions import Self +from ..base_model import BaseModel from ..data_container import DataContainer from ..preprocessing.preprocessor import Preprocessor from ..preprocessing.whitener import Whitener from ..utils.data_types import DataArray, DataObject, GenericType from ..utils.sanity_checks import validate_input_type from ..utils.xarray_utils import convert_to_dim_type -from ._base_model import _BaseModel -class _BaseModelCrossSet(_BaseModel): +class BaseModelCrossSet(BaseModel): """ Abstract base class for cross-decomposition models. diff --git a/xeofs/models/cpcca.py b/xeofs/cross/cpcca.py similarity index 99% rename from xeofs/models/cpcca.py rename to xeofs/cross/cpcca.py index 9bf767ef..ec8c0da2 100644 --- a/xeofs/models/cpcca.py +++ b/xeofs/cross/cpcca.py @@ -5,16 +5,16 @@ import xarray as xr from typing_extensions import Self +from ..linalg._numpy import _fractional_matrix_power +from ..linalg.decomposer import Decomposer from ..utils.data_types import DataArray, DataObject from ..utils.hilbert_transform import hilbert_transform -from ..utils.linalg import fractional_matrix_power from ..utils.statistics import pearson_correlation from ..utils.xarray_utils import argsort_dask -from ._base_model_cross_set import _BaseModelCrossSet -from .decomposer import Decomposer +from .base_model_cross_set import BaseModelCrossSet -class CPCCA(_BaseModelCrossSet): +class CPCCA(BaseModelCrossSet): """Continuum Power CCA (CPCCA). CPCCA extends continuum power regression to isolate pairs of coupled @@ -637,7 +637,7 @@ def fraction_variance_Y_explained_by_X(self) -> DataArray: def _compute_total_variance_numpy(X, Y): Cx = X.conj().T @ X / (X.shape[0] - 1) - Tinv = fractional_matrix_power(Cx, -0.5) + Tinv = _fractional_matrix_power(Cx, -0.5) return np.linalg.norm(Tinv @ X.conj().T @ Y / (X.shape[0] - 1)) ** 2 def _compute_residual_variance_numpy(X, Y, Xrec, Yrec): @@ -645,7 +645,7 @@ def _compute_residual_variance_numpy(X, Y, Xrec, Yrec): dY = Y - Yrec Cx = X.conj().T @ X / (X.shape[0] - 1) - Tinv = fractional_matrix_power(Cx, -0.5) + Tinv = _fractional_matrix_power(Cx, -0.5) return np.linalg.norm(Tinv @ dX.conj().T @ dY / (dX.shape[0] - 1)) ** 2 sample_name_x = "sample_dim_x" diff --git a/xeofs/models/cpcca_rotator.py b/xeofs/cross/cpcca_rotator.py similarity index 97% rename from xeofs/models/cpcca_rotator.py rename to xeofs/cross/cpcca_rotator.py index c0157aa9..162157cd 100644 --- a/xeofs/models/cpcca_rotator.py +++ b/xeofs/cross/cpcca_rotator.py @@ -4,18 +4,17 @@ import xarray as xr from typing_extensions import Self +from ..base_model import BaseModel from ..data_container import DataContainer -from ..preprocessing.preprocessor import Preprocessor -from ..preprocessing.whitener import Whitener +from ..linalg.rotation import promax +from ..preprocessing import Preprocessor, Whitener from ..utils.data_types import DataArray, DataObject -from ..utils.rotation import promax from ..utils.xarray_utils import argsort_dask, get_deterministic_sign_multiplier -from ._base_model import _BaseModel from .cpcca import CPCCA, ComplexCPCCA, HilbertCPCCA class CPCCARotator(CPCCA): - """Rotate a solution obtained from ``xe.models.CPCCA``. + """Rotate a solution obtained from ``xe.cross.CPCCA``. Rotate the obtained components and scores of a CPCCA model to increase interpretability. The algorithm here is based on the approach of Cheng & @@ -75,7 +74,7 @@ def __init__( rtol: float = 1e-8, compute: bool = True, ): - _BaseModel.__init__(self) + BaseModel.__init__(self) if max_iter is None: max_iter = 1000 if compute else 100 @@ -289,11 +288,11 @@ def _fit_algorithm(self, model) -> Self: return self def fit(self, model: CPCCA) -> Self: - """Rotate the solution obtained from ``xe.models.CPCCA``. + """Rotate the solution obtained from ``xe.cross.CPCCA``. Parameters ---------- - model : ``xe.models.CPCCA`` + model : ``xe.cross.CPCCA`` The CPCCA model to be rotated. """ @@ -455,7 +454,7 @@ def _get_feature_name(self): class ComplexCPCCARotator(CPCCARotator, ComplexCPCCA): - """Rotate a solution obtained from ``xe.models.ComplexCPCCA``. + """Rotate a solution obtained from ``xe.cross.ComplexCPCCA``. Rotate the obtained components and scores of a CPCCA model to increase interpretability. The algorithm here is based on the approach of Cheng & @@ -520,7 +519,7 @@ def __init__(self, **kwargs): class HilbertCPCCARotator(ComplexCPCCARotator, HilbertCPCCA): - """Rotate a solution obtained from ``xe.models.HilbertCPCCA``. + """Rotate a solution obtained from ``xe.cross.HilbertCPCCA``. Rotate the obtained components and scores of a CPCCA model to increase interpretability. The algorithm here is based on the approach of Cheng & diff --git a/xeofs/models/mca.py b/xeofs/cross/mca.py similarity index 100% rename from xeofs/models/mca.py rename to xeofs/cross/mca.py diff --git a/xeofs/models/mca_rotator.py b/xeofs/cross/mca_rotator.py similarity index 97% rename from xeofs/models/mca_rotator.py rename to xeofs/cross/mca_rotator.py index 4ee02055..a47146c4 100644 --- a/xeofs/models/mca_rotator.py +++ b/xeofs/cross/mca_rotator.py @@ -3,7 +3,7 @@ class MCARotator(CPCCARotator, MCA): - """Rotate a solution obtained from ``xe.models.MCA``. + """Rotate a solution obtained from ``xe.cross.MCA``. Rotate the obtained components and scores of a CPCCA model to increase interpretability. The algorithm here is based on the approach of Cheng & @@ -76,7 +76,7 @@ def __init__( class ComplexMCARotator(ComplexCPCCARotator, ComplexMCA): - """Rotate a solution obtained from ``xe.models.ComplexMCA``. + """Rotate a solution obtained from ``xe.cross.ComplexMCA``. Rotate the obtained components and scores of a CPCCA model to increase interpretability. The algorithm here is based on the approach of Cheng & @@ -153,7 +153,7 @@ def __init__( class HilbertMCARotator(HilbertCPCCARotator, HilbertMCA): - """Rotate a solution obtained from ``xe.models.HilbertMCA``. + """Rotate a solution obtained from ``xe.cross.HilbertMCA``. Rotate the obtained components and scores of a CPCCA model to increase interpretability. The algorithm here is based on the approach of Cheng & diff --git a/xeofs/linalg/__init__.py b/xeofs/linalg/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/xeofs/linalg/_numpy/__init__.py b/xeofs/linalg/_numpy/__init__.py new file mode 100644 index 00000000..cd53e976 --- /dev/null +++ b/xeofs/linalg/_numpy/__init__.py @@ -0,0 +1,5 @@ +from ._rotation import _promax +from ._svd import _SVD +from ._utils import _fractional_matrix_power + +__all__ = ["_fractional_matrix_power", "_promax", "_SVD"] diff --git a/xeofs/utils/rotation.py b/xeofs/linalg/_numpy/_rotation.py similarity index 90% rename from xeofs/utils/rotation.py rename to xeofs/linalg/_numpy/_rotation.py index 0f1b10cc..7f7f6f42 100644 --- a/xeofs/utils/rotation.py +++ b/xeofs/linalg/_numpy/_rotation.py @@ -1,25 +1,6 @@ -import dask.array import numpy as np -import xarray as xr - -from .data_types import DataArray - - -def promax(loadings: DataArray, feature_dim, **kwargs): - rotated, rot_mat, phi_mat = xr.apply_ufunc( - _promax, - loadings, - input_core_dims=[[feature_dim, "mode"]], - output_core_dims=[ - [feature_dim, "mode"], - ["mode_m", "mode_n"], - ["mode_m", "mode_n"], - ], - kwargs=kwargs, - dask="allowed", - ) - - return rotated, rot_mat, phi_mat +from dask.array import Array as DaskArray # type: ignore +from dask.array.linalg import svd_compressed def _promax( @@ -154,9 +135,9 @@ def _varimax( X = X.copy() n_samples, n_modes = X.shape - if isinstance(X, dask.array.Array): + if isinstance(X, DaskArray): # Use svd_compressed if dask to allow chunking in both dimensions - svd_func = dask.array.linalg.svd_compressed + svd_func = svd_compressed svd_args = (n_modes,) else: svd_func = np.linalg.svd diff --git a/xeofs/models/_np_classes/_svd.py b/xeofs/linalg/_numpy/_svd.py similarity index 100% rename from xeofs/models/_np_classes/_svd.py rename to xeofs/linalg/_numpy/_svd.py diff --git a/xeofs/utils/linalg.py b/xeofs/linalg/_numpy/_utils.py similarity index 91% rename from xeofs/utils/linalg.py rename to xeofs/linalg/_numpy/_utils.py index 2863850c..f5fd50cb 100644 --- a/xeofs/utils/linalg.py +++ b/xeofs/linalg/_numpy/_utils.py @@ -1,9 +1,9 @@ import numpy as np -from ..models._np_classes._svd import _SVD +from ._svd import _SVD -def fractional_matrix_power(C, power, **kwargs): +def _fractional_matrix_power(C, power, **kwargs): """Compute the fractional matrix power of a symmetric matrix using SVD. Note: This function is a simplified version of the fractional_matrix_power diff --git a/xeofs/models/decomposer.py b/xeofs/linalg/decomposer.py similarity index 100% rename from xeofs/models/decomposer.py rename to xeofs/linalg/decomposer.py diff --git a/xeofs/linalg/rotation.py b/xeofs/linalg/rotation.py new file mode 100644 index 00000000..74360ee1 --- /dev/null +++ b/xeofs/linalg/rotation.py @@ -0,0 +1,21 @@ +import xarray as xr + +from ..utils.data_types import DataArray +from ._numpy._rotation import _promax + + +def promax(loadings: DataArray, feature_dim, **kwargs): + rotated, rot_mat, phi_mat = xr.apply_ufunc( + _promax, + loadings, + input_core_dims=[[feature_dim, "mode"]], + output_core_dims=[ + [feature_dim, "mode"], + ["mode_m", "mode_n"], + ["mode_m", "mode_n"], + ], + kwargs=kwargs, + dask="allowed", + ) + + return rotated, rot_mat, phi_mat diff --git a/xeofs/models/svd.py b/xeofs/linalg/svd.py similarity index 98% rename from xeofs/models/svd.py rename to xeofs/linalg/svd.py index 09b15f31..137331a4 100644 --- a/xeofs/models/svd.py +++ b/xeofs/linalg/svd.py @@ -3,7 +3,7 @@ from dask.base import compute as dask_compute from ..utils.data_types import DataArray -from ._np_classes._svd import _SVD +from ._numpy import _SVD class SVD: diff --git a/xeofs/multi/__init__.py b/xeofs/multi/__init__.py new file mode 100644 index 00000000..c47cd4bc --- /dev/null +++ b/xeofs/multi/__init__.py @@ -0,0 +1,27 @@ +import warnings + +from .cca import CCA + +__all__ = ["CCA"] + + +DEPRECATED_NAMES = [ + # ("OldClass", "NewClass"), +] + + +def __dir__(): + return sorted(__all__ + [names[0] for names in DEPRECATED_NAMES]) + + +def __getattr__(name): + for old_name, new_name in DEPRECATED_NAMES: + if name == old_name: + msg = ( + f"Class '{old_name}' is deprecated and will be renamed to '{new_name}' in the next major release. " + f"In that release, '{old_name}' will refer to a different class. " + f"Please switch to '{new_name}' to maintain compatibility." + ) + warnings.warn(msg, DeprecationWarning, stacklevel=2) + return globals()[new_name] + raise AttributeError(f"module {__name__} has no attribute {name}") diff --git a/xeofs/models/cca.py b/xeofs/multi/cca.py similarity index 99% rename from xeofs/models/cca.py rename to xeofs/multi/cca.py index a3785054..cfb5bea6 100644 --- a/xeofs/models/cca.py +++ b/xeofs/multi/cca.py @@ -21,9 +21,9 @@ from .._version import __version__ from ..preprocessing.preprocessor import Preprocessor +from ..single import EOF from ..utils.data_types import DataArray, DataList, DataObject from ..utils.sanity_checks import assert_not_complex -from .eof import EOF def _check_parameter_number(parameter_name: str, parameter, n_views: int): @@ -272,7 +272,7 @@ class CCA(CCABaseModel): Examples -------- - >>> from xe.models import CCA + >>> from xe.cross import CCA >>> model = CCA(n_modes=5) >>> model.fit(data) >>> can_loadings = model.canonical_loadings() diff --git a/xeofs/preprocessing/__init__.py b/xeofs/preprocessing/__init__.py index 34883ac9..826949bf 100644 --- a/xeofs/preprocessing/__init__.py +++ b/xeofs/preprocessing/__init__.py @@ -1,17 +1,19 @@ from .concatenator import Concatenator from .dimension_renamer import DimensionRenamer from .multi_index_converter import MultiIndexConverter +from .preprocessor import Preprocessor from .sanitizer import Sanitizer from .scaler import Scaler from .stacker import Stacker from .whitener import Whitener __all__ = [ - "Scaler", - "Sanitizer", - "MultiIndexConverter", - "Stacker", "Concatenator", "DimensionRenamer", + "MultiIndexConverter", + "Preprocessor", + "Sanitizer", + "Scaler", + "Stacker", "Whitener", ] diff --git a/xeofs/preprocessing/whitener.py b/xeofs/preprocessing/whitener.py index 952f44ae..4bf76600 100644 --- a/xeofs/preprocessing/whitener.py +++ b/xeofs/preprocessing/whitener.py @@ -4,13 +4,13 @@ import xarray as xr from typing_extensions import Self -from ..models.svd import SVD +from ..linalg._numpy import _fractional_matrix_power +from ..linalg.svd import SVD from ..utils.data_types import ( DataArray, Dims, DimsList, ) -from ..utils.linalg import fractional_matrix_power from ..utils.sanity_checks import assert_single_dataarray from .transformer import Transformer @@ -184,7 +184,7 @@ def _compute_whitener_transform_numpy(self, X): C = X.conj().T @ X / nc power = (self.alpha - 1) / 2 svd_kwargs = {"random_state": self.random_state} - T = fractional_matrix_power(C, power, **svd_kwargs) + T = _fractional_matrix_power(C, power, **svd_kwargs) Tinv = np.linalg.inv(T) return T, Tinv diff --git a/xeofs/models/rotator_factory.py b/xeofs/rotator_factory.py similarity index 90% rename from xeofs/models/rotator_factory.py rename to xeofs/rotator_factory.py index 5a7a04f0..dda8d0ab 100644 --- a/xeofs/models/rotator_factory.py +++ b/xeofs/rotator_factory.py @@ -1,7 +1,5 @@ -from .eof import EOF, HilbertEOF -from .eof_rotator import EOFRotator, HilbertEOFRotator -from .mca import MCA, HilbertMCA -from .mca_rotator import HilbertMCARotator, MCARotator +from .cross import MCA, HilbertMCA, HilbertMCARotator, MCARotator +from .single import EOF, EOFRotator, HilbertEOF, HilbertEOFRotator class RotatorFactory: diff --git a/xeofs/single/__init__.py b/xeofs/single/__init__.py new file mode 100644 index 00000000..daf30d80 --- /dev/null +++ b/xeofs/single/__init__.py @@ -0,0 +1,43 @@ +import warnings + +from .eeof import ExtendedEOF +from .eof import EOF, ComplexEOF, HilbertEOF +from .eof_rotator import ComplexEOFRotator, EOFRotator, HilbertEOFRotator +from .gwpca import GWPCA +from .opa import OPA +from .sparse_pca import SparsePCA + +__all__ = [ + "EOF", + "ExtendedEOF", + "SparsePCA", + "OPA", + "GWPCA", + "ComplexEOF", + "HilbertEOF", + "EOFRotator", + "ComplexEOFRotator", + "HilbertEOFRotator", +] + + +DEPRECATED_NAMES = [ + # ("OldClass", "NewClass"), +] + + +def __dir__(): + return sorted(__all__ + [names[0] for names in DEPRECATED_NAMES]) + + +def __getattr__(name): + for old_name, new_name in DEPRECATED_NAMES: + if name == old_name: + msg = ( + f"Class '{old_name}' is deprecated and will be renamed to '{new_name}' in the next major release. " + f"In that release, '{old_name}' will refer to a different class. " + f"Please switch to '{new_name}' to maintain compatibility." + ) + warnings.warn(msg, DeprecationWarning, stacklevel=2) + return globals()[new_name] + raise AttributeError(f"module {__name__} has no attribute {name}") diff --git a/xeofs/single/_numpy/__init__.py b/xeofs/single/_numpy/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/xeofs/models/_np_classes/_sparse_pca.py b/xeofs/single/_numpy/_sparse_pca.py similarity index 100% rename from xeofs/models/_np_classes/_sparse_pca.py rename to xeofs/single/_numpy/_sparse_pca.py diff --git a/xeofs/models/_base_model_single_set.py b/xeofs/single/base_model_single_set.py similarity index 98% rename from xeofs/models/_base_model_single_set.py rename to xeofs/single/base_model_single_set.py index 3530e05a..b6d0ec7b 100644 --- a/xeofs/models/_base_model_single_set.py +++ b/xeofs/single/base_model_single_set.py @@ -7,19 +7,17 @@ import xarray as xr from typing_extensions import Self +from ..base_model import BaseModel from ..data_container import DataContainer from ..preprocessing.preprocessor import Preprocessor from ..utils.data_types import DataArray, DataObject from ..utils.sanity_checks import validate_input_type -from ..utils.xarray_utils import ( - convert_to_dim_type, -) -from ._base_model import _BaseModel +from ..utils.xarray_utils import convert_to_dim_type xr.set_options(keep_attrs=True) -class _BaseModelSingleSet(_BaseModel): +class BaseModelSingleSet(BaseModel): """ Abstract base class for single-set models. diff --git a/xeofs/models/eeof.py b/xeofs/single/eeof.py similarity index 99% rename from xeofs/models/eeof.py rename to xeofs/single/eeof.py index 6f081222..fe007b3e 100644 --- a/xeofs/models/eeof.py +++ b/xeofs/single/eeof.py @@ -43,7 +43,7 @@ class ExtendedEOF(EOF): Examples -------- - >>> from xeofs.models import EEOF + >>> from xeofs.single import EEOF >>> model = EEOF(n_modes=5, tau=1, embedding=20, n_pca_modes=20) >>> model.fit(data, dim=("time")) diff --git a/xeofs/models/eof.py b/xeofs/single/eof.py similarity index 99% rename from xeofs/models/eof.py rename to xeofs/single/eof.py index 7d89f82a..0d94be62 100644 --- a/xeofs/models/eof.py +++ b/xeofs/single/eof.py @@ -4,14 +4,14 @@ import xarray as xr from typing_extensions import Self +from ..linalg.decomposer import Decomposer from ..utils.data_types import DataArray, DataObject from ..utils.hilbert_transform import hilbert_transform from ..utils.xarray_utils import total_variance as compute_total_variance -from ._base_model_single_set import _BaseModelSingleSet -from .decomposer import Decomposer +from .base_model_single_set import BaseModelSingleSet -class EOF(_BaseModelSingleSet): +class EOF(BaseModelSingleSet): """EOF analysis. Empirical Orthogonal Functions (EOF) analysis, more commonly known @@ -45,7 +45,7 @@ class EOF(_BaseModelSingleSet): Examples -------- - >>> model = xe.models.EOF(n_modes=5) + >>> model = xe.single.EOF(n_modes=5) >>> model.fit(X) >>> scores = model.scores() diff --git a/xeofs/models/eof_rotator.py b/xeofs/single/eof_rotator.py similarity index 95% rename from xeofs/models/eof_rotator.py rename to xeofs/single/eof_rotator.py index bdfcf4e2..b321189c 100644 --- a/xeofs/models/eof_rotator.py +++ b/xeofs/single/eof_rotator.py @@ -6,15 +6,15 @@ from .._version import __version__ from ..data_container import DataContainer -from ..preprocessing.preprocessor import Preprocessor +from ..linalg.rotation import promax +from ..preprocessing import Preprocessor from ..utils.data_types import DataArray -from ..utils.rotation import promax from ..utils.xarray_utils import argsort_dask, get_deterministic_sign_multiplier from .eof import EOF, ComplexEOF, HilbertEOF class EOFRotator(EOF): - """Rotate a solution obtained from ``xe.models.EOF``. + """Rotate a solution obtained from ``xe.single.EOF``. Rotated EOF analysis (e.g. [1]_) is a variation of standard EOF analysis, which uses a rotation technique (Varimax or Promax) on the extracted modes to maximize the variance explained by @@ -46,9 +46,9 @@ class EOFRotator(EOF): Examples -------- - >>> model = xe.models.EOF(n_modes=10) + >>> model = xe.single.EOF(n_modes=10) >>> model.fit(X, "time") - >>> rotator = xe.models.EOFRotator(n_modes=10) + >>> rotator = xe.single.EOFRotator(n_modes=10) >>> rotator.fit(model) >>> rotator.components() @@ -101,11 +101,11 @@ def get_serialization_attrs(self) -> dict: ) def fit(self, model) -> Self: - """Rotate the solution obtained from ``xe.models.EOF``. + """Rotate the solution obtained from ``xe.single.EOF``. Parameters ---------- - model : ``xe.models.EOF`` + model : ``xe.single.EOF`` The EOF model to be rotated. """ @@ -287,7 +287,7 @@ def _compute_rot_mat_inv_trans(self, rotation_matrix, input_dims) -> DataArray: class ComplexEOFRotator(EOFRotator, ComplexEOF): - """Rotate a solution obtained from ``xe.models.ComplexEOF``. + """Rotate a solution obtained from ``xe.single.ComplexEOF``. Parameters ---------- @@ -308,9 +308,9 @@ class ComplexEOFRotator(EOFRotator, ComplexEOF): Examples -------- - >>> model = xe.models.ComplexEOF(n_modes=10) + >>> model = xe.single.ComplexEOF(n_modes=10) >>> model.fit(data) - >>> rotator = xe.models.ComplexEOFRotator(n_modes=10) + >>> rotator = xe.single.ComplexEOFRotator(n_modes=10) >>> rotator.fit(model) >>> rotator.components() @@ -333,7 +333,7 @@ def __init__( class HilbertEOFRotator(EOFRotator, HilbertEOF): - """Rotate a solution obtained from ``xe.models.HilbertEOF``. + """Rotate a solution obtained from ``xe.single.HilbertEOF``. Hilbert Rotated EOF analysis [1]_ [2]_ [3]_ extends EOF analysis by incorporating both amplitude and phase information using a Hilbert transform prior to performing MCA and subsequent Varimax or Promax rotation. @@ -364,9 +364,9 @@ class HilbertEOFRotator(EOFRotator, HilbertEOF): Examples -------- - >>> model = xe.models.HilbertEOF(n_modes=10) + >>> model = xe.single.HilbertEOF(n_modes=10) >>> model.fit(data) - >>> rotator = xe.models.HilbertEOFRotator(n_modes=10) + >>> rotator = xe.single.HilbertEOFRotator(n_modes=10) >>> rotator.fit(model) >>> rotator.components() diff --git a/xeofs/models/gwpca.py b/xeofs/single/gwpca.py similarity index 99% rename from xeofs/models/gwpca.py rename to xeofs/single/gwpca.py index ae99ccd4..df087a3d 100644 --- a/xeofs/models/gwpca.py +++ b/xeofs/single/gwpca.py @@ -13,10 +13,10 @@ from ..utils.distance_metrics import VALID_METRICS from ..utils.kernels import VALID_KERNELS from ..utils.sanity_checks import assert_not_complex -from ._base_model_single_set import _BaseModelSingleSet +from .base_model_single_set import BaseModelSingleSet -class GWPCA(_BaseModelSingleSet): +class GWPCA(BaseModelSingleSet): """Geographically weighted PCA. Geographically weighted PCA (GWPCA) [1]_ uses a geographically weighted approach to perform PCA for diff --git a/xeofs/models/opa.py b/xeofs/single/opa.py similarity index 98% rename from xeofs/models/opa.py rename to xeofs/single/opa.py index 85611966..bb0edf34 100644 --- a/xeofs/models/opa.py +++ b/xeofs/single/opa.py @@ -2,14 +2,14 @@ import xarray as xr from typing_extensions import Self +from ..linalg.decomposer import Decomposer from ..utils.data_types import DataArray, DataObject from ..utils.sanity_checks import assert_not_complex -from ._base_model_single_set import _BaseModelSingleSet -from .decomposer import Decomposer +from .base_model_single_set import BaseModelSingleSet from .eof import EOF -class OPA(_BaseModelSingleSet): +class OPA(BaseModelSingleSet): """Optimal Persistence Analysis. Optimal Persistence Analysis (OPA) [1]_ [2]_ identifies the patterns with the @@ -51,7 +51,7 @@ class OPA(_BaseModelSingleSet): Examples -------- - >>> from xeofs.models import OPA + >>> from xeofs.single import OPA >>> model = OPA(n_modes=10, tau_max=50, n_pca_modes=100) >>> model.fit(X, dim=("time")) diff --git a/xeofs/models/sparse_pca.py b/xeofs/single/sparse_pca.py similarity index 98% rename from xeofs/models/sparse_pca.py rename to xeofs/single/sparse_pca.py index e6dbcf26..f4d2e209 100644 --- a/xeofs/models/sparse_pca.py +++ b/xeofs/single/sparse_pca.py @@ -8,11 +8,11 @@ from ..utils.sanity_checks import assert_not_complex from ..utils.xarray_utils import get_matrix_rank from ..utils.xarray_utils import total_variance as compute_total_variance -from ._base_model_single_set import _BaseModelSingleSet -from ._np_classes._sparse_pca import compute_rspca, compute_spca +from ._numpy._sparse_pca import compute_rspca, compute_spca +from .base_model_single_set import BaseModelSingleSet -class SparsePCA(_BaseModelSingleSet): +class SparsePCA(BaseModelSingleSet): """ Sparse PCA via Variable Projection. @@ -84,7 +84,7 @@ class SparsePCA(_BaseModelSingleSet): Examples -------- - >>> model = xe.models.SparsePCA(n_modes=2, alpha=1e-4) + >>> model = xe.single.SparsePCA(n_modes=2, alpha=1e-4) >>> model.fit(data, "time") >>> components = model.components() """ diff --git a/xeofs/validation/bootstrapper.py b/xeofs/validation/bootstrapper.py index 34c4acb6..71765978 100644 --- a/xeofs/validation/bootstrapper.py +++ b/xeofs/validation/bootstrapper.py @@ -8,7 +8,7 @@ from .._version import __version__ from ..data_container import DataContainer -from ..models import EOF +from ..single import EOF from ..utils.data_types import DataArray