diff --git a/docs/workflows/README.MD b/docs/workflows/README.MD
index fb44709..50c8cbb 100644
--- a/docs/workflows/README.MD
+++ b/docs/workflows/README.MD
@@ -1,4 +1,49 @@
-## Workflows
+# Workflows
 The repository will handle multiple workflows and backends, some of which have specific requirements.
 
-- [pyroSAR + GAMMA](pyrosar_gamma.md)
\ No newline at end of file
+## Workflows
+- pyroSAR + GAMMA
+    - [Requirements](pyrosar_gamma.md)
+
+## Command line interface
+> **_NOTE:_**  At this time, pyroSAR + GAMMA is the only workflow, and the command line interfaces have not yet been generalised to other workflows.
+
+The package has a number of useful command line interfaces:
+
+### Finding the location of a scene on the NCI
+The `find-scene` command will display the location of a given scene on the NCI.
+The full path to the scene is required as the input to other commands.
+
+Example usage:
+```
+$ find-scene S1A_EW_GRDM_1SDH_20240129T091735_20240129T091828_052319_065379_0F1E
+
+/path/to/scene/S1A_EW_GRDM_1SDH_20240129T091735_20240129T091828_052319_065379_0F1E.zip
+```
+
+### Submit a workflow
+This will submit a job request to the NCI based on the job parameters and file paths in the supplied config. 
+The [default config](../../sar_antarctica/nci/configs/default.toml) will be used if no other config is provided.
+
+Example usage
+```
+$ submit-pyrosar-gamma-workflow /path/to/scene/S1A_EW_GRDM_1SDH_20240129T091735_20240129T091828_052319_065379_0F1E.zip
+```
+This will submit a job to the NCI with the default config.
+To use a different config, run the command and supply the `--config` option
+```
+--config /path/to/config.toml
+```
+
+### Run a workflow interactively
+If you are still testing a workflow, it is best to run it in an interactive session.
+While in an interactive session, you can run the workflow directly. 
+
+Example usage
+```
+$ run-pyrosar-gamma-workflow /path/to/scene/S1A_EW_GRDM_1SDH_20240129T091735_20240129T091828_052319_065379_0F1E.zip
+```
+To use a different config, run the command and supply the `--config` option
+```
+--config /path/to/config.toml
+```
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index 7bb7b36..66ff531 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -21,6 +21,12 @@ version = "0.1" # TODO base this on files in proje t
 Homepage = "https://github.com/GeoscienceAustralia/sar-antarctica.git"
 # Documentation = "XXX"
 
+[project.scripts]
+find-scene = "sar_antarctica.nci.cli:find_scene_file"
+find-orbits = "sar_antarctica.nci.cli:find_orbits_for_scene"
+run-pyrosar-gamma-workflow = "sar_antarctica.nci.cli:run_pyrosar_gamma_workflow"
+submit-pyrosar-gamma-workflow = "sar_antarctica.nci.cli:submit_pyrosar_gamma_workflow"
+
 [tool.pytest.ini_options]
 testpaths = ["tests/*"]
 
diff --git a/sar_antarctica/nci/cli.py b/sar_antarctica/nci/cli.py
new file mode 100644
index 0000000..ef344a5
--- /dev/null
+++ b/sar_antarctica/nci/cli.py
@@ -0,0 +1,174 @@
+import click
+from pathlib import Path
+import tomli
+
+from sar_antarctica.nci.filesystem import get_orbits_nci
+from sar_antarctica.nci.submission.pyrosar_gamma.prepare_input import (
+    get_orbit_and_dem,
+)
+from sar_antarctica.nci.preparation.orbits import (
+    filter_orbits_to_cover_time_window,
+)
+from sar_antarctica.nci.preparation.scenes import (
+    parse_scene_file_sensor,
+    parse_scene_file_dates,
+    find_scene_file_from_id,
+)
+from sar_antarctica.nci.processing.pyroSAR.pyrosar_geocode import (
+    run_pyrosar_gamma_geocode,
+)
+from sar_antarctica.nci.submission.pyrosar_gamma.submit_job import submit_job
+
+
+@click.command()
+@click.argument("scene_name", type=str)
+def find_scene_file(scene_name):
+    scene_file = find_scene_file_from_id(scene_name)
+
+    click.echo(scene_file)
+
+
+DEFAULT_CONFIGURATION = Path(__file__).resolve().parent / "configs/default.toml"
+
+
+def configure(ctx, param, filename):
+    with open(filename, "rb") as f:
+        configuration_dictionary = tomli.load(f)
+    ctx.default_map = configuration_dictionary
+
+
+@click.command()
+@click.argument(
+    "scene",
+    type=click.Path(exists=True, dir_okay=False, path_type=Path),
+)
+@click.option(
+    "-c",
+    "--config",
+    type=click.Path(dir_okay=False),
+    default=DEFAULT_CONFIGURATION,
+    callback=configure,
+    is_eager=True,
+    expose_value=False,
+    help="Read option defaults from the specified .toml file",
+    show_default=True,
+)
+@click.option("--spacing", type=int)
+@click.option("--scaling", type=click.Choice(["linear", "db"]))
+@click.option("--ncpu", type=str, default="4")
+@click.option("--mem", type=str, default="32")
+@click.option("--queue", type=str, default="normal")
+@click.option("--project", type=str, default="u46")
+@click.option("--walltime", type=str, default="02:00:00")
+@click.option(
+    "--output-dir",
+    type=click.Path(exists=True, file_okay=False, path_type=Path),
+    default="/g/data/yp75/projects/sar-antractica-processing/pyrosar_gamma/",
+)
+def submit_pyrosar_gamma_workflow(
+    scene, spacing, scaling, ncpu, mem, queue, project, walltime, output_dir
+):
+
+    pbs_parameters = {
+        "ncpu": ncpu,
+        "mem": mem,
+        "queue": queue,
+        "project": project,
+        "walltime": walltime,
+    }
+
+    log_dir = output_dir / "submission/logs"
+    log_dir.mkdir(parents=True, exist_ok=True)
+
+    submit_job(scene, spacing, scaling, pbs_parameters, log_dir)
+
+
+@click.command()
+@click.argument(
+    "scene",
+    type=click.Path(exists=True, dir_okay=False, path_type=Path),
+)
+@click.option(
+    "-c",
+    "--config",
+    type=click.Path(dir_okay=False),
+    default=DEFAULT_CONFIGURATION,
+    callback=configure,
+    is_eager=True,
+    expose_value=False,
+    help="Read option defaults from the specified .toml file",
+    show_default=True,
+)
+@click.option("--spacing", type=int)
+@click.option("--scaling", type=click.Choice(["linear", "db"]))
+@click.option(
+    "--orbit-dir", type=click.Path(exists=True, file_okay=False, path_type=Path)
+)
+@click.option("--orbit-type", type=click.Choice(["POE", "RES", "either"]))
+@click.option(
+    "--output-dir",
+    type=click.Path(exists=True, file_okay=False, path_type=Path),
+    default="/g/data/yp75/projects/sar-antractica-processing/pyrosar_gamma/",
+)
+@click.option(
+    "--gamma-lib-dir",
+    type=click.Path(exists=True, file_okay=False, path_type=Path),
+    default="/g/data/dg9/GAMMA/GAMMA_SOFTWARE-20230712",
+)
+@click.option(
+    "--gamma-env-var",
+    type=str,
+    default="/g/data/yp75/projects/pyrosar_processing/sar-pyrosar-nci:/apps/fftw3/3.3.10/lib:/apps/gdal/3.6.4/lib64",
+)
+def run_pyrosar_gamma_workflow(
+    scene,
+    spacing,
+    scaling,
+    orbit_dir,
+    orbit_type,
+    output_dir,
+    gamma_lib_dir,
+    gamma_env_var,
+):
+
+    click.echo("Preparing orbit and DEM")
+    dem_output_dir = output_dir / "data/dem"
+
+    orbit, dem = get_orbit_and_dem(scene, dem_output_dir, orbit_dir, orbit_type)
+
+    click.echo(f"    Identified orbit: {orbit}")
+    click.echo(f"    Identified DEM: {dem}")
+
+    click.echo("Running processing")
+    print(scene, spacing, scaling, output_dir, gamma_lib_dir, gamma_env_var)
+    run_pyrosar_gamma_geocode(
+        scene=scene,
+        orbit=orbit,
+        dem=dem,
+        output=output_dir,
+        gamma_library=gamma_lib_dir,
+        gamma_env=gamma_env_var,
+        geocode_spacing=spacing,
+        geocode_scaling=scaling,
+    )
+
+
+@click.command()
+@click.argument("scene")
+def find_orbits_for_scene(scene: str):
+    sensor = parse_scene_file_sensor(scene)
+    start_time, stop_time = parse_scene_file_dates(scene)
+
+    poe_paths = get_orbits_nci("POE", sensor)
+    relevent_poe_paths = filter_orbits_to_cover_time_window(
+        poe_paths, start_time, stop_time
+    )
+    for orbit in relevent_poe_paths:
+        print(orbit["orbit"])
+
+    res_paths = get_orbits_nci("RES", sensor)
+    relevant_res_paths = filter_orbits_to_cover_time_window(
+        res_paths, start_time, stop_time
+    )
+    for orbit in relevant_res_paths:
+        print(orbit["orbit"])
diff --git a/sar_antarctica/nci/configs/EW.toml b/sar_antarctica/nci/configs/EW.toml
new file mode 100644
index 0000000..59bb9b4
--- /dev/null
+++ b/sar_antarctica/nci/configs/EW.toml
@@ -0,0 +1,12 @@
+spacing = 40
+scaling = "linear"
+ncpu = "4"
+mem = "64"
+queue = "normal"
+project = "u46"
+walltime = "02:00:00"
+orbit_dir = "/g/data/fj7/Copernicus/Sentinel-1/"
+orbit_type = "POE"
+output_dir = "/g/data/yp75/projects/sar-antractica-processing/pyrosar_gamma/"
+gamma_lib_dir = "/g/data/dg9/GAMMA/GAMMA_SOFTWARE-20230712"
+gamma_env_var = "/g/data/yp75/projects/pyrosar_processing/sar-pyrosar-nci:/apps/fftw3/3.3.10/lib:/apps/gdal/3.6.4/lib64"
\ No newline at end of file
diff --git a/sar_antarctica/nci/configs/IW.toml b/sar_antarctica/nci/configs/IW.toml
new file mode 100644
index 0000000..73e9501
--- /dev/null
+++ b/sar_antarctica/nci/configs/IW.toml
@@ -0,0 +1,12 @@
+spacing = 10
+scaling = "linear"
+ncpu = "4"
+mem = "128"
+queue = "normal"
+project = "u46"
+walltime = "02:00:00"
+orbit_dir = "/g/data/fj7/Copernicus/Sentinel-1/"
+orbit_type = "POE"
+output_dir = "/g/data/yp75/projects/sar-antractica-processing/pyrosar_gamma_IW/"
+gamma_lib_dir = "/g/data/dg9/GAMMA/GAMMA_SOFTWARE-20230712"
+gamma_env_var = "/g/data/yp75/projects/pyrosar_processing/sar-pyrosar-nci:/apps/fftw3/3.3.10/lib:/apps/gdal/3.6.4/lib64"
\ No newline at end of file
diff --git a/sar_antarctica/nci/configs/default.toml b/sar_antarctica/nci/configs/default.toml
new file mode 100644
index 0000000..4231cc4
--- /dev/null
+++ b/sar_antarctica/nci/configs/default.toml
@@ -0,0 +1,12 @@
+spacing = 40
+scaling = "linear"
+ncpu = "4"
+mem = "32"
+queue = "normal"
+project = "u46"
+walltime = "02:00:00"
+orbit_dir = "/g/data/fj7/Copernicus/Sentinel-1/"
+orbit_type = "POE"
+output_dir = "/g/data/yp75/projects/sar-antractica-processing/pyrosar_gamma/"
+gamma_lib_dir = "/g/data/dg9/GAMMA/GAMMA_SOFTWARE-20230712"
+gamma_env_var = "/g/data/yp75/projects/pyrosar_processing/sar-pyrosar-nci:/apps/fftw3/3.3.10/lib:/apps/gdal/3.6.4/lib64"
\ No newline at end of file
diff --git a/sar_antarctica/nci/filesystem.py b/sar_antarctica/nci/filesystem.py
index e4ae90f..5f1a8de 100644
--- a/sar_antarctica/nci/filesystem.py
+++ b/sar_antarctica/nci/filesystem.py
@@ -1,9 +1,14 @@
 from pathlib import Path
 
 from sar_antarctica.nci.preparation.orbits import find_orbits
+from sar_antarctica.nci.preparation.dem import get_cop30_dem_for_bounds
 
 
-def get_orbits_nci(orbit_type: str | None, sensor: str) -> list[Path]:
+def get_orbits_nci(
+    orbit_type: str | None,
+    sensor: str,
+    nci_orbit_dir: Path = Path("/g/data/fj7/Copernicus/Sentinel-1/"),
+) -> list[Path]:
     """For a given orbit type and sensor, compile the relevant orbit files
 
     Parameters
@@ -12,7 +17,8 @@ def get_orbits_nci(orbit_type: str | None, sensor: str) -> list[Path]:
         One of 'POE', 'RES', or None. If None, both POE and RES orbits will be included
     sensor : str
         Sensor (e.g. S1A or S1B) to search. Typically extracted from the scene ID
-
+    nci_orbit_dir : Path, optional
+        The path containing orbit files on the NCI, by default Path("/g/data/fj7/Copernicus/Sentinel-1/")
     Returns
     -------
     list[Path]
@@ -25,7 +31,6 @@ def get_orbits_nci(orbit_type: str | None, sensor: str) -> list[Path]:
     """
 
     # Constants for NCI
-    S1_DIR = Path("/g/data/fj7/Copernicus/Sentinel-1/")
     POE_DIR = "POEORB"
     RES_DIR = "RESORB"
 
@@ -39,9 +44,22 @@ def get_orbits_nci(orbit_type: str | None, sensor: str) -> list[Path]:
         raise ValueError("orbit_type must be one of 'POE', 'RES', or None")
 
     nci_orbit_directories = [
-        S1_DIR / orbit_dir / sensor for orbit_dir in orbit_type_directories
+        nci_orbit_dir / orbit_dir / sensor for orbit_dir in orbit_type_directories
     ]
 
     orbits = find_orbits(nci_orbit_directories)
 
     return orbits
+
+
+def get_dem_nci(
+    scene: Path, scene_bounds: tuple[float, float, float, float], output_dir: Path
+) -> Path:
+    if not output_dir.exists():
+        output_dir.mkdir(parents=True, exist_ok=True)
+    dem_file = output_dir / f"{scene.stem}.tif"
+
+    if not dem_file.exists():
+        _, _ = get_cop30_dem_for_bounds(scene_bounds, dem_file, ellipsoid_heights=True)
+
+    return dem_file
diff --git a/sar_antarctica/nci/preparation/create_config.py b/sar_antarctica/nci/preparation/create_config.py
deleted file mode 100644
index bfd2b87..0000000
--- a/sar_antarctica/nci/preparation/create_config.py
+++ /dev/null
@@ -1,103 +0,0 @@
-import click
-from pathlib import Path
-from pyroSAR import identify
-import rasterio
-
-from sar_antarctica.nci.filesystem import get_orbits_nci
-
-from sar_antarctica.nci.preparation.scenes import (
-    find_scene_file_from_id,
-    parse_scene_file_sensor,
-)
-from sar_antarctica.nci.preparation.orbits import find_latest_orbit_for_scene
-from sar_antarctica.nci.preparation.dem import get_cop30_dem_for_bounds
-
-
-def write_file_paths(
-    config_file: Path,
-    scene_file: Path,
-    orbit_file: Path,
-    dem_file: Path,
-    data_dir: Path,
-    ancillary_dir="ancillary",
-    processed_dir="processed_scene",
-):
-    inputs_header = "[inputs]\n"
-    scene_setting = f"scene = '{str(scene_file)}'\n"
-    orbit_setting = f"orbit = '{str(orbit_file)}'\n"
-    dem_setting = f"dem = '{str(dem_file)}'\n"
-
-    outputs_header = "[outputs]\n"
-    data_path_setting = f"data = '{str(data_dir)}'\n"
-    ancillary_setting = f"ancillary = '{ancillary_dir}'\n"
-    processed_setting = f"processed = '{processed_dir}'\n"
-
-    with open(config_file, "w") as cf:
-        cf.writelines(
-            [
-                inputs_header,
-                scene_setting,
-                orbit_setting,
-                dem_setting,
-                "\n",
-                outputs_header,
-                data_path_setting,
-                ancillary_setting,
-                processed_setting,
-            ]
-        )
-
-
-@click.command()
-@click.argument("scene_id", nargs=1)
-@click.argument("scene_config", nargs=1)
-def main(scene_id: str, scene_config: str):
-    """Generate a configuration file for a scene ID
-
-    Parameters
-    ----------
-    scene_id : str
-        ID of scene to process
-    scene_config : str
-        where to store the output configuration file
-    """
-    print(f"Processing scene: {scene_id} \n")
-
-    # Set the data path for outputs
-    data_dir = Path("/g/data/yp75/projects/sar-antractica-processing/data")
-
-    # Path to configuration file for scene
-    config_file = Path(scene_config)
-
-    # Identify location of scene on GADI
-    scene_file = find_scene_file_from_id(scene_id)
-
-    # Identify location of latest orbit file on GADI
-    scene_sensor = parse_scene_file_sensor(scene_id)
-    poe_orbits = get_orbits_nci("POE", scene_sensor)
-    latest_poe_file = find_latest_orbit_for_scene(scene_id, poe_orbits)
-
-    # Identify bounds of scene and use bounding box to build DEM
-    scene = identify(str(scene_file))
-    scene_bbox = scene.bbox().extent
-    scene_bounds = (
-        scene_bbox["xmin"],
-        scene_bbox["ymin"],
-        scene_bbox["xmax"],
-        scene_bbox["ymax"],
-    )
-
-    # Set path for dem and create
-    dem_dir = data_dir / "dem"
-    dem_file = dem_dir / f"{scene_id}_dem.tif"
-    _, _ = get_cop30_dem_for_bounds(
-        bounds=scene_bounds, save_path=dem_file, ellipsoid_heights=True
-    )
-
-    # Write to config file
-    write_file_paths(config_file, scene_file, latest_poe_file, dem_file, data_dir)
-
-
-if __name__ == "__main__":
-
-    main()
diff --git a/sar_antarctica/nci/preparation/scenes.py b/sar_antarctica/nci/preparation/scenes.py
index f7f4b35..849a667 100644
--- a/sar_antarctica/nci/preparation/scenes.py
+++ b/sar_antarctica/nci/preparation/scenes.py
@@ -2,7 +2,7 @@
 from pathlib import Path
 import re
 
-SCENE_DIR = Path("/g/data/fj7/Copernicus/Sentinel-1/C-SAR/GRD/")
+SCENE_DIR = Path("/g/data/fj7/Copernicus/Sentinel-1/C-SAR/")
 
 
 def parse_scene_file_sensor(scene_id: str) -> str:
@@ -37,6 +37,55 @@ def parse_scene_file_sensor(scene_id: str) -> str:
     return match.group(1)
 
 
+def parse_scene_file_mode(scene_id: str) -> str:
+    raise NotImplementedError
+
+
+def parse_scene_file_product(scene_id: str) -> str:
+    """Extract Sentinel-1 product string (GRDM or SLC_) from scene ID and return
+    shortened version (either GRD or SLC)
+
+    Parameters
+    ----------
+    scene_id : str
+        Sentinel-1 scene ID
+        e.g. S1A_EW_GRDM_1SDH_20220612T120348_20220612T120452_043629_053582_0F6
+
+    Returns
+    -------
+    str
+        Product string, either GRD or SLC
+
+    Raises
+    ------
+    ValueError
+        Could not find expected match of four characters containing letters/underscores.
+    ValueError
+        Identified string did not match either "GRDM" or "SLC_"
+    """
+
+    pattern = r"^S1[A-Z]_[A-Z]{2}_([A-Z_]{4})_"
+
+    match = re.match(pattern, scene_id)
+
+    if not match:
+        raise ValueError(
+            "No product string was found. Looking for S1X_YY_ZZZZ_ where ZZZZ can be letters or underscores."
+        )
+
+    product_string = match.group(1)
+    if product_string == "GRDM":
+        product = "GRD"
+    elif product_string == "SLC_":
+        product = "SLC"
+    else:
+        raise ValueError(
+            f"Expected product string to be either GRDM or SLC_, but got {product_string}."
+        )
+
+    return product
+
+
 def parse_scene_file_dates(scene_id: str) -> tuple[datetime, datetime]:
     """Extracts start_date and end_date from the given scene ID.
 
@@ -93,6 +142,8 @@ def find_scene_file_from_id(scene_id: str) -> Path:
         Found no files -- expects one. Or another Error
     """
 
+    scene_product = parse_scene_file_product(scene_id)
+
     # Parse the scene dates -- only start date is needed for search
     scene_start, _ = parse_scene_file_dates(scene_id)
 
@@ -101,7 +152,7 @@ def find_scene_file_from_id(scene_id: str) -> Path:
     month = scene_start.strftime("%m")
 
     # Set path on GADI and search
-    search_path = SCENE_DIR.joinpath(f"{year}/{year}-{month}/")
+    search_path = SCENE_DIR.joinpath(f"{scene_product}/{year}/{year}-{month}/")
     file_path = list(search_path.rglob(f"{scene_id}.zip"))
 
     # Identify file
diff --git a/sar_antarctica/nci/processing/pyroSAR/pyrosar_geocode.py b/sar_antarctica/nci/processing/pyroSAR/pyrosar_geocode.py
index 31052ec..1b7f96d 100644
--- a/sar_antarctica/nci/processing/pyroSAR/pyrosar_geocode.py
+++ b/sar_antarctica/nci/processing/pyroSAR/pyrosar_geocode.py
@@ -6,7 +6,6 @@
 from pyroSAR.gamma.dem import dem_import
 import shutil
 import sys
-import tomli
 
 from sar_antarctica.nci.processing.GAMMA.GAMMA_utils import set_gamma_env_variables
 
@@ -19,65 +18,37 @@
 log.setLevel(logging.INFO)
 
 
-@click.command()
-@click.argument("workflow_config", nargs=1)
-@click.argument("scene_config", nargs=1)
-def cli(workflow_config: str, scene_config: str):
+def prepare_directories(processing_root: Path, scene_full_name: str, scene_outname):
 
-    # Read in config file
-    with open(workflow_config, "rb") as f:
-        workflow_config_dict = tomli.load(f)
+    # Set directories under the processing root
+    SCENE_DIR = f"data/processed_scene/{scene_full_name}"
+    TEMP_DIR = f"data/temp/{scene_outname}"
+    LOG_DIR = f"data/temp/{scene_outname}/logfiles"
 
-    with open(scene_config, "rb") as f:
-        scene_config_dict = tomli.load(f)
+    # Construct a dictionary for use
+    processing_directories = {
+        "scene": processing_root / SCENE_DIR,
+        "temp": processing_root / TEMP_DIR,
+        "logs": processing_root / LOG_DIR,
+    }
 
-    # Split config dicts up to ease readability
-    config_inputs = scene_config_dict["inputs"]
-    config_outputs = scene_config_dict["outputs"]
-    config_gamma = workflow_config_dict["gamma"]
-    config_geocode = workflow_config_dict["geocode"]
+    # Create directories if not exist
+    log.info("Setting directories:")
+    for dir_name, dir_path in processing_directories.items():
+        log.info(f"    {dir_name}: {dir_path}")
+        dir_path.mkdir(parents=True, exist_ok=True)
 
-    # Environment variables for GAMMA must be set
-    set_gamma_env_variables(
-        config_gamma["software_env_var"], config_gamma["libs_env_var"]
-    )
-
-    # Identify scene
-    scene_zip = Path(config_inputs["scene"])
-    scene_id = scene_zip.stem
-    log.info(f"Scene ID: {scene_id} has the following metadata:\n{scene_zip}")
-    if scene_zip.exists():
-        pyrosar_scene_id = identify(scene_zip)
-
-    # Construct output scenes
-    data_dir = Path(config_outputs["data"])
-    processed_scene_dir = (
-        data_dir
-        / config_outputs["processed"]
-        / pyrosar_scene_id.outname_base(extensions=None)
-    )
-    pyrosar_temp_dir = (
-        data_dir / "temp" / pyrosar_scene_id.outname_base(extensions=None)
-    )
-    pyrosar_temp_log_dir = pyrosar_temp_dir / "logfiles"
+    return processing_directories
 
-    log.info("creating directories:")
-    for dir in [processed_scene_dir, pyrosar_temp_dir, pyrosar_temp_log_dir]:
-        dir.mkdir(parents=True, exist_ok=True)
-        log.info(f"    {dir}")
 
-    # Copy over orbit file
-    orbit_file = Path(config_inputs["orbit"])
-    orbit_filename = orbit_file.name
-    shutil.copy(orbit_file, pyrosar_temp_dir / orbit_filename)
+def prepare_dem_for_gamma(dem_tif: Path, temp_dir: Path, log_dir: Path) -> Path:
 
-    # Create DEM in GAMMA format
-    dem_tif = Path(config_inputs["dem"])
-    dem_gamma = pyrosar_temp_dir / dem_tif.stem
+    dem_dir = dem_tif.parent
+    dem_name = dem_tif.stem
+    dem_gamma = temp_dir / dem_name
 
     if dem_gamma.exists():
         log.info("DEM exists")
-        pass
     else:
         log.info("running DEM")
 
@@ -85,27 +56,66 @@ def cli(workflow_config: str, scene_config: str):
             src=str(dem_tif),
             dst=str(dem_gamma),
             geoid=None,
-            logpath=str(pyrosar_temp_log_dir),
-            outdir=str(dem_tif.parent),
+            logpath=str(log_dir),
+            outdir=str(dem_dir),
         )
 
         log.info("finished DEM")
 
-    # Run geocode process
-    # Note that GAMMA geocode from pyrosar produces gamma_0 RTC backscatter
+    return dem_gamma
+
+
+def run_pyrosar_gamma_geocode(
+    scene: Path,
+    orbit: Path,
+    dem: Path,
+    output: Path,
+    gamma_library: Path,
+    gamma_env: str,
+    geocode_spacing: int,
+    geocode_scaling: str,
+):
+
+    # Set up environment variables for GAMMA
+    set_gamma_env_variables(str(gamma_library), gamma_env)
+
+    # Identify scene
+    scene_name = scene.stem
+    pyrosar_scene_id = identify(scene)
+
+    # Create processing directories if required
+    processing_directories = prepare_directories(
+        output, scene_name, pyrosar_scene_id.outname_base(extensions=None)
+    )
+
+    # Prepare orbit file
+    # Copy to temp dir to prevent pyroSAR modifying in-place
+    orbit_dir = processing_directories["temp"]
+    shutil.copy(orbit, orbit_dir / orbit.name)
+
+    dem_gamma = prepare_dem_for_gamma(
+        dem, processing_directories["temp"], processing_directories["logs"]
+    )
+
     log.info("running geocode")
 
+    # Set the border removal step to pyroSAR for GRD products. Ignore otherwise
+    if pyrosar_scene_id.product == "GRD":
+        border_removal_method = "pyroSAR"
+    else:
+        border_removal_method = None
+
     geocode(
         scene=pyrosar_scene_id,
         dem=str(dem_gamma),
-        tmpdir=str(pyrosar_temp_dir),
-        outdir=str(processed_scene_dir),
-        spacing=config_geocode["spacing"],
-        scaling=config_geocode["scaling"],
+        tmpdir=str(processing_directories["temp"]),
+        outdir=str(processing_directories["scene"]),
+        spacing=geocode_spacing,
+        scaling=geocode_scaling,
         func_geoback=1,
         nodata=(0, -99),
         update_osv=False,
-        osvdir=str(pyrosar_temp_dir),
+        osvdir=str(orbit_dir),
         allow_RES_OSV=False,
         cleanup=False,
         export_extra=[
@@ -116,7 +126,7 @@ def cli(workflow_config: str, scene_config: str):
             "pix_ratio_geo",
         ],
         basename_extensions=None,
-        removeS1BorderNoiseMethod="pyroSAR",
+        removeS1BorderNoiseMethod=border_removal_method,
         refine_lut=False,
         rlks=None,
         azlks=None,
@@ -124,8 +134,3 @@ def cli(workflow_config: str, scene_config: str):
     )
 
     log.info("finished geocode")
-
-
-if __name__ == "__main__":
-
-    cli()
diff --git a/sar_antarctica/nci/submission/pbs_template.txt b/sar_antarctica/nci/submission/pbs_template.txt
new file mode 100644
index 0000000..64d44af
--- /dev/null
+++ b/sar_antarctica/nci/submission/pbs_template.txt
@@ -0,0 +1,11 @@
+#!/bin/bash
+#PBS -l ncpus=<NCPU>
+#PBS -l mem=<MEM>GB
+#PBS -q <QUEUE>
+#PBS -P <PROJECT>
+#PBS -l walltime=<WALLTIME>
+#PBS -l storage=<STORAGE>
+#PBS -l wd
+#PBS -o <LOGDIR>/<JOBNAME>
+#PBS -e <LOGDIR>/<JOBNAME>
+#PBS -N <JOBNAME>
\ No newline at end of file
diff --git a/sar_antarctica/nci/submission/pyrosar_gamma/prepare_input.py b/sar_antarctica/nci/submission/pyrosar_gamma/prepare_input.py
new file mode 100644
index 0000000..79c3b8c
--- /dev/null
+++ b/sar_antarctica/nci/submission/pyrosar_gamma/prepare_input.py
@@ -0,0 +1,57 @@
+from datetime import datetime
+from pathlib import Path
+from pyroSAR import identify
+
+from sar_antarctica.nci.preparation.orbits import find_latest_orbit_covering_window
+from sar_antarctica.nci.filesystem import get_orbits_nci, get_dem_nci
+
+
+def get_orbit_and_dem(
+    scene_file: Path,
+    dem_output_dir: Path,
+    orbit_dir: Path = Path("/g/data/fj7/Copernicus/Sentinel-1/"),
+    orbit_type: str | None = "POE",
+) -> tuple[Path, Path]:
+    """For a given Sentinel-1 scene, find the relevant orbit path and DEM path.
+    The DEM will be created if no DEM path is found.
+
+    Parameters
+    ----------
+    scene_file : Path
+        Full path to the scene
+        e.g. "path/to/scene/scene_id.zip"
+    orbit_type : str, optional
+        The orbit type to get. Any of "POE", "RES" or None, by default "POE"
+
+    Returns
+    -------
+    tuple[Path, Path]
+        A tuple containing the path to the orbit file and a path to the DEM file.
+        e.g. ("path/to/orbit/orbitfile.EOF", "path/to/dem/demfile.tif")
+    """
+
+    # Extract metadata
+    scene = identify(scene_file)
+
+    # Isolate metadata for finding orbit
+    scene_sensor = scene.sensor
+    scene_start = datetime.strptime(scene.start, "%Y%m%dT%H%M%S")
+    scene_stop = datetime.strptime(scene.stop, "%Y%m%dT%H%M%S")
+
+    # Find orbit
+    orbit_files = get_orbits_nci(orbit_type, scene_sensor, orbit_dir)
+    orbit_file = find_latest_orbit_covering_window(orbit_files, scene_start, scene_stop)
+
+    # Isolate metadata for creating DEM
+    scene_bbox = scene.bbox().extent
+    scene_bounds = (
+        scene_bbox["xmin"],
+        scene_bbox["ymin"],
+        scene_bbox["xmax"],
+        scene_bbox["ymax"],
+    )
+
+    # Build DEM
+    dem_file = get_dem_nci(scene_file, scene_bounds, dem_output_dir)
+
+    return (orbit_file, dem_file)
diff --git a/sar_antarctica/nci/submission/pyrosar_gamma/pyrosar_gamma.py b/sar_antarctica/nci/submission/pyrosar_gamma/pyrosar_gamma.py
deleted file mode 100644
index 015761a..0000000
--- a/sar_antarctica/nci/submission/pyrosar_gamma/pyrosar_gamma.py
+++ /dev/null
@@ -1,173 +0,0 @@
-import click
-import os
-from pathlib import Path
-import tomli
-from typing import Any
-
-WORKFLOW = "pyrosar_gamma"
-PROCESSING_DIR = "/g/data/yp75/projects/sar-antractica-processing"
-
-
-def get_list_of_scenes(scene_source: str) -> list[str]:
-    """Convert script input to list.
-    If a .zip file, produce a list with that.
-    If a .txt file, open the file, and produce a list of all .zip files.
-
-    Parameters
-    ----------
-    scene_source : str
-        The file to be processed. Either a single .zip or a .txt containing multiple .zip files
-
-    Returns
-    -------
-    list[str]
-        List of files to process
-    """
-
-    # Process a single .zip file
-    if scene_source.endswith(".zip"):
-        scene_list = [scene_source]
-    # Process a .txt file containing .zip files
-    elif scene_source.endswith(".txt"):
-        with open(scene_source, "r") as f:
-            scene_list = [line.strip() for line in f if line.strip().endswith(".zip")]
-    else:
-        scene_list = []
-
-    if scene_list is not None:
-        return scene_list
-    else:
-        raise RuntimeError(
-            "No valid scenes were found for processing. Expected single .zip file or .txt file containing at least one .zip file."
-        )
-
-
-def update_pbs_template(
-    pbs_template: str, scene_id: str, job_config: dict[str, str | dict[str, Any]]
-) -> str:
-    """_summary_
-
-    Parameters
-    ----------
-    pbs_template : str
-        A string containing a PBS jobscript
-    scene_id : str
-        The scene ID for the job
-    job_config : dict[str, str  |  dict[str, Any]]
-        Dictionary containing information on the job, main keys are
-        root, submission, configuration, and settings
-
-    Returns
-    -------
-    str
-        The updated PBS jobscript string with specified values replaced
-    """
-
-    """For a given PBS jobscript template, replace specified values with jobscript settings
-
-    Parameters
-    ----------
-    pbs_template : str
-        A string containing a PBS jobscript
-    jobscript_settings: dict
-
-
-    Returns
-    -------
-    str
-        The updated PBS jobscript string with specified values replaced
-    """
-
-    processing_path = Path(job_config["root"])
-    log_path = (
-        processing_path
-        / job_config["submission"]["root"]
-        / job_config["submission"]["logs"]
-    )
-    config_path = processing_path / job_config["configuration"]["root"]
-
-    job_configuration = job_config["configuration"]
-    job_settings = job_config["settings"]
-
-    workflow_config = job_settings["workflow_config"]
-    # Dictionary to replace placeholders in PBS text with values from configurations
-    replace_dict = {
-        "<SCENE_ID>": scene_id,
-        "<NCPU>": job_settings["ncpu"],
-        "<MEM>": job_settings["mem"],
-        "<QUEUE>": job_settings["queue"],
-        "<PROJECT>": job_settings["project"],
-        "<WALLTIME>": job_settings["walltime"],
-        "<STORAGE>": job_settings["storage"],
-        "<LOG_DIR>": log_path,
-        "<WORKFLOW_CONFIG>": config_path
-        / job_configuration["workflow"]
-        / f"{workflow_config}.toml",
-        "<SCENE_CONFIG>": config_path / job_configuration["scene"] / f"{scene_id}.toml",
-    }
-
-    for key, value in replace_dict.items():
-        pbs_template = pbs_template.replace(
-            key, value if isinstance(value, str) else str(value)
-        )
-
-    return pbs_template
-
-
-@click.command()
-@click.argument("config_file", nargs=1)
-@click.argument("scene_source", nargs=1)
-def pyrosar_gamma_workflow(
-    config_file: str | os.PathLike, scene_source: str | os.PathLike
-) -> None:
-    """Take an input of a single scene or file with multiple scenes and submit pyroSAR+GAMMA jobs
-
-    Parameters
-    ----------
-    processing_dir : str
-        The directory to store configuration and jobscript files
-    scene_source : str
-        The file to be processed. Either a single .zip or a .txt containing multiple .zip files
-    """
-
-    current_file_directory = Path(__file__).resolve().parent
-
-    with open(config_file, "rb") as f:
-        config = tomli.load(f)
-
-    # Extract specific configuration dictionaries
-    job_config = config["job"]
-    submission_config = job_config["submission"]
-    configuration_config = job_config["configuration"]
-    settings_config = job_config["settings"]
-
-    # Get folder structure
-    processing_dir = Path(job_config["root"])
-    log_dir = processing_dir / submission_config["root"] / submission_config["logs"]
-
-    # Get scenes from source
-    scene_list = get_list_of_scenes(scene_source)
-
-    for scene_path in scene_list:
-        # Determine scene ID from command line input and create submission script
-        scene_id = Path(scene_path).stem
-        scene_script = log_dir / scene_id / f"{scene_id}.sh"
-        scene_script.parent.mkdir(exist_ok=True, parents=True)
-
-        # Read the workflow template and replace values
-        workflow_name = settings_config["workflow_config"]
-        template_file = current_file_directory / f"{workflow_name}.txt"
-        print(template_file)
-        pbs_template = template_file.read_text()
-        pbs_template = update_pbs_template(pbs_template, scene_id, job_config)
-
-        # Write updated text to pbs script
-        scene_script.write_text(pbs_template)
-
-        # Submit script
-        qsub_command = f"qsub {scene_script}"
-        os.system(qsub_command)
-
-
-if __name__ == "__main__":
-    pyrosar_gamma_workflow()
diff --git a/sar_antarctica/nci/submission/pyrosar_gamma/pyrosar_gamma.txt b/sar_antarctica/nci/submission/pyrosar_gamma/pyrosar_gamma.txt
deleted file mode 100644
index a33d8b5..0000000
--- a/sar_antarctica/nci/submission/pyrosar_gamma/pyrosar_gamma.txt
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-
-#PBS -l ncpus=<NCPU>
-#PBS -l mem=<MEM>GB
-#PBS -q <QUEUE>
-#PBS -P <PROJECT>
-#PBS -l walltime=<WALLTIME>
-#PBS -l storage=<STORAGE>
-#PBS -l wd
-#PBS -o <LOG_DIR>/<SCENE_ID>
-#PBS -e <LOG_DIR>/<SCENE_ID>
-#PBS -N <SCENE_ID>
-
-# Load the module
-module use /g/data/yp75/modules/modulefiles
-module load sar-antarctica/v0.1
-
-# Activate micromamba environment
-micromamba activate sar-antarctica
-
-# Run preparation script
-cd /g/data/yp75/modules/sar-antarctica/v0.1/sar-antarctica/sar_antarctica/nci/preparation/
-python create_config.py <SCENE_ID> <SCENE_CONFIG>
-
-# Run preocessing script
-cd /g/data/yp75/modules/sar-antarctica/v0.1/sar-antarctica/sar_antarctica/nci/processing/pyroSAR/
-python pyrosar_geocode.py <WORKFLOW_CONFIG> <SCENE_CONFIG>
\ No newline at end of file
diff --git a/sar_antarctica/nci/submission/pyrosar_gamma/submit_job.py b/sar_antarctica/nci/submission/pyrosar_gamma/submit_job.py
new file mode 100644
index 0000000..e21c3d6
--- /dev/null
+++ b/sar_antarctica/nci/submission/pyrosar_gamma/submit_job.py
@@ -0,0 +1,51 @@
+import os
+from pathlib import Path
+from sar_antarctica.nci.submission.utils import populate_pbs_template
+
+
+ENVIRONMENT_COMMAND = """
+
+export MAMBA_EXE=/g/data/yp75/ca6983/micromamba/bin/micromamba
+export MAMBA_ROOT_PREFIX=/g/data/yp75/ca6983/micromamba
+source $MAMBA_ROOT_PREFIX/etc/profile.d/mamba.sh
+
+micromamba activate sar-antarctica
+
+"""
+
+
+def submit_job(
+    scene: Path,
+    spacing: int,
+    scaling: str,
+    pbs_parameters: dict[str, str],
+    log_dir: str,
+):
+
+    scene_name = scene.stem
+
+    scene_script = log_dir / scene_name / f"{scene_name}.sh"
+    scene_script.parent.mkdir(exist_ok=True, parents=True)
+
+    pbs_script = populate_pbs_template(
+        pbs_parameters["ncpu"],
+        pbs_parameters["mem"],
+        pbs_parameters["queue"],
+        pbs_parameters["project"],
+        pbs_parameters["walltime"],
+        scene_name,
+        log_dir,
+    )
+
+    job_command = (
+        f"run-pyrosar-gamma-workflow {scene} --spacing {spacing} --scaling {scaling}"
+    )
+
+    job_script = pbs_script + ENVIRONMENT_COMMAND + job_command
+
+    # Write updated text to pbs script
+    scene_script.write_text(job_script)
+
+    # Submit script
+    qsub_command = f"qsub {scene_script}"
+    os.system(qsub_command)
diff --git a/sar_antarctica/nci/submission/utils.py b/sar_antarctica/nci/submission/utils.py
new file mode 100644
index 0000000..0033a12
--- /dev/null
+++ b/sar_antarctica/nci/submission/utils.py
@@ -0,0 +1,36 @@
+from pathlib import Path
+
+
+SUBMISSION_DIR = Path(__file__).resolve().parent
+SUBMISSION_TEMPLATE = SUBMISSION_DIR / "pbs_template.txt"
+STORAGE = "gdata/yp75+gdata/dg9+gdata/fj7+gdata/v10"
+
+
+def populate_pbs_template(
+    ncpu: int,
+    mem: int,
+    queue: str,
+    project: str,
+    walltime: str,
+    jobname: str,
+    log_dir: str,
+):
+    pbs_template = SUBMISSION_TEMPLATE.read_text()
+
+    replace_dict = {
+        "<NCPU>": ncpu,
+        "<MEM>": mem,
+        "<QUEUE>": queue,
+        "<PROJECT>": project,
+        "<WALLTIME>": walltime,
+        "<STORAGE>": STORAGE,
+        "<LOGDIR>": log_dir,
+        "<JOBNAME>": jobname,
+    }
+
+    for key, value in replace_dict.items():
+        pbs_template = pbs_template.replace(
+            key, value if isinstance(value, str) else str(value)
+        )
+
+    return pbs_template