diff --git a/gnssanalysis/gn_io/igslog.py b/gnssanalysis/gn_io/igslog.py index fcda523..e769d1a 100644 --- a/gnssanalysis/gn_io/igslog.py +++ b/gnssanalysis/gn_io/igslog.py @@ -1,8 +1,10 @@ """IGS log files parser""" +import logging import glob as _glob import re as _re from multiprocessing import Pool as _Pool +from typing import Union, List, Tuple import numpy as _np import pandas as _pd @@ -11,9 +13,10 @@ from .. import gn_frame as _gn_frame from .. import gn_io as _gn_io from .. import gn_transform as _gn_transform -from tqdm import tqdm -_REGEX_ID = _re.compile( +logger = logging.getLogger(__name__) + +_REGEX_ID_V1 = _re.compile( rb""" (?:Four\sCharacter\sID|Site\sID)\s+\:\s*(\w{4}).*\W+ .*\W+ @@ -23,7 +26,17 @@ _re.IGNORECASE | _re.VERBOSE, ) -_REGEX_LOC = _re.compile( +_REGEX_ID_V2 = _re.compile( + rb""" + (?:Nine\sCharacter\sID|Site\sID)\s+\:\s*(\w{4}).*\W+ + .*\W+ + (?:\s{25}.+\W+|) + IERS.+\:\s*(\w{9}|) + """, + _re.IGNORECASE | _re.VERBOSE, +) + +_REGEX_LOC_V1 = _re.compile( rb""" 2.+\W+City\sor\sTown\s+\:\s*(\w[^\(\n\,/\?]+|).*\W+ State.+\W+Country\s+\:\s*([^\(\n\,\d]+|).*\W+(?:\s{25}.+\W+|) @@ -38,6 +51,21 @@ _re.IGNORECASE | _re.VERBOSE, ) +_REGEX_LOC_V2 = _re.compile( + rb""" + 2.+\W+City\sor\sTown\s+\:\s*(\w[^\(\n\,/\?]+|).*\W+ + State.+\W+Country\sor\sRegion\s+\:\s*([^\(\n\,\d]+|).*\W+(?:\s{25}.+\W+|) + Tectonic.+\W+(?:\s{25}.+\W+|).+\W+ + X.{22}\:?\s*([\d\-\+\.\,]+|).*\W+ + Y.{22}\:?\s*([\d\-\+\.\,]+|).*\W+ + Z.{22}\:?\s*([\d\-\+\.\,]+|).*\W+ + Latitude.+\:\s*([\d\.\,\-\+]+|).*\W+ + Longitud.+\:\s*([\d\.\,\-\+]+|).*\W+ + Elevatio.+\:\s*([\d\.\,\-\+]+|).* + """, + _re.IGNORECASE | _re.VERBOSE, +) + _REGEX_REC = _re.compile( rb""" @@ -77,12 +105,21 @@ _REGEX_LOGNAME = r"(?:.*\/)(\w{4})(?:\w+_(\d{8})|_(\d{8})\-?\w?|(\d{8})|_.*|\d+|)" -def find_recent_logs(logs_glob_path: str, rnx_glob_path=None) -> _pd.DataFrame: - """Takes glob expression to get the list of log files, - parses names into site and date and selects the ones - with most recent date - /data/station_logs/station_logs_IGS/*/*.log - /data/acs/pea/proc/exs/data/*.rnx""" +class LogVersionError(Exception): + """ + Log file does not conform to known IGS version standard + """ + + pass + + +def find_recent_logs(logs_glob_path: str, rnx_glob_path: str = None) -> _pd.DataFrame: + """Takes glob expression to create list of logs, parses names into site and date and selects most recent ones + + :param str logs_glob_path: A glob expression for log files, e.g. /data/station_logs_IGS/*/*.log + :param str rnx_glob_path: A glob expression for rnx files, e.g. /data/pea/exs/data/*.rnx, defaults to None + :return _pd.DataFrame: Returns a dataframe containing information from all station logs processed + """ paths = _pd.Series(_glob.glob(pathname=logs_glob_path, recursive=False), name="PATH") logs_df = paths.str.extract(expand=True, pat=_REGEX_LOGNAME) @@ -101,39 +138,136 @@ def find_recent_logs(logs_glob_path: str, rnx_glob_path=None) -> _pd.DataFrame: return recent_logs_df -def parse_igs_log(filename_array: _np.ndarray) -> _np.ndarray: - """Parses igs log and outputs ndarray with parsed data - Expects ndarray of the form [CODE DATE PATH]""" - file_code, __, file_path = filename_array +_REGEX_VERSION_1 = _re.compile(rb"""(site log\))""") +_REGEX_VERSION_2 = _re.compile(rb"""(site log v2)""") - with open(file_path, "rb") as file: - data = file.read() - blk_id = _REGEX_ID.search(data) - if blk_id is None: - tqdm.write(f"ID rejected from {file_path}") +def determine_log_version(data: bytes) -> str: + """Given the byes object that results from reading an IGS log file, determine the version ("v1.0" or "v2.0") + + :param bytes data: IGS log file bytes object to determine the version of + :return str: Return the version number: "v1.0" or "v2.0" (or "Unknown" if file does not conform to standard) + """ + + result_v1 = _REGEX_VERSION_1.search(data) + if result_v1: + return "v1.0" + + result_v2 = _REGEX_VERSION_2.search(data) + if result_v2: + return "v2.0" + + raise LogVersionError("Log file does not conform to any known IGS version") + + +def extract_id_block(data: bytes, file_path: str, file_code: str, version: str = None) -> Union[List[str], _np.array]: + """Extract the site ID block given the bytes object read from an IGS site log file + + :param bytes data: The bytes object returned from an open() call on a IGS site log in "rb" mode + :param str file_path: The path to the file from which the "data" bytes object was obtained + :param str file_code: Code from the filename_array passed to the parse_igs_log() function + :param str version: Version number of log file (e.g. "v2.0") - determined if version=None, defaults to None + :raises LogVersionError: Raises an error if an unknown version string is passed in + :return bytes: The site ID block of the IGS site log + """ + if version == None: + version = determine_log_version(data) + + if version == "v1.0": + _REGEX_ID = _REGEX_ID_V1 + elif version == "v2.0": + _REGEX_ID = _REGEX_ID_V2 + else: + raise LogVersionError("Incorrect version string passed to the extract_id_block() function") + + id_block = _REGEX_ID.search(data) + if id_block is None: + logger.warning(f"ID rejected from {file_path}") return _np.array([]).reshape(0, 12) - blk_id = [blk_id[1].decode().upper(), blk_id[2].decode().upper()] # no .groups() thus 1 and 2 - code = blk_id[0] + id_block = [id_block[1].decode().upper(), id_block[2].decode().upper()] # no .groups() thus 1 and 2 + code = id_block[0] if code != file_code: - tqdm.write(f"{code}!={file_code} at {file_path}") + logger.warning(f"{code}!={file_code} at {file_path}") return _np.array([]).reshape(0, 12) + return id_block - blk_loc = _REGEX_LOC.search(data) - if blk_loc is None: - tqdm.write(f"LOC rejected from {file_path}") + +def extract_location_block(data: bytes, file_path: str, version: str = None) -> Union[_re.Match[bytes], _np.array]: + """Extract the location block given the bytes object read from an IGS site log file + + :param bytes data: The bytes object returned from an open() call on a IGS site log in "rb" mode + :param str file_path: The path to the file from which the "data" bytes object was obtained + :param str version: Version number of log file (e.g. "v2.0") - determined if version=None, defaults to None + :raises LogVersionError: Raises an error if an unknown version string is passed in + :return bytes: The location block of the IGS site log + """ + if version == None: + version = determine_log_version(data) + + if version == "v1.0": + _REGEX_LOC = _REGEX_LOC_V1 + elif version == "v2.0": + _REGEX_LOC = _REGEX_LOC_V2 + else: + raise LogVersionError("Incorrect version string passed to extract_location_block() function") + + location_block = _REGEX_LOC.search(data) + if location_block is None: + logger.warning(f"LOC rejected from {file_path}") return _np.array([]).reshape(0, 12) + return location_block + - blk_rec = _REGEX_REC.findall(data) - if blk_rec == []: - tqdm.write(f"REC rejected from {file_path}") +def extract_receiver_block(data: bytes, file_path: str) -> Union[List[Tuple[bytes]], _np.array]: + """Extract the location block given the bytes object read from an IGS site log file + + :param bytes data: The bytes object returned from an open() call on a IGS site log in "rb" mode + :param str file_path: The path to the file from which the "data" bytes object was obtained + :return List[Tuple[bytes]]: The receiver block of the data. Each list element specifies an receiver + """ + receiver_block = _REGEX_REC.findall(data) + if receiver_block == []: + logger.warning(f"REC rejected from {file_path}") return _np.array([]).reshape(0, 12) + return receiver_block + - blk_ant = _REGEX_ANT.findall(data) - if blk_ant == []: - tqdm.write(f"ANT rejected from {file_path}") +def extract_antenna_block(data: bytes, file_path: str) -> Union[List[Tuple[bytes]], _np.array]: + """Extract the antenna block given the bytes object read from an IGS site log file + + :param bytes data: The bytes object returned from an open() call on a IGS site log in "rb" mode + :param str file_path: The path to the file from which the "data" bytes object was obtained + :return List[Tuple[bytes]]: The antenna block of the data. Each list element specifies an antenna + """ + antenna_block = _REGEX_ANT.findall(data) + if antenna_block == []: + logger.warning(f"ANT rejected from {file_path}") return _np.array([]).reshape(0, 12) + return antenna_block + + +def parse_igs_log(filename_array: _np.ndarray) -> Union[_np.ndarray, None]: + """Parses igs log and outputs ndarray with parsed data + + :param _np.ndarray filename_array: Metadata on input log file. Expects ndarray of the form [CODE DATE PATH] + :return _np.ndarray: Returns array with data from the IGS log file parsed + """ + file_code, _, file_path = filename_array + + with open(file_path, "rb") as file: + data = file.read() + + try: + version = determine_log_version(data) + except LogVersionError as e: + logger.warning(f"Error: {e}, skipping parsing the log file") + return + + blk_id = extract_id_block(data, version, file_path, file_code) + blk_loc = extract_location_block(data, version, file_path) + blk_rec = extract_receiver_block(data, file_path) + blk_ant = extract_antenna_block(data, file_path) blk_loc = [group.decode(encoding="utf8", errors="ignore") for group in blk_loc.groups()] blk_rec = _np.asarray(blk_rec, dtype=str) @@ -166,9 +300,12 @@ def parse_igs_log(filename_array: _np.ndarray) -> _np.ndarray: return _np.concatenate([blk_uni, file_path_arr], axis=1) -def igslogdate2datetime64(stacked_rec_ant_dt: _np.ndarray): - """2010-01-01T00:00 - - can be any non-space character. If parsing fails - None""" +def igslogdate2datetime64(stacked_rec_ant_dt: _np.ndarray) -> _np.datetime64: + """Function to convert datetimes for IGS log files to np.datetime64 objects, e.g. 2010-01-01T00:00 + + :param _np.ndarray stacked_rec_ant_dt: Array of IGS log datetimes to convert but need to be non-space values + :return _np.datetime64: Return datetime64 object - if parsing fails returns None + """ dt_array_float = ( _pd.Series(stacked_rec_ant_dt) .str.extract(pat=r"(\d{4})\S?(\d{2})\S?(\d+)\D?(?:(\d{1,2})\:(\d{1,2})\D?|)") @@ -212,28 +349,39 @@ def igslogdate2datetime64(stacked_rec_ant_dt: _np.ndarray): return dt_datetime64 -def translate_series(series, translation): - """changes values in the series according to the dictionary of old_value-new_value""" +def translate_series(series: _pd.Series, translation: dict) -> _pd.Series: + """Changes values in the series according to the dictionary of input_value:output_value + + :param _pd.Series series: _pd.Series to translate + :param dict translation: Dictionary that defines the translation (mapping) to carry out + :return _pd.Series: Return a _pd.Series with the resultant translation (mapping) + """ series = series.copy() series.index = series.values series.update(translation) return series -def gather_metadata(logs_glob_path="/data/station_logs/station_logs_IGS/*/*.log", rnx_glob_path=None, num_threads=1): - """parses logiles found with glob expression""" +def gather_metadata( + logs_glob_path: str = "/data/station_logs/station_logs_IGS/*/*.log", rnx_glob_path: str = None, num_threads: int = 1 +) -> List[_pd.DataFrame]: + """Parses log files found with glob expressions into pd.DataFrames + + :param str logs_glob_path: A glob expression for log files, defaults to "/data/station_logs_IGS/*/*.log" + :param str rnx_glob_path: A glob expression for rnx files, e.g. /data/pea/exs/data/*.rnx, defaults to None + :param int num_threads: Number of threads to run, defaults to 1 + :return List[_pd.DataFrame]: List of DataFrames with [ID, Receiver, Antenna] data + """ parsed_filenames = find_recent_logs(logs_glob_path=logs_glob_path, rnx_glob_path=rnx_glob_path).values total = parsed_filenames.shape[0] if num_threads == 1: gather = [] - for file in tqdm(parsed_filenames, miniters=total // 100, total=total): + for file in parsed_filenames: gather.append(parse_igs_log(file)) else: with _Pool(processes=num_threads) as pool: - gather = list( - tqdm(pool.imap_unordered(parse_igs_log, parsed_filenames), total=total, miniters=total // 100) - ) + gather = list(pool.imap_unordered(parse_igs_log, parsed_filenames)) gather_raw = _np.concatenate(gather) @@ -356,8 +504,12 @@ def gather_metadata(logs_glob_path="/data/station_logs/station_logs_IGS/*/*.log" return id_loc_df, rec_df, ant_df -def frame2snx_string(frame_of_day): - """frame_of_day dataframe to ESTIMATE sinex block""" +def frame2snx_string(frame_of_day: _pd.DataFrame) -> str: + """Convert frame_of_day dataframe to ESTIMATE sinex block + + :param _pd.DataFrame frame_of_day: Dataframe defining the reference frame of the day of interest + :return str: Returns a sinex block string from the frame definition + """ code_pt = frame_of_day.index.to_series().str.split("_", expand=True) # .to_frame().values code_pt.columns = ["CODE", "PT"] frame_dt = _gn_datetime.j20002datetime(frame_of_day.attrs["REF_EPOCH"]) @@ -409,8 +561,14 @@ def frame2snx_string(frame_of_day): return buf -def meta2sting(id_loc_df, rec_df, ant_df): - """Converts three metadata dataframe to sinex blocks (string)""" +def meta2string(id_loc_df: _pd.DataFrame, rec_df: _pd.DataFrame, ant_df: _pd.DataFrame) -> str: + """Converts the three metadata dataframes (Site ID, Receiver, Antenna) to sinex block + + :param _pd.DataFrame id_loc_df: Dataframe detailing Site IDs / Locations + :param _pd.DataFrame rec_df: Dataframe detailing Receiver information + :param _pd.DataFrame ant_df: Dataframe detailing Antenna information + :return str: Returns a Sinex block str (in standard IGS Sinex format) + """ rec_df["S/N"] = rec_df["S/N"].str.slice(0, 5) rec_df["FW"] = rec_df["FW"].str.slice(0, 11) @@ -505,15 +663,26 @@ def meta2sting(id_loc_df, rec_df, ant_df): def write_meta_gather_master( - logs_glob_path="/data/station_logs/*/*.log", - rnx_glob_path="/data/acs/pea/proc/exs/data/*.rnx", - frame_datetime=None, - frame_snx_path="/data/ITRF/itrf2014/ITRF2014-IGS-TRF.SNX.gz", - frame_soln_path="/data/ITRF/itrf2014/ITRF2014-soln-gnss.snx", - frame_psd_path="/data/ITRF/itrf2014/ITRF2014-psd-gnss.snx", - out_path="/data/meta_gather.snx", - num_threads=None, -): + logs_glob_path: str, + rnx_glob_path: str, + frame_snx_path: str, + frame_soln_path: str, + frame_psd_path: str, + frame_datetime: _np.datetime64 = None, + out_path: str = "/data/meta_gather.snx", + num_threads: int = 1, +) -> None: + """Create a SNX file of stations, based on given reference frame projected to a datetime using site logs + rnxs + + :param str logs_glob_path: A glob path to find desired log files, e.g. "/data/site_logs/*/*.log" + :param str rnx_glob_path: A glob path to find desired RNX files (optional), e.g. "/data/rinex-files/*.rnx" + :param str frame_snx_path: Path to reference frame sinex file, e.g. "/data/itrf2014/ITRF2014-IGS-TRF.SNX.gz" + :param str frame_soln_path: Path to solution file of reference frame, e.g. "/data/itrf2014/ITRF2014-soln-gnss.snx" + :param str frame_psd_path: Path to post-seismic deformation file, e.g. "/data/itrf2014/ITRF2014-psd-gnss.snx" + :param _np.datetime64 frame_datetime: Datetime to project the dataframe to, defaults to None + :param str out_path: Path of file to output, defaults to "/data/meta_gather.snx" + :param int num_threads: Number of threads to run on parsing log / rnx files, defaults to 1 + """ if frame_datetime is None: frame_datetime = _np.datetime64("today") else: @@ -568,7 +737,7 @@ def write_meta_gather_master( ] ) # ant/rec - buf.extend(meta2sting(id_loc_df, rec_df, ant_df)) + buf.extend(meta2string(id_loc_df, rec_df, ant_df)) # projected coordinates if gather_itrf is not None: buf.extend(frame2snx_string(gather_itrf)) diff --git a/gnssanalysis/gn_io/sp3.py b/gnssanalysis/gn_io/sp3.py index 29251ef..bc3542e 100644 --- a/gnssanalysis/gn_io/sp3.py +++ b/gnssanalysis/gn_io/sp3.py @@ -347,6 +347,7 @@ def read_sp3( pOnly: bool = True, nodata_to_nan: bool = True, drop_offline_sats: bool = False, + continue_on_ep_ev_encountered: bool = True, ) -> _pd.DataFrame: """Reads an SP3 file and returns the data as a pandas DataFrame. @@ -357,6 +358,9 @@ def read_sp3( and converts 999999* (indicating nodata) to NaN in the SP3 CLK column. Defaults to True. :param bool drop_offline_sats: If True, drops satellites from the DataFrame if they have ANY missing (nodata) values in the SP3 POS column. + :param bool continue_on_ep_ev_encountered: If True, logs a warning and continues if EV or EP rows are found in + the input SP3. These are currently unsupported by this function and will be ignored. Set to false to + raise a NotImplementedError instead. :return pandas.DataFrame: The SP3 data as a DataFrame. :raise FileNotFoundError: If the SP3 file specified by sp3_path_or_bytes does not exist. :raise Exception: For other errors reading SP3 file/bytes @@ -400,18 +404,37 @@ def read_sp3( sp3_pos_nodata_to_nan(sp3_df) # Convert 999999* (which indicates nodata in the SP3 CLK column) to NaN sp3_clock_nodata_to_nan(sp3_df) + + # P/V/EP/EV flag handling is currently incomplete. The current implementation truncates to the first letter, + # so can't parse nor differenitate between EP and EV! + if "E" in sp3_df.index.get_level_values("PV_FLAG").unique(): + if not continue_on_ep_ev_encountered: + raise NotImplementedError("EP and EV flag rows are currently not supported") + logger.warning("EP / EV flag rows encountered. These are not yet supported, and will be ignored!") + + # Check very top of the header to see if this SP3 is Position only , or also contains Velocities if pOnly or parsed_header.HEAD.loc["PV_FLAG"] == "P": sp3_df = sp3_df.loc[sp3_df.index.get_level_values("PV_FLAG") == "P"] sp3_df.index = sp3_df.index.droplevel("PV_FLAG") - # TODO consider exception handling if EP rows encountered else: + # DF contains interlaced Position & Velocity measurements for each sat. Split the data based on this, and + # recombine, turning Pos and Vel into separate columns. position_df = sp3_df.xs("P", level="PV_FLAG") velocity_df = sp3_df.xs("V", level="PV_FLAG") - # TODO consider exception handling if EV rows encountered + + # NOTE: care must now be taken to ensure this split and merge operation does not duplicate the FLAGS columns! + + # Remove the (per sat per epoch, not per pos / vel section) FLAGS from one of our DFs so when we concat them + # back together we don't have duplicated flags. + # The param axis=1, removes from columns rather than indexes (i.e. we want to drop the column from the data, + # not drop all the data to which the column previously applied!) + # We drop from pos rather than vel, because vel is on the right hand side, so the layout resembles the + # layout of an SP3 file better. Functionally, this shouldn't make a difference. + position_df = position_df.drop(axis=1, columns="FLAGS") + velocity_df.columns = SP3_VELOCITY_COLUMNS sp3_df = _pd.concat([position_df, velocity_df], axis=1) - # sp3_df.drop(columns="PV_FLAG", inplace=True) # Check for duplicate epochs, dedupe and log warning if sp3_df.index.has_duplicates: # a literaly free check # This typically runs in sub ms time. Marks all but first instance as duped: @@ -691,6 +714,17 @@ def gen_sp3_content( :param io.TextIOBase buf: The buffer to write the SP3 content to. Defaults to None. :return str or None: The SP3 content if `buf` is None, otherwise None. """ + + # TODO ensure we correctly handle outputting Velocity data! I.e. does this need to be interlaced back in, + # not printed as additional columns?! + # E.g. do we need: + # PG01... X Y Z CLK ... + # VG01... VX VY VZ ... + # + # Rather than: + # PG01... X Y Z CLK ... VX VY VZ ... + # ? + out_buf = buf if buf is not None else _io.StringIO() if sort_outputs: # If we need to do particular sorting/ordering of satellites and constellations we can use some of the @@ -699,7 +733,8 @@ def gen_sp3_content( out_df = sp3_df["EST"] flags_df = sp3_df["FLAGS"] # Prediction, maneuver, etc. - # Validate that all flags have valid values + # Valid values for the respective flags are 'E' 'P' 'M' 'P' (or blank), as per page 11-12 of the SP3d standard: + # https://files.igs.org/pub/data/format/sp3d.pdf if not ( flags_df["Clock_Event"].astype(str).isin(["E", " "]).all() and flags_df["Clock_Pred"].astype(str).isin(["P", " "]).all() diff --git a/tests/test_datasets/sitelog_test_data.py b/tests/test_datasets/sitelog_test_data.py new file mode 100644 index 0000000..42545ee --- /dev/null +++ b/tests/test_datasets/sitelog_test_data.py @@ -0,0 +1,329 @@ +# Central record of IGS site log test data sets to be shared across unit tests + +# first dataset is a truncated version of file abmf_20240710.log + +abmf_site_log_v1 = bytes( + """ + ABMF Site Information Form (site log) + International GNSS Service + See Instructions at: + https://files.igs.org/pub/station/general/sitelog_instr.txt + +0. Form + + Prepared by (full name) : RGP TEAM + Date Prepared : 2024-07-10 + Report Type : UPDATE + If Update: + Previous Site Log : (ssss_ccyymmdd.log) + Modified/Added Sections : (n.n,n.n,...) + + +1. Site Identification of the GNSS Monument + + Site Name : Aeroport du Raizet -LES ABYMES - Météo France + Four Character ID : ABMF + Monument Inscription : NONE + IERS DOMES Number : 97103M001 + CDP Number : NONE + Monument Description : INOX TRIANGULAR PLATE ON TOP OF METALLIC PILAR + Height of the Monument : 2.0 m + Monument Foundation : ROOF + Foundation Depth : 4.0 m + Marker Description : TOP AND CENTRE OF THE TRIANGULAR PLATE + Date Installed : 2008-07-15T00:00Z + Geologic Characteristic : + Bedrock Type : + Bedrock Condition : + Fracture Spacing : 11-50 cm + Fault zones nearby : + Distance/activity : + Additional Information : + + +2. Site Location Information + + City or Town : Les Abymes + State or Province : Guadeloupe (971) + Country : Guadeloupe + Tectonic Plate : CARIBBEAN + Approximate Position (ITRF) + X coordinate (m) : 2919786.0 + Y coordinate (m) : -5383745.0 + Z coordinate (m) : 1774604.0 + Latitude (N is +) : +161544.30 + Longitude (E is +) : -0613139.11 + Elevation (m,ellips.) : -25.0 + Additional Information : + + +3. GNSS Receiver Information + +3.1 Receiver Type : LEICA GR25 + Satellite System : GPS+GLO+GAL+BDS+SBAS + Serial Number : 1830399 + Firmware Version : 4.31 + Elevation Cutoff Setting : 3 deg + Date Installed : 2019-03-13T17:00Z + Date Removed : 2019-04-15T12:00Z + Temperature Stabiliz. : none + Additional Information : L2C disabled + +3.2 Receiver Type : SEPT POLARX5 + Satellite System : GPS+GLO+GAL+BDS+SBAS + Serial Number : 3013312 + Firmware Version : 5.2.0 + Elevation Cutoff Setting : 0 deg + Date Installed : 2019-04-15T12:00Z + Date Removed : 2019-10-01T16:00Z + Temperature Stabiliz. : none + Additional Information : L2C disabled + +3.3 Receiver Type : SEPT POLARX5 + Satellite System : GPS+GLO+GAL+BDS+SBAS + Serial Number : 3013312 + Firmware Version : 5.3.0 + Elevation Cutoff Setting : 0 deg + Date Installed : 2019-10-01T16:00Z + Date Removed : (CCYY-MM-DDThh:mmZ) + Temperature Stabiliz. : none + Additional Information : L2C disabled + +3.x Receiver Type : (A20, from rcvr_ant.tab; see instructions) + Satellite System : (GPS+GLO+GAL+BDS+QZSS+SBAS) + Serial Number : (A20, but note the first A5 is used in SINEX) + Firmware Version : (A11) + Elevation Cutoff Setting : (deg) + Date Installed : (CCYY-MM-DDThh:mmZ) + Date Removed : (CCYY-MM-DDThh:mmZ) + Temperature Stabiliz. : (none or tolerance in degrees C) + Additional Information : (multiple lines) + + +4. GNSS Antenna Information + +4.1 Antenna Type : AERAT2775_43 SPKE + Serial Number : 5546 + Antenna Reference Point : TOP + Marker->ARP Up Ecc. (m) : 000.0500 + Marker->ARP North Ecc(m) : 000.0000 + Marker->ARP East Ecc(m) : 000.0000 + Alignment from True N : 0 deg + Antenna Radome Type : SPKE + Radome Serial Number : NONE + Antenna Cable Type : + Antenna Cable Length : 30.0 m + Date Installed : 2008-07-15T00:00Z + Date Removed : 2009-10-15T20:00Z + Additional Information : + +4.2 Antenna Type : TRM55971.00 NONE + Serial Number : 1440911917 + Antenna Reference Point : BAM + Marker->ARP Up Ecc. (m) : 000.0000 + Marker->ARP North Ecc(m) : 000.0000 + Marker->ARP East Ecc(m) : 000.0000 + Alignment from True N : 0 deg + Antenna Radome Type : NONE + Radome Serial Number : + Antenna Cable Type : + Antenna Cable Length : 30.0 m + Date Installed : 2009-10-15T20:00Z + Date Removed : 2012-01-24T12:00Z + Additional Information : + +4.3 Antenna Type : TRM57971.00 NONE + Serial Number : 1441112501 + Antenna Reference Point : BAM + Marker->ARP Up Ecc. (m) : 000.0000 + Marker->ARP North Ecc(m) : 000.0000 + Marker->ARP East Ecc(m) : 000.0000 + Alignment from True N : 0 deg + Antenna Radome Type : NONE + Radome Serial Number : + Antenna Cable Type : + Antenna Cable Length : 30.0 m + Date Installed : 2012-01-24T12:00Z + Date Removed : (CCYY-MM-DDThh:mmZ) + Additional Information : + +4.x Antenna Type : (A20, from rcvr_ant.tab; see instructions) + Serial Number : (A*, but note the first A5 is used in SINEX) + Antenna Reference Point : (BPA/BCR/XXX from "antenna.gra"; see instr.) + Marker->ARP Up Ecc. (m) : (F8.4) + Marker->ARP North Ecc(m) : (F8.4) + Marker->ARP East Ecc(m) : (F8.4) + Alignment from True N : (deg; + is clockwise/east) + Antenna Radome Type : (A4 from rcvr_ant.tab; see instructions) + Radome Serial Number : + Antenna Cable Type : (vendor & type number) + Antenna Cable Length : (m) + Date Installed : (CCYY-MM-DDThh:mmZ) + Date Removed : (CCYY-MM-DDThh:mmZ) + Additional Information : (multiple lines) + """, + "utf-8", +) + +abmf_site_log_v2 = bytes( + """ + ABMF00GLP Site Information Form (site log v2.0) + International GNSS Service + See Instructions at: + https://files.igs.org/pub/station/general/sitelog_instr_v2.0.txt + +0. Form + + Prepared by (full name) : RGP TEAM + Date Prepared : 2024-07-10 + Report Type : UPDATE + If Update: + Previous Site Log : (ssssmrccc_ccyymmdd.log) + Modified/Added Sections : (n.n,n.n,...) + + +1. Site Identification of the GNSS Monument + + Site Name : Aeroport du Raizet -LES ABYMES - Météo France + Nine Character ID : ABMF00GLP + Monument Inscription : NONE + IERS DOMES Number : 97103M001 + CDP Number : NONE + Monument Description : INOX TRIANGULAR PLATE ON TOP OF METALLIC PILAR + Height of the Monument : 2.0 m + Monument Foundation : ROOF + Foundation Depth : 4.0 m + Marker Description : TOP AND CENTRE OF THE TRIANGULAR PLATE + Date Installed : 2008-07-15T00:00Z + Geologic Characteristic : + Bedrock Type : + Bedrock Condition : + Fracture Spacing : 11-50 cm + Fault zones nearby : + Distance/activity : + Additional Information : + + +2. Site Location Information + + City or Town : Les Abymes + State or Province : Guadeloupe (971) + Country or Region : GLP + Tectonic Plate : CARIBBEAN + Approximate Position (ITRF) + X coordinate (m) : 2919786.0 + Y coordinate (m) : -5383745.0 + Z coordinate (m) : 1774604.0 + Latitude (N is +) : +161544.30 + Longitude (E is +) : -0613139.11 + Elevation (m,ellips.) : -25.0 + Additional Information : + + +3. GNSS Receiver Information + +3.1 Receiver Type : LEICA GR25 + Satellite System : GPS+GLO+GAL+BDS+SBAS + Serial Number : 1830399 + Firmware Version : 4.31 + Elevation Cutoff Setting : 3 deg + Date Installed : 2019-03-13T17:00Z + Date Removed : 2019-04-15T12:00Z + Temperature Stabiliz. : none + Additional Information : L2C disabled + +3.2 Receiver Type : SEPT POLARX5 + Satellite System : GPS+GLO+GAL+BDS+SBAS + Serial Number : 3013312 + Firmware Version : 5.2.0 + Elevation Cutoff Setting : 0 deg + Date Installed : 2019-04-15T12:00Z + Date Removed : 2019-10-01T16:00Z + Temperature Stabiliz. : none + Additional Information : L2C disabled + +3.3 Receiver Type : SEPT POLARX5 + Satellite System : GPS+GLO+GAL+BDS+SBAS + Serial Number : 3013312 + Firmware Version : 5.3.0 + Elevation Cutoff Setting : 0 deg + Date Installed : 2019-10-01T16:00Z + Date Removed : (CCYY-MM-DDThh:mmZ) + Temperature Stabiliz. : none + Additional Information : L2C disabled + +3.x Receiver Type : (A20, from rcvr_ant.tab; see instructions) + Satellite System : (GPS+GLO+GAL+BDS+QZSS+SBAS) + Serial Number : (A20, but note the first A5 is used in SINEX) + Firmware Version : (A11) + Elevation Cutoff Setting : (deg) + Date Installed : (CCYY-MM-DDThh:mmZ) + Date Removed : (CCYY-MM-DDThh:mmZ) + Temperature Stabiliz. : (none or tolerance in degrees C) + Additional Information : (multiple lines) + + +4. GNSS Antenna Information + +4.1 Antenna Type : AERAT2775_43 SPKE + Serial Number : 5546 + Antenna Reference Point : TOP + Marker->ARP Up Ecc. (m) : 000.0500 + Marker->ARP North Ecc(m) : 000.0000 + Marker->ARP East Ecc(m) : 000.0000 + Alignment from True N : 0 deg + Antenna Radome Type : SPKE + Radome Serial Number : NONE + Antenna Cable Type : + Antenna Cable Length : 30.0 m + Date Installed : 2008-07-15T00:00Z + Date Removed : 2009-10-15T20:00Z + Additional Information : + +4.2 Antenna Type : TRM55971.00 NONE + Serial Number : 1440911917 + Antenna Reference Point : BAM + Marker->ARP Up Ecc. (m) : 000.0000 + Marker->ARP North Ecc(m) : 000.0000 + Marker->ARP East Ecc(m) : 000.0000 + Alignment from True N : 0 deg + Antenna Radome Type : NONE + Radome Serial Number : + Antenna Cable Type : + Antenna Cable Length : 30.0 m + Date Installed : 2009-10-15T20:00Z + Date Removed : 2012-01-24T12:00Z + Additional Information : + +4.3 Antenna Type : TRM57971.00 NONE + Serial Number : 1441112501 + Antenna Reference Point : BAM + Marker->ARP Up Ecc. (m) : 000.0000 + Marker->ARP North Ecc(m) : 000.0000 + Marker->ARP East Ecc(m) : 000.0000 + Alignment from True N : 0 deg + Antenna Radome Type : NONE + Radome Serial Number : + Antenna Cable Type : + Antenna Cable Length : 30.0 m + Date Installed : 2012-01-24T12:00Z + Date Removed : (CCYY-MM-DDThh:mmZ) + Additional Information : + +4.x Antenna Type : (A20, from rcvr_ant.tab; see instructions) + Serial Number : (A*, but note the first A5 is used in SINEX) + Antenna Reference Point : (BPA/BCR/XXX from "antenna.gra"; see instr.) + Marker->ARP Up Ecc. (m) : (F8.4) + Marker->ARP North Ecc(m) : (F8.4) + Marker->ARP East Ecc(m) : (F8.4) + Alignment from True N : (deg; + is clockwise/east) + Antenna Radome Type : (A4 from rcvr_ant.tab; see instructions) + Radome Serial Number : + Antenna Cable Type : (vendor & type number) + Antenna Cable Length : (m) + Date Installed : (CCYY-MM-DDThh:mmZ) + Date Removed : (CCYY-MM-DDThh:mmZ) + Additional Information : (multiple lines) + """, + "utf-8", +) diff --git a/tests/test_datasets/sp3_test_data.py b/tests/test_datasets/sp3_test_data.py index 83f97d6..e13d831 100644 --- a/tests/test_datasets/sp3_test_data.py +++ b/tests/test_datasets/sp3_test_data.py @@ -41,6 +41,60 @@ EOF """ +# Manual test dataset for EV and EP flags (currently just checking that exceptions are raised because we don't handle them yet) +sp3_test_data_ep_ev_rows = b"""#dV2007 4 12 0 0 0.00000000 1 ORBIT IGS14 BHN ESOC +## 1422 345600.00000000 900.00000000 54202 0.0000000000000 ++ 1 G01 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 ++ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 ++ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 ++ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 ++ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +++ 8 8 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +++ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +++ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +++ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +++ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +%c M cc GPS ccc cccc cccc cccc cccc ccccc ccccc ccccc ccccc +%c cc cc ccc ccc cccc cccc cccc cccc ccccc ccccc ccccc ccccc +%f 0.0000000 0.000000000 0.00000000000 0.000000000000000 +%f 0.0000000 0.000000000 0.00000000000 0.000000000000000 +%i 0 0 0 0 0 0 0 0 0 +%i 0 0 0 0 0 0 0 0 0 +/* EUROPEAN SPACE OPERATIONS CENTRE - DARMSTADT, GERMANY +/* --------------------------------------------------------- +/* SP3 FILE GENERATED BY NAPEOS BAHN TOOL (DETERMINATION) +/* PCV:IGS14_2022 OL/AL:EOT11A NONE YN ORB:CoN CLK:CoN +* 2007 4 12 0 0 0.00000000 +PG01 -6114.801556 -13827.040252 22049.171610 999999.999999 +EP 55 55 55 222 1234567 -1234567 5999999 -30 21 -1230000 +VG01 27184.457428 -3548.055474 5304.058806 999999.999999 +EV 22 22 22 111 1234567 1234567 1234567 1234567 1234567 1234567 +EOF +""" +# NOTE: copied from SP3d PDF.. alignment of EP and EV seem to be quite wrong. +# TODO update with a proper source. Will do for now just to check exceptions on these flags being seen. + + +# Expected content output for gnssanalysis gen_sp3_content() when reading in then writing +# out sp3_test_data_igs_benchmark_null_clock: +expected_sp3_output_igs_benchmark_null_clock = b"""* 2007 4 12 0 0 0.00000000 +PG01 -6114.801556 -13827.040252 22049.171610 999999.999999 +VG01 27184.457428 -3548.055474 5304.058806 999999.999999 +PG02 12947.223282 22448.220655 6215.570741 999999.999999 +VG02 -7473.756152 -4355.288568 29939.333728 999999.999999 +* 2007 4 12 0 15 0.00000000 +PG01 -3659.032812 -14219.662913 22339.175481 123456.999999 +VG01 27295.435569 -5170.061971 1131.227754 999999.999999 +PG02 12163.580358 21962.803659 8849.429007 999999.999999 +VG02 -9967.334764 -6367.969150 28506.683280 999999.999999 +* 2007 4 12 0 30 0.00000000 +PG01 -1218.171155 -14755.013599 22252.168480 999999.999999 +VG01 26855.435366 -6704.236117 -3062.394499 999999.999999 +PG02 11149.555664 21314.099837 11331.977499 123456.999999 +VG02 -12578.915944 -7977.396362 26581.116225 999999.999999 +""" +# NOTE 'EOF' is added in write_sp3() so we don't expect it here + # second dataset is a truncated version of file COD0OPSFIN_20242010000_01D_05M_ORB.SP3 sp3_test_data_truncated_cod_final = b"""#dP2024 7 19 0 0 0.00000000 2 d+D IGS20 FIT AIUB ## 2323 432000.00000000 300.00000000 60510 0.0000000000000 @@ -168,3 +222,5 @@ PG19 0.000000 0.000000 0.000000 999999.999999 EOF """ + +# TODO add some test data that actually has flags!! And write tests for those. diff --git a/tests/test_igslog.py b/tests/test_igslog.py new file mode 100644 index 0000000..05c0487 --- /dev/null +++ b/tests/test_igslog.py @@ -0,0 +1,95 @@ +import unittest +import numpy as _np +import pandas as _pd + +from gnssanalysis.gn_io import igslog +from test_datasets.sitelog_test_data import abmf_site_log_v1 as v1_data, abmf_site_log_v2 as v2_data + + +class Testregex(unittest.TestCase): + def test_determine_log_version(self): + # Ensure version 1 and 2 strings are produced as expected + self.assertEqual(igslog.determine_log_version(v1_data), "v1.0") + self.assertEqual(igslog.determine_log_version(v2_data), "v2.0") + + # Check that LogVersionError is raised on wrong data + self.assertRaises(igslog.LogVersionError, igslog.determine_log_version, b"Wrong data") + + def test_extract_id_block(self): + # Ensure the extract of ID information works and gives correct dome number: + self.assertEqual(igslog.extract_id_block(v1_data, "/example/path", "ABMF", "v1.0"), ["ABMF", "97103M001"]) + self.assertEqual(igslog.extract_id_block(v2_data, "/example/path", "ABMF", "v2.0"), ["ABMF", "97103M001"]) + # Check automatic version determination works as expected: + self.assertEqual(igslog.extract_id_block(v1_data, "/example/path", "ABMF"), ["ABMF", "97103M001"]) + + # Check LogVersionError is raised on no data: + with self.assertRaises(igslog.LogVersionError): + igslog.extract_id_block(data=b"", file_path="/example/path", file_code="ABMF") + # Check LogVersionError is raised on wrong data: + with self.assertRaises(igslog.LogVersionError): + igslog.extract_id_block(data=b"Wrong data", file_path="/example/path", file_code="ABMF") + # Check LogVersionError is raised on wrong version number: + with self.assertRaises(igslog.LogVersionError): + igslog.extract_id_block(data=v1_data, file_path="/example/path", file_code="ABMF", version="v3.0") + + def test_extract_location_block(self): + # Version 1 Location description results: + v1_location_block = igslog.extract_location_block(v1_data, "/example/path", "v1.0") + self.assertEqual(v1_location_block.group(1), b"Les Abymes") + self.assertEqual(v1_location_block.group(2), b"Guadeloupe") + + # Version 2 Location description results: + v2_location_block = igslog.extract_location_block(v2_data, "/example/path", "v2.0") + self.assertEqual(v2_location_block.group(1), b"Les Abymes") + self.assertEqual(v2_location_block.group(2), b"GLP") + + # Coordinate information remains the same: + self.assertEqual(v2_location_block.group(3), v1_location_block.group(3)) + + # Check LogVersionError is rasied on no data: + with self.assertRaises(igslog.LogVersionError): + igslog.extract_location_block(data=b"", file_path="/example/path") + # Check LogVersionError is rasied on wrong data: + with self.assertRaises(igslog.LogVersionError): + igslog.extract_location_block(data=b"Wrong data", file_path="/example/path") + # Check LogVersionError raised on wrong version number: + with self.assertRaises(igslog.LogVersionError): + igslog.extract_location_block(data=v1_data, file_path="/example/path", version="v3.0") + + def test_extract_receiver_block(self): + # Testing version 1: + v1_receiver_block = igslog.extract_receiver_block(v1_data, "/example/path") + self.assertEqual(v1_receiver_block[0][0], b"LEICA GR25") + self.assertEqual( + v1_receiver_block[1][0], v1_receiver_block[2][0] + ) # Testing that entries [1] and [2] are receiver: "SEPT POLARX5" + self.assertEqual(v1_receiver_block[1][3], b"5.2.0") # Difference between entries is a Firmware change + self.assertEqual(v1_receiver_block[2][3], b"5.3.0") # Difference between entries is a Firmware change + # Last receiver should not have an end date assigned (i.e. current): + self.assertEqual(v1_receiver_block[-1][-1], b"") + + # Same as above, but for version 2: + v2_receiver_block = igslog.extract_receiver_block(v2_data, "/example/path") + self.assertEqual(v2_receiver_block[0][0], b"LEICA GR25") + self.assertEqual( + v2_receiver_block[1][0], v2_receiver_block[2][0] + ) # Testing that entries 2 and 3 are "SEPT POLARX5" + self.assertEqual(v2_receiver_block[1][3], b"5.2.0") # Difference between entries 2 and 3 is in Firmware change + self.assertEqual(v2_receiver_block[2][3], b"5.3.0") + # Last receiver should not have an end date assigned (i.e. current): + self.assertEqual(v2_receiver_block[-1][-1], b"") + + def test_extract_antenna_block(self): + # Testing version 1: + v1_antenna_block = igslog.extract_antenna_block(v1_data, "/example/path") + self.assertEqual(v1_antenna_block[0][0], b"AERAT2775_43") # Check antenna type of first entry + self.assertEqual(v1_antenna_block[0][8], b"2009-10-15T20:00Z") # Check end date of second entry + # Last antenna should not have an end date assigned (i.e. current): + self.assertEqual(v1_antenna_block[-1][-1], b"") + + # Testing version 2: + v2_antenna_block = igslog.extract_antenna_block(v2_data, "/example/path") + self.assertEqual(v2_antenna_block[0][0], b"AERAT2775_43") # Check antenna type of first entry + self.assertEqual(v2_antenna_block[0][8], b"2009-10-15T20:00Z") # Check end date of second entry + # Last antenna should not have an end date assigned (i.e. current): + self.assertEqual(v2_antenna_block[-1][-1], b"") diff --git a/tests/test_sp3.py b/tests/test_sp3.py index 1906cb4..e357519 100644 --- a/tests/test_sp3.py +++ b/tests/test_sp3.py @@ -12,6 +12,10 @@ from test_datasets.sp3_test_data import ( # first dataset is part of the IGS benchmark (modified to include non null data on clock): sp3_test_data_igs_benchmark_null_clock as input_data, + # Expected content section we want gnssanalysis to write out + expected_sp3_output_igs_benchmark_null_clock, + # Test exception raising when encountering EP, EV rows + sp3_test_data_ep_ev_rows, # second dataset is a truncated version of file COD0OPSFIN_20242010000_01D_05M_ORB.SP3: sp3_test_data_truncated_cod_final as input_data2, sp3_test_data_partially_offline_sat as offline_sat_test_data, @@ -53,26 +57,25 @@ def test_read_sp3_pv(self, mock_file): self.assertEqual(len(result), 6) # Ensure first epoch is correct / not skipped by incorrect detection of data start. # Check output of both header and data section. - self.assertEqual( - result.attrs["HEADER"]["HEAD"]["DATETIME"], "2007 4 12 0 0 0.00000000" - ) + self.assertEqual(result.attrs["HEADER"]["HEAD"]["DATETIME"], "2007 4 12 0 0 0.00000000") self.assertEqual(result.index[0][0], 229608000) # Same date, as J2000 + @patch("builtins.open", new_callable=mock_open, read_data=sp3_test_data_ep_ev_rows) + def test_read_sp3_pv_with_ev_ep_rows(self, mock_file): + # Expect exception relating to the EV and EP rows, as we can't currently handle them properly. + self.assertRaises( + NotImplementedError, sp3.read_sp3, "mock_path", pOnly=False, continue_on_ep_ev_encountered=False + ) + @patch("builtins.open", new_callable=mock_open, read_data=input_data) def test_read_sp3_header_svs_basic(self, mock_file): """ Minimal test of reading SVs from header """ result = sp3.read_sp3("mock_path", pOnly=False) - self.assertEqual( - result.attrs["HEADER"]["SV_INFO"].shape[0], 2, "Should be two SVs in data" - ) - self.assertEqual( - result.attrs["HEADER"]["SV_INFO"].index[1], "G02", "Second SV should be G02" - ) - self.assertEqual( - result.attrs["HEADER"]["SV_INFO"].iloc[1], 8, "Second ACC should be 8" - ) + self.assertEqual(result.attrs["HEADER"]["SV_INFO"].shape[0], 2, "Should be two SVs in data") + self.assertEqual(result.attrs["HEADER"]["SV_INFO"].index[1], "G02", "Second SV should be G02") + self.assertEqual(result.attrs["HEADER"]["SV_INFO"].iloc[1], 8, "Second ACC should be 8") def test_read_sp3_header_svs_detailed(self): """ @@ -83,16 +86,12 @@ def test_read_sp3_header_svs_detailed(self): """ # We check that negative values parse correctly, but override the default behaviour of warning about them, # to keep the output clean. - result = sp3.parse_sp3_header( - sample_header_svs, warn_on_negative_sv_acc_values=False - ) + result = sp3.parse_sp3_header(sample_header_svs, warn_on_negative_sv_acc_values=False) # Pull out SV info header section, which contains SVs and their accuracy codes # Note: .attrs['HEADER'] nesting gets added by parent function. sv_info = result["SV_INFO"] sv_count = sv_info.shape[0] # Effectively len() - self.assertEqual( - sv_count, 30, msg="There should be 30 SVs parsed from the test data" - ) + self.assertEqual(sv_count, 30, msg="There should be 30 SVs parsed from the test data") # Ensure no SVs are read as empty self.assertFalse( @@ -104,23 +103,15 @@ def test_read_sp3_header_svs_detailed(self): first_sv = sv_info.index[0] self.assertEqual(first_sv, "G02", msg="First SV in test data should be G02") end_line1_sv = sv_info.index[16] - self.assertEqual( - end_line1_sv, "G18", msg="Last SV on test line 1 (pos 17) should be G18" - ) + self.assertEqual(end_line1_sv, "G18", msg="Last SV on test line 1 (pos 17) should be G18") start_line2_sv = sv_info.index[17] - self.assertEqual( - start_line2_sv, "G19", msg="First SV on test line 2 (pos 18) should be G19" - ) + self.assertEqual(start_line2_sv, "G19", msg="First SV on test line 2 (pos 18) should be G19") end_line2_sv = sv_info.index[29] - self.assertEqual( - end_line2_sv, "G32", msg="Last SV on test line 2 (pos 30) should be G32" - ) + self.assertEqual(end_line2_sv, "G32", msg="Last SV on test line 2 (pos 30) should be G32") # Ensure first, wrap around, and last accuracy codes came out correctly. Data is artificial to differentiate. first_acc = sv_info.iloc[0] - self.assertEqual( - first_acc, 10, msg="First accuracy code in test data should be 10" - ) + self.assertEqual(first_acc, 10, msg="First accuracy code in test data should be 10") end_line1_acc = sv_info.iloc[16] self.assertEqual( end_line1_acc, @@ -128,26 +119,18 @@ def test_read_sp3_header_svs_detailed(self): msg="Accuracy code end line 1 in test data should be -14", ) start_line2_acc = sv_info.iloc[17] - self.assertEqual( - start_line2_acc, 11, msg="First ACC on test line 2 (pos 18) should be 11" - ) + self.assertEqual(start_line2_acc, 11, msg="First ACC on test line 2 (pos 18) should be 11") end_line2_acc = sv_info.iloc[29] - self.assertEqual( - end_line2_acc, 18, msg="Last ACC on test line 2 (pos 30) should be 18" - ) + self.assertEqual(end_line2_acc, 18, msg="Last ACC on test line 2 (pos 30) should be 18") # TODO Add test(s) for correctly reading header fundamentals (ACC, ORB_TYPE, etc.) # TODO add tests for correctly reading the actual content of the SP3 in addition to the header. # TODO add tests for correctly generating sp3 output content with gen_sp3_content() and gen_sp3_header() def test_sp3_clock_nodata_to_nan(self): - sp3_df = pd.DataFrame( - {("EST", "CLK"): [999999.999999, 123456.789, 999999.999999, 987654.321]} - ) + sp3_df = pd.DataFrame({("EST", "CLK"): [999999.999999, 123456.789, 999999.999999, 987654.321]}) sp3.sp3_clock_nodata_to_nan(sp3_df) - expected_result = pd.DataFrame( - {("EST", "CLK"): [np.nan, 123456.789, np.nan, 987654.321]} - ) + expected_result = pd.DataFrame({("EST", "CLK"): [np.nan, 123456.789, np.nan, 987654.321]}) self.assertTrue(sp3_df.equals(expected_result)) def test_sp3_pos_nodata_to_nan(self): @@ -311,6 +294,7 @@ def test_trim_df(self, mock_file): # - write (generate content) # TODO notes added above to implement those bits separately + class TestMergeSP3(TestCase): def setUp(self): self.setUpPyfakefs()