From 5656fde6a4d28d7a51d9cda19ce04681bbc05226 Mon Sep 17 00:00:00 2001 From: 0x4A616B65 Date: Thu, 8 Aug 2024 15:00:16 -0400 Subject: [PATCH] Added host list detections sql upload! --- README.md | 2 + docs/index.md | 2 + docs/sql.md | 34 ++++++--- qualyspy/sql/__init__.py | 1 + qualyspy/sql/base.py | 16 +++- qualyspy/sql/supported_uploads.py | 91 ++++++++++++++++++++--- qualyspy/vmdr/data_classes/detection.py | 33 ++++---- qualyspy/vmdr/data_classes/hosts.py | 28 +++---- qualyspy/vmdr/get_host_list_detections.py | 89 ++++++++++++++++++++-- 9 files changed, 240 insertions(+), 56 deletions(-) diff --git a/README.md b/README.md index 3d23c14..941adf9 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,8 @@ hosts = get_host_list(auth, details="All/AGs", show_tags=True, page_count=4) ## Current Supported Modules +>**Head's Up!:** SQL DB uploading is currently in development! 🎉 + |Module| Status | |--|--| | GAV (Global AssetView) |✅| diff --git a/docs/index.md b/docs/index.md index fce48a5..e3f2c1b 100644 --- a/docs/index.md +++ b/docs/index.md @@ -28,6 +28,8 @@ hosts = get_host_list(auth, details="All/AGs", show_tags=True, page_count=4) >>>[VMDRHost(12345), ...] ``` +>**Head's Up!:** SQL DB uploading is currently in development! 🎉 + ## Current Supported Modules |Module| Status | diff --git a/docs/sql.md b/docs/sql.md index e4d8f3e..1d0e0d5 100644 --- a/docs/sql.md +++ b/docs/sql.md @@ -2,7 +2,7 @@ >**Head's Up!:** ```qualyspy.sql``` is currently in development and has been tested using Microsoft SQL Server only. Other DBs will be tested at some point. -```qualyspy``` supports uploading data it has pulled to a SQL Database using various ```upload__*``` functions. Thanks to the [Pandas library](https://pandas.pydata.org) and qualyspy's ```BaseList``` class, uploading is rather easy. +```qualyspy``` supports uploading data it has pulled to a SQL Database using various ```upload__*``` functions. Thanks to the [Pandas library](https://pandas.pydata.org) and qualyspy's ```BaseList``` class, uploading is rather easy. ```qualyspy``` automatically will create the table for you if it does not exist, and will append data to the table if it does exist. The ```import_datetime``` field is also added to each table to track when the data was uploaded. ## Steps to Get Going @@ -18,11 +18,24 @@ Next, build your connection object. ```qualyspy``` supports username/password au ```py -# Get a sqlalchemy.Connection using trusted_connection: +# Get a sqlalchemy.Connection using trusted_connection to SQL Server. +# since driver defaults to "ODBC Driver 17 for SQL Server" and db_type defaults to "mssql", you can omit them. cnxn = db_connect(host='10.0.0.1', db='qualysdata', trusted_cnxn=True) -# Get a sqlalchemy.Connection with username/password auth: -cnxn = db_connect(host='10.0.0.1', db='qualysdata', username='Jane', password='SuperSecretPassword!') +# Get a sqlalchemy.Connection with username/password auth to an oracle DB: +cnxn = db_connect(host='10.0.0.1', db='qualysdata', username='Jane', password='SuperSecretPassword!', db_type='oracle', driver='Some Driver for Oracle') +``` + +Note that you are required to call ```.close()``` on the connection object when you are done with it to close the connection to the DB. + +```py + +cnxn = db_connect(host='10.0.0.1', db='qualysdata', trusted_cnxn=True) + +# Do some stuff with the connection +... + +cnxn.close() ``` ### Step 3: Fire Away! @@ -33,12 +46,13 @@ And finally, you can use the following supported functions: Each upload function takes 2 parameters. The first is the ```BaseList``` of data, and the second is the ```sqlalchemy.Connection``` object you built above. -| Function Name | Module | ```qualyspy``` Function Data Source | -| -- | -- | -- | -| ```upload_vmdr_ags``` | VMDR | ```vmdr.get_ag_list()```| -| ```upload_vmdr_kb``` | VMDR | ```vmdr.query_kb()```| -| ```upload_vmdr_hosts``` | VMDR | ```vmdr.get_host_list()```| -| ```upload_vmdr_ips``` | VMDR | ```vmdr.get_ip_list()```| +| Function Name | Module | ```qualyspy``` Function Data Source | Resulting Table Name | +| -- | -- | -- | -- | +| ```upload_vmdr_ags``` | VMDR | ```vmdr.get_ag_list()```| ```vmdr_assetgroups``` | +| ```upload_vmdr_kb``` | VMDR | ```vmdr.query_kb()```| ```vmdr_knowledgebase``` | +| ```upload_vmdr_hosts``` | VMDR | ```vmdr.get_host_list()```| ```vmdr_hosts_list``` | +| ```upload_vmdr_hld``` | VMDR | ```vmdr.get_hld()```| ```vmdr_hld_hosts_list``` for hosts and ```vmdr_hld_detections``` for detections | +| ```upload_vmdr_ips``` | VMDR | ```vmdr.get_ip_list()```| ```vmdr_ips``` | ## A Friendly Recommendation For Getting Data diff --git a/qualyspy/sql/__init__.py b/qualyspy/sql/__init__.py index 6ccd2f4..d146be6 100644 --- a/qualyspy/sql/__init__.py +++ b/qualyspy/sql/__init__.py @@ -8,4 +8,5 @@ upload_vmdr_kb, upload_vmdr_hosts, upload_vmdr_ips, + upload_vmdr_hld, ) diff --git a/qualyspy/sql/base.py b/qualyspy/sql/base.py index 2da33ae..a88c500 100644 --- a/qualyspy/sql/base.py +++ b/qualyspy/sql/base.py @@ -65,7 +65,12 @@ def db_connect( f"{db_type}+pyodbc://{username}:{password}@{host}/{db}?driver={driver}" ) - engine = create_engine(conn_str) + if db_type != "mssql" and driver == "ODBC Driver 17 for SQL Server": + engine = create_engine(conn_str) + else: + # We can enable fast_executemany for mssql to speed up inserts: + print("Enabling fast_executemany for mssql...") + engine = create_engine(conn_str, fast_executemany=True) return engine.connect() @@ -90,9 +95,9 @@ def upload_data(df: DataFrame, table: str, cnxn: Connection, dtype: dict) -> int # For any string values in the DataFrame, make sure it doesn't # exceed VARCHAR(MAX) length: - for col in df.columns: - if df[col].dtype == "object": - df[col] = df[col].str.slice(0, 2147483647) + # for col in df.columns: + # if df[col].dtype == "object": + # df[col] = df[col].str.slice(0, 2147483647) # Upload the data: print(f"Uploading {len(df)} rows to {table}...") @@ -136,7 +141,10 @@ def prepare_dataclass(dataclass: dataclass) -> dict: "TRURISK_SCORE_FACTORS", "IP", "IPV6", + "QDS", + "QDS_FACTORS", ] + DICT_FIELDS = [ "CORRELATION", "CVSS", diff --git a/qualyspy/sql/supported_uploads.py b/qualyspy/sql/supported_uploads.py index e5bfda9..232bfd6 100644 --- a/qualyspy/sql/supported_uploads.py +++ b/qualyspy/sql/supported_uploads.py @@ -63,7 +63,7 @@ def upload_vmdr_ags(ags: BaseList, cnxn: Connection) -> int: df = DataFrame([prepare_dataclass(ag) for ag in ags]) # Upload the data: - return upload_data(df, "AssetGroups", cnxn, dtype=COLS) + return upload_data(df, "vmdr_assetgroups", cnxn, dtype=COLS) def upload_vmdr_kb(kbs: BaseList, cnxn: Connection) -> int: @@ -115,16 +115,17 @@ def upload_vmdr_kb(kbs: BaseList, cnxn: Connection) -> int: df = DataFrame([prepare_dataclass(kb) for kb in kbs]) # Upload the data: - return upload_data(df, "knowledgebase", cnxn, dtype=COLS) + return upload_data(df, "vmdr_knowledgebase", cnxn, dtype=COLS) -def upload_vmdr_hosts(hosts: BaseList, cnxn: Connection) -> int: +def upload_vmdr_hosts(hosts: BaseList, cnxn: Connection, is_hld: bool = False) -> int: """ Upload data from vmdr.get_host_list() to SQL. Parameters: hosts (BaseList): The Host List to upload. cnxn (Connection): The Connection object to the SQL database. + is_hld (bool): If the data is from a Host List Detail pull. You can ignore this. Returns: int: The number of rows uploaded. @@ -189,8 +190,10 @@ def upload_vmdr_hosts(hosts: BaseList, cnxn: Connection) -> int: df.drop(columns=["METADATA", "DNS_DATA", "DETECTION_LIST"], inplace=True) - # Upload the data: - return upload_data(df, "vmdr_hosts_list", cnxn, dtype=COLS) + # Upload the data, with table depdening on if it is a Host List Detail pull or not: + return upload_data( + df, "vmdr_hosts_list" if not is_hld else "vmdr_hld_hosts_list", cnxn, dtype=COLS + ) def upload_vmdr_ips(ips: BaseList, cnxn: Connection) -> int: @@ -206,17 +209,85 @@ def upload_vmdr_ips(ips: BaseList, cnxn: Connection) -> int: """ COLS = { - "IP_OBJ": types.String(), + "IP": types.String(), "TYPE": types.String(), } # Convert the BaseList to a DataFrame: - df = DataFrame([prepare_dataclass(ip) for ip in ips], columns=["IP"]) + df = DataFrame([str(ip) for ip in ips], columns=["IP"]) # Add the TYPE column, which shows if it is a single IP or a range: - df["TYPE"] = df["IP"].apply( - lambda x: "Single IP" if "/" not in str(x) else "IP Range" - ) + df["TYPE"] = df["IP"].apply(lambda x: "Single IP" if "/" not in x else "IP Range") # Upload the data: return upload_data(df, "vmdr_ips", cnxn, dtype=COLS) + + +def upload_vmdr_hld(hld: BaseList, cnxn: Connection) -> int: + """ + Upload data from vmdr.get_hld() to SQL. + + Parameters: + hld (BaseList): The Host List to upload. + cnxn (Connection): The Connection object to the SQL database. + + Returns: + int: The number of rows uploaded. + + """ + + """ + Get_hld and get_host_list technically return the same data. get_hld just + includes the DETECTION_LIST attribute. We can use the same upload function + for the host part, and then snip off the DETECTION_LIST attribute to upload + to a detections table. + """ + + # Isolate the detection lists. Since the Detection objects themselves + # have an ID attribute, we can use that to link them back to the host. + detections = BaseList() + for host in hld: + if host.DETECTION_LIST: + for detection in host.DETECTION_LIST: + detections.append(detection) + + # upload_vmdr_hosts automatically ignores the DETECTION_LIST attribute, + # so we can use it here with the is_hld flag set to True to put the hosts + # in a different table than get_host_list. + hosts_uploaded = upload_vmdr_hosts(hld, cnxn, is_hld=True) + print( + f"Uploaded {hosts_uploaded} hosts to vmdr_hld_hosts_list. Moving to detections..." + ) + + COLS = { + "UNIQUE_VULN_ID": types.BigInteger(), + "QID": types.BigInteger(), + "TYPE": types.String(), + "SEVERITY": types.Integer(), + "STATUS": types.String(), + "SSL": types.Boolean(), + "RESULTS": types.String(), + "FIRST_FOUND_DATETIME": types.DateTime(), + "LAST_FOUND_DATETIME": types.DateTime(), + "QDS": types.Integer(), + "QDS_FACTORS": types.String(), + "TIMES_FOUND": types.Integer(), + "LAST_TEST_DATETIME": types.DateTime(), + "LAST_UPDATE_DATETIME": types.DateTime(), + "IS_IGNORED": types.Boolean(), + "IS_DISABLED": types.Boolean(), + "LAST_PROCESSED_DATETIME": types.DateTime(), + "LAST_FIXED_DATETIME": types.DateTime(), + "PORT": types.Integer(), + "PROTOCOL": types.String(), + "FQDN": types.String(), + } + + # Convert the BaseList to a DataFrame: + df = DataFrame([prepare_dataclass(detection) for detection in detections]) + + # Set QDS to an integer: + df["QDS"] = df["QDS"].apply(lambda x: int(x) if x else None) + + # Upload the data: + return upload_data(df, "vmdr_hld_detections", cnxn, dtype=COLS) diff --git a/qualyspy/vmdr/data_classes/detection.py b/qualyspy/vmdr/data_classes/detection.py index b7484a6..bc1117a 100644 --- a/qualyspy/vmdr/data_classes/detection.py +++ b/qualyspy/vmdr/data_classes/detection.py @@ -11,6 +11,7 @@ from .qds_factor import QDSFactor from .qds import QDS as qds +from ..data_classes.lists import BaseList @dataclass(order=True) @@ -117,6 +118,11 @@ class Detection: compare=False, ) + ID: int = field( + metadata={"description": "The host ID of host the detection is on."}, + default=None, + ) + def __post_init__(self): # convert the datetimes to datetime objects DATETIME_FIELDS = [ @@ -132,7 +138,7 @@ def __post_init__(self): BOOL_FIELDS = ["IS_IGNORED", "IS_DISABLED", "SSL"] - INT_FIELDS = ["UNIQUE_VULN_ID", "QID", "SEVERITY", "TIMES_FOUND", "PORT"] + INT_FIELDS = ["UNIQUE_VULN_ID", "QID", "SEVERITY", "TIMES_FOUND", "PORT", "ID"] for field in DATETIME_FIELDS: if ( @@ -173,20 +179,19 @@ def __post_init__(self): # convert the QDS factors to QDSFactor objects if self.QDS_FACTORS: - # if [QDS_FACTORS][QDS_FACTOR] is a list of dictionaries, itereate through each dictionary and convert it to a QDSFactor object - # if it is just one dictionary, convert it to a QDSFactor object - if isinstance(self.QDS_FACTORS["QDS_FACTOR"], list): - self.QDS_FACTORS = [ + factors_bl = BaseList() + data = self.QDS_FACTORS["QDS_FACTOR"] + + # Normalize QDS factors to a list for easier processing + if isinstance(data, dict): + data = [data] + + for factor in data: + factors_bl.append( QDSFactor(NAME=factor["@name"], VALUE=factor["#text"]) - for factor in self.QDS_FACTORS["QDS_FACTOR"] - ] - else: - self.QDS_FACTORS = [ - QDSFactor( - NAME=self.QDS_FACTORS["QDS_FACTOR"]["@name"], - VALUE=self.QDS_FACTORS["QDS_FACTOR"]["#text"], - ) - ] + ) + + self.QDS_FACTORS = factors_bl def __str__(self): # return str(self.UNIQUE_VULN_ID) diff --git a/qualyspy/vmdr/data_classes/hosts.py b/qualyspy/vmdr/data_classes/hosts.py index d0b3c3f..6f3ff93 100644 --- a/qualyspy/vmdr/data_classes/hosts.py +++ b/qualyspy/vmdr/data_classes/hosts.py @@ -205,7 +205,7 @@ class VMDRHost: compare=False, ) - DETECTION_LIST: Optional[Union[list[Detection], BaseList[Detection]]] = field( + DETECTION_LIST: Optional[BaseList[Detection]] = field( default=None, metadata={"description": "The detection list of the host."}, compare=False, @@ -328,7 +328,6 @@ def __post_init__(self): "ASSET_RISK_SCORE", "TRURISK_SCORE", "ASSET_CRITICALITY_SCORE", - "CLOUD_ACCOUNT_ID", ] if self.DNS_DATA: @@ -450,17 +449,20 @@ def __post_init__(self): # check for a detections list and convert it to a BaseList of Detection objects (used in hld): if self.DETECTION_LIST: - if isinstance(self.DETECTION_LIST["DETECTION"], list): - self.DETECTION_LIST = BaseList( - [ - Detection.from_dict(detection) - for detection in self.DETECTION_LIST["DETECTION"] - ] - ) - else: - self.DETECTION_LIST = BaseList( - [Detection.from_dict(self.DETECTION_LIST["DETECTION"])] - ) + + detections_bl = BaseList() + data = self.DETECTION_LIST["DETECTION"] + + if isinstance(data, dict): + data = [data] + + for detection in data: + # Append the host's ID attr to the detection dictionary + # to allow for a relationship: + detection["ID"] = self.ID + detections_bl.append(Detection.from_dict(detection)) + + self.DETECTION_LIST = detections_bl def __str__(self) -> str: """ diff --git a/qualyspy/vmdr/get_host_list_detections.py b/qualyspy/vmdr/get_host_list_detections.py index 9ab50ac..70edfca 100644 --- a/qualyspy/vmdr/get_host_list_detections.py +++ b/qualyspy/vmdr/get_host_list_detections.py @@ -77,8 +77,8 @@ def hld_backend( page_count (Union[int, "all"]): The number of pages to retrieve. Defaults to "all". **kwargs: Additional keyword arguments to pass to the API. See below. - :Kwargs: - ``` + Kwargs: + action (Optional[str]) #The action to perform. Default is 'list'. WARNING: any value you pass is overwritten with 'list'. It is just recognized as valid for the sake of completeness. echo_request (Optional[bool]) #Whether to echo the request. Default is False. Ends up being passed to the API as 0 or 1. WARNING: this SDK does not include this field in the data. show_asset_id (Optional[bool]) #Whether to show the asset IDs. Default is 'False'. ends up being passed to API as 0 or 1. @@ -157,8 +157,6 @@ def hld_backend( host_metadata_fields (Optional[str]) #A comma-separated string of metadata fields to include. Use carefully. show_cloud_tags (Optional[bool]) #Whether to show cloud tags. Default is False. Ends up being passed to the API as 0 or 1. cloud_tag_fields (Optional[str]) #A comma-separated string of cloud tag fields to include. Use carefully. - ``` - Returns: List: A list of VMDRHost objects, with their DETECTIONS attribute populated. @@ -314,7 +312,88 @@ def get_hld( threads (int): The number of threads to use. Defaults to 5. page_count (Union[int, "all"]): The number of pages to retrieve. Defaults to "all". chunk_count (Union[int, "all"]): The number of chunks to retrieve. Defaults to "all". - **kwargs: Additional keyword arguments to pass to the API. See qualyspy.vmdr.get_host_list_detections.hld_backend() for details. + **kwargs: Additional keyword arguments to pass to the API. + + Kwargs: + + action (Optional[str]) #The action to perform. Default is 'list'. WARNING: any value you pass is overwritten with 'list'. It is just recognized as valid for the sake of completeness. + echo_request (Optional[bool]) #Whether to echo the request. Default is False. Ends up being passed to the API as 0 or 1. WARNING: this SDK does not include this field in the data. + show_asset_id (Optional[bool]) #Whether to show the asset IDs. Default is 'False'. ends up being passed to API as 0 or 1. + include_vuln_type (Optional[Literal["confirmed", "potential"]]) #The type of vulnerability to include. If not specified, both types are included. + + DETECTION FILTERS: + show_results (Optional[bool]) #Whether to show the results. Default is True. Ends up being passed to the API as 0 or 1. WARNING: this SDK overwrites any value you pass with '1'. It is just recognized as valid for the sake of completeness. + arf_kernel_filter (Optional[Literal[0,1,2,3,4]]) #Specify vulns for Linux kernels. 0 = don't filter, 1 = exclude kernel vulns that are not exploitable, 2 = only include kernel related vulns that are not exploitable, 3 = only include exploitable kernel vulns, 4 = only include kernel vulns. If specified, results are in a host's tag. + arf_service_filter (Optional[Literal[0,1,2,3,4]]) #Specify vulns found on running or nonrunning ports/services. 0 = don't filter, 1 = exclude service related vulns that are exploitable, 2 = only include service vulns that are exploitable, 3 = only include service vulns that are not exploitable, 4 = only include service vulns. If specified, results are in a host's tag. + arf_config_filter (Optional[Literal[0,1,2,3,4]]) #Specify vulns that can be vulnerable based on host config. 0 = don't filter, 1 = exclude vulns that are exploitable due to host config, 2 = only include config related vulns that are exploitable, 3 = only include config related vulns that are not exploitable, 4 = only include config related vulns. If specified, results are in a host's tag. + active_kernels_only (Optional[Literal[0,1,2,3]]) #Specify vulns related to running or non-running kernels. 0 = don't filter, 1 = exclude non-running kernels, 2 = only include vulns on non-running kernels, 3 = only include vulns with running kernels. If specified, results are in a host's tag. + output_format (Optional[Literal["XML", "CSV"]]) #The format of the output. Default is 'XML'. WARNING: this SDK will overwrite any value you pass with 'XML'. It is just recognized as valid for the sake of completeness. + supress_duplicated_data_from_csv (Optional[bool]) #Whether to suppress duplicated data from CSV. Default is False. Ends up being passed to the API as 0 or 1. WARNING: this SDK does not include this field in the data. + truncation_limit (Optional[int]) #The truncation limit for a page. Default is 1000. + max_days_since_detection_updated (Optional[int]) #The maximum number of days since the detection was last updated. For detections that have never changed, the value is applied to the last detection date. + detection_updated_since (Optional[str]) #The date and time since the detection was updated. + detection_updated_before (Optional[str]) #The date and time before the detection was updated. + detection_processed_before (Optional[str]) #The date and time before the detection was processed. + detection_processed_after (Optional[str]) #The date and time after the detection was processed. + detection_last_tested_since (Optional[str]) #The date and time since the detection was last tested. + detection_last_tested_since_days (Optional[int]) #The number of days since the detection was last tested. + detection_last_tested_before (Optional[str]) #The date and time before the detection was last tested. + detection_last_tested_before_days (Optional[int]) #The number of days before the detection was last tested. + include_ignored (Optional[bool]) #Whether to include ignored detections. Default is False. Ends up being passed to the API as 0 or 1. + include_disabled (Optional[bool]) #Whether to include disabled detections. Default is False. Ends up being passed to the API as 0 or 1. + + HOST FILTERS: + ids (Optional[str]) #A comma-separated string of host IDs to include. + id_min (Optional[Union[int,str]]) #The minimum host ID to include. + id_max (Optional[Union[int,str]]) #The maximum host ID to include. + ips (Optional[str]) #The IP address of the host to include. Can be a comma-separated string, and also supports ranges with a hyphen: 10.0.0.0-10.0.0.255. + ipv6 (Optional[str]) #The IPv6 address of the host to include. Can be a comma-separated string. Does not support ranges. + ag_ids (Optional[str]) #Show only hosts belonging to the specified asset group IDs. Can be a comma-separated string, and also supports ranges with a hyphen: 1-5. + ag_titles (Optional[str]) #Show only hosts belonging to the specified asset group titles. Can be a comma-separated string. + network_ids (Optional[str]) #Show only hosts belonging to the specified network IDs. Can be a comma-separated string. + network_names (Optional[str]) #displays the name of the network corresponding to the network ID. + vm_scan_since (Optional[str]) #The date and time since the last VM scan. Format is 'YYYY-MM-DD[THH:MM:SS]'. + no_vm_scan_since (Optional[str]) #The date and time since the last VM scan. Format is 'YYYY-MM-DD[THH:MM:SS]'. + max_days_since_last_vm_scan (Optional[int]) #The maximum number of days since the last VM scan. + vm_processed_before (Optional[str]) #The date and time before the VM scan was processed. Format is 'YYYY-MM-DD[THH:MM:SS]'. + vm_processed_after (Optional[str]) #The date and time after the VM scan was processed. Format is 'YYYY-MM-DD[THH:MM:SS]'. + vm_scan_date_before (Optional[str]) #The date and time before the VM scan. Format is 'YYYY-MM-DD[THH:MM:SS]'. + vm_scan_date_after (Optional[str]) #The date and time after the VM scan. Format is 'YYYY-MM-DD[THH:MM:SS]'. + vm_auth_scan_date_before (Optional[str]) #The date and time before the VM authenticated scan. Format is 'YYYY-MM-DD[THH:MM:SS]'. + vm_auth_scan_date_after (Optional[str]) #The date and time after the VM authenticated scan. Format is 'YYYY-MM-DD[THH:MM:SS]'. + status (Optional[Literal["New", "Active", "Fixed", "Re-Opened"]]) #The status of the detection. + compliance_enabled (Optional[bool]) #Whether compliance is enabled. Default is False. Ends up being passed to the API as 0 or 1. + os_pattern (Optional[str]) #PCRE Regex to match operating systems. + + QID FILTERS: + qids (Optional[str]) #A comma-separated string of QIDs to include. + severities (Optional[str]) #A comma-separated string of severities to include. Can also be a hyphenated range, i.e. '2-4'. + filter_superseded_qids (Optional[bool]) #Whether to filter superseded QIDs. Default is False. Ends up being passed to the API as 0 or 1. + include_search_list_titles (Optional[str]) #A comma-separated string of search list titles to include. + exclude_search_list_titles (Optional[str]) #A comma-separated string of search list titles to exclude. + include_search_list_ids (Optional[str]) #A comma-separated string of search list IDs to include. + exclude_search_list_ids (Optional[str]) #A comma-separated string of search list IDs to exclude. + + ASSET TAG FILTERS: + use_tags (Optional[bool]) #Whether to use tags. Default is False. Ends up being passed to the API as 0 or 1. + tag_set_by (Optional[Literal['id','name']]) #When filtering on tags, whether to filter by tag ID or tag name. + tag_include_selector (Optional[Literal['any','all']]) #When filtering on tags, choose if asset has to match any or all tags specified. + tag_exclude_selector (Optional[Literal['any','all']]) #When filtering on tags, choose if asset has to match any or all tags specified. + tag_set_include (Optional[str]) #A comma-separated string of tag IDs or names to include. + tag_set_exclude (Optional[str]) #A comma-separated string of tag IDs or names to exclude. + show_tags (Optional[bool]) #Whether to show tags. Default is False. Ends up being passed to the API as 0 or 1. + + QDS FILTERS: + show_qds (Optional[bool]) #Whether to show QDS. Default is False. Ends up being passed to the API as 0 or 1. + qds_min (Optional[int]) #The minimum QDS to include. + qds_max (Optional[int]) #The maximum QDS to include. + show_qds_factors (Optional[bool]) #Whether to show QDS factors. Default is False. Ends up being passed to the API as 0 or 1. + + EC2/AZURE METADATA FILTERS: + host_metadata (Optional[Literal['all,'ec2', 'azure']]) #The type of metadata to include. Default is 'all'. + host_metadata_fields (Optional[str]) #A comma-separated string of metadata fields to include. Use carefully. + show_cloud_tags (Optional[bool]) #Whether to show cloud tags. Default is False. Ends up being passed to the API as 0 or 1. + cloud_tag_fields (Optional[str]) #A comma-separated string of cloud tag fields to include. Use carefully. Returns: BaseList: A list of VMDRHost objects, with their DETECTIONS attribute populated.