Skip to content

Commit

Permalink
Merge pull request #343 from demisto/hod_validate_files
Browse files Browse the repository at this point in the history
demisto/etc#23233 added all files validation
  • Loading branch information
hod-alpert authored Apr 21, 2020
2 parents 1df3412 + 8f16641 commit 6ff1b7c
Show file tree
Hide file tree
Showing 16 changed files with 804 additions and 56 deletions.
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Changelog

* Added *-a , --validate-all* option in *validate* to run all validation on all files.

#### 0.5.2
* Added *-c, --command* option in *generate-docs* to generate a specific command from an integration.
Expand Down
10 changes: 8 additions & 2 deletions demisto_sdk/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,10 @@ def unify(**kwargs):
@click.option(
'-p', '--path', help='Path of file to validate specifically, outside of a git directory.'
)
@click.option(
'-a', '--validate-all', is_flag=True, show_default=True, default=False,
help='Whether to run all validation on all files or not'
)
@pass_config
def validate(config, **kwargs):
sys.path.append(config.configuration.env_dir)
Expand All @@ -218,7 +222,8 @@ def validate(config, **kwargs):
is_backward_check=not kwargs['no_backward_comp'],
is_circle=kwargs['post_commit'], prev_ver=kwargs['prev_ver'],
validate_conf_json=kwargs['conf_json'], use_git=kwargs['use_git'],
file_path=kwargs.get('path'))
file_path=kwargs.get('path'),
validate_all=kwargs.get('validate_all'))
return validator.run()


Expand Down Expand Up @@ -301,7 +306,8 @@ def secrets(config, **kwargs):
@click.option("-kc", "--keep-container", is_flag=True, help="Keep the test container")
@click.option("--test-xml", help="Path to store pytest xml results", type=click.Path(exists=True, resolve_path=True))
@click.option("--json-report", help="Path to store json results", type=click.Path(exists=True, resolve_path=True))
@click.option("-lp", "--log-path", help="Path to store all levels of logs", type=click.Path(exists=True, resolve_path=True))
@click.option("-lp", "--log-path", help="Path to store all levels of logs",
type=click.Path(exists=True, resolve_path=True))
def lint(input: str, git: bool, all_packs: bool, verbose: int, quiet: bool, parallel: int, no_flake8: bool,
no_bandit: bool, no_mypy: bool, no_vulture: bool, no_pylint: bool, no_test: bool, no_pwsh_analyze: bool,
no_pwsh_test: bool, keep_container: bool, test_xml: str, json_report: str, log_path: str):
Expand Down
2 changes: 2 additions & 0 deletions demisto_sdk/commands/common/schemas/playbook.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
type: map
mapping:
beta:
type: bool
elasticcommonfields:
type: map
allowempty: True
Expand Down
15 changes: 9 additions & 6 deletions demisto_sdk/commands/common/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -605,13 +605,16 @@ def find_type(path: str):
return 'layout'
else:
return 'dashboard'

# When using it for all files validation- sometimes 'id' can be integer
elif 'id' in _dict:
_id = _dict['id'].lower()
if _id.startswith('incident'):
return 'incidentfield'
elif _id.startswith('indicator'):
return 'indicatorfield'
if isinstance(_dict.get('id'), str):
_id = _dict['id'].lower()
if _id.startswith('incident'):
return 'incidentfield'
elif _id.startswith('indicator'):
return 'indicatorfield'
else:
print(f'The file {path} could not be recognized, please update the "id" to be a string')

return ''

Expand Down
5 changes: 5 additions & 0 deletions demisto_sdk/commands/validate/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ Whether the validation is done after you committed your files, this will help th
should check in its run. Before you commit the files it should not be used. Mostly for build validations.
* **-p, --path**
Path of file to validate specifically.
* **-a, --validate-all**
Whether to run all validation on all files or not.

**Examples**:
`demisto-sdk validate`
Expand All @@ -58,3 +60,6 @@ This indicates that the command runs post commit.
`demisto-sdk validate -p Integrations/Pwned-V2/Pwned-V2.yml`
This will validate the file Integrations/Pwned-V2/Pwned-V2.yml only.
<br><br>
`demisto-sdk validate -a`
This will validate all files under `Packs` and `Beta_Integrations` directories
<br><br>
151 changes: 138 additions & 13 deletions demisto_sdk/commands/validate/file_validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,19 +13,21 @@

import os
import re
from glob import glob

from demisto_sdk.commands.common.configuration import Configuration
from demisto_sdk.commands.common.constants import (
BETA_INTEGRATION_REGEX, BETA_INTEGRATION_YML_REGEX, CHECKED_TYPES_REGEXES,
CODE_FILES_REGEX, DIR_LIST_FOR_REGULAR_ENTETIES, IGNORED_TYPES_REGEXES,
IMAGE_REGEX, INTEGRATION_REGEX, JSON_ALL_DASHBOARDS_REGEXES,
JSON_ALL_INCIDENT_TYPES_REGEXES, JSON_ALL_LAYOUT_REGEXES,
JSON_INDICATOR_AND_INCIDENT_FIELDS, KNOWN_FILE_STATUSES,
OLD_YML_FORMAT_FILE, PACKAGE_SCRIPTS_REGEXES,
BETA_INTEGRATION_REGEX, BETA_INTEGRATION_YML_REGEX, BETA_INTEGRATIONS_DIR,
CHECKED_TYPES_REGEXES, CODE_FILES_REGEX, DIR_LIST_FOR_REGULAR_ENTETIES,
IGNORED_TYPES_REGEXES, IMAGE_REGEX, INTEGRATION_REGEX, INTEGRATION_REGXES,
JSON_ALL_DASHBOARDS_REGEXES, JSON_ALL_INCIDENT_TYPES_REGEXES,
JSON_ALL_LAYOUT_REGEXES, JSON_INDICATOR_AND_INCIDENT_FIELDS,
KNOWN_FILE_STATUSES, OLD_YML_FORMAT_FILE, PACKAGE_SCRIPTS_REGEXES,
PACKAGE_SUPPORTING_DIRECTORIES, PACKS_DIR, PACKS_DIRECTORIES,
PLAYBOOK_REGEX, PLAYBOOKS_REGEXES_LIST, REPUTATION_REGEX, SCHEMA_REGEX,
SCRIPT_REGEX, TEST_PLAYBOOK_REGEX, TESTS_DIRECTORIES,
YML_BETA_INTEGRATIONS_REGEXES, YML_INTEGRATION_REGEXES, Errors)
SCRIPT_REGEX, TEST_PLAYBOOK_REGEX, TEST_PLAYBOOKS_DIR, TESTS_DIRECTORIES,
YML_ALL_SCRIPTS_REGEXES, YML_BETA_INTEGRATIONS_REGEXES,
YML_INTEGRATION_REGEXES, Errors)
from demisto_sdk.commands.common.hook_validations.conf_json import \
ConfJsonValidator
from demisto_sdk.commands.common.hook_validations.dashboard import \
Expand Down Expand Up @@ -75,12 +77,14 @@ class FilesValidator:
validate_conf_json (bool): Whether to validate conf.json or not.
validate_id_set (bool): Whether to validate id_set or not.
file_path (string): If validating a specific file, golds it's path.
validate_all (bool) Whether to validate all files or not.
configuration (Configuration): Configurations for IDSetValidator.
"""

def __init__(self, is_backward_check=True, prev_ver=None, use_git=False, is_circle=False,
print_ignored_files=False, validate_conf_json=True, validate_id_set=False, file_path=None,
configuration=Configuration()):
validate_all=False, configuration=Configuration()):
self.validate_all = validate_all
self.branch_name = ''
self.use_git = use_git
if self.use_git:
Expand Down Expand Up @@ -514,22 +518,139 @@ def validate_committed_files(self):
# Ensure schema change did not break BC
if schema_changed:
print("Schema changed, validating all files")
self.validate_all_files()
self.validate_all_files_schema()
else:
self.validate_modified_files(modified_files)
self.validate_added_files(added_files)
self.validate_no_old_format(old_format_files)
self.validate_pack_unique_files(packs)

def validate_pack_unique_files(self, packs):
def validate_pack_unique_files(self, packs: set) -> None:
"""
Runs validations on the following pack files:
* .secret-ignore: Validates that the file exist and that the file's secrets can be parsed as a list delimited by '\n'
* .pack-ignore: Validates that the file exists and that all regexes in it can be compiled
* README.md file: Validates that the file exists
* pack_metadata.json: Validates that the file exists and that it has a valid structure
Args:
packs: A set of pack paths i.e {Packs/<pack-name1>, Packs/<pack-name2>}
"""
for pack in packs:
print(f'Validating {pack} unique pack files')
pack_unique_files_validator = PackUniqueFilesValidator(pack)
pack_errors = pack_unique_files_validator.validate_pack_unique_files()
if pack_errors:
print_error(pack_errors)
self._is_valid = False

def run_all_validations_on_file(self, file_path: str, file_type: str = None) -> None:
"""
Runs all validations on file specified in 'file_path'
Args:
file_path: A relative content path to a file to be validated
file_type: The output of 'find_type' method
"""
file_extension = os.path.splitext(file_path)[-1]
# We validate only yml json and .md files
if file_extension not in ['.yml', '.json', '.md']:
return

# Ignoring changelog and description files since these are checked on the integration validation
if 'changelog' in file_path.lower() or 'description' in file_path.lower():
return
print(f'Validating {file_path}')

if 'README' in file_path:
readme_validator = ReadMeValidator(file_path)
if not readme_validator.is_valid_file():
self._is_valid = False
return
structure_validator = StructureValidator(file_path, predefined_scheme=file_type)
if not structure_validator.is_valid_file():
self._is_valid = False

elif re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE):
playbook_validator = PlaybookValidator(structure_validator)
if not playbook_validator.is_valid_playbook():
self._is_valid = False

elif re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE) or file_type == 'playbook':
playbook_validator = PlaybookValidator(structure_validator)
if not playbook_validator.is_valid_playbook(validate_rn=False):
self._is_valid = False

elif checked_type(file_path, INTEGRATION_REGXES) or file_type == 'integration':
integration_validator = IntegrationValidator(structure_validator)
if not integration_validator.is_valid_file(validate_rn=False):
self._is_valid = False

elif checked_type(file_path, YML_ALL_SCRIPTS_REGEXES) or file_type == 'script':
# Set file path to the yml file
structure_validator.file_path = file_path
script_validator = ScriptValidator(structure_validator)

if not script_validator.is_valid_file(validate_rn=False):
self._is_valid = False

elif checked_type(file_path, YML_BETA_INTEGRATIONS_REGEXES) or file_type == 'betaintegration':
integration_validator = IntegrationValidator(structure_validator)
if not integration_validator.is_valid_beta_integration():
self._is_valid = False

# incident fields and indicator fields are using the same scheme.
elif checked_type(file_path, JSON_INDICATOR_AND_INCIDENT_FIELDS) or \
file_type in ('incidentfield', 'indicatorfield'):
incident_field_validator = IncidentFieldValidator(structure_validator)
if not incident_field_validator.is_valid_file(validate_rn=False):
self._is_valid = False

elif checked_type(file_path, [REPUTATION_REGEX]) or file_type == 'reputation':
reputation_validator = ReputationValidator(structure_validator)
if not reputation_validator.is_valid_file(validate_rn=False):
self._is_valid = False

elif checked_type(file_path, JSON_ALL_LAYOUT_REGEXES) or file_type == 'layout':
layout_validator = LayoutValidator(structure_validator)
if not layout_validator.is_valid_layout(validate_rn=False):
self._is_valid = False

elif checked_type(file_path, JSON_ALL_DASHBOARDS_REGEXES) or file_type == 'dashboard':
dashboard_validator = DashboardValidator(structure_validator)
if not dashboard_validator.is_valid_dashboard(validate_rn=False):
self._is_valid = False

elif checked_type(file_path, JSON_ALL_INCIDENT_TYPES_REGEXES) or file_type == 'incidenttype':
incident_type_validator = IncidentTypeValidator(structure_validator)
if not incident_type_validator.is_valid_incident_type(validate_rn=False):
self._is_valid = False

elif checked_type(file_path, CHECKED_TYPES_REGEXES):
print(f'Could not find validations for file {file_path}')

else:
print_error('The file type of {} is not supported in validate command'.format(file_path))
print_error('validate command supports: Integrations, Scripts, Playbooks, dashboards, incident types, '
'reputations, Incident fields, Indicator fields, Images, Release notes, '
'Layouts and Descriptions')
self._is_valid = False

def validate_all_files(self):
print('Validating all files')
print('Validating conf.json')
conf_json_validator = ConfJsonValidator()
if not conf_json_validator.is_valid_conf_json():
self._is_valid = False
packs = {os.path.basename(pack) for pack in glob(f'{PACKS_DIR}/*')}
self.validate_pack_unique_files(packs)
all_files_to_validate = set()
for directory in [PACKS_DIR, BETA_INTEGRATIONS_DIR, TEST_PLAYBOOKS_DIR]:
all_files_to_validate |= {file for file in glob(fr'{directory}/**', recursive=True) if
not os.path.isdir(file)}
print('Validating all Pack and Beta Integration files')
for file in all_files_to_validate:
self.run_all_validations_on_file(file, file_type=find_type(file))

def validate_all_files_schema(self):
"""Validate all files in the repo are in the right format."""
# go over packs
for root, dirs, _ in os.walk(PACKS_DIR):
Expand Down Expand Up @@ -590,6 +711,9 @@ def is_valid_structure(self):
Returns:
(bool). Whether the structure is valid or not.
"""
if self.validate_all:
self.validate_all_files()
return self._is_valid
if self.validate_conf_json:
if not self.conf_json_validator.is_valid_conf_json():
self._is_valid = False
Expand All @@ -601,15 +725,16 @@ def is_valid_structure(self):
else:
self.validate_against_previous_version(no_error=True)
print('Validates all of Content repo directories according to their schemas')
self.validate_all_files()
self.validate_all_files_schema()

else:
if self.file_path:
print('Not using git, validating file: {}'.format(self.file_path))
self.is_backward_check = False # if not using git, no need for BC checks
self.validate_added_files({self.file_path}, file_type=find_type(self.file_path))
else:
print('Not using git, validating all files.')
self.validate_all_files()
self.validate_all_files_schema()

return self._is_valid

Expand Down
Loading

0 comments on commit 6ff1b7c

Please sign in to comment.