Skip to content
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,7 @@ Run `python core.py validate --help` to see the list of validation options.
--snomed-url TEXT Base url of snomed api to use. (ex. https://snowstorm.snomedtools.org/snowstorm/snomed-ct)
--snomed-edition TEXT Edition of snomed to use. (ex. SNOMEDCT-US)
-r, --rules TEXT Specify rule core ID ex. CORE-000001. Can be specified multiple times.
-er, --exclude-rules TEXT Specify rule core ID to exclude, ex. CORE-000001. Can be specified multiple times.
-lr, --local-rules TEXT Specify relative path to directory or file containing
local rule yml and/or json rule files.
-cs, --custom-standard Adding this flag tells engine to use a custom standard specified with -s and -v
Expand Down
1 change: 1 addition & 0 deletions cdisc_rules_engine/models/validation_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
"define_version",
"external_dictionaries",
"rules",
"exclude_rules",
"local_rules",
"custom_standard",
"progress",
Expand Down
22 changes: 17 additions & 5 deletions core.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,12 @@ def cli():
multiple=True,
help="Specify rule core ID ex. CORE-000001. Can be specified multiple times",
)
@click.option(
"--exclude-rules",
"-er",
multiple=True,
help="Specify rule core ID to exclude, ex. CORE-000001. Can be specified multiple times",
)
@click.option(
"--local-rules",
"-lr",
Expand Down Expand Up @@ -230,6 +236,7 @@ def validate(
snomed_edition: str,
snomed_url: str,
rules: Tuple[str],
exclude_rules: Tuple[str],
local_rules: str,
custom_standard: bool,
progress: str,
Expand All @@ -252,7 +259,11 @@ def validate(
logger.error(
"Flag --raw-report can be used only when --output-format is JSON"
)
ctx.exit()
ctx.exit(2)

if exclude_rules and rules:
logger.error("Cannot use both --rules and --exclude-rules flags together.")
ctx.exit(2)

cache_path: str = os.path.join(os.path.dirname(__file__), cache)

Expand All @@ -276,28 +287,28 @@ def validate(
logger.error(
"Argument --dataset-path cannot be used together with argument --data"
)
ctx.exit()
ctx.exit(2)
dataset_paths, found_formats = valid_data_file(
[str(Path(data).joinpath(fn)) for fn in os.listdir(data)]
)
if len(found_formats) > 1:
logger.error(
f"Argument --data contains more than one allowed file format ({', '.join(found_formats)})." # noqa: E501
)
ctx.exit()
ctx.exit(2)
elif dataset_path:
dataset_paths, found_formats = valid_data_file([dp for dp in dataset_path])
if len(found_formats) > 1:
logger.error(
f"Argument --dataset-path contains more than one allowed file format ({', '.join(found_formats)})." # noqa: E501
)
ctx.exit()
ctx.exit(2)
else:
logger.error(
"You must pass one of the following arguments: --dataset-path, --data"
)
# no need to define dataset_paths here, the program execution will stop
ctx.exit()
ctx.exit(2)
run_validation(
Validation_args(
cache_path,
Expand All @@ -315,6 +326,7 @@ def validate(
define_version,
external_dictionaries,
rules,
exclude_rules,
local_rules,
custom_standard,
progress,
Expand Down
68 changes: 52 additions & 16 deletions scripts/script_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,16 +49,18 @@ def get_library_metadata_from_cache(args) -> LibraryMetadataContainer: # noqa
define_version.model_package == "define_2_1"
and len(args.controlled_terminology_package) > 0
):
raise ValueError(
engine_logger.error(
"Cannot use -ct controlled terminology package command with Define-XML2.1 submission"
)
raise SystemError(2)
elif (
define_version.model_package == "define_2_0"
and len(args.controlled_terminology_package) > 1
):
raise ValueError(
engine_logger.error(
"Cannot provide multiple controlled terminology packages with Define-XML2.0 submission"
)
raise SystemError(2)
standards_file = os.path.join(args.cache, "standards_details.pkl")
models_file = os.path.join(args.cache, "standards_models.pkl")
variables_codelist_file = os.path.join(args.cache, "variable_codelist_maps.pkl")
Expand Down Expand Up @@ -245,18 +247,37 @@ def load_custom_rules(custom_data, cdisc_data, standard, version, rules, standar


def load_specified_rules(
rules_data, rule_ids, standard, version, standard_dict, substandard
rules_data,
rule_ids,
excluded_rule_ids,
standard,
version,
standard_dict,
substandard,
):
key = get_rules_cache_key(standard, version, substandard)
standard_rules = standard_dict.get(key, {})
valid_rule_ids = set()
for rule in rule_ids:
if rule in standard_rules:

# Determine valid rules based on inclusion and exclusion lists
for rule in standard_rules:
if (rule_ids is None or rule in rule_ids) and (
excluded_rule_ids is None or rule not in excluded_rule_ids
):
valid_rule_ids.add(rule)
else:
raise ValueError(
f"The rule specified '{rule}' is not in the standard {standard} and version {version}"
)
# Check that all specified rules are valid
if rule_ids:
for rule in rule_ids:
if rule not in standard_rules:
raise ValueError(
f"The rule specified to include '{rule}' is not in the standard {standard} and version {version}"
)
else:
for rule in excluded_rule_ids:
if rule not in standard_rules:
raise ValueError(
f"The rule specified to exclude '{rule}' is not in the standard {standard} and version {version}"
)
rules = []
for rule_id in valid_rule_ids:
rule_data = rules_data.get(rule_id)
Expand Down Expand Up @@ -329,10 +350,11 @@ def load_rules_from_cache(args) -> List[dict]:
args.rules,
standard_dict,
)
elif args.rules:
elif args.rules or args.exclude_rules:
return load_specified_rules(
rules_data,
args.rules,
args.exclude_rules,
args.standard,
args.version.replace(".", "-"),
standard_dict,
Expand Down Expand Up @@ -365,17 +387,25 @@ def load_rules_from_local(args) -> List[dict]:
get_rules_cache_key(args.standard, args.version.replace(".", "-"), rule)
for rule in args.rules
)
excluded_keys = None
elif args.exclude_rules:
excluded_keys = set(
get_rules_cache_key(args.standard, args.version.replace(".", "-"), rule)
for rule in args.exclude_rules
)
keys = None
else:
engine_logger.info(
"No rules specified with -r rules flag. "
"No rules specified with -r or -er rules flags. "
"Validating with rules in local directory"
)
excluded_keys = None
keys = None

for rule_file in rule_files:
rule = load_and_parse_rule(rule_file)
if rule:
process_rule(rule, args, rule_data, rules, keys)
process_rule(rule, args, rule_data, rules, keys, excluded_keys)

missing_keys = set()
if keys:
Expand Down Expand Up @@ -428,7 +458,7 @@ def rule_matches_standard_version(rule, standard, version, substandard=None):
return False


def process_rule(rule, args, rule_data, rules, keys):
def process_rule(rule, args, rule_data, rules, keys, excluded_keys):
"""Process a rule and add it to the rules list if applicable."""
core_id = rule.get("core_id")
if not core_id:
Expand All @@ -440,7 +470,11 @@ def process_rule(rule, args, rule_data, rules, keys):
if rule_identifier in rule_data:
engine_logger.error(f"Duplicate rule {core_id} in local directory. Skipping...")
return
if rule.get("status", "").lower() == "draft":
if (
rule.get("status", "").lower() == "draft"
and (keys is None or rule_identifier in keys)
and (excluded_keys is None or rule_identifier not in excluded_keys)
):
rule_data[rule_identifier] = rule
rules.append(rule)
elif rule.get("status", None).lower() == "published":
Expand All @@ -455,13 +489,15 @@ def process_rule(rule, args, rule_data, rules, keys):
f"version '{args.version}'{substandard_msg}. Skipping..."
)
return
if keys is None or rule_identifier in keys:
if (keys is None or rule_identifier in keys) and (
excluded_keys is None or rule_identifier not in excluded_keys
):
rule_data[rule_identifier] = rule
rules.append(rule)
else:
engine_logger.info(
f"Rule {core_id} not specified with "
"-r rule flag and in local directory. Skipping..."
"-r rule flag or excluded with -er rule flag and in local directory. Skipping..."
)


Expand Down
49 changes: 47 additions & 2 deletions tests/QARegressionTests/test_core/test_validate.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,51 @@ def test_validate_local_rule(self):
self.assertEqual(stderr, "")
self.assertFalse(self.error_keyword in stdout)

def test_validate_local_exclude_rule(self):
args = [
"python",
"core.py",
"validate",
"-s",
"sdtmig",
"-v",
"3.4",
"-dp",
os.path.join("tests", "resources", "datasets", "ae.xpt"),
"-lr",
os.path.join("tests", "resources", "rules"),
"-er",
"CORE-000473",
]
exit_code, stdout, stderr = run_command(args, False)
self.assertEqual(exit_code, 0)
self.assertEqual(stderr, "")
self.assertFalse(self.error_keyword in stdout)

def test_validate_include_exclude(self):
args = [
"python",
"core.py",
"validate",
"-s",
"sdtmig",
"-v",
"3.4",
"-dp",
os.path.join("tests", "resources", "datasets", "ae.xpt"),
"-lr",
os.path.join("tests", "resources", "rules"),
"-r",
"CORE-000470",
"-er",
"CORE-000473",
]
exit_code, stdout, stderr = run_command(args, False)
self.assertEqual(exit_code, 2)
self.assertIn(
"cannot use both --rules and --exclude-rules flags together.", stderr
)

def test_validate_minimum_options(self):
args = [
"python",
Expand Down Expand Up @@ -489,7 +534,7 @@ def test_validate_dummy_with_all_options(self):
f"-l error"
)
exit_code, stdout, stderr = run_command(args, True)
self.assertEqual(exit_code, 0)
self.assertEqual(exit_code, 2)
self.assertFalse(self.error_keyword in stdout)
self.assertFalse(self.error_keyword in stdout)
expected_pattern = (
Expand All @@ -513,7 +558,7 @@ def test_validate_dummy_without_dataset_path(self):
f"-v 3.4 "
)
exit_code, stdout, stderr = run_command(args, True)
self.assertEqual(exit_code, 0)
self.assertEqual(exit_code, 2)
expected_pattern = (
r"\[error \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3} - "
r"core\.py:\d+\] - you must pass one of the following arguments: "
Expand Down
Loading
Loading