diff --git a/features/environment.py b/features/environment.py index f0d37c54..93792d24 100644 --- a/features/environment.py +++ b/features/environment.py @@ -71,8 +71,9 @@ def after_scenario(context, scenario): execution_mode = context.config.userdata.get('execution_mode') if execution_mode and execution_mode == 'ExecutionMode.TESTING': if context.failed: - if context.step.error_message and not 'Behave errors' in context.step.error_message: #exclude behave output from exception logging + if context.step.error_message and not getattr(context, 'intentional_error_occured', False): #exclude behave output from exception logging context.caught_exceptions.append(ExceptionSummary.from_context(context)) + context.intentional_error_occured = False context.scenario_outcome_state.append((len(context.gherkin_outcomes)-1, {'scenario': context.scenario.name, 'last_step': context.scenario.steps[-1]})) elif execution_mode and execution_mode == 'ExecutionMode.PRODUCTION': if context.failed: diff --git a/features/steps/validation_handling.py b/features/steps/validation_handling.py index 45090a20..fe3816cc 100644 --- a/features/steps/validation_handling.py +++ b/features/steps/validation_handling.py @@ -68,6 +68,7 @@ def generate_error_message(context, errors): """ Function to trigger the behave error mechanism by raising an exception so that errors are printed to the console. """ + context.intentional_error_occured = True assert not errors, "Errors occured:" + ''.join(f'\n - {error}' for error in errors) diff --git a/test/test_main.py b/test/test_main.py index 0b27f91c..12434468 100644 --- a/test/test_main.py +++ b/test/test_main.py @@ -109,11 +109,11 @@ def wrap_text(text, width): print(tabulate.tabulate(table_data, headers=headers, tablefmt='fancy_grid')) if base.startswith('fail'): - assert len(error_outcomes) > 0 or caught_exceptions + assert len(error_outcomes) > 0 elif base.startswith('pass'): - assert len(error_outcomes) == 0 and len(activating_outcomes) > 0 + assert len(error_outcomes) == 0 and len(activating_outcomes) and not caught_exceptions elif base.startswith('na'): - assert len(error_outcomes) == 0 and len(activating_outcomes) == 0 + assert len(error_outcomes) == 0 and len(activating_outcomes) == 0 and not caught_exceptions if error_outcomes: tabulate_results = [