make the test case results errors work with WorkflowTaskException and use in ErrorForDisplay

This commit is contained in:
jasquat 2023-05-23 10:33:37 -04:00
parent 4e359e6574
commit ed42c6c399
4 changed files with 157 additions and 13 deletions

View File

@ -2,12 +2,14 @@ import glob
import json
import os
import re
import traceback
from dataclasses import dataclass
from typing import Any
from typing import Callable
from typing import Optional
from lxml import etree # type: ignore
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException # type: ignore
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore
from SpiffWorkflow.task import Task as SpiffTask # type: ignore
from SpiffWorkflow.task import TaskState
@ -43,12 +45,23 @@ class MissingInputTaskData(Exception):
pass
@dataclass
class TestCaseErrorDetails:
error_messages: list[str]
task_error_line: Optional[str] = None
task_trace: Optional[list[str]] = None
task_bpmn_identifier: Optional[str] = None
task_bpmn_name: Optional[str] = None
task_line_number: Optional[int] = None
stacktrace: Optional[list[str]] = None
@dataclass
class TestCaseResult:
passed: bool
bpmn_file: str
test_case_identifier: str
error_messages: Optional[list[str]] = None
test_case_error_details: Optional[TestCaseErrorDetails] = None
DEFAULT_NSMAP = {
@ -127,8 +140,8 @@ class ProcessModelTestRunner:
formatted_tests = ["FAILING TESTS:"]
for failing_test in self.failing_tests():
msg = ""
if failing_test.error_messages:
msg = "\n\t\t".join(failing_test.error_messages)
if failing_test.test_case_error_details is not None:
msg = "\n\t\t".join(failing_test.test_case_error_details.error_messages)
formatted_tests.append(f"\t{failing_test.bpmn_file}: {failing_test.test_case_identifier}: {msg}")
return "\n".join(formatted_tests)
@ -147,8 +160,7 @@ class ProcessModelTestRunner:
try:
self.run_test_case(bpmn_file, test_case_identifier, test_case_contents)
except Exception as ex:
ex_as_array = str(ex).split("\n")
self._add_test_result(False, bpmn_file, test_case_identifier, ex_as_array)
self._add_test_result(False, bpmn_file, test_case_identifier, exception=ex)
def run_test_case(self, bpmn_file: str, test_case_identifier: str, test_case_contents: dict) -> None:
bpmn_process_instance = self._instantiate_executer(bpmn_file)
@ -329,15 +341,40 @@ class ProcessModelTestRunner:
def _get_relative_path_of_bpmn_file(self, bpmn_file: str) -> str:
return os.path.relpath(bpmn_file, start=self.process_model_directory_path)
def _exception_to_test_case_error_details(self, exception: Exception) -> TestCaseErrorDetails:
error_messages = str(exception).split("\n")
test_case_error_details = TestCaseErrorDetails(error_messages=error_messages)
if isinstance(exception, WorkflowTaskException):
test_case_error_details.task_error_line = exception.error_line
test_case_error_details.task_trace = exception.task_trace
test_case_error_details.task_line_number = exception.line_number
test_case_error_details.task_bpmn_identifier = exception.task_spec.bpmn_id
test_case_error_details.task_bpmn_name = exception.task_spec.bpmn_name
else:
test_case_error_details.stacktrace = traceback.format_exc().split("\n")
return test_case_error_details
def _add_test_result(
self, passed: bool, bpmn_file: str, test_case_identifier: str, error_messages: Optional[list[str]] = None
self,
passed: bool,
bpmn_file: str,
test_case_identifier: str,
error_messages: Optional[list[str]] = None,
exception: Optional[Exception] = None,
) -> None:
test_case_error_details = None
if exception is not None:
test_case_error_details = self._exception_to_test_case_error_details(exception)
elif error_messages:
test_case_error_details = TestCaseErrorDetails(error_messages=error_messages)
bpmn_file_relative = self._get_relative_path_of_bpmn_file(bpmn_file)
test_result = TestCaseResult(
passed=passed,
bpmn_file=bpmn_file_relative,
test_case_identifier=test_case_identifier,
error_messages=error_messages,
test_case_error_details=test_case_error_details,
)
self.test_case_results.append(test_result)

View File

@ -4,6 +4,7 @@ import {
ErrorForDisplay,
ProcessInstanceEventErrorDetail,
ProcessInstanceLogEntry,
TestCaseErrorDetails,
} from '../interfaces';
function errorDetailDisplay(
@ -40,6 +41,22 @@ export const errorForDisplayFromProcessInstanceErrorDetail = (
return errorForDisplay;
};
export const errorForDisplayFromTestCaseErrorDetails = (
testCaseErrorDetails: TestCaseErrorDetails
) => {
const errorForDisplay: ErrorForDisplay = {
message: testCaseErrorDetails.error_messages.join('\n'),
messageClassName: 'failure-string',
task_name: testCaseErrorDetails.task_bpmn_name,
task_id: testCaseErrorDetails.task_bpmn_identifier,
line_number: testCaseErrorDetails.task_line_number,
error_line: testCaseErrorDetails.task_line_contents,
task_trace: testCaseErrorDetails.task_trace,
stacktrace: testCaseErrorDetails.stacktrace,
};
return errorForDisplay;
};
export const childrenForErrorObject = (errorObject: ErrorForDisplay) => {
let sentryLinkTag = null;
if (errorObject.sentry_link) {

View File

@ -3,19 +3,24 @@ import { Button, Modal } from '@carbon/react';
import { useState } from 'react';
import { useUriListForPermissions } from '../hooks/UriListForPermissions';
import HttpService from '../services/HttpService';
import { ProcessFile } from '../interfaces';
import { ProcessFile, TestCaseResult, TestCaseResults } from '../interfaces';
import {
childrenForErrorObject,
errorForDisplayFromTestCaseErrorDetails,
} from './ErrorDisplay';
type OwnProps = {
processModelFile: ProcessFile;
};
export default function ProcessModelTestRun({ processModelFile }: OwnProps) {
const [testCaseResults, setTestCaseResults] = useState<any>(null);
const [testCaseResults, setTestCaseResults] =
useState<TestCaseResults | null>(null);
const [showTestCaseResultsModal, setShowTestCaseResultsModal] =
useState<boolean>(false);
const { targetUris } = useUriListForPermissions();
const onProcessModelTestRunSuccess = (result: any) => {
const onProcessModelTestRunSuccess = (result: TestCaseResults) => {
setTestCaseResults(result);
};
@ -60,18 +65,80 @@ export default function ProcessModelTestRun({ processModelFile }: OwnProps) {
});
};
const testCaseFormattedResultTag = () => {
if (!testCaseResults) {
return null;
}
const passingRows: any[] = [];
const failingRows: any[] = [];
testCaseResults.passing.forEach((testCaseResult: TestCaseResult) => {
passingRows.push(<p>{testCaseResult.test_case_identifier}</p>);
});
testCaseResults.failing
.slice(0, 2)
.forEach((testCaseResult: TestCaseResult) => {
if (testCaseResult.test_case_error_details) {
const errorForDisplay = errorForDisplayFromTestCaseErrorDetails(
testCaseResult.test_case_error_details
);
const errorChildren = childrenForErrorObject(errorForDisplay);
failingRows.push(
<>
<br />
<p>
Test Case:{' '}
<strong>{testCaseResult.test_case_identifier}</strong>
</p>
{errorChildren}
</>
);
}
});
return (
<>
<p>Passing: {testCaseResults.passing.length}</p>
<p>Failing: {testCaseResults.failing.length}</p>
<br />
{failingRows.length > 0 ? (
<>
<p>Failure Details:</p>
{failingRows}
</>
) : null}
{passingRows.length > 0 ? (
<>
<p>Successful Test Cases:</p>
{passingRows}
</>
) : null}
</>
);
};
const testCaseResultsModal = () => {
if (!testCaseResults) {
return null;
}
let modalHeading = 'All Tests PASSED';
if (!testCaseResults.all_passed) {
modalHeading = 'Some Tests FAILED';
}
return (
<Modal
open={showTestCaseResultsModal}
data-qa="test-case-results-modal"
modalHeading="RESULT FOR"
modalLabel="LABLE"
modalHeading={modalHeading}
modalLabel="Test Case Rsults"
primaryButtonText="OK"
onRequestSubmit={() => setShowTestCaseResultsModal(false)}
onRequestClose={() => setShowTestCaseResultsModal(false)}
>
{JSON.stringify(testCaseResults)}
{testCaseFormattedResultTag()}
</Modal>
);
};

View File

@ -365,3 +365,26 @@ export interface InterstitialPageResponse {
task?: ProcessInstanceTask;
process_instance?: ProcessInstance;
}
export interface TestCaseErrorDetails {
error_messages: string[];
stacktrace?: string[];
task_bpmn_identifier?: string;
task_bpmn_name?: string;
task_line_contents?: string;
task_line_number?: number;
task_trace?: string[];
}
export interface TestCaseResult {
bpmn_file: string;
passed: boolean;
test_case_identifier: string;
test_case_error_details?: TestCaseErrorDetails;
}
export interface TestCaseResults {
all_passed: boolean;
failing: TestCaseResult[];
passing: TestCaseResult[];
}