Skip to content

Commit

Permalink
quality_control: factor out duplicate annotation summary creation log…
Browse files Browse the repository at this point in the history
…ic (#7632)

In addition, remove code creating an unused annotation summary object
from `generate_report`.

<!-- Raise an issue to propose your change
(https://github.com/opencv/cvat/issues).
It helps to avoid duplication of efforts from multiple independent
contributors.
Discuss your ideas with maintainers to be sure that changes will be
approved and merged.
Read the [Contribution
guide](https://opencv.github.io/cvat/docs/contributing/). -->

<!-- Provide a general summary of your changes in the Title above -->

### Motivation and context
<!-- Why is this change required? What problem does it solve? If it
fixes an open
issue, please link to the issue here. Describe your changes in detail,
add
screenshots. -->

### How has this been tested?
<!-- Please describe in detail how you tested your changes.
Include details of your testing environment, and the tests you ran to
see how your change affects other areas of the code, etc. -->
Manual testing.

### Checklist
<!-- Go over all the following points, and put an `x` in all the boxes
that apply.
If an item isn't applicable for some reason, then ~~explicitly
strikethrough~~ the whole
line. If you don't do that, GitHub will show incorrect progress for the
pull request.
If you're unsure about any of these, don't hesitate to ask. We're here
to help! -->
- [x] I submit my changes into the `develop` branch
- ~~[ ] I have created a changelog fragment~~ <!-- see top comment in
CHANGELOG.md -->
- ~~[ ] I have updated the documentation accordingly~~
- ~~[ ] I have added tests to cover my changes~~
- ~~[ ] I have linked related issues (see [GitHub docs](

https://help.github.com/en/github/managing-your-work-on-github/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword))~~
- ~~[ ] I have increased versions of npm packages if it is necessary

([cvat-canvas](https://github.com/opencv/cvat/tree/develop/cvat-canvas#versioning),

[cvat-core](https://github.com/opencv/cvat/tree/develop/cvat-core#versioning),

[cvat-data](https://github.com/opencv/cvat/tree/develop/cvat-data#versioning)
and

[cvat-ui](https://github.com/opencv/cvat/tree/develop/cvat-ui#versioning))~~

### License

- [x] I submit _my code changes_ under the same [MIT License](
https://github.com/opencv/cvat/blob/develop/LICENSE) that covers the
project.
  Feel free to contact the maintainers if that's a concern.
  • Loading branch information
SpecLad authored Mar 19, 2024
1 parent acb68a5 commit 53bf350
Showing 1 changed file with 41 additions and 74 deletions.
115 changes: 41 additions & 74 deletions cvat/apps/quality_control/quality_reports.py
Original file line number Diff line number Diff line change
Expand Up @@ -1886,37 +1886,9 @@ def _find_closest_unmatched_shape(shape: dm.Annotation):
gt_label_idx = gt_ann.label if gt_ann else self._UNMATCHED_IDX
confusion_matrix[ds_label_idx, gt_label_idx] += 1

matched_ann_counts = np.diag(confusion_matrix)
ds_ann_counts = np.sum(confusion_matrix, axis=1)
gt_ann_counts = np.sum(confusion_matrix, axis=0)
label_accuracies = _arr_div(
matched_ann_counts, ds_ann_counts + gt_ann_counts - matched_ann_counts
)
label_precisions = _arr_div(matched_ann_counts, ds_ann_counts)
label_recalls = _arr_div(matched_ann_counts, gt_ann_counts)

valid_annotations_count = np.sum(matched_ann_counts)
missing_annotations_count = np.sum(confusion_matrix[self._UNMATCHED_IDX, :])
extra_annotations_count = np.sum(confusion_matrix[:, self._UNMATCHED_IDX])
total_annotations_count = np.sum(confusion_matrix)
ds_annotations_count = np.sum(ds_ann_counts[: self._UNMATCHED_IDX])
gt_annotations_count = np.sum(gt_ann_counts[: self._UNMATCHED_IDX])

self._frame_results[frame_id] = ComparisonReportFrameSummary(
annotations=ComparisonReportAnnotationsSummary(
valid_count=valid_annotations_count,
missing_count=missing_annotations_count,
extra_count=extra_annotations_count,
total_count=total_annotations_count,
ds_count=ds_annotations_count,
gt_count=gt_annotations_count,
confusion_matrix=ConfusionMatrix(
labels=confusion_matrix_labels,
rows=confusion_matrix,
precision=label_precisions,
recall=label_recalls,
accuracy=label_accuracies,
),
annotations=self._generate_annotations_summary(
confusion_matrix, confusion_matrix_labels
),
annotation_components=ComparisonReportAnnotationComponentsSummary(
shape=ComparisonReportAnnotationShapeSummary(
Expand Down Expand Up @@ -1955,21 +1927,49 @@ def _make_zero_confusion_matrix(self) -> Tuple[List[str], np.ndarray]:

return label_names, confusion_matrix

@classmethod
def _generate_annotations_summary(
cls, confusion_matrix: np.ndarray, confusion_matrix_labels: List[str]
) -> ComparisonReportAnnotationsSummary:
matched_ann_counts = np.diag(confusion_matrix)
ds_ann_counts = np.sum(confusion_matrix, axis=1)
gt_ann_counts = np.sum(confusion_matrix, axis=0)

label_accuracies = _arr_div(
matched_ann_counts, ds_ann_counts + gt_ann_counts - matched_ann_counts
)
label_precisions = _arr_div(matched_ann_counts, ds_ann_counts)
label_recalls = _arr_div(matched_ann_counts, gt_ann_counts)

valid_annotations_count = np.sum(matched_ann_counts)
missing_annotations_count = np.sum(confusion_matrix[cls._UNMATCHED_IDX, :])
extra_annotations_count = np.sum(confusion_matrix[:, cls._UNMATCHED_IDX])
total_annotations_count = np.sum(confusion_matrix)
ds_annotations_count = np.sum(ds_ann_counts[: cls._UNMATCHED_IDX])
gt_annotations_count = np.sum(gt_ann_counts[: cls._UNMATCHED_IDX])

return ComparisonReportAnnotationsSummary(
valid_count=valid_annotations_count,
missing_count=missing_annotations_count,
extra_count=extra_annotations_count,
total_count=total_annotations_count,
ds_count=ds_annotations_count,
gt_count=gt_annotations_count,
confusion_matrix=ConfusionMatrix(
labels=confusion_matrix_labels,
rows=confusion_matrix,
precision=label_precisions,
recall=label_recalls,
accuracy=label_accuracies,
),
)

def generate_report(self) -> ComparisonReport:
self._find_gt_conflicts()

# accumulate stats
intersection_frames = []
conflicts = []
annotations = ComparisonReportAnnotationsSummary(
valid_count=0,
missing_count=0,
extra_count=0,
total_count=0,
ds_count=0,
gt_count=0,
confusion_matrix=None,
)
annotation_components = ComparisonReportAnnotationComponentsSummary(
shape=ComparisonReportAnnotationShapeSummary(
valid_count=0,
Expand All @@ -1992,11 +1992,6 @@ def generate_report(self) -> ComparisonReport:
for frame_id, frame_result in self._frame_results.items():
intersection_frames.append(frame_id)
conflicts += frame_result.conflicts

if annotations is None:
annotations = deepcopy(frame_result.annotations)
else:
annotations.accumulate(frame_result.annotations)
confusion_matrix += frame_result.annotations.confusion_matrix.rows

if annotation_components is None:
Expand All @@ -2005,22 +2000,6 @@ def generate_report(self) -> ComparisonReport:
annotation_components.accumulate(frame_result.annotation_components)
mean_ious.append(frame_result.annotation_components.shape.mean_iou)

matched_ann_counts = np.diag(confusion_matrix)
ds_ann_counts = np.sum(confusion_matrix, axis=1)
gt_ann_counts = np.sum(confusion_matrix, axis=0)
label_accuracies = _arr_div(
matched_ann_counts, ds_ann_counts + gt_ann_counts - matched_ann_counts
)
label_precisions = _arr_div(matched_ann_counts, ds_ann_counts)
label_recalls = _arr_div(matched_ann_counts, gt_ann_counts)

valid_annotations_count = np.sum(matched_ann_counts)
missing_annotations_count = np.sum(confusion_matrix[self._UNMATCHED_IDX, :])
extra_annotations_count = np.sum(confusion_matrix[:, self._UNMATCHED_IDX])
total_annotations_count = np.sum(confusion_matrix)
ds_annotations_count = np.sum(ds_ann_counts[: self._UNMATCHED_IDX])
gt_annotations_count = np.sum(gt_ann_counts[: self._UNMATCHED_IDX])

return ComparisonReport(
parameters=self.settings,
comparison_summary=ComparisonReportComparisonSummary(
Expand All @@ -2036,20 +2015,8 @@ def generate_report(self) -> ComparisonReport:
[c for c in conflicts if c.severity == AnnotationConflictSeverity.ERROR]
),
conflicts_by_type=Counter(c.type for c in conflicts),
annotations=ComparisonReportAnnotationsSummary(
valid_count=valid_annotations_count,
missing_count=missing_annotations_count,
extra_count=extra_annotations_count,
total_count=total_annotations_count,
ds_count=ds_annotations_count,
gt_count=gt_annotations_count,
confusion_matrix=ConfusionMatrix(
labels=confusion_matrix_labels,
rows=confusion_matrix,
precision=label_precisions,
recall=label_recalls,
accuracy=label_accuracies,
),
annotations=self._generate_annotations_summary(
confusion_matrix, confusion_matrix_labels
),
annotation_components=ComparisonReportAnnotationComponentsSummary(
shape=ComparisonReportAnnotationShapeSummary(
Expand Down

0 comments on commit 53bf350

Please sign in to comment.