-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdebug_bbq.py
More file actions
34 lines (27 loc) · 1.28 KB
/
debug_bbq.py
File metadata and controls
34 lines (27 loc) · 1.28 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
"""
Debug BBQ uncertainty calculation
"""
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent))
from src.presentation_eval.bayesian_bradley_terry import BayesianBradleyTerryEvaluator
import numpy as np
def debug_uncertainty():
evaluator = BayesianBradleyTerryEvaluator()
evaluator.add_presentations(['A', 'B'])
evaluator.add_raters(['Expert'])
sparse_comparisons = [('Expert', 'A', 'B', 1)] # 1개
dense_comparisons = [('Expert', 'A', 'B', 1)] * 20 # 20개
print("=== Sparse comparisons ===")
sparse_result = evaluator.fit_bayesian(sparse_comparisons, n_samples=100)
print("Sparse uncertainties:", sparse_result['uncertainties'])
print("Sparse n_comparisons:", len(sparse_comparisons))
print("\n=== Dense comparisons ===")
dense_result = evaluator.fit_bayesian(dense_comparisons, n_samples=100)
print("Dense uncertainties:", dense_result['uncertainties'])
print("Dense n_comparisons:", len(dense_comparisons))
print(f"\nSparse A uncertainty: {sparse_result['uncertainties']['A']}")
print(f"Dense A uncertainty: {dense_result['uncertainties']['A']}")
print(f"Is sparse > dense? {sparse_result['uncertainties']['A'] > dense_result['uncertainties']['A']}")
if __name__ == "__main__":
debug_uncertainty()