Skip to content

Commit fd3cddc

Browse files
rkritika1508claude
andcommitted
fix: use textdetox/xlmr-large-toxicity-classifier as default nsfw_text model
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
1 parent 5b2fe3b commit fd3cddc

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

backend/app/core/validators/config/nsfw_text_safety_validator_config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ class NSFWTextSafetyValidatorConfig(BaseValidatorConfig):
1010
threshold: float = 0.8
1111
validation_method: str = "sentence"
1212
device: Optional[str] = "cpu"
13-
model_name: Optional[str] = "michellejieli/NSFW_text_classifier"
13+
model_name: Optional[str] = "textdetox/xlmr-large-toxicity-classifier"
1414

1515
def build(self):
1616
return NSFWText(

backend/app/tests/test_toxicity_hub_validators.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -304,7 +304,7 @@ def test_build_with_defaults(self):
304304
assert kwargs["threshold"] == 0.8
305305
assert kwargs["validation_method"] == "sentence"
306306
assert kwargs["device"] == "cpu"
307-
assert kwargs["model_name"] == "michellejieli/NSFW_text_classifier"
307+
assert kwargs["model_name"] == "textdetox/xlmr-large-toxicity-classifier"
308308

309309
def test_build_with_custom_params(self):
310310
config = NSFWTextSafetyValidatorConfig(

0 commit comments

Comments
 (0)