NSFW (Not Safe for Work) classification#
This example demonstrates using the Not Safe for Work classification tool.
from vision_agent_tools.models.nsfw_classification import NSFWClassification
# (replace this path with your own!)
test_image = "path/to/your/image.jpg"
# Load the image
image = Image.open(test_image)
# Initialize the NSFW model.
nsfw_classification = NSFWClassification()
# Run the inference
results = nsfw_classification(image)
# Let's print the predicted label
print(results.label)
NSFWClassification
#
Bases: BaseMLModel
The primary intended use of this model is for the classification of NSFW (Not Safe for Work) images.
__call__(image)
#
Performs the NSFW inference on an image using the NSFWClassification model.
Parameters:
Name | Type | Description | Default |
---|---|---|---|
image |
Image
|
The input image for object detection. |
required |
Returns:
Name | Type | Description |
---|---|---|
NSFWInferenceData |
NSFWInferenceData
|
The inference result from the NSFWClassification model. label (str): The label for the unsafe content detected in the image. score (float):The score for the unsafe content detected in the image. |
__init__()
#
Initializes the NSFW (Not Safe for Work) classification tool.
NSFWInferenceData
#
Bases: BaseModel
Represents an inference result from the NSFWClassification model.
Attributes:
Name | Type | Description |
---|---|---|
label |
str
|
The predicted label for the image. |
score |
float
|
The confidence score associated with the prediction (between 0 and 1). |