TIEGSafeSearchAnnotation = record adult : TIEGLikelihood; // Represents the adult content likelihood for the image. Adult content may contain elements such as nudity, pornographic images or cartoons, or sexual activities spoof : TIEGLikelihood; // Spoof likelihood. The likelihood that an modification was made to the image's canonical version to make it appear funny or offensive medical : TIEGLikelihood; // Likelihood that this is a medical image violence: TIEGLikelihood; // Likelihood that this image contains violent content racy : TIEGLikelihood; // Likelihood that the request image contains racy content. Racy content may include (but is not limited to) skimpy or sheer clothing, strategically covered nudity, lewd or provocative poses, or close-ups of sensitive body areas end;
Description
Results array for SafeSearch: Explicit content detection (iegrSafeSearchDetection request)
Detects explicit content such as adult content or violent content within an image.
Provides likelihood ratings for the following explicit content categories: adult, spoof, medical, violence, and racy.
if Length( GVision.WebDetection.bestGuessLabels ) > 0 then lbxResults.Items.Add( 'This image is likely to be: ' + GVision.WebDetection.bestGuessLabels[0].webLabel );
if GVision.HasLandmarkAnnotations then begin lbxResults.Items.Add( 'This image is the landmark: ' + GVision.LandmarkAnnotations[0].description ); lbxResults.Items.Add( 'Latitude: ' + LatLongToStr( GVision.LandmarkAnnotations[0].latitude, True )); lbxResults.Items.Add( 'Longitude: ' + LatLongToStr( GVision.LandmarkAnnotations[0].longitude, False )); end;
lbxResults.Items.Add( format( 'Found %d matching images on the web', [ GVision.WebDetection.fullMatchingImages.Count ])); lbxResults.Items.AddStrings( GVision.WebDetection.fullMatchingImages );
lbxResults.Items.Add( format( 'Found %d pages on the web containing this image', [ GVision.WebDetection.pagesWithMatchingImages.Count ])); lbxResults.Items.AddStrings( GVision.WebDetection.pagesWithMatchingImages );
if GVision.SafeSearchAnnotation.adult in [ieglLikely, ieglVeryLikely] then lbxResults.Items.Add( 'Likely to be an ADULT image' );
if GVision.SafeSearchAnnotation.violence in [ieglLikely, ieglVeryLikely] then lbxResults.Items.Add( 'Likely to be a VIOLENT image' );