- commit
- f30b32f
- parent
- 4b9763a
- author
- Eric Bower
- date
- 2024-09-27 01:57:37 +0000 UTC
chore: nsfw detector
2 files changed,
+61,
-0
+55,
-0
1@@ -0,0 +1,55 @@
2+import sys
3+import glob
4+from PIL import Image
5+from transformers import pipeline
6+import torch
7+
8+CGREEN = '\033[92m'
9+CYELLOW = '\033[93m'
10+CRED = '\033[91m'
11+CEND = '\033[0m'
12+
13+def images(root_dir):
14+ count = 0
15+ for filename in glob.iglob(root_dir + '**/*.jpg', recursive=True):
16+ if count == 10:
17+ return
18+ try:
19+ img = Image.open(filename)
20+ yield img, filename
21+ except Exception as err:
22+ print("failed to open file", err)
23+ count += 1
24+
25+if __name__ == '__main__':
26+ if len(sys.argv) < 2:
27+ raise Exception(f"{CRED}error!: please provide root image folder{CEND}")
28+ root_dir = sys.argv[1]
29+ print(f"root_dir {root_dir}")
30+ threshold = 0.3
31+
32+ print(f"failure threshold is set to {threshold:.4f}")
33+
34+ print("loading model")
35+ device = 'cuda:0' if torch.cuda.is_available() else 'cpu'
36+ classify = pipeline(
37+ "image-classification",
38+ model="Falconsai/nsfw_image_detection",
39+ device=device,
40+ )
41+
42+ print("scanning images")
43+ for img, filename in images(root_dir):
44+ result = None
45+ try:
46+ result = classify(img)
47+ except Exception as err:
48+ print(f"{CYELLOW}err{CEND} (score:n/a) {filename} {err}")
49+ continue
50+
51+ nsfw_score = result[1]["score"]
52+ score_read = '%.4f' % nsfw_score
53+ if nsfw_score > threshold:
54+ print(f"{CRED}failed{CEND} (score:{score_read}) {filename}")
55+ else:
56+ print(f"{CGREEN}passed{CEND} (score:{score_read}) {filename}")
1@@ -0,0 +1,6 @@
2+Pillow==10.4.0
3+pycryptonight==0.0.1
4+pyrx==0.3.0
5+Requests==2.32.3
6+torch==2.4.1
7+transformers==4.45.1