Fixed support for SD WebUI v1.10.1 #206
@@ -1,11 +1,11 @@
|
||||
cython
|
||||
ifnude
|
||||
insightface==0.7.3
|
||||
onnx>=1.14.0
|
||||
protobuf>=3.20.2
|
||||
opencv-python
|
||||
pandas
|
||||
albumentations==1.3.1
|
||||
pydantic
|
||||
safetensors
|
||||
onnxruntime>=1.15.0
|
||||
onnxruntime-gpu>=1.15.0
|
||||
onnxruntime-gpu>=1.15.0
|
||||
|
||||
+2
-2
@@ -1,10 +1,10 @@
|
||||
protobuf>=3.20.2
|
||||
cython
|
||||
ifnude
|
||||
insightface==0.7.3
|
||||
onnx>=1.14.0
|
||||
onnxruntime>=1.15.0
|
||||
opencv-python
|
||||
pandas
|
||||
albumentations==1.3.1
|
||||
pydantic
|
||||
safetensors
|
||||
safetensors
|
||||
|
||||
@@ -22,7 +22,6 @@ from scripts.faceswaplab_swapping import upscaled_inswapper
|
||||
from scripts.faceswaplab_swapping.upcaled_inswapper_options import InswappperOptions
|
||||
from scripts.faceswaplab_utils.imgutils import (
|
||||
pil_to_cv2,
|
||||
check_against_nsfw,
|
||||
)
|
||||
from scripts.faceswaplab_utils.faceswaplab_logging import logger, save_img_debug
|
||||
from scripts import faceswaplab_globals
|
||||
@@ -713,8 +712,6 @@ def process_image_unit(
|
||||
if unit.enable:
|
||||
faces = get_faces(pil_to_cv2(image))
|
||||
|
||||
if check_against_nsfw(image):
|
||||
return [(image, info)]
|
||||
if not unit.blend_faces and not force_blend:
|
||||
src_faces = unit.faces
|
||||
logger.info(f"will generate {len(src_faces)} images")
|
||||
|
||||
@@ -13,37 +13,6 @@ from scripts.faceswaplab_utils.typing import BoxCoords, CV2ImgU8, PILImage
|
||||
from scripts.faceswaplab_utils.faceswaplab_logging import logger
|
||||
|
||||
|
||||
def check_against_nsfw(img: PILImage) -> bool:
|
||||
"""
|
||||
Check if an image exceeds the Not Safe for Work (NSFW) score.
|
||||
|
||||
Parameters:
|
||||
img (PILImage): The image to be checked.
|
||||
|
||||
Returns:
|
||||
bool: True if any part of the image is considered NSFW, False otherwise.
|
||||
"""
|
||||
|
||||
NSFW_SCORE_THRESHOLD = get_sd_option("faceswaplab_nsfw_threshold", 0.7)
|
||||
|
||||
# For testing purpose :
|
||||
if NSFW_SCORE_THRESHOLD >= 1:
|
||||
return False
|
||||
|
||||
from ifnude import detect
|
||||
|
||||
shapes: List[bool] = []
|
||||
chunks: List[Dict[str, Union[int, float]]] = detect(img)
|
||||
|
||||
for chunk in chunks:
|
||||
logger.debug(
|
||||
f"chunck score {chunk['score']}, threshold : {NSFW_SCORE_THRESHOLD}"
|
||||
)
|
||||
shapes.append(chunk["score"] > NSFW_SCORE_THRESHOLD)
|
||||
|
||||
return any(shapes)
|
||||
|
||||
|
||||
def pil_to_cv2(pil_img: PILImage) -> CV2ImgU8:
|
||||
"""
|
||||
Convert a PIL Image into an OpenCV image (cv2).
|
||||
|
||||
Reference in New Issue
Block a user