From fb62ee246c0ae68547f7ffca626d4e03fa08a762 Mon Sep 17 00:00:00 2001 From: harisreedhar Date: Tue, 5 Nov 2024 21:29:43 +0530 Subject: [PATCH] changes --- facefusion/processors/modules/deep_swapper.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/facefusion/processors/modules/deep_swapper.py b/facefusion/processors/modules/deep_swapper.py index 8b7ec1f1..117c79be 100755 --- a/facefusion/processors/modules/deep_swapper.py +++ b/facefusion/processors/modules/deep_swapper.py @@ -3,7 +3,6 @@ from typing import List, Tuple import cv2 import numpy -from cv2.typing import Size import facefusion.jobs.job_manager import facefusion.jobs.job_store @@ -44,6 +43,7 @@ MODEL_SET : ModelSet =\ } }, 'template': 'arcface_128_v2', + 'size': (224, 224) } } @@ -64,12 +64,6 @@ def get_model_options() -> ModelOptions: return MODEL_SET.get(deep_swapper_model) -def get_model_size() -> Size: - deep_swapper = get_inference_pool().get('deep_swapper') - model_size = deep_swapper.get_outputs()[-1].shape[1:3] - return model_size - - def register_args(program : ArgumentParser) -> None: group_processors = find_argument_group(program, 'processors') if group_processors: @@ -117,7 +111,7 @@ def post_process() -> None: def swap_face(target_face : Face, temp_vision_frame : VisionFrame) -> VisionFrame: model_template = get_model_options().get('template') - model_size = get_model_size() + model_size = get_model_options().get('size') crop_vision_frame, affine_matrix = warp_face_by_face_landmark_5(temp_vision_frame, target_face.landmark_set.get('5/68'), model_template, model_size) crop_vision_frame_raw = crop_vision_frame.copy() box_mask = create_static_box_mask(crop_vision_frame.shape[:2][::-1], state_manager.get_item('face_mask_blur'), state_manager.get_item('face_mask_padding')) @@ -171,7 +165,7 @@ def normalize_crop_frame(crop_vision_frame : VisionFrame) -> VisionFrame: def prepare_crop_mask(crop_source_mask : Mask, crop_target_mask : Mask) -> Mask: - model_size = get_model_size() + model_size = get_model_options().get('size') crop_mask = numpy.maximum.reduce([ crop_source_mask, crop_target_mask ]) crop_mask = crop_mask.reshape(model_size).clip(0, 1) crop_mask = cv2.erode(crop_mask, numpy.ones((5, 5), numpy.uint8), iterations = 1)