From aea4c34a08ac6196904f018802987dd0bd3b6b4f Mon Sep 17 00:00:00 2001 From: Gremlation <192086133+gremlation@users.noreply.github.com> Date: Sat, 28 Dec 2024 02:25:24 +0800 Subject: [PATCH] Add filename and sha256 outputs to UnetLoaderGGUF --- nodes.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/nodes.py b/nodes.py index e24d291..2fe0ea0 100644 --- a/nodes.py +++ b/nodes.py @@ -2,6 +2,8 @@ import torch import logging import collections +import hashlib +from functools import lru_cache import comfy.sd import comfy.utils @@ -125,11 +127,21 @@ def INPUT_TYPES(s): } } - RETURN_TYPES = ("MODEL",) + RETURN_TYPES = ("MODEL", "STRING", "STRING") + RETURN_NAMES = ("MODEL", "filename", "sha256") FUNCTION = "load_unet" CATEGORY = "bootleg" TITLE = "Unet Loader (GGUF)" + @staticmethod + @lru_cache(maxsize=None) + def hash_file(file_path): + hash_func = hashlib.sha256() + with open(file_path, "rb") as file: + while chunk := file.read(256 * 1024): + hash_func.update(chunk) + return hash_func.hexdigest() + def load_unet(self, unet_name, dequant_dtype=None, patch_dtype=None, patch_on_device=None): ops = GGMLOps() @@ -158,7 +170,11 @@ def load_unet(self, unet_name, dequant_dtype=None, patch_dtype=None, patch_on_de raise RuntimeError("ERROR: Could not detect model type of: {}".format(unet_path)) model = GGUFModelPatcher.clone(model) model.patch_on_device = patch_on_device - return (model,) + + hash = self.hash_file(unet_path) + + return (model, unet_name, hash) + class UnetLoaderGGUFAdvanced(UnetLoaderGGUF): @classmethod