From 5f715d6fda151d21f621d9ec801975d938332305 Mon Sep 17 00:00:00 2001 From: Vantage with AI Date: Wed, 7 Jan 2026 18:50:30 +0530 Subject: [PATCH 1/3] Implement GGUF metadata extraction function Added function to extract metadata from GGUF files. --- loader.py | 27 ++++++++++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/loader.py b/loader.py index 1948027..0c9dd13 100644 --- a/loader.py +++ b/loader.py @@ -48,6 +48,25 @@ def get_list_field(reader, field_name, field_type): else: raise TypeError(f"Unknown field type {field_type}") +def get_gguf_metadata(reader): + """Extract all simple metadata fields like safetensors""" + metadata = {} + for field_name in reader.fields: + try: + field = reader.get_field(field_name) + if len(field.types) == 1: # Simple scalar fields only + if field.types[0] == gguf.GGUFValueType.STRING: + metadata[field_name] = str(field.parts[field.data[-1]], "utf-8") + elif field.types[0] == gguf.GGUFValueType.INT32: + metadata[field_name] = int(field.parts[field.data[-1]]) + elif field.types[0] == gguf.GGUFValueType.F32: + metadata[field_name] = float(field.parts[field.data[-1]]) + elif field.types[0] == gguf.GGUFValueType.BOOL: + metadata[field_name] = bool(field.parts[field.data[-1]]) + except: + continue + return metadata + def gguf_sd_loader(path, handle_prefix="model.diffusion_model.", return_arch=False, is_text_model=False): """ Read state dict as fake tensors @@ -135,10 +154,11 @@ def gguf_sd_loader(path, handle_prefix="model.diffusion_model.", return_arch=Fal if len(qsd) > 0: max_key = max(qsd.keys(), key=lambda k: qsd[k].numel()) state_dict[max_key].is_largest_weight = True - + + metadata = get_gguf_metadata(reader) if return_arch: - return (state_dict, arch_str) - return state_dict + return (state_dict, arch_str, metadata) + return (state_dict, metadata) # for remapping llama.cpp -> original key names T5_SD_MAP = { @@ -404,3 +424,4 @@ def gguf_clip_loader(path): else: pass return sd + From f083506720f2f049631ed6b6e937440f5579f6c7 Mon Sep 17 00:00:00 2001 From: Vantage with AI Date: Wed, 7 Jan 2026 18:52:40 +0530 Subject: [PATCH 2/3] Updated the GGUF model loading and patching classes to include metadata handling. --- nodes.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/nodes.py b/nodes.py index ff5aaf0..15dfa17 100644 --- a/nodes.py +++ b/nodes.py @@ -165,9 +165,9 @@ def load_unet(self, unet_name, dequant_dtype=None, patch_dtype=None, patch_on_de # init model unet_path = folder_paths.get_full_path("unet", unet_name) - sd = gguf_sd_loader(unet_path) + sd, metadata = gguf_sd_loader(unet_path) model = comfy.sd.load_diffusion_model_state_dict( - sd, model_options={"custom_operations": ops} + sd, model_options={"custom_operations": ops}, metadata=metadata ) if model is None: logging.error("ERROR UNSUPPORTED UNET {}".format(unet_path)) @@ -319,3 +319,4 @@ def load_clip(self, clip_name1, clip_name2, clip_name3, clip_name4, type="stable "QuadrupleCLIPLoaderGGUF": QuadrupleCLIPLoaderGGUF, "UnetLoaderGGUFAdvanced": UnetLoaderGGUFAdvanced, } + From e9963a6f72d373d64679ed3a331f1feef69cb643 Mon Sep 17 00:00:00 2001 From: City <125218114+city96@users.noreply.github.com> Date: Sun, 11 Jan 2026 19:30:22 +0100 Subject: [PATCH 3/3] Clean up return logic for extra metadata This should be more future proof in case we need to return other attributes in the future. Possible breaking change for anyone using `gguf_sd_loader` directly either way, though. --- loader.py | 22 ++++++++++++---------- nodes.py | 4 ++-- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/loader.py b/loader.py index 0c9dd13..a17c8ab 100644 --- a/loader.py +++ b/loader.py @@ -66,8 +66,8 @@ def get_gguf_metadata(reader): except: continue return metadata - -def gguf_sd_loader(path, handle_prefix="model.diffusion_model.", return_arch=False, is_text_model=False): + +def gguf_sd_loader(path, handle_prefix="model.diffusion_model.", is_text_model=False): """ Read state dict as fake tensors """ @@ -154,11 +154,13 @@ def gguf_sd_loader(path, handle_prefix="model.diffusion_model.", return_arch=Fal if len(qsd) > 0: max_key = max(qsd.keys(), key=lambda k: qsd[k].numel()) state_dict[max_key].is_largest_weight = True - - metadata = get_gguf_metadata(reader) - if return_arch: - return (state_dict, arch_str, metadata) - return (state_dict, metadata) + + # extra info to return + extra = { + "arch_str": arch_str, + "metadata": get_gguf_metadata(reader) + } + return (state_dict, extra) # for remapping llama.cpp -> original key names T5_SD_MAP = { @@ -266,7 +268,7 @@ def gguf_mmproj_loader(path): logging.info(f"Using mmproj '{target[0]}' for text encoder '{tenc_fname}'.") target = os.path.join(root, target[0]) - vsd = gguf_sd_loader(target, is_text_model=True) + vsd, _ = gguf_sd_loader(target, is_text_model=True) # concat 4D to 5D if "v.patch_embd.weight.1" in vsd: @@ -395,7 +397,8 @@ def gguf_tekken_tokenizer_loader(path, temb_shape): return torch.ByteTensor(list(json.dumps(data).encode('utf-8'))) def gguf_clip_loader(path): - sd, arch = gguf_sd_loader(path, return_arch=True, is_text_model=True) + sd, extra = gguf_sd_loader(path, is_text_model=True) + arch = extra.get("arch_str", None) if arch in {"t5", "t5encoder"}: temb_key = "token_embd.weight" if temb_key in sd and sd[temb_key].shape == (256384, 4096): @@ -424,4 +427,3 @@ def gguf_clip_loader(path): else: pass return sd - diff --git a/nodes.py b/nodes.py index 15dfa17..50d3a60 100644 --- a/nodes.py +++ b/nodes.py @@ -165,9 +165,9 @@ def load_unet(self, unet_name, dequant_dtype=None, patch_dtype=None, patch_on_de # init model unet_path = folder_paths.get_full_path("unet", unet_name) - sd, metadata = gguf_sd_loader(unet_path) + sd, extra = gguf_sd_loader(unet_path) model = comfy.sd.load_diffusion_model_state_dict( - sd, model_options={"custom_operations": ops}, metadata=metadata + sd, model_options={"custom_operations": ops}, metadata=extra.get("metadata", {}) ) if model is None: logging.error("ERROR UNSUPPORTED UNET {}".format(unet_path))