>>107866929
I was thinking of something like thisa:
import torch
from transformers import AutoModelForCausalLM
from safetensors.torch import save_file
MODEL_ID = "YanLabs/gemma-3-4b-it-abliterated-normpreserve"
OUT_FILE = "gemma-3-4b-it-abliterated-text-encoder.safetensors"
# Load model
model = AutoModelForCausalLM.from_pretrained(
MODEL_ID,
torch_dtype=torch.float16,
device_map="cpu", # safest option
)
state_dict = model.state_dict()
# Save as single safetensors file
save_file(state_dict, OUT_FILE)
print(f"Saved {OUT_FILE}")
actually ill just try this