méthode à suivre

This commit is contained in:
Alex
2026-01-14 18:12:25 +01:00
parent c5d372e98d
commit a4296d012e
3 changed files with 16 additions and 8 deletions

View File

@@ -1 +1,2 @@
# Les modèles générés
qwen2.5*/

View File

@@ -17,7 +17,7 @@ print("=== LoRA merge script started ===")
# ----------------------------
# Load base model
# ----------------------------
print("[1/4] Loading base model...")
print(f"{80 * '_'}\n[1/4] Loading base model...")
base_model = AutoModelForCausalLM.from_pretrained(
BASE_MODEL,
torch_dtype=DTYPE,
@@ -29,7 +29,7 @@ print("Base model loaded.")
# ----------------------------
# Load tokenizer
# ----------------------------
print("[2/4] Loading tokenizer...")
print(f"{80 * '_'}\n[2/4] Loading tokenizer...")
tokenizer = AutoTokenizer.from_pretrained(
BASE_MODEL,
trust_remote_code=True
@@ -40,7 +40,7 @@ print("Tokenizer loaded.")
# ----------------------------
# Load LoRA adapter
# ----------------------------
print("[3/4] Loading LoRA adapter...")
print(f"{80 * '_'}\n[3/4] Loading LoRA adapter...")
model = PeftModel.from_pretrained(
base_model,
LORA_DIR,
@@ -50,7 +50,7 @@ print("LoRA adapter loaded.")
# ----------------------------
# Merge LoRA into base model
# ----------------------------
print("[4/4] Merging LoRA into base model...")
print(f"{80 * '_'}\n[4/4] Merging LoRA into base model...")
model = model.merge_and_unload()
print("LoRA successfully merged.")