From 71e595a9662bf82039d04ac2d49d91b4a609fb34 Mon Sep 17 00:00:00 2001 From: Alex Date: Wed, 11 Feb 2026 17:02:29 +0100 Subject: [PATCH] fine tunning --- Finetunning/finetunning.py | 2 +- Finetunning/validation.py | 2 +- Traduction/main.py | 2 -- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/Finetunning/finetunning.py b/Finetunning/finetunning.py index f7dbbff..ddfa009 100644 --- a/Finetunning/finetunning.py +++ b/Finetunning/finetunning.py @@ -145,7 +145,7 @@ training_args = TrainingArguments( per_device_train_batch_size=1, gradient_accumulation_steps=16, learning_rate=1e-4, - num_train_epochs=3, + num_train_epochs=2, max_steps=1000, fp16=False, # ⚠ disable AMP diff --git a/Finetunning/validation.py b/Finetunning/validation.py index 030c66b..b2d4306 100644 --- a/Finetunning/validation.py +++ b/Finetunning/validation.py @@ -8,7 +8,7 @@ from nltk.translate.bleu_score import corpus_bleu # Configuration # ---------------------------- BASE_MODEL = "Qwen/Qwen2.5-7B-Instruct" # base model -LORA_DIR = "./qwen2.5-7b-uk-fr-lora" # fine-tuned LoRA +LORA_DIR = "./qwen2.5-7b-uk-fr-lora-2epoch" # fine-tuned LoRA VALIDATION_FILE = "validation.jsonl" # small validation subset MAX_INPUT_LENGTH = 1024 DEVICE = "cuda" if torch.cuda.is_available() else "cpu" diff --git a/Traduction/main.py b/Traduction/main.py index 973c678..fe9a59d 100644 --- a/Traduction/main.py +++ b/Traduction/main.py @@ -11,7 +11,6 @@ from reportlab.pdfbase.ttfonts import TTFont import os, time # Configuration -DEBUG = True PDF_PATH = "Traduction/TaniaBorecMemoir(Ukr).pdf" OLLAMA_MODEL = "traductionUkrainienVersFrancais:latest" OLLAMA_URL = "http://localhost:11434/api/generate" @@ -174,7 +173,6 @@ def load_checkpoint(): return json.load(f) return {"last_processed_index": -1, "results": {}} -# Sauvegarde le checkpoint # Sauvegarde le checkpoint def save_checkpoint(last_index, results): # Trier les clés du dictionnaire results