Buckets:

rtrm's picture
download
raw
14.5 kB
<meta charset="utf-8" /><meta name="hf:doc:metadata" content="{&quot;title&quot;:&quot;Fine-tuning supervizat&quot;,&quot;local&quot;:&quot;fine-tuning-supervizat&quot;,&quot;sections&quot;:[{&quot;title&quot;:&quot;1️⃣ Template-uri de chat&quot;,&quot;local&quot;:&quot;1-template-uri-de-chat&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2},{&quot;title&quot;:&quot;2️⃣ Fine-tuning supervizat&quot;,&quot;local&quot;:&quot;2-fine-tuning-supervizat&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2},{&quot;title&quot;:&quot;3️⃣ Adaptarea de rang scăzut (LoRA)&quot;,&quot;local&quot;:&quot;3-adaptarea-de-rang-scăzut-lora&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2},{&quot;title&quot;:&quot;4️⃣ Evaluarea&quot;,&quot;local&quot;:&quot;4-evaluarea&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2},{&quot;title&quot;:&quot;Referințe&quot;,&quot;local&quot;:&quot;referințe&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2}],&quot;depth&quot;:1}">
<link href="/docs/course/pr_1069/rum/_app/immutable/assets/0.e3b0c442.css" rel="modulepreload">
<link rel="modulepreload" href="/docs/course/pr_1069/rum/_app/immutable/entry/start.1de7c3d2.js">
<link rel="modulepreload" href="/docs/course/pr_1069/rum/_app/immutable/chunks/scheduler.37c15a92.js">
<link rel="modulepreload" href="/docs/course/pr_1069/rum/_app/immutable/chunks/singletons.e13b7dfd.js">
<link rel="modulepreload" href="/docs/course/pr_1069/rum/_app/immutable/chunks/index.18351ede.js">
<link rel="modulepreload" href="/docs/course/pr_1069/rum/_app/immutable/chunks/paths.e130b7b0.js">
<link rel="modulepreload" href="/docs/course/pr_1069/rum/_app/immutable/entry/app.1f82014c.js">
<link rel="modulepreload" href="/docs/course/pr_1069/rum/_app/immutable/chunks/index.2bf4358c.js">
<link rel="modulepreload" href="/docs/course/pr_1069/rum/_app/immutable/nodes/0.3c83e1ab.js">
<link rel="modulepreload" href="/docs/course/pr_1069/rum/_app/immutable/chunks/each.e59479a4.js">
<link rel="modulepreload" href="/docs/course/pr_1069/rum/_app/immutable/nodes/20.c1410147.js">
<link rel="modulepreload" href="/docs/course/pr_1069/rum/_app/immutable/chunks/Tip.363c041f.js">
<link rel="modulepreload" href="/docs/course/pr_1069/rum/_app/immutable/chunks/getInferenceSnippets.24b50994.js"><!-- HEAD_svelte-u9bgzb_START --><meta name="hf:doc:metadata" content="{&quot;title&quot;:&quot;Fine-tuning supervizat&quot;,&quot;local&quot;:&quot;fine-tuning-supervizat&quot;,&quot;sections&quot;:[{&quot;title&quot;:&quot;1️⃣ Template-uri de chat&quot;,&quot;local&quot;:&quot;1-template-uri-de-chat&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2},{&quot;title&quot;:&quot;2️⃣ Fine-tuning supervizat&quot;,&quot;local&quot;:&quot;2-fine-tuning-supervizat&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2},{&quot;title&quot;:&quot;3️⃣ Adaptarea de rang scăzut (LoRA)&quot;,&quot;local&quot;:&quot;3-adaptarea-de-rang-scăzut-lora&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2},{&quot;title&quot;:&quot;4️⃣ Evaluarea&quot;,&quot;local&quot;:&quot;4-evaluarea&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2},{&quot;title&quot;:&quot;Referințe&quot;,&quot;local&quot;:&quot;referințe&quot;,&quot;sections&quot;:[],&quot;depth&quot;:2}],&quot;depth&quot;:1}"><!-- HEAD_svelte-u9bgzb_END --> <p></p> <h1 class="relative group"><a id="fine-tuning-supervizat" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#fine-tuning-supervizat"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>Fine-tuning supervizat</span></h1> <p data-svelte-h="svelte-1652gkr">În <a href="/course/chapter2/2">Capitolul 2 Secțiunea 2</a>, am văzut că modelele de limbaj generativ pot fi ajustate pentru sarcini specifice, cum ar fi rezumarea și răspunsul la întrebări. Cu toate acestea, în zilele noastre este mult mai frecvent să ajustăm modelele de limbaj pe o gamă largă de sarcini simultan; o metodă cunoscută sub numele de fine-tuning supervizat (SFT). Acest proces ajută modelele să devină mai versatile și capabile să gestioneze diverse cazuri de utilizare. Majoritatea LLM-urilor cu care oamenii interacționează pe platforme precum ChatGPT au trecut prin SFT pentru a fi mai utile și aliniate cu preferințele umane. Vom împărți acest capitol în patru secțiuni:</p> <h2 class="relative group"><a id="1-template-uri-de-chat" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#1-template-uri-de-chat"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>1️⃣ Template-uri de chat</span></h2> <p data-svelte-h="svelte-1mvxiax">Template-urile de chat structurează interacțiunile dintre utilizatori și modelele AI, asigurând răspunsuri consecvente și adecvate contextual. Acestea includ componente precum prompturi de sistem și mesaje bazate pe roluri.</p> <h2 class="relative group"><a id="2-fine-tuning-supervizat" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#2-fine-tuning-supervizat"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>2️⃣ Fine-tuning supervizat</span></h2> <p data-svelte-h="svelte-145y4ih">Fine-tuningul supervizat (SFT) este un proces critic pentru adaptarea modelelor de limbaj pre-antrenate la sarcini specifice. Aceasta implică antrenarea modelului pe un set de date specific sarcinii cu exemple etichetate. Pentru un ghid detaliat despre SFT, inclusiv pașii cheie și cele mai bune practici, consultați <a href="https://huggingface.co/docs/trl/en/sft_trainer" rel="nofollow">secțiunea de fine-tuning supervizat din documentația TRL</a>.</p> <h2 class="relative group"><a id="3-adaptarea-de-rang-scăzut-lora" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#3-adaptarea-de-rang-scăzut-lora"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>3️⃣ Adaptarea de rang scăzut (LoRA)</span></h2> <p data-svelte-h="svelte-yz8k3d">Adaptarea de rang scăzut (LoRA) este o tehnică pentru fine-tuningul modelelor de limbaj prin adăugarea de matrice de rang scăzut la straturile modelului. Aceasta permite un fine-tuning eficient păstrând în același timp cunoștințele pre-antrenate ale modelului. Unul dintre beneficiile cheie ale LoRA este economia semnificativă de memorie pe care o oferă, făcând posibilă ajustarea modelelor mari pe hardware cu resurse limitate.</p> <h2 class="relative group"><a id="4-evaluarea" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#4-evaluarea"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>4️⃣ Evaluarea</span></h2> <p data-svelte-h="svelte-1bykd8x">Evaluarea este un pas crucial în procesul de fine-tuning. Ne permite să măsurăm performanța modelului pe un set de date specific sarcinii.</p> <div class="course-tip bg-gradient-to-br dark:bg-gradient-to-r before:border-green-500 dark:before:border-green-800 from-green-50 dark:from-gray-900 to-white dark:to-gray-950 border border-green-50 text-green-700 dark:text-gray-400">⚠️ Pentru a beneficia de toate funcționalitățile disponibile cu Model Hub și 🤗 Transformers, recomandăm <a href="https://huggingface.co/join" data-svelte-h="svelte-1vx364w">crearea unui cont</a>.</div> <h2 class="relative group"><a id="referințe" class="header-link block pr-1.5 text-lg no-hover:hidden with-hover:absolute with-hover:p-1.5 with-hover:opacity-0 with-hover:group-hover:opacity-100 with-hover:right-full" href="#referințe"><span><svg class="" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" width="1em" height="1em" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 256"><path d="M167.594 88.393a8.001 8.001 0 0 1 0 11.314l-67.882 67.882a8 8 0 1 1-11.314-11.315l67.882-67.881a8.003 8.003 0 0 1 11.314 0zm-28.287 84.86l-28.284 28.284a40 40 0 0 1-56.567-56.567l28.284-28.284a8 8 0 0 0-11.315-11.315l-28.284 28.284a56 56 0 0 0 79.196 79.197l28.285-28.285a8 8 0 1 0-11.315-11.314zM212.852 43.14a56.002 56.002 0 0 0-79.196 0l-28.284 28.284a8 8 0 1 0 11.314 11.314l28.284-28.284a40 40 0 0 1 56.568 56.567l-28.285 28.285a8 8 0 0 0 11.315 11.314l28.284-28.284a56.065 56.065 0 0 0 0-79.196z" fill="currentColor"></path></svg></span></a> <span>Referințe</span></h2> <ul data-svelte-h="svelte-wueumt"><li><a href="https://huggingface.co/docs/transformers/main/en/chat_templating" rel="nofollow">Documentația Transformers despre template-urile de chat</a></li> <li><a href="https://github.com/huggingface/trl/blob/main/trl/scripts/sft.py" rel="nofollow">Script pentru fine-tuning supervizat în TRL</a></li> <li><a href="https://huggingface.co/docs/trl/main/en/sft_trainer" rel="nofollow"><code>SFTTrainer</code> în TRL</a></li> <li><a href="https://arxiv.org/abs/2305.18290" rel="nofollow">Lucrarea despre optimizarea directă a preferințelor</a></li> <li><a href="https://huggingface.co/docs/trl/sft_trainer" rel="nofollow">Fine-tuning supervizat cu TRL</a></li> <li><a href="https://github.com/huggingface/alignment-handbook" rel="nofollow">Cum să faceți fine-tuning la Google Gemma cu ChatML și Hugging Face TRL</a></li> <li><a href="https://huggingface.co/learn/cookbook/en/fine_tuning_llm_to_generate_persian_product_catalogs_in_json_format" rel="nofollow">Fine-tuning LLM pentru a genera cataloage de produse persane în format JSON</a></li></ul> <a class="!text-gray-400 !no-underline text-sm flex items-center not-prose mt-4" href="https://github.com/huggingface/course/blob/main/chapters/rum/chapter11/1.mdx" target="_blank"><span data-svelte-h="svelte-1kd6by1">&lt;</span> <span data-svelte-h="svelte-x0xyl0">&gt;</span> <span data-svelte-h="svelte-1dajgef"><span class="underline ml-1.5">Update</span> on GitHub</span></a> <p></p>
<script>
{
__sveltekit_1ftlxhy = {
assets: "/docs/course/pr_1069/rum",
base: "/docs/course/pr_1069/rum",
env: {}
};
const element = document.currentScript.parentElement;
const data = [null,null];
Promise.all([
import("/docs/course/pr_1069/rum/_app/immutable/entry/start.1de7c3d2.js"),
import("/docs/course/pr_1069/rum/_app/immutable/entry/app.1f82014c.js")
]).then(([kit, app]) => {
kit.start(app, element, {
node_ids: [0, 20],
data,
form: null,
error: null
});
});
}
</script>

Xet Storage Details

Size:
14.5 kB
·
Xet hash:
d4d1e85f521d7c649891c9bf0ab632030537a6019dc1ce6dade3d05c5530f247

Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.