Buckets:

rtrm's picture
download
raw
38.4 kB
import{s as _e,n as he,o as je}from"../chunks/scheduler.23542ac5.js";import{S as Ce,i as Ze,e as i,s as a,c as m,h as Ie,a as p,d as e,b as n,f as Te,g as o,j as M,k as be,l as Ne,m as s,n as c,t as r,o as f,p as u}from"../chunks/index.9b1f405b.js";import{C as $e,H as y,E as ge}from"../chunks/MermaidChart.svelte_svelte_type_style_lang.99afe1ab.js";import{C as w}from"../chunks/CodeBlock.1c9c6858.js";function We(xl){let d,Nt,Zt,$t,U,gt,J,Wt,T,Bl=`<a href="https://huggingface.co/papers/2212.04488" rel="nofollow">커스텀 Diffusion</a>은 피사체의 이미지 몇 장(4~5장)만 주어지면 Stable Diffusion처럼 text-to-image 모델을 커스터마이징하는 방법입니다.
‘train_custom_diffusion.py’ 스크립트는 학습 과정을 구현하고 이를 Stable Diffusion에 맞게 조정하는 방법을 보여줍니다.`,Xt,b,Ll='이 교육 사례는 <a href="https://nupurkmr9.github.io/" rel="nofollow">Nupur Kumari</a>가 제공하였습니다. (Custom Diffusion의 저자 중 한명).',Rt,_,Vt,h,Et,j,zl="스크립트를 실행하기 전에 라이브러리의 학습 dependencies를 설치해야 합니다:",Gt,C,Yl="<strong>중요</strong>",vt,Z,kl="예제 스크립트의 최신 버전을 성공적으로 실행하려면 <strong>소스로부터 설치</strong>하는 것을 매우 권장하며, 예제 스크립트를 자주 업데이트하는 만큼 일부 예제별 요구 사항을 설치하고 설치를 최신 상태로 유지하는 것이 좋습니다. 이를 위해 새 가상 환경에서 다음 단계를 실행하세요:",Ft,I,xt,N,Sl='<a href="https://github.com/huggingface/diffusers/tree/main/examples/custom_diffusion" rel="nofollow">example folder</a>로 cd하여 이동하세요.',Bt,$,Lt,g,Ql="이제 실행",zt,W,Yt,X,Al='그리고 <a href="https://github.com/huggingface/accelerate/" rel="nofollow">🤗Accelerate</a> 환경을 초기화:',kt,R,St,V,Hl="또는 사용자 환경에 대한 질문에 답하지 않고 기본 가속 구성을 사용하려면 다음과 같이 하세요.",Qt,E,At,G,ql="또는 사용 중인 환경이 대화형 셸을 지원하지 않는 경우(예: jupyter notebook)",Ht,v,qt,F,Dt,x,Dl='이제 데이터셋을 가져옵니다. <a href="https://www.cs.cmu.edu/~custom-diffusion/assets/data.zip" rel="nofollow">여기</a>에서 데이터셋을 다운로드하고 압축을 풉니다. 직접 데이터셋을 사용하려면 <a href="create_dataset">학습용 데이터셋 생성하기</a> 가이드를 참고하세요.',Pt,B,Pl="또한 ‘clip-retrieval’을 사용하여 200개의 실제 이미지를 수집하고, regularization으로서 이를 학습 데이터셋의 타겟 이미지와 결합합니다. 이렇게 하면 주어진 타겟 이미지에 대한 과적합을 방지할 수 있습니다. 다음 플래그를 사용하면 <code>prior_loss_weight=1.</code>로 <code>prior_preservation</code>, <code>real_prior</code> regularization을 활성화할 수 있습니다.\n클래스_프롬프트<code>는 대상 이미지와 동일한 카테고리 이름이어야 합니다. 수집된 실제 이미지에는 </code>class_prompt<code>와 유사한 텍스트 캡션이 있습니다. 검색된 이미지는 </code>class_data_dir<code>에 저장됩니다. 생성된 이미지를 regularization으로 사용하기 위해 </code>real_prior`를 비활성화할 수 있습니다. 실제 이미지를 수집하려면 훈련 전에 이 명령을 먼저 사용하십시오.",Ot,L,Kt,z,Ol='<strong><strong><em>참고: <a href="https://huggingface.co/stabilityai/stable-diffusion-2" rel="nofollow">stable-diffusion-2</a> 768x768 모델을 사용하는 경우 ‘해상도’를 768로 변경하세요.</em></strong></strong>',tl,Y,Kl="스크립트는 모델 체크포인트와 <code>pytorch_custom_diffusion_weights.bin</code> 파일을 생성하여 저장소에 저장합니다.",ll,k,el,S,te='<strong>더 낮은 VRAM 요구 사항(GPU당 16GB)으로 더 빠르게 훈련하려면 <code>--enable_xformers_memory_efficient_attention</code>을 사용하세요. 설치 방법은 <a href="https://github.com/facebookresearch/xformers" rel="nofollow">가이드</a>를 따르세요.</strong>',sl,Q,le="가중치 및 편향(<code>wandb</code>)을 사용하여 실험을 추적하고 중간 결과를 저장하려면(강력히 권장합니다) 다음 단계를 따르세요:",al,A,ee=`<li><code>wandb</code> 설치: <code>pip install wandb</code>.</li> <li>로그인 : <code>wandb login</code>.</li> <li>그런 다음 트레이닝을 시작하는 동안 <code>validation_prompt</code>를 지정하고 <code>report_to</code>를 <code>wandb</code>로 설정합니다. 다음과 같은 관련 인수를 구성할 수도 있습니다:
<ul><li><code>num_validation_images</code></li> <li><code>validation_steps</code></li></ul></li>`,nl,H,il,q,se='다음은 <a href="https://wandb.ai/sayakpaul/custom-diffusion/runs/26ghrcau" rel="nofollow">Weights and Biases page</a>의 예시이며, 여러 학습 세부 정보와 함께 중간 결과들을 확인할 수 있습니다.',pl,D,ae='<code>--push_to_hub</code>를 지정하면 학습된 파라미터가 허깅 페이스 허브의 리포지토리에 푸시됩니다. 다음은 <a href="https://huggingface.co/sayakpaul/custom-diffusion-cat" rel="nofollow">예제 리포지토리</a>입니다.',Ml,P,ml,O,ne='<a href="https://github.com/ShivamShrirao/diffusers/blob/main/examples/dreambooth/train_dreambooth.py" rel="nofollow">this</a>와 유사하게 각 컨셉에 대한 정보가 포함된 <a href="https://github.com/adobe-research/custom-diffusion/blob/main/assets/concept_list.json" rel="nofollow">json</a> 파일을 제공합니다.',ol,K,ie="실제 이미지를 수집하려면 json 파일의 각 컨셉에 대해 이 명령을 실행합니다.",cl,tt,rl,lt,pe="그럼 우리는 학습시킬 준비가 되었습니다!",fl,et,ul,st,Me='다음은 <a href="https://wandb.ai/sayakpaul/custom-diffusion/runs/3990tzkg" rel="nofollow">Weights and Biases page</a>의 예시이며, 다른 학습 세부 정보와 함께 중간 결과들을 확인할 수 있습니다.',wl,at,yl,nt,me="사람 얼굴에 대한 파인튜닝을 위해 다음과 같은 설정이 더 효과적이라는 것을 확인했습니다: <code>learning_rate=5e-6</code>, <code>max_train_steps=1000 to 2000</code>, <code>freeze_model=crossattn</code>을 최소 15~20개의 이미지로 설정합니다.",dl,it,oe="실제 이미지를 수집하려면 훈련 전에 이 명령을 먼저 사용하십시오.",Ul,pt,Jl,Mt,ce="이제 학습을 시작하세요!",Tl,mt,bl,ot,_l,ct,re="위 프롬프트를 사용하여 모델을 학습시킨 후에는 아래 프롬프트를 사용하여 추론을 실행할 수 있습니다. 프롬프트에 ‘modifier token’(예: 위 예제에서는 \\<new1>)을 반드시 포함해야 합니다.",hl,rt,jl,ft,fe="허브 리포지토리에서 이러한 매개변수를 직접 로드할 수 있습니다:",Cl,ut,Zl,wt,ue="다음은 여러 컨셉으로 추론을 수행하는 예제입니다:",Il,yt,Nl,dt,we="여기서 ‘고양이’와 ‘나무 냄비’는 여러 컨셉을 말합니다.",$l,Ut,gl,Jt,ye="<code>--checkpointing_steps</code> 인수를 사용한 경우 학습 과정에서 저장된 전체 체크포인트 중 하나에서 추론을 수행할 수도 있습니다.",Wl,Tt,Xl,bt,de="더 많은 메모리를 절약하려면 스크립트에 <code>--set_grads_to_none</code> 인수를 전달하세요. 이렇게 하면 성적이 0이 아닌 없음으로 설정됩니다. 그러나 특정 동작이 변경되므로 문제가 발생하면 이 인수를 제거하세요.",Rl,_t,Ue='자세한 정보: <a href="https://pytorch.org/docs/stable/generated/torch.optim.Optimizer.zero_grad.html" rel="nofollow">https://pytorch.org/docs/stable/generated/torch.optim.Optimizer.zero_grad.html</a>',Vl,ht,El,jt,Je='실험에 대한 자세한 내용은 <a href="https://www.cs.cmu.edu/~custom-diffusion/" rel="nofollow">당사 웹페이지</a>를 참조하세요.',Gl,Ct,vl,It,Fl;return U=new $e({props:{containerStyle:"float: right; margin-left: 10px; display: inline-flex; position: relative; z-index: 10;"}}),J=new y({props:{title:"커스텀 Diffusion 학습 예제",local:"커스텀-diffusion-학습-예제",headingTag:"h1"}}),_=new y({props:{title:"로컬에서 PyTorch로 실행하기",local:"로컬에서-pytorch로-실행하기",headingTag:"h2"}}),h=new y({props:{title:"Dependencies 설치하기",local:"dependencies-설치하기",headingTag:"h3"}}),I=new w({props:{code:"Z2l0JTIwY2xvbmUlMjBodHRwcyUzQSUyRiUyRmdpdGh1Yi5jb20lMkZodWdnaW5nZmFjZSUyRmRpZmZ1c2VycyUwQWNkJTIwZGlmZnVzZXJzJTBBcGlwJTIwaW5zdGFsbCUyMC1lJTIwLg==",highlighted:`git <span class="hljs-built_in">clone</span> https://github.com/huggingface/diffusers
<span class="hljs-built_in">cd</span> diffusers
pip install -e .`,wrap:!1}}),$=new w({props:{code:"Y2QlMjBleGFtcGxlcyUyRmN1c3RvbV9kaWZmdXNpb24=",highlighted:'<span class="hljs-built_in">cd</span> examples/custom_diffusion',wrap:!1}}),W=new w({props:{code:"cGlwJTIwaW5zdGFsbCUyMC1yJTIwcmVxdWlyZW1lbnRzLnR4dCUwQXBpcCUyMGluc3RhbGwlMjBjbGlwLXJldHJpZXZhbA==",highlighted:`pip install -r requirements.txt
pip install clip-retrieval`,wrap:!1}}),R=new w({props:{code:"YWNjZWxlcmF0ZSUyMGNvbmZpZw==",highlighted:"accelerate config",wrap:!1}}),E=new w({props:{code:"YWNjZWxlcmF0ZSUyMGNvbmZpZyUyMGRlZmF1bHQ=",highlighted:"accelerate config default",wrap:!1}}),v=new w({props:{code:"ZnJvbSUyMGFjY2VsZXJhdGUudXRpbHMlMjBpbXBvcnQlMjB3cml0ZV9iYXNpY19jb25maWclMEElMEF3cml0ZV9iYXNpY19jb25maWcoKQ==",highlighted:`<span class="hljs-keyword">from</span> accelerate.utils <span class="hljs-keyword">import</span> write_basic_config
write_basic_config()`,wrap:!1}}),F=new y({props:{title:"고양이 예제 😺",local:"고양이-예제-",headingTag:"h3"}}),L=new w({props:{code:"cGlwJTIwaW5zdGFsbCUyMGNsaXAtcmV0cmlldmFsJTBBcHl0aG9uJTIwcmV0cmlldmUucHklMjAtLWNsYXNzX3Byb21wdCUyMGNhdCUyMC0tY2xhc3NfZGF0YV9kaXIlMjByZWFsX3JlZyUyRnNhbXBsZXNfY2F0JTIwLS1udW1fY2xhc3NfaW1hZ2VzJTIwMjAw",highlighted:`pip install clip-retrieval
python retrieve.py --class_prompt <span class="hljs-built_in">cat</span> --class_data_dir real_reg/samples_cat --num_class_images 200`,wrap:!1}}),k=new w({props:{code:"ZXhwb3J0JTIwTU9ERUxfTkFNRSUzRCUyMkNvbXBWaXMlMkZzdGFibGUtZGlmZnVzaW9uLXYxLTQlMjIlMEFleHBvcnQlMjBPVVRQVVRfRElSJTNEJTIycGF0aC10by1zYXZlLW1vZGVsJTIyJTBBZXhwb3J0JTIwSU5TVEFOQ0VfRElSJTNEJTIyLiUyRmRhdGElMkZjYXQlMjIlMEElMEFhY2NlbGVyYXRlJTIwbGF1bmNoJTIwdHJhaW5fY3VzdG9tX2RpZmZ1c2lvbi5weSUyMCU1QyUwQSUyMCUyMC0tcHJldHJhaW5lZF9tb2RlbF9uYW1lX29yX3BhdGglM0QlMjRNT0RFTF9OQU1FJTIwJTIwJTVDJTBBJTIwJTIwLS1pbnN0YW5jZV9kYXRhX2RpciUzRCUyNElOU1RBTkNFX0RJUiUyMCU1QyUwQSUyMCUyMC0tb3V0cHV0X2RpciUzRCUyNE9VVFBVVF9ESVIlMjAlNUMlMEElMjAlMjAtLWNsYXNzX2RhdGFfZGlyJTNELiUyRnJlYWxfcmVnJTJGc2FtcGxlc19jYXQlMkYlMjAlNUMlMEElMjAlMjAtLXdpdGhfcHJpb3JfcHJlc2VydmF0aW9uJTIwLS1yZWFsX3ByaW9yJTIwLS1wcmlvcl9sb3NzX3dlaWdodCUzRDEuMCUyMCU1QyUwQSUyMCUyMC0tY2xhc3NfcHJvbXB0JTNEJTIyY2F0JTIyJTIwLS1udW1fY2xhc3NfaW1hZ2VzJTNEMjAwJTIwJTVDJTBBJTIwJTIwLS1pbnN0YW5jZV9wcm9tcHQlM0QlMjJwaG90byUyMG9mJTIwYSUyMCUzQ25ldzElM0UlMjBjYXQlMjIlMjAlMjAlNUMlMEElMjAlMjAtLXJlc29sdXRpb24lM0Q1MTIlMjAlMjAlNUMlMEElMjAlMjAtLXRyYWluX2JhdGNoX3NpemUlM0QyJTIwJTIwJTVDJTBBJTIwJTIwLS1sZWFybmluZ19yYXRlJTNEMWUtNSUyMCUyMCU1QyUwQSUyMCUyMC0tbHJfd2FybXVwX3N0ZXBzJTNEMCUyMCU1QyUwQSUyMCUyMC0tbWF4X3RyYWluX3N0ZXBzJTNEMjUwJTIwJTVDJTBBJTIwJTIwLS1zY2FsZV9sciUyMC0taGZsaXAlMjAlMjAlNUMlMEElMjAlMjAtLW1vZGlmaWVyX3Rva2VuJTIwJTIyJTNDbmV3MSUzRSUyMiUyMCU1QyUwQSUyMCUyMC0tcHVzaF90b19odWI=",highlighted:`<span class="hljs-built_in">export</span> MODEL_NAME=<span class="hljs-string">&quot;CompVis/stable-diffusion-v1-4&quot;</span>
<span class="hljs-built_in">export</span> OUTPUT_DIR=<span class="hljs-string">&quot;path-to-save-model&quot;</span>
<span class="hljs-built_in">export</span> INSTANCE_DIR=<span class="hljs-string">&quot;./data/cat&quot;</span>
accelerate launch train_custom_diffusion.py \\
--pretrained_model_name_or_path=<span class="hljs-variable">$MODEL_NAME</span> \\
--instance_data_dir=<span class="hljs-variable">$INSTANCE_DIR</span> \\
--output_dir=<span class="hljs-variable">$OUTPUT_DIR</span> \\
--class_data_dir=./real_reg/samples_cat/ \\
--with_prior_preservation --real_prior --prior_loss_weight=1.0 \\
--class_prompt=<span class="hljs-string">&quot;cat&quot;</span> --num_class_images=200 \\
--instance_prompt=<span class="hljs-string">&quot;photo of a &lt;new1&gt; cat&quot;</span> \\
--resolution=512 \\
--train_batch_size=2 \\
--learning_rate=1e-5 \\
--lr_warmup_steps=0 \\
--max_train_steps=250 \\
--scale_lr --hflip \\
--modifier_token <span class="hljs-string">&quot;&lt;new1&gt;&quot;</span> \\
--push_to_hub`,wrap:!1}}),H=new w({props:{code:"YWNjZWxlcmF0ZSUyMGxhdW5jaCUyMHRyYWluX2N1c3RvbV9kaWZmdXNpb24ucHklMjAlNUMlMEElMjAlMjAtLXByZXRyYWluZWRfbW9kZWxfbmFtZV9vcl9wYXRoJTNEJTI0TU9ERUxfTkFNRSUyMCUyMCU1QyUwQSUyMCUyMC0taW5zdGFuY2VfZGF0YV9kaXIlM0QlMjRJTlNUQU5DRV9ESVIlMjAlNUMlMEElMjAlMjAtLW91dHB1dF9kaXIlM0QlMjRPVVRQVVRfRElSJTIwJTVDJTBBJTIwJTIwLS1jbGFzc19kYXRhX2RpciUzRC4lMkZyZWFsX3JlZyUyRnNhbXBsZXNfY2F0JTJGJTIwJTVDJTBBJTIwJTIwLS13aXRoX3ByaW9yX3ByZXNlcnZhdGlvbiUyMC0tcmVhbF9wcmlvciUyMC0tcHJpb3JfbG9zc193ZWlnaHQlM0QxLjAlMjAlNUMlMEElMjAlMjAtLWNsYXNzX3Byb21wdCUzRCUyMmNhdCUyMiUyMC0tbnVtX2NsYXNzX2ltYWdlcyUzRDIwMCUyMCU1QyUwQSUyMCUyMC0taW5zdGFuY2VfcHJvbXB0JTNEJTIycGhvdG8lMjBvZiUyMGElMjAlM0NuZXcxJTNFJTIwY2F0JTIyJTIwJTIwJTVDJTBBJTIwJTIwLS1yZXNvbHV0aW9uJTNENTEyJTIwJTIwJTVDJTBBJTIwJTIwLS10cmFpbl9iYXRjaF9zaXplJTNEMiUyMCUyMCU1QyUwQSUyMCUyMC0tbGVhcm5pbmdfcmF0ZSUzRDFlLTUlMjAlMjAlNUMlMEElMjAlMjAtLWxyX3dhcm11cF9zdGVwcyUzRDAlMjAlNUMlMEElMjAlMjAtLW1heF90cmFpbl9zdGVwcyUzRDI1MCUyMCU1QyUwQSUyMCUyMC0tc2NhbGVfbHIlMjAtLWhmbGlwJTIwJTIwJTVDJTBBJTIwJTIwLS1tb2RpZmllcl90b2tlbiUyMCUyMiUzQ25ldzElM0UlMjIlMjAlNUMlMEElMjAlMjAtLXZhbGlkYXRpb25fcHJvbXB0JTNEJTIyJTNDbmV3MSUzRSUyMGNhdCUyMHNpdHRpbmclMjBpbiUyMGElMjBidWNrZXQlMjIlMjAlNUMlMEElMjAlMjAtLXJlcG9ydF90byUzRCUyMndhbmRiJTIyJTIwJTVDJTBBJTIwJTIwLS1wdXNoX3RvX2h1Yg==",highlighted:`accelerate launch train_custom_diffusion.py \\
--pretrained_model_name_or_path=<span class="hljs-variable">$MODEL_NAME</span> \\
--instance_data_dir=<span class="hljs-variable">$INSTANCE_DIR</span> \\
--output_dir=<span class="hljs-variable">$OUTPUT_DIR</span> \\
--class_data_dir=./real_reg/samples_cat/ \\
--with_prior_preservation --real_prior --prior_loss_weight=1.0 \\
--class_prompt=<span class="hljs-string">&quot;cat&quot;</span> --num_class_images=200 \\
--instance_prompt=<span class="hljs-string">&quot;photo of a &lt;new1&gt; cat&quot;</span> \\
--resolution=512 \\
--train_batch_size=2 \\
--learning_rate=1e-5 \\
--lr_warmup_steps=0 \\
--max_train_steps=250 \\
--scale_lr --hflip \\
--modifier_token <span class="hljs-string">&quot;&lt;new1&gt;&quot;</span> \\
--validation_prompt=<span class="hljs-string">&quot;&lt;new1&gt; cat sitting in a bucket&quot;</span> \\
--report_to=<span class="hljs-string">&quot;wandb&quot;</span> \\
--push_to_hub`,wrap:!1}}),P=new y({props:{title:"멀티 컨셉에 대한 학습 🐱🪵",local:"멀티-컨셉에-대한-학습-",headingTag:"h3"}}),tt=new w({props:{code:"cGlwJTIwaW5zdGFsbCUyMGNsaXAtcmV0cmlldmFsJTBBcHl0aG9uJTIwcmV0cmlldmUucHklMjAtLWNsYXNzX3Byb21wdCUyMCU3QiU3RCUyMC0tY2xhc3NfZGF0YV9kaXIlMjAlN0IlN0QlMjAtLW51bV9jbGFzc19pbWFnZXMlMjAyMDA=",highlighted:`pip install clip-retrieval
python retrieve.py --class_prompt {} --class_data_dir {} --num_class_images 200`,wrap:!1}}),et=new w({props:{code:"ZXhwb3J0JTIwTU9ERUxfTkFNRSUzRCUyMkNvbXBWaXMlMkZzdGFibGUtZGlmZnVzaW9uLXYxLTQlMjIlMEFleHBvcnQlMjBPVVRQVVRfRElSJTNEJTIycGF0aC10by1zYXZlLW1vZGVsJTIyJTBBJTBBYWNjZWxlcmF0ZSUyMGxhdW5jaCUyMHRyYWluX2N1c3RvbV9kaWZmdXNpb24ucHklMjAlNUMlMEElMjAlMjAtLXByZXRyYWluZWRfbW9kZWxfbmFtZV9vcl9wYXRoJTNEJTI0TU9ERUxfTkFNRSUyMCUyMCU1QyUwQSUyMCUyMC0tb3V0cHV0X2RpciUzRCUyNE9VVFBVVF9ESVIlMjAlNUMlMEElMjAlMjAtLWNvbmNlcHRzX2xpc3QlM0QuJTJGY29uY2VwdF9saXN0Lmpzb24lMjAlNUMlMEElMjAlMjAtLXdpdGhfcHJpb3JfcHJlc2VydmF0aW9uJTIwLS1yZWFsX3ByaW9yJTIwLS1wcmlvcl9sb3NzX3dlaWdodCUzRDEuMCUyMCU1QyUwQSUyMCUyMC0tcmVzb2x1dGlvbiUzRDUxMiUyMCUyMCU1QyUwQSUyMCUyMC0tdHJhaW5fYmF0Y2hfc2l6ZSUzRDIlMjAlMjAlNUMlMEElMjAlMjAtLWxlYXJuaW5nX3JhdGUlM0QxZS01JTIwJTIwJTVDJTBBJTIwJTIwLS1scl93YXJtdXBfc3RlcHMlM0QwJTIwJTVDJTBBJTIwJTIwLS1tYXhfdHJhaW5fc3RlcHMlM0Q1MDAlMjAlNUMlMEElMjAlMjAtLW51bV9jbGFzc19pbWFnZXMlM0QyMDAlMjAlNUMlMEElMjAlMjAtLXNjYWxlX2xyJTIwLS1oZmxpcCUyMCUyMCU1QyUwQSUyMCUyMC0tbW9kaWZpZXJfdG9rZW4lMjAlMjIlM0NuZXcxJTNFJTJCJTNDbmV3MiUzRSUyMiUyMCU1QyUwQSUyMCUyMC0tcHVzaF90b19odWI=",highlighted:`<span class="hljs-built_in">export</span> MODEL_NAME=<span class="hljs-string">&quot;CompVis/stable-diffusion-v1-4&quot;</span>
<span class="hljs-built_in">export</span> OUTPUT_DIR=<span class="hljs-string">&quot;path-to-save-model&quot;</span>
accelerate launch train_custom_diffusion.py \\
--pretrained_model_name_or_path=<span class="hljs-variable">$MODEL_NAME</span> \\
--output_dir=<span class="hljs-variable">$OUTPUT_DIR</span> \\
--concepts_list=./concept_list.json \\
--with_prior_preservation --real_prior --prior_loss_weight=1.0 \\
--resolution=512 \\
--train_batch_size=2 \\
--learning_rate=1e-5 \\
--lr_warmup_steps=0 \\
--max_train_steps=500 \\
--num_class_images=200 \\
--scale_lr --hflip \\
--modifier_token <span class="hljs-string">&quot;&lt;new1&gt;+&lt;new2&gt;&quot;</span> \\
--push_to_hub`,wrap:!1}}),at=new y({props:{title:"사람 얼굴에 대한 학습",local:"사람-얼굴에-대한-학습",headingTag:"h3"}}),pt=new w({props:{code:"cGlwJTIwaW5zdGFsbCUyMGNsaXAtcmV0cmlldmFsJTBBcHl0aG9uJTIwcmV0cmlldmUucHklMjAtLWNsYXNzX3Byb21wdCUyMHBlcnNvbiUyMC0tY2xhc3NfZGF0YV9kaXIlMjByZWFsX3JlZyUyRnNhbXBsZXNfcGVyc29uJTIwLS1udW1fY2xhc3NfaW1hZ2VzJTIwMjAw",highlighted:`pip install clip-retrieval
python retrieve.py --class_prompt person --class_data_dir real_reg/samples_person --num_class_images 200`,wrap:!1}}),mt=new w({props:{code:"ZXhwb3J0JTIwTU9ERUxfTkFNRSUzRCUyMkNvbXBWaXMlMkZzdGFibGUtZGlmZnVzaW9uLXYxLTQlMjIlMEFleHBvcnQlMjBPVVRQVVRfRElSJTNEJTIycGF0aC10by1zYXZlLW1vZGVsJTIyJTBBZXhwb3J0JTIwSU5TVEFOQ0VfRElSJTNEJTIycGF0aC10by1pbWFnZXMlMjIlMEElMEFhY2NlbGVyYXRlJTIwbGF1bmNoJTIwdHJhaW5fY3VzdG9tX2RpZmZ1c2lvbi5weSUyMCU1QyUwQSUyMCUyMC0tcHJldHJhaW5lZF9tb2RlbF9uYW1lX29yX3BhdGglM0QlMjRNT0RFTF9OQU1FJTIwJTIwJTVDJTBBJTIwJTIwLS1pbnN0YW5jZV9kYXRhX2RpciUzRCUyNElOU1RBTkNFX0RJUiUyMCU1QyUwQSUyMCUyMC0tb3V0cHV0X2RpciUzRCUyNE9VVFBVVF9ESVIlMjAlNUMlMEElMjAlMjAtLWNsYXNzX2RhdGFfZGlyJTNELiUyRnJlYWxfcmVnJTJGc2FtcGxlc19wZXJzb24lMkYlMjAlNUMlMEElMjAlMjAtLXdpdGhfcHJpb3JfcHJlc2VydmF0aW9uJTIwLS1yZWFsX3ByaW9yJTIwLS1wcmlvcl9sb3NzX3dlaWdodCUzRDEuMCUyMCU1QyUwQSUyMCUyMC0tY2xhc3NfcHJvbXB0JTNEJTIycGVyc29uJTIyJTIwLS1udW1fY2xhc3NfaW1hZ2VzJTNEMjAwJTIwJTVDJTBBJTIwJTIwLS1pbnN0YW5jZV9wcm9tcHQlM0QlMjJwaG90byUyMG9mJTIwYSUyMCUzQ25ldzElM0UlMjBwZXJzb24lMjIlMjAlMjAlNUMlMEElMjAlMjAtLXJlc29sdXRpb24lM0Q1MTIlMjAlMjAlNUMlMEElMjAlMjAtLXRyYWluX2JhdGNoX3NpemUlM0QyJTIwJTIwJTVDJTBBJTIwJTIwLS1sZWFybmluZ19yYXRlJTNENWUtNiUyMCUyMCU1QyUwQSUyMCUyMC0tbHJfd2FybXVwX3N0ZXBzJTNEMCUyMCU1QyUwQSUyMCUyMC0tbWF4X3RyYWluX3N0ZXBzJTNEMTAwMCUyMCU1QyUwQSUyMCUyMC0tc2NhbGVfbHIlMjAtLWhmbGlwJTIwLS1ub2F1ZyUyMCU1QyUwQSUyMCUyMC0tZnJlZXplX21vZGVsJTIwY3Jvc3NhdHRuJTIwJTVDJTBBJTIwJTIwLS1tb2RpZmllcl90b2tlbiUyMCUyMiUzQ25ldzElM0UlMjIlMjAlNUMlMEElMjAlMjAtLWVuYWJsZV94Zm9ybWVyc19tZW1vcnlfZWZmaWNpZW50X2F0dGVudGlvbiUyMCU1QyUwQSUyMCUyMC0tcHVzaF90b19odWI=",highlighted:`<span class="hljs-built_in">export</span> MODEL_NAME=<span class="hljs-string">&quot;CompVis/stable-diffusion-v1-4&quot;</span>
<span class="hljs-built_in">export</span> OUTPUT_DIR=<span class="hljs-string">&quot;path-to-save-model&quot;</span>
<span class="hljs-built_in">export</span> INSTANCE_DIR=<span class="hljs-string">&quot;path-to-images&quot;</span>
accelerate launch train_custom_diffusion.py \\
--pretrained_model_name_or_path=<span class="hljs-variable">$MODEL_NAME</span> \\
--instance_data_dir=<span class="hljs-variable">$INSTANCE_DIR</span> \\
--output_dir=<span class="hljs-variable">$OUTPUT_DIR</span> \\
--class_data_dir=./real_reg/samples_person/ \\
--with_prior_preservation --real_prior --prior_loss_weight=1.0 \\
--class_prompt=<span class="hljs-string">&quot;person&quot;</span> --num_class_images=200 \\
--instance_prompt=<span class="hljs-string">&quot;photo of a &lt;new1&gt; person&quot;</span> \\
--resolution=512 \\
--train_batch_size=2 \\
--learning_rate=5e-6 \\
--lr_warmup_steps=0 \\
--max_train_steps=1000 \\
--scale_lr --hflip --noaug \\
--freeze_model crossattn \\
--modifier_token <span class="hljs-string">&quot;&lt;new1&gt;&quot;</span> \\
--enable_xformers_memory_efficient_attention \\
--push_to_hub`,wrap:!1}}),ot=new y({props:{title:"추론",local:"추론",headingTag:"h2"}}),rt=new w({props:{code:"aW1wb3J0JTIwdG9yY2glMEFmcm9tJTIwZGlmZnVzZXJzJTIwaW1wb3J0JTIwRGlmZnVzaW9uUGlwZWxpbmUlMEElMEFwaXBlJTIwJTNEJTIwRGlmZnVzaW9uUGlwZWxpbmUuZnJvbV9wcmV0cmFpbmVkKCUyMkNvbXBWaXMlMkZzdGFibGUtZGlmZnVzaW9uLXYxLTQlMjIlMkMlMjB0b3JjaF9kdHlwZSUzRHRvcmNoLmZsb2F0MTYpLnRvKCUyMmN1ZGElMjIpJTBBcGlwZS51bmV0LmxvYWRfYXR0bl9wcm9jcyglMjJwYXRoLXRvLXNhdmUtbW9kZWwlMjIlMkMlMjB3ZWlnaHRfbmFtZSUzRCUyMnB5dG9yY2hfY3VzdG9tX2RpZmZ1c2lvbl93ZWlnaHRzLmJpbiUyMiklMEFwaXBlLmxvYWRfdGV4dHVhbF9pbnZlcnNpb24oJTIycGF0aC10by1zYXZlLW1vZGVsJTIyJTJDJTIwd2VpZ2h0X25hbWUlM0QlMjIlM0NuZXcxJTNFLmJpbiUyMiklMEElMEFpbWFnZSUyMCUzRCUyMHBpcGUoJTBBJTIwJTIwJTIwJTIwJTIyJTNDbmV3MSUzRSUyMGNhdCUyMHNpdHRpbmclMjBpbiUyMGElMjBidWNrZXQlMjIlMkMlMEElMjAlMjAlMjAlMjBudW1faW5mZXJlbmNlX3N0ZXBzJTNEMTAwJTJDJTBBJTIwJTIwJTIwJTIwZ3VpZGFuY2Vfc2NhbGUlM0Q2LjAlMkMlMEElMjAlMjAlMjAlMjBldGElM0QxLjAlMkMlMEEpLmltYWdlcyU1QjAlNUQlMEFpbWFnZS5zYXZlKCUyMmNhdC5wbmclMjIp",highlighted:`<span class="hljs-keyword">import</span> torch
<span class="hljs-keyword">from</span> diffusers <span class="hljs-keyword">import</span> DiffusionPipeline
pipe = DiffusionPipeline.from_pretrained(<span class="hljs-string">&quot;CompVis/stable-diffusion-v1-4&quot;</span>, torch_dtype=torch.float16).to(<span class="hljs-string">&quot;cuda&quot;</span>)
pipe.unet.load_attn_procs(<span class="hljs-string">&quot;path-to-save-model&quot;</span>, weight_name=<span class="hljs-string">&quot;pytorch_custom_diffusion_weights.bin&quot;</span>)
pipe.load_textual_inversion(<span class="hljs-string">&quot;path-to-save-model&quot;</span>, weight_name=<span class="hljs-string">&quot;&lt;new1&gt;.bin&quot;</span>)
image = pipe(
<span class="hljs-string">&quot;&lt;new1&gt; cat sitting in a bucket&quot;</span>,
num_inference_steps=<span class="hljs-number">100</span>,
guidance_scale=<span class="hljs-number">6.0</span>,
eta=<span class="hljs-number">1.0</span>,
).images[<span class="hljs-number">0</span>]
image.save(<span class="hljs-string">&quot;cat.png&quot;</span>)`,wrap:!1}}),ut=new w({props:{code:"aW1wb3J0JTIwdG9yY2glMEFmcm9tJTIwaHVnZ2luZ2ZhY2VfaHViLnJlcG9jYXJkJTIwaW1wb3J0JTIwUmVwb0NhcmQlMEFmcm9tJTIwZGlmZnVzZXJzJTIwaW1wb3J0JTIwRGlmZnVzaW9uUGlwZWxpbmUlMEElMEFtb2RlbF9pZCUyMCUzRCUyMCUyMnNheWFrcGF1bCUyRmN1c3RvbS1kaWZmdXNpb24tY2F0JTIyJTBBY2FyZCUyMCUzRCUyMFJlcG9DYXJkLmxvYWQobW9kZWxfaWQpJTBBYmFzZV9tb2RlbF9pZCUyMCUzRCUyMGNhcmQuZGF0YS50b19kaWN0KCklNUIlMjJiYXNlX21vZGVsJTIyJTVEJTBBJTBBcGlwZSUyMCUzRCUyMERpZmZ1c2lvblBpcGVsaW5lLmZyb21fcHJldHJhaW5lZChiYXNlX21vZGVsX2lkJTJDJTIwdG9yY2hfZHR5cGUlM0R0b3JjaC5mbG9hdDE2KS50byglMjJjdWRhJTIyKSUwQXBpcGUudW5ldC5sb2FkX2F0dG5fcHJvY3MobW9kZWxfaWQlMkMlMjB3ZWlnaHRfbmFtZSUzRCUyMnB5dG9yY2hfY3VzdG9tX2RpZmZ1c2lvbl93ZWlnaHRzLmJpbiUyMiklMEFwaXBlLmxvYWRfdGV4dHVhbF9pbnZlcnNpb24obW9kZWxfaWQlMkMlMjB3ZWlnaHRfbmFtZSUzRCUyMiUzQ25ldzElM0UuYmluJTIyKSUwQSUwQWltYWdlJTIwJTNEJTIwcGlwZSglMEElMjAlMjAlMjAlMjAlMjIlM0NuZXcxJTNFJTIwY2F0JTIwc2l0dGluZyUyMGluJTIwYSUyMGJ1Y2tldCUyMiUyQyUwQSUyMCUyMCUyMCUyMG51bV9pbmZlcmVuY2Vfc3RlcHMlM0QxMDAlMkMlMEElMjAlMjAlMjAlMjBndWlkYW5jZV9zY2FsZSUzRDYuMCUyQyUwQSUyMCUyMCUyMCUyMGV0YSUzRDEuMCUyQyUwQSkuaW1hZ2VzJTVCMCU1RCUwQWltYWdlLnNhdmUoJTIyY2F0LnBuZyUyMik=",highlighted:`<span class="hljs-keyword">import</span> torch
<span class="hljs-keyword">from</span> huggingface_hub.repocard <span class="hljs-keyword">import</span> RepoCard
<span class="hljs-keyword">from</span> diffusers <span class="hljs-keyword">import</span> DiffusionPipeline
model_id = <span class="hljs-string">&quot;sayakpaul/custom-diffusion-cat&quot;</span>
card = RepoCard.load(model_id)
base_model_id = card.data.to_dict()[<span class="hljs-string">&quot;base_model&quot;</span>]
pipe = DiffusionPipeline.from_pretrained(base_model_id, torch_dtype=torch.float16).to(<span class="hljs-string">&quot;cuda&quot;</span>)
pipe.unet.load_attn_procs(model_id, weight_name=<span class="hljs-string">&quot;pytorch_custom_diffusion_weights.bin&quot;</span>)
pipe.load_textual_inversion(model_id, weight_name=<span class="hljs-string">&quot;&lt;new1&gt;.bin&quot;</span>)
image = pipe(
<span class="hljs-string">&quot;&lt;new1&gt; cat sitting in a bucket&quot;</span>,
num_inference_steps=<span class="hljs-number">100</span>,
guidance_scale=<span class="hljs-number">6.0</span>,
eta=<span class="hljs-number">1.0</span>,
).images[<span class="hljs-number">0</span>]
image.save(<span class="hljs-string">&quot;cat.png&quot;</span>)`,wrap:!1}}),yt=new w({props:{code:"aW1wb3J0JTIwdG9yY2glMEFmcm9tJTIwaHVnZ2luZ2ZhY2VfaHViLnJlcG9jYXJkJTIwaW1wb3J0JTIwUmVwb0NhcmQlMEFmcm9tJTIwZGlmZnVzZXJzJTIwaW1wb3J0JTIwRGlmZnVzaW9uUGlwZWxpbmUlMEElMEFtb2RlbF9pZCUyMCUzRCUyMCUyMnNheWFrcGF1bCUyRmN1c3RvbS1kaWZmdXNpb24tY2F0LXdvb2Rlbi1wb3QlMjIlMEFjYXJkJTIwJTNEJTIwUmVwb0NhcmQubG9hZChtb2RlbF9pZCklMEFiYXNlX21vZGVsX2lkJTIwJTNEJTIwY2FyZC5kYXRhLnRvX2RpY3QoKSU1QiUyMmJhc2VfbW9kZWwlMjIlNUQlMEElMEFwaXBlJTIwJTNEJTIwRGlmZnVzaW9uUGlwZWxpbmUuZnJvbV9wcmV0cmFpbmVkKGJhc2VfbW9kZWxfaWQlMkMlMjB0b3JjaF9kdHlwZSUzRHRvcmNoLmZsb2F0MTYpLnRvKCUyMmN1ZGElMjIpJTBBcGlwZS51bmV0LmxvYWRfYXR0bl9wcm9jcyhtb2RlbF9pZCUyQyUyMHdlaWdodF9uYW1lJTNEJTIycHl0b3JjaF9jdXN0b21fZGlmZnVzaW9uX3dlaWdodHMuYmluJTIyKSUwQXBpcGUubG9hZF90ZXh0dWFsX2ludmVyc2lvbihtb2RlbF9pZCUyQyUyMHdlaWdodF9uYW1lJTNEJTIyJTNDbmV3MSUzRS5iaW4lMjIpJTBBcGlwZS5sb2FkX3RleHR1YWxfaW52ZXJzaW9uKG1vZGVsX2lkJTJDJTIwd2VpZ2h0X25hbWUlM0QlMjIlM0NuZXcyJTNFLmJpbiUyMiklMEElMEFpbWFnZSUyMCUzRCUyMHBpcGUoJTBBJTIwJTIwJTIwJTIwJTIydGhlJTIwJTNDbmV3MSUzRSUyMGNhdCUyMHNjdWxwdHVyZSUyMGluJTIwdGhlJTIwc3R5bGUlMjBvZiUyMGElMjAlM0NuZXcyJTNFJTIwd29vZGVuJTIwcG90JTIyJTJDJTBBJTIwJTIwJTIwJTIwbnVtX2luZmVyZW5jZV9zdGVwcyUzRDEwMCUyQyUwQSUyMCUyMCUyMCUyMGd1aWRhbmNlX3NjYWxlJTNENi4wJTJDJTBBJTIwJTIwJTIwJTIwZXRhJTNEMS4wJTJDJTBBKS5pbWFnZXMlNUIwJTVEJTBBaW1hZ2Uuc2F2ZSglMjJtdWx0aS1zdWJqZWN0LnBuZyUyMik=",highlighted:`<span class="hljs-keyword">import</span> torch
<span class="hljs-keyword">from</span> huggingface_hub.repocard <span class="hljs-keyword">import</span> RepoCard
<span class="hljs-keyword">from</span> diffusers <span class="hljs-keyword">import</span> DiffusionPipeline
model_id = <span class="hljs-string">&quot;sayakpaul/custom-diffusion-cat-wooden-pot&quot;</span>
card = RepoCard.load(model_id)
base_model_id = card.data.to_dict()[<span class="hljs-string">&quot;base_model&quot;</span>]
pipe = DiffusionPipeline.from_pretrained(base_model_id, torch_dtype=torch.float16).to(<span class="hljs-string">&quot;cuda&quot;</span>)
pipe.unet.load_attn_procs(model_id, weight_name=<span class="hljs-string">&quot;pytorch_custom_diffusion_weights.bin&quot;</span>)
pipe.load_textual_inversion(model_id, weight_name=<span class="hljs-string">&quot;&lt;new1&gt;.bin&quot;</span>)
pipe.load_textual_inversion(model_id, weight_name=<span class="hljs-string">&quot;&lt;new2&gt;.bin&quot;</span>)
image = pipe(
<span class="hljs-string">&quot;the &lt;new1&gt; cat sculpture in the style of a &lt;new2&gt; wooden pot&quot;</span>,
num_inference_steps=<span class="hljs-number">100</span>,
guidance_scale=<span class="hljs-number">6.0</span>,
eta=<span class="hljs-number">1.0</span>,
).images[<span class="hljs-number">0</span>]
image.save(<span class="hljs-string">&quot;multi-subject.png&quot;</span>)`,wrap:!1}}),Ut=new y({props:{title:"학습된 체크포인트에서 추론하기",local:"학습된-체크포인트에서-추론하기",headingTag:"h3"}}),Tt=new y({props:{title:"Grads를 None으로 설정",local:"grads를-none으로-설정",headingTag:"h2"}}),ht=new y({props:{title:"실험 결과",local:"실험-결과",headingTag:"h2"}}),Ct=new ge({props:{source:"https://github.com/huggingface/diffusers/blob/main/docs/source/ko/training/custom_diffusion.md"}}),{c(){d=i("meta"),Nt=a(),Zt=i("p"),$t=a(),m(U.$$.fragment),gt=a(),m(J.$$.fragment),Wt=a(),T=i("p"),T.innerHTML=Bl,Xt=a(),b=i("p"),b.innerHTML=Ll,Rt=a(),m(_.$$.fragment),Vt=a(),m(h.$$.fragment),Et=a(),j=i("p"),j.textContent=zl,Gt=a(),C=i("p"),C.innerHTML=Yl,vt=a(),Z=i("p"),Z.innerHTML=kl,Ft=a(),m(I.$$.fragment),xt=a(),N=i("p"),N.innerHTML=Sl,Bt=a(),m($.$$.fragment),Lt=a(),g=i("p"),g.textContent=Ql,zt=a(),m(W.$$.fragment),Yt=a(),X=i("p"),X.innerHTML=Al,kt=a(),m(R.$$.fragment),St=a(),V=i("p"),V.textContent=Hl,Qt=a(),m(E.$$.fragment),At=a(),G=i("p"),G.textContent=ql,Ht=a(),m(v.$$.fragment),qt=a(),m(F.$$.fragment),Dt=a(),x=i("p"),x.innerHTML=Dl,Pt=a(),B=i("p"),B.innerHTML=Pl,Ot=a(),m(L.$$.fragment),Kt=a(),z=i("p"),z.innerHTML=Ol,tl=a(),Y=i("p"),Y.innerHTML=Kl,ll=a(),m(k.$$.fragment),el=a(),S=i("p"),S.innerHTML=te,sl=a(),Q=i("p"),Q.innerHTML=le,al=a(),A=i("ul"),A.innerHTML=ee,nl=a(),m(H.$$.fragment),il=a(),q=i("p"),q.innerHTML=se,pl=a(),D=i("p"),D.innerHTML=ae,Ml=a(),m(P.$$.fragment),ml=a(),O=i("p"),O.innerHTML=ne,ol=a(),K=i("p"),K.textContent=ie,cl=a(),m(tt.$$.fragment),rl=a(),lt=i("p"),lt.textContent=pe,fl=a(),m(et.$$.fragment),ul=a(),st=i("p"),st.innerHTML=Me,wl=a(),m(at.$$.fragment),yl=a(),nt=i("p"),nt.innerHTML=me,dl=a(),it=i("p"),it.textContent=oe,Ul=a(),m(pt.$$.fragment),Jl=a(),Mt=i("p"),Mt.textContent=ce,Tl=a(),m(mt.$$.fragment),bl=a(),m(ot.$$.fragment),_l=a(),ct=i("p"),ct.textContent=re,hl=a(),m(rt.$$.fragment),jl=a(),ft=i("p"),ft.textContent=fe,Cl=a(),m(ut.$$.fragment),Zl=a(),wt=i("p"),wt.textContent=ue,Il=a(),m(yt.$$.fragment),Nl=a(),dt=i("p"),dt.textContent=we,$l=a(),m(Ut.$$.fragment),gl=a(),Jt=i("p"),Jt.innerHTML=ye,Wl=a(),m(Tt.$$.fragment),Xl=a(),bt=i("p"),bt.innerHTML=de,Rl=a(),_t=i("p"),_t.innerHTML=Ue,Vl=a(),m(ht.$$.fragment),El=a(),jt=i("p"),jt.innerHTML=Je,Gl=a(),m(Ct.$$.fragment),vl=a(),It=i("p"),this.h()},l(t){const l=Ie("svelte-u9bgzb",document.head);d=p(l,"META",{name:!0,content:!0}),l.forEach(e),Nt=n(t),Zt=p(t,"P",{}),Te(Zt).forEach(e),$t=n(t),o(U.$$.fragment,t),gt=n(t),o(J.$$.fragment,t),Wt=n(t),T=p(t,"P",{"data-svelte-h":!0}),M(T)!=="svelte-cml10z"&&(T.innerHTML=Bl),Xt=n(t),b=p(t,"P",{"data-svelte-h":!0}),M(b)!=="svelte-10e22jc"&&(b.innerHTML=Ll),Rt=n(t),o(_.$$.fragment,t),Vt=n(t),o(h.$$.fragment,t),Et=n(t),j=p(t,"P",{"data-svelte-h":!0}),M(j)!=="svelte-xj6xw"&&(j.textContent=zl),Gt=n(t),C=p(t,"P",{"data-svelte-h":!0}),M(C)!=="svelte-kmsm4y"&&(C.innerHTML=Yl),vt=n(t),Z=p(t,"P",{"data-svelte-h":!0}),M(Z)!=="svelte-od6hhu"&&(Z.innerHTML=kl),Ft=n(t),o(I.$$.fragment,t),xt=n(t),N=p(t,"P",{"data-svelte-h":!0}),M(N)!=="svelte-y8wn8i"&&(N.innerHTML=Sl),Bt=n(t),o($.$$.fragment,t),Lt=n(t),g=p(t,"P",{"data-svelte-h":!0}),M(g)!=="svelte-1vcow5r"&&(g.textContent=Ql),zt=n(t),o(W.$$.fragment,t),Yt=n(t),X=p(t,"P",{"data-svelte-h":!0}),M(X)!=="svelte-1m4i3jt"&&(X.innerHTML=Al),kt=n(t),o(R.$$.fragment,t),St=n(t),V=p(t,"P",{"data-svelte-h":!0}),M(V)!=="svelte-12kxokf"&&(V.textContent=Hl),Qt=n(t),o(E.$$.fragment,t),At=n(t),G=p(t,"P",{"data-svelte-h":!0}),M(G)!=="svelte-1094f9w"&&(G.textContent=ql),Ht=n(t),o(v.$$.fragment,t),qt=n(t),o(F.$$.fragment,t),Dt=n(t),x=p(t,"P",{"data-svelte-h":!0}),M(x)!=="svelte-1hl0glm"&&(x.innerHTML=Dl),Pt=n(t),B=p(t,"P",{"data-svelte-h":!0}),M(B)!=="svelte-1em9a98"&&(B.innerHTML=Pl),Ot=n(t),o(L.$$.fragment,t),Kt=n(t),z=p(t,"P",{"data-svelte-h":!0}),M(z)!=="svelte-6lkpti"&&(z.innerHTML=Ol),tl=n(t),Y=p(t,"P",{"data-svelte-h":!0}),M(Y)!=="svelte-sf3x1d"&&(Y.innerHTML=Kl),ll=n(t),o(k.$$.fragment,t),el=n(t),S=p(t,"P",{"data-svelte-h":!0}),M(S)!=="svelte-cfu7qy"&&(S.innerHTML=te),sl=n(t),Q=p(t,"P",{"data-svelte-h":!0}),M(Q)!=="svelte-1cdkqys"&&(Q.innerHTML=le),al=n(t),A=p(t,"UL",{"data-svelte-h":!0}),M(A)!=="svelte-o6a60s"&&(A.innerHTML=ee),nl=n(t),o(H.$$.fragment,t),il=n(t),q=p(t,"P",{"data-svelte-h":!0}),M(q)!=="svelte-1gf7cxo"&&(q.innerHTML=se),pl=n(t),D=p(t,"P",{"data-svelte-h":!0}),M(D)!=="svelte-1jkha3u"&&(D.innerHTML=ae),Ml=n(t),o(P.$$.fragment,t),ml=n(t),O=p(t,"P",{"data-svelte-h":!0}),M(O)!=="svelte-dvorqa"&&(O.innerHTML=ne),ol=n(t),K=p(t,"P",{"data-svelte-h":!0}),M(K)!=="svelte-22dnkf"&&(K.textContent=ie),cl=n(t),o(tt.$$.fragment,t),rl=n(t),lt=p(t,"P",{"data-svelte-h":!0}),M(lt)!=="svelte-1ls580o"&&(lt.textContent=pe),fl=n(t),o(et.$$.fragment,t),ul=n(t),st=p(t,"P",{"data-svelte-h":!0}),M(st)!=="svelte-9w5zdp"&&(st.innerHTML=Me),wl=n(t),o(at.$$.fragment,t),yl=n(t),nt=p(t,"P",{"data-svelte-h":!0}),M(nt)!=="svelte-x3ps4s"&&(nt.innerHTML=me),dl=n(t),it=p(t,"P",{"data-svelte-h":!0}),M(it)!=="svelte-1baklot"&&(it.textContent=oe),Ul=n(t),o(pt.$$.fragment,t),Jl=n(t),Mt=p(t,"P",{"data-svelte-h":!0}),M(Mt)!=="svelte-1hae69m"&&(Mt.textContent=ce),Tl=n(t),o(mt.$$.fragment,t),bl=n(t),o(ot.$$.fragment,t),_l=n(t),ct=p(t,"P",{"data-svelte-h":!0}),M(ct)!=="svelte-1fnfhx0"&&(ct.textContent=re),hl=n(t),o(rt.$$.fragment,t),jl=n(t),ft=p(t,"P",{"data-svelte-h":!0}),M(ft)!=="svelte-1kgsue9"&&(ft.textContent=fe),Cl=n(t),o(ut.$$.fragment,t),Zl=n(t),wt=p(t,"P",{"data-svelte-h":!0}),M(wt)!=="svelte-1rk20g1"&&(wt.textContent=ue),Il=n(t),o(yt.$$.fragment,t),Nl=n(t),dt=p(t,"P",{"data-svelte-h":!0}),M(dt)!=="svelte-d3qkpz"&&(dt.textContent=we),$l=n(t),o(Ut.$$.fragment,t),gl=n(t),Jt=p(t,"P",{"data-svelte-h":!0}),M(Jt)!=="svelte-13dw6tk"&&(Jt.innerHTML=ye),Wl=n(t),o(Tt.$$.fragment,t),Xl=n(t),bt=p(t,"P",{"data-svelte-h":!0}),M(bt)!=="svelte-eyi1ip"&&(bt.innerHTML=de),Rl=n(t),_t=p(t,"P",{"data-svelte-h":!0}),M(_t)!=="svelte-1q8lr5q"&&(_t.innerHTML=Ue),Vl=n(t),o(ht.$$.fragment,t),El=n(t),jt=p(t,"P",{"data-svelte-h":!0}),M(jt)!=="svelte-awcruk"&&(jt.innerHTML=Je),Gl=n(t),o(Ct.$$.fragment,t),vl=n(t),It=p(t,"P",{}),Te(It).forEach(e),this.h()},h(){be(d,"name","hf:doc:metadata"),be(d,"content",Xe)},m(t,l){Ne(document.head,d),s(t,Nt,l),s(t,Zt,l),s(t,$t,l),c(U,t,l),s(t,gt,l),c(J,t,l),s(t,Wt,l),s(t,T,l),s(t,Xt,l),s(t,b,l),s(t,Rt,l),c(_,t,l),s(t,Vt,l),c(h,t,l),s(t,Et,l),s(t,j,l),s(t,Gt,l),s(t,C,l),s(t,vt,l),s(t,Z,l),s(t,Ft,l),c(I,t,l),s(t,xt,l),s(t,N,l),s(t,Bt,l),c($,t,l),s(t,Lt,l),s(t,g,l),s(t,zt,l),c(W,t,l),s(t,Yt,l),s(t,X,l),s(t,kt,l),c(R,t,l),s(t,St,l),s(t,V,l),s(t,Qt,l),c(E,t,l),s(t,At,l),s(t,G,l),s(t,Ht,l),c(v,t,l),s(t,qt,l),c(F,t,l),s(t,Dt,l),s(t,x,l),s(t,Pt,l),s(t,B,l),s(t,Ot,l),c(L,t,l),s(t,Kt,l),s(t,z,l),s(t,tl,l),s(t,Y,l),s(t,ll,l),c(k,t,l),s(t,el,l),s(t,S,l),s(t,sl,l),s(t,Q,l),s(t,al,l),s(t,A,l),s(t,nl,l),c(H,t,l),s(t,il,l),s(t,q,l),s(t,pl,l),s(t,D,l),s(t,Ml,l),c(P,t,l),s(t,ml,l),s(t,O,l),s(t,ol,l),s(t,K,l),s(t,cl,l),c(tt,t,l),s(t,rl,l),s(t,lt,l),s(t,fl,l),c(et,t,l),s(t,ul,l),s(t,st,l),s(t,wl,l),c(at,t,l),s(t,yl,l),s(t,nt,l),s(t,dl,l),s(t,it,l),s(t,Ul,l),c(pt,t,l),s(t,Jl,l),s(t,Mt,l),s(t,Tl,l),c(mt,t,l),s(t,bl,l),c(ot,t,l),s(t,_l,l),s(t,ct,l),s(t,hl,l),c(rt,t,l),s(t,jl,l),s(t,ft,l),s(t,Cl,l),c(ut,t,l),s(t,Zl,l),s(t,wt,l),s(t,Il,l),c(yt,t,l),s(t,Nl,l),s(t,dt,l),s(t,$l,l),c(Ut,t,l),s(t,gl,l),s(t,Jt,l),s(t,Wl,l),c(Tt,t,l),s(t,Xl,l),s(t,bt,l),s(t,Rl,l),s(t,_t,l),s(t,Vl,l),c(ht,t,l),s(t,El,l),s(t,jt,l),s(t,Gl,l),c(Ct,t,l),s(t,vl,l),s(t,It,l),Fl=!0},p:he,i(t){Fl||(r(U.$$.fragment,t),r(J.$$.fragment,t),r(_.$$.fragment,t),r(h.$$.fragment,t),r(I.$$.fragment,t),r($.$$.fragment,t),r(W.$$.fragment,t),r(R.$$.fragment,t),r(E.$$.fragment,t),r(v.$$.fragment,t),r(F.$$.fragment,t),r(L.$$.fragment,t),r(k.$$.fragment,t),r(H.$$.fragment,t),r(P.$$.fragment,t),r(tt.$$.fragment,t),r(et.$$.fragment,t),r(at.$$.fragment,t),r(pt.$$.fragment,t),r(mt.$$.fragment,t),r(ot.$$.fragment,t),r(rt.$$.fragment,t),r(ut.$$.fragment,t),r(yt.$$.fragment,t),r(Ut.$$.fragment,t),r(Tt.$$.fragment,t),r(ht.$$.fragment,t),r(Ct.$$.fragment,t),Fl=!0)},o(t){f(U.$$.fragment,t),f(J.$$.fragment,t),f(_.$$.fragment,t),f(h.$$.fragment,t),f(I.$$.fragment,t),f($.$$.fragment,t),f(W.$$.fragment,t),f(R.$$.fragment,t),f(E.$$.fragment,t),f(v.$$.fragment,t),f(F.$$.fragment,t),f(L.$$.fragment,t),f(k.$$.fragment,t),f(H.$$.fragment,t),f(P.$$.fragment,t),f(tt.$$.fragment,t),f(et.$$.fragment,t),f(at.$$.fragment,t),f(pt.$$.fragment,t),f(mt.$$.fragment,t),f(ot.$$.fragment,t),f(rt.$$.fragment,t),f(ut.$$.fragment,t),f(yt.$$.fragment,t),f(Ut.$$.fragment,t),f(Tt.$$.fragment,t),f(ht.$$.fragment,t),f(Ct.$$.fragment,t),Fl=!1},d(t){t&&(e(Nt),e(Zt),e($t),e(gt),e(Wt),e(T),e(Xt),e(b),e(Rt),e(Vt),e(Et),e(j),e(Gt),e(C),e(vt),e(Z),e(Ft),e(xt),e(N),e(Bt),e(Lt),e(g),e(zt),e(Yt),e(X),e(kt),e(St),e(V),e(Qt),e(At),e(G),e(Ht),e(qt),e(Dt),e(x),e(Pt),e(B),e(Ot),e(Kt),e(z),e(tl),e(Y),e(ll),e(el),e(S),e(sl),e(Q),e(al),e(A),e(nl),e(il),e(q),e(pl),e(D),e(Ml),e(ml),e(O),e(ol),e(K),e(cl),e(rl),e(lt),e(fl),e(ul),e(st),e(wl),e(yl),e(nt),e(dl),e(it),e(Ul),e(Jl),e(Mt),e(Tl),e(bl),e(_l),e(ct),e(hl),e(jl),e(ft),e(Cl),e(Zl),e(wt),e(Il),e(Nl),e(dt),e($l),e(gl),e(Jt),e(Wl),e(Xl),e(bt),e(Rl),e(_t),e(Vl),e(El),e(jt),e(Gl),e(vl),e(It)),e(d),u(U,t),u(J,t),u(_,t),u(h,t),u(I,t),u($,t),u(W,t),u(R,t),u(E,t),u(v,t),u(F,t),u(L,t),u(k,t),u(H,t),u(P,t),u(tt,t),u(et,t),u(at,t),u(pt,t),u(mt,t),u(ot,t),u(rt,t),u(ut,t),u(yt,t),u(Ut,t),u(Tt,t),u(ht,t),u(Ct,t)}}}const Xe='{"title":"커스텀 Diffusion 학습 예제","local":"커스텀-diffusion-학습-예제","sections":[{"title":"로컬에서 PyTorch로 실행하기","local":"로컬에서-pytorch로-실행하기","sections":[{"title":"Dependencies 설치하기","local":"dependencies-설치하기","sections":[],"depth":3},{"title":"고양이 예제 😺","local":"고양이-예제-","sections":[],"depth":3},{"title":"멀티 컨셉에 대한 학습 🐱🪵","local":"멀티-컨셉에-대한-학습-","sections":[],"depth":3},{"title":"사람 얼굴에 대한 학습","local":"사람-얼굴에-대한-학습","sections":[],"depth":3}],"depth":2},{"title":"추론","local":"추론","sections":[{"title":"학습된 체크포인트에서 추론하기","local":"학습된-체크포인트에서-추론하기","sections":[],"depth":3}],"depth":2},{"title":"Grads를 None으로 설정","local":"grads를-none으로-설정","sections":[],"depth":2},{"title":"실험 결과","local":"실험-결과","sections":[],"depth":2}],"depth":1}';function Re(xl){return je(()=>{new URLSearchParams(window.location.search).get("fw")}),[]}class Fe extends Ce{constructor(d){super(),Ze(this,d,Re,We,_e,{})}}export{Fe as component};

Xet Storage Details

Size:
38.4 kB
·
Xet hash:
47e66d1de13d53768a621b266d178d81709fab26c5da1433b9e005ed0cee5694

Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.