Buckets:

rtrm's picture
download
raw
26.1 kB
import{s as ke,o as je,n as ae}from"../chunks/scheduler.8c3d61f6.js";import{S as Ge,i as We,g as u,s as i,r as x,A as Be,h as f,f as l,c as r,j as P,u as g,x as U,k as A,y as p,a as c,v as T,d as v,t as J,w as _}from"../chunks/index.da70eac4.js";import{T as Ce}from"../chunks/Tip.1d9b8c37.js";import{D as le}from"../chunks/Docstring.ee4b6913.js";import{C as ge}from"../chunks/CodeBlock.00a903b3.js";import{E as xe}from"../chunks/ExampleCodeBlock.f7bd2c1f.js";import{H as $e,E as Ve}from"../chunks/EditOnGithub.1e64e623.js";function Ne(Z){let n,h='To learn more about how to load Textual Inversion embeddings, see the <a href="../../using-diffusers/loading_adapters#textual-inversion">Textual Inversion</a> loading guide.';return{c(){n=u("p"),n.innerHTML=h},l(a){n=f(a,"P",{"data-svelte-h":!0}),U(n)!=="svelte-1n8qarv"&&(n.innerHTML=h)},m(a,o){c(a,n,o)},p:ae,d(a){a&&l(n)}}}function Re(Z){let n,h="To load a Textual Inversion embedding vector in 🤗 Diffusers format:",a,o,d;return o=new ge({props:{code:"ZnJvbSUyMGRpZmZ1c2VycyUyMGltcG9ydCUyMFN0YWJsZURpZmZ1c2lvblBpcGVsaW5lJTBBaW1wb3J0JTIwdG9yY2glMEElMEFtb2RlbF9pZCUyMCUzRCUyMCUyMnJ1bndheW1sJTJGc3RhYmxlLWRpZmZ1c2lvbi12MS01JTIyJTBBcGlwZSUyMCUzRCUyMFN0YWJsZURpZmZ1c2lvblBpcGVsaW5lLmZyb21fcHJldHJhaW5lZChtb2RlbF9pZCUyQyUyMHRvcmNoX2R0eXBlJTNEdG9yY2guZmxvYXQxNikudG8oJTIyY3VkYSUyMiklMEElMEFwaXBlLmxvYWRfdGV4dHVhbF9pbnZlcnNpb24oJTIyc2QtY29uY2VwdHMtbGlicmFyeSUyRmNhdC10b3klMjIpJTBBJTBBcHJvbXB0JTIwJTNEJTIwJTIyQSUyMCUzQ2NhdC10b3klM0UlMjBiYWNrcGFjayUyMiUwQSUwQWltYWdlJTIwJTNEJTIwcGlwZShwcm9tcHQlMkMlMjBudW1faW5mZXJlbmNlX3N0ZXBzJTNENTApLmltYWdlcyU1QjAlNUQlMEFpbWFnZS5zYXZlKCUyMmNhdC1iYWNrcGFjay5wbmclMjIp",highlighted:`<span class="hljs-keyword">from</span> diffusers <span class="hljs-keyword">import</span> StableDiffusionPipeline
<span class="hljs-keyword">import</span> torch
model_id = <span class="hljs-string">&quot;runwayml/stable-diffusion-v1-5&quot;</span>
pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=torch.float16).to(<span class="hljs-string">&quot;cuda&quot;</span>)
pipe.load_textual_inversion(<span class="hljs-string">&quot;sd-concepts-library/cat-toy&quot;</span>)
prompt = <span class="hljs-string">&quot;A &lt;cat-toy&gt; backpack&quot;</span>
image = pipe(prompt, num_inference_steps=<span class="hljs-number">50</span>).images[<span class="hljs-number">0</span>]
image.save(<span class="hljs-string">&quot;cat-backpack.png&quot;</span>)`,wrap:!1}}),{c(){n=u("p"),n.textContent=h,a=i(),x(o.$$.fragment)},l(t){n=f(t,"P",{"data-svelte-h":!0}),U(n)!=="svelte-1gc783q"&&(n.textContent=h),a=r(t),g(o.$$.fragment,t)},m(t,m){c(t,n,m),c(t,a,m),T(o,t,m),d=!0},p:ae,i(t){d||(v(o.$$.fragment,t),d=!0)},o(t){J(o.$$.fragment,t),d=!1},d(t){t&&(l(n),l(a)),_(o,t)}}}function Xe(Z){let n,h="locally:",a,o,d;return o=new ge({props:{code:"ZnJvbSUyMGRpZmZ1c2VycyUyMGltcG9ydCUyMFN0YWJsZURpZmZ1c2lvblBpcGVsaW5lJTBBaW1wb3J0JTIwdG9yY2glMEElMEFtb2RlbF9pZCUyMCUzRCUyMCUyMnJ1bndheW1sJTJGc3RhYmxlLWRpZmZ1c2lvbi12MS01JTIyJTBBcGlwZSUyMCUzRCUyMFN0YWJsZURpZmZ1c2lvblBpcGVsaW5lLmZyb21fcHJldHJhaW5lZChtb2RlbF9pZCUyQyUyMHRvcmNoX2R0eXBlJTNEdG9yY2guZmxvYXQxNikudG8oJTIyY3VkYSUyMiklMEElMEFwaXBlLmxvYWRfdGV4dHVhbF9pbnZlcnNpb24oJTIyLiUyRmNoYXJ0dXJuZXJ2Mi5wdCUyMiUyQyUyMHRva2VuJTNEJTIyY2hhcnR1cm5lcnYyJTIyKSUwQSUwQXByb21wdCUyMCUzRCUyMCUyMmNoYXJ0dXJuZXJ2MiUyQyUyMG11bHRpcGxlJTIwdmlld3MlMjBvZiUyMHRoZSUyMHNhbWUlMjBjaGFyYWN0ZXIlMjBpbiUyMHRoZSUyMHNhbWUlMjBvdXRmaXQlMkMlMjBhJTIwY2hhcmFjdGVyJTIwdHVybmFyb3VuZCUyMG9mJTIwYSUyMHdvbWFuJTIwd2VhcmluZyUyMGElMjBibGFjayUyMGphY2tldCUyMGFuZCUyMHJlZCUyMHNoaXJ0JTJDJTIwYmVzdCUyMHF1YWxpdHklMkMlMjBpbnRyaWNhdGUlMjBkZXRhaWxzLiUyMiUwQSUwQWltYWdlJTIwJTNEJTIwcGlwZShwcm9tcHQlMkMlMjBudW1faW5mZXJlbmNlX3N0ZXBzJTNENTApLmltYWdlcyU1QjAlNUQlMEFpbWFnZS5zYXZlKCUyMmNoYXJhY3Rlci5wbmclMjIp",highlighted:`<span class="hljs-keyword">from</span> diffusers <span class="hljs-keyword">import</span> StableDiffusionPipeline
<span class="hljs-keyword">import</span> torch
model_id = <span class="hljs-string">&quot;runwayml/stable-diffusion-v1-5&quot;</span>
pipe = StableDiffusionPipeline.from_pretrained(model_id, torch_dtype=torch.float16).to(<span class="hljs-string">&quot;cuda&quot;</span>)
pipe.load_textual_inversion(<span class="hljs-string">&quot;./charturnerv2.pt&quot;</span>, token=<span class="hljs-string">&quot;charturnerv2&quot;</span>)
prompt = <span class="hljs-string">&quot;charturnerv2, multiple views of the same character in the same outfit, a character turnaround of a woman wearing a black jacket and red shirt, best quality, intricate details.&quot;</span>
image = pipe(prompt, num_inference_steps=<span class="hljs-number">50</span>).images[<span class="hljs-number">0</span>]
image.save(<span class="hljs-string">&quot;character.png&quot;</span>)`,wrap:!1}}),{c(){n=u("p"),n.textContent=h,a=i(),x(o.$$.fragment)},l(t){n=f(t,"P",{"data-svelte-h":!0}),U(n)!=="svelte-4c75kq"&&(n.textContent=h),a=r(t),g(o.$$.fragment,t)},m(t,m){c(t,n,m),c(t,a,m),T(o,t,m),d=!0},p:ae,i(t){d||(v(o.$$.fragment,t),d=!0)},o(t){J(o.$$.fragment,t),d=!1},d(t){t&&(l(n),l(a)),_(o,t)}}}function Ee(Z){let n,h="Example:",a,o,d;return o=new ge({props:{code:"ZnJvbSUyMGRpZmZ1c2VycyUyMGltcG9ydCUyMEF1dG9QaXBlbGluZUZvclRleHQySW1hZ2UlMEFpbXBvcnQlMjB0b3JjaCUwQSUwQXBpcGVsaW5lJTIwJTNEJTIwQXV0b1BpcGVsaW5lRm9yVGV4dDJJbWFnZS5mcm9tX3ByZXRyYWluZWQoJTIycnVud2F5bWwlMkZzdGFibGUtZGlmZnVzaW9uLXYxLTUlMjIpJTBBJTBBJTIzJTIwRXhhbXBsZSUyMDElMEFwaXBlbGluZS5sb2FkX3RleHR1YWxfaW52ZXJzaW9uKCUyMnNkLWNvbmNlcHRzLWxpYnJhcnklMkZndGE1LWFydHdvcmslMjIpJTBBcGlwZWxpbmUubG9hZF90ZXh0dWFsX2ludmVyc2lvbiglMjJzZC1jb25jZXB0cy1saWJyYXJ5JTJGbW9lYi1zdHlsZSUyMiklMEElMEElMjMlMjBSZW1vdmUlMjBhbGwlMjB0b2tlbiUyMGVtYmVkZGluZ3MlMEFwaXBlbGluZS51bmxvYWRfdGV4dHVhbF9pbnZlcnNpb24oKSUwQSUwQSUyMyUyMEV4YW1wbGUlMjAyJTBBcGlwZWxpbmUubG9hZF90ZXh0dWFsX2ludmVyc2lvbiglMjJzZC1jb25jZXB0cy1saWJyYXJ5JTJGbW9lYi1zdHlsZSUyMiklMEFwaXBlbGluZS5sb2FkX3RleHR1YWxfaW52ZXJzaW9uKCUyMnNkLWNvbmNlcHRzLWxpYnJhcnklMkZndGE1LWFydHdvcmslMjIpJTBBJTBBJTIzJTIwUmVtb3ZlJTIwanVzdCUyMG9uZSUyMHRva2VuJTBBcGlwZWxpbmUudW5sb2FkX3RleHR1YWxfaW52ZXJzaW9uKCUyMiUzQ21vZS1iaXVzJTNFJTIyKSUwQSUwQSUyMyUyMEV4YW1wbGUlMjAzJTNBJTIwdW5sb2FkJTIwZnJvbSUyMFNEWEwlMEFwaXBlbGluZSUyMCUzRCUyMEF1dG9QaXBlbGluZUZvclRleHQySW1hZ2UuZnJvbV9wcmV0cmFpbmVkKCUyMnN0YWJpbGl0eWFpJTJGc3RhYmxlLWRpZmZ1c2lvbi14bC1iYXNlLTEuMCUyMiklMEFlbWJlZGRpbmdfcGF0aCUyMCUzRCUyMGhmX2h1Yl9kb3dubG9hZCglMEElMjAlMjAlMjAlMjByZXBvX2lkJTNEJTIybGlub3l0cyUyRndlYl95MmslMjIlMkMlMjBmaWxlbmFtZSUzRCUyMndlYl95MmtfZW1iLnNhZmV0ZW5zb3JzJTIyJTJDJTIwcmVwb190eXBlJTNEJTIybW9kZWwlMjIlMEEpJTBBJTBBJTIzJTIwbG9hZCUyMGVtYmVkZGluZ3MlMjB0byUyMHRoZSUyMHRleHQlMjBlbmNvZGVycyUwQXN0YXRlX2RpY3QlMjAlM0QlMjBsb2FkX2ZpbGUoZW1iZWRkaW5nX3BhdGgpJTBBJTBBJTIzJTIwbG9hZCUyMGVtYmVkZGluZ3MlMjBvZiUyMHRleHRfZW5jb2RlciUyMDElMjAoQ0xJUCUyMFZpVC1MJTJGMTQpJTBBcGlwZWxpbmUubG9hZF90ZXh0dWFsX2ludmVyc2lvbiglMEElMjAlMjAlMjAlMjBzdGF0ZV9kaWN0JTVCJTIyY2xpcF9sJTIyJTVEJTJDJTBBJTIwJTIwJTIwJTIwdG9rZW4lM0QlNUIlMjIlM0NzMCUzRSUyMiUyQyUyMCUyMiUzQ3MxJTNFJTIyJTVEJTJDJTBBJTIwJTIwJTIwJTIwdGV4dF9lbmNvZGVyJTNEcGlwZWxpbmUudGV4dF9lbmNvZGVyJTJDJTBBJTIwJTIwJTIwJTIwdG9rZW5pemVyJTNEcGlwZWxpbmUudG9rZW5pemVyJTJDJTBBKSUwQSUyMyUyMGxvYWQlMjBlbWJlZGRpbmdzJTIwb2YlMjB0ZXh0X2VuY29kZXIlMjAyJTIwKENMSVAlMjBWaVQtRyUyRjE0KSUwQXBpcGVsaW5lLmxvYWRfdGV4dHVhbF9pbnZlcnNpb24oJTBBJTIwJTIwJTIwJTIwc3RhdGVfZGljdCU1QiUyMmNsaXBfZyUyMiU1RCUyQyUwQSUyMCUyMCUyMCUyMHRva2VuJTNEJTVCJTIyJTNDczAlM0UlMjIlMkMlMjAlMjIlM0NzMSUzRSUyMiU1RCUyQyUwQSUyMCUyMCUyMCUyMHRleHRfZW5jb2RlciUzRHBpcGVsaW5lLnRleHRfZW5jb2Rlcl8yJTJDJTBBJTIwJTIwJTIwJTIwdG9rZW5pemVyJTNEcGlwZWxpbmUudG9rZW5pemVyXzIlMkMlMEEpJTBBJTBBJTIzJTIwVW5sb2FkJTIwZXhwbGljaXRseSUyMGZyb20lMjBib3RoJTIwdGV4dCUyMGVuY29kZXJzJTIwYWJkJTIwdG9rZW5pemVycyUwQXBpcGVsaW5lLnVubG9hZF90ZXh0dWFsX2ludmVyc2lvbiglMEElMjAlMjAlMjAlMjB0b2tlbnMlM0QlNUIlMjIlM0NzMCUzRSUyMiUyQyUyMCUyMiUzQ3MxJTNFJTIyJTVEJTJDJTIwdGV4dF9lbmNvZGVyJTNEcGlwZWxpbmUudGV4dF9lbmNvZGVyJTJDJTIwdG9rZW5pemVyJTNEcGlwZWxpbmUudG9rZW5pemVyJTBBKSUwQXBpcGVsaW5lLnVubG9hZF90ZXh0dWFsX2ludmVyc2lvbiglMEElMjAlMjAlMjAlMjB0b2tlbnMlM0QlNUIlMjIlM0NzMCUzRSUyMiUyQyUyMCUyMiUzQ3MxJTNFJTIyJTVEJTJDJTIwdGV4dF9lbmNvZGVyJTNEcGlwZWxpbmUudGV4dF9lbmNvZGVyXzIlMkMlMjB0b2tlbml6ZXIlM0RwaXBlbGluZS50b2tlbml6ZXJfMiUwQSk=",highlighted:`<span class="hljs-keyword">from</span> diffusers <span class="hljs-keyword">import</span> AutoPipelineForText2Image
<span class="hljs-keyword">import</span> torch
pipeline = AutoPipelineForText2Image.from_pretrained(<span class="hljs-string">&quot;runwayml/stable-diffusion-v1-5&quot;</span>)
<span class="hljs-comment"># Example 1</span>
pipeline.load_textual_inversion(<span class="hljs-string">&quot;sd-concepts-library/gta5-artwork&quot;</span>)
pipeline.load_textual_inversion(<span class="hljs-string">&quot;sd-concepts-library/moeb-style&quot;</span>)
<span class="hljs-comment"># Remove all token embeddings</span>
pipeline.unload_textual_inversion()
<span class="hljs-comment"># Example 2</span>
pipeline.load_textual_inversion(<span class="hljs-string">&quot;sd-concepts-library/moeb-style&quot;</span>)
pipeline.load_textual_inversion(<span class="hljs-string">&quot;sd-concepts-library/gta5-artwork&quot;</span>)
<span class="hljs-comment"># Remove just one token</span>
pipeline.unload_textual_inversion(<span class="hljs-string">&quot;&lt;moe-bius&gt;&quot;</span>)
<span class="hljs-comment"># Example 3: unload from SDXL</span>
pipeline = AutoPipelineForText2Image.from_pretrained(<span class="hljs-string">&quot;stabilityai/stable-diffusion-xl-base-1.0&quot;</span>)
embedding_path = hf_hub_download(
repo_id=<span class="hljs-string">&quot;linoyts/web_y2k&quot;</span>, filename=<span class="hljs-string">&quot;web_y2k_emb.safetensors&quot;</span>, repo_type=<span class="hljs-string">&quot;model&quot;</span>
)
<span class="hljs-comment"># load embeddings to the text encoders</span>
state_dict = load_file(embedding_path)
<span class="hljs-comment"># load embeddings of text_encoder 1 (CLIP ViT-L/14)</span>
pipeline.load_textual_inversion(
state_dict[<span class="hljs-string">&quot;clip_l&quot;</span>],
token=[<span class="hljs-string">&quot;&lt;s0&gt;&quot;</span>, <span class="hljs-string">&quot;&lt;s1&gt;&quot;</span>],
text_encoder=pipeline.text_encoder,
tokenizer=pipeline.tokenizer,
)
<span class="hljs-comment"># load embeddings of text_encoder 2 (CLIP ViT-G/14)</span>
pipeline.load_textual_inversion(
state_dict[<span class="hljs-string">&quot;clip_g&quot;</span>],
token=[<span class="hljs-string">&quot;&lt;s0&gt;&quot;</span>, <span class="hljs-string">&quot;&lt;s1&gt;&quot;</span>],
text_encoder=pipeline.text_encoder_2,
tokenizer=pipeline.tokenizer_2,
)
<span class="hljs-comment"># Unload explicitly from both text encoders abd tokenizers</span>
pipeline.unload_textual_inversion(
tokens=[<span class="hljs-string">&quot;&lt;s0&gt;&quot;</span>, <span class="hljs-string">&quot;&lt;s1&gt;&quot;</span>], text_encoder=pipeline.text_encoder, tokenizer=pipeline.tokenizer
)
pipeline.unload_textual_inversion(
tokens=[<span class="hljs-string">&quot;&lt;s0&gt;&quot;</span>, <span class="hljs-string">&quot;&lt;s1&gt;&quot;</span>], text_encoder=pipeline.text_encoder_2, tokenizer=pipeline.tokenizer_2
)`,wrap:!1}}),{c(){n=u("p"),n.textContent=h,a=i(),x(o.$$.fragment)},l(t){n=f(t,"P",{"data-svelte-h":!0}),U(n)!=="svelte-11lpom8"&&(n.textContent=h),a=r(t),g(o.$$.fragment,t)},m(t,m){c(t,n,m),c(t,a,m),T(o,t,m),d=!0},p:ae,i(t){d||(v(o.$$.fragment,t),d=!0)},o(t){J(o.$$.fragment,t),d=!1},d(t){t&&(l(n),l(a)),_(o,t)}}}function ze(Z){let n,h,a,o,d,t,m,Te="Textual Inversion is a training method for personalizing models by learning new text embeddings from a few example images. The file produced from training is extremely small (a few KBs) and the new embeddings can be loaded into the text encoder.",K,C,ve="<code>TextualInversionLoaderMixin</code> provides a function for loading Textual Inversion embeddings from Diffusers and Automatic1111 into the text encoder and loading a special token to activate the embeddings.",O,k,ee,V,te,M,N,ie,F,Je="Load Textual Inversion tokens and embeddings to the tokenizer and text encoder.",re,b,R,de,q,_e=`Load Textual Inversion embeddings into the text encoder of <a href="/docs/diffusers/main/en/api/pipelines/stable_diffusion/text2img#diffusers.StableDiffusionPipeline">StableDiffusionPipeline</a> (both 🤗 Diffusers and
Automatic1111 formats are supported).`,pe,S,we="Example:",ce,j,me,Y,Ue=`To load a Textual Inversion embedding vector in Automatic1111 format, make sure to download the vector first
(for example from <a href="https://civitai.com/models/3036?modelVersionId=9857" rel="nofollow">civitAI</a>) and then load the vector`,ue,G,fe,W,X,he,H,Ze=`Processes prompts that include a special token corresponding to a multi-vector textual inversion embedding to
be replaced with multiple special tokens each corresponding to one of the vectors. If the prompt has no textual
inversion token or if the textual inversion token is a single vector, the input prompt is returned.`,be,I,E,Me,Q,Ie='Unload Textual Inversion embeddings from the text encoder of <a href="/docs/diffusers/main/en/api/pipelines/stable_diffusion/text2img#diffusers.StableDiffusionPipeline">StableDiffusionPipeline</a>',ye,B,ne,z,oe,D,se;return d=new $e({props:{title:"Textual Inversion",local:"textual-inversion",headingTag:"h1"}}),k=new Ce({props:{$$slots:{default:[Ne]},$$scope:{ctx:Z}}}),V=new $e({props:{title:"TextualInversionLoaderMixin",local:"diffusers.loaders.TextualInversionLoaderMixin",headingTag:"h2"}}),N=new le({props:{name:"class diffusers.loaders.TextualInversionLoaderMixin",anchor:"diffusers.loaders.TextualInversionLoaderMixin",parameters:[],source:"https://github.com/huggingface/diffusers/blob/main/src/diffusers/loaders/textual_inversion.py#L110"}}),R=new le({props:{name:"load_textual_inversion",anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion",parameters:[{name:"pretrained_model_name_or_path",val:": Union"},{name:"token",val:": Union = None"},{name:"tokenizer",val:": Optional = None"},{name:"text_encoder",val:": Optional = None"},{name:"**kwargs",val:""}],parametersDescription:[{anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion.pretrained_model_name_or_path",description:`<strong>pretrained_model_name_or_path</strong> (<code>str</code> or <code>os.PathLike</code> or <code>List[str or os.PathLike]</code> or <code>Dict</code> or <code>List[Dict]</code>) &#x2014;
Can be either one of the following or a list of them:</p>
<ul>
<li>A string, the <em>model id</em> (for example <code>sd-concepts-library/low-poly-hd-logos-icons</code>) of a
pretrained model hosted on the Hub.</li>
<li>A path to a <em>directory</em> (for example <code>./my_text_inversion_directory/</code>) containing the textual
inversion weights.</li>
<li>A path to a <em>file</em> (for example <code>./my_text_inversions.pt</code>) containing textual inversion weights.</li>
<li>A <a href="https://pytorch.org/tutorials/beginner/saving_loading_models.html#what-is-a-state-dict" rel="nofollow">torch state
dict</a>.</li>
</ul>`,name:"pretrained_model_name_or_path"},{anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion.token",description:`<strong>token</strong> (<code>str</code> or <code>List[str]</code>, <em>optional</em>) &#x2014;
Override the token to use for the textual inversion weights. If <code>pretrained_model_name_or_path</code> is a
list, then <code>token</code> must also be a list of equal length.`,name:"token"},{anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion.text_encoder",description:`<strong>text_encoder</strong> (<a href="https://huggingface.co/docs/transformers/main/en/model_doc/clip#transformers.CLIPTextModel" rel="nofollow">CLIPTextModel</a>, <em>optional</em>) &#x2014;
Frozen text-encoder (<a href="https://huggingface.co/openai/clip-vit-large-patch14" rel="nofollow">clip-vit-large-patch14</a>).
If not specified, function will take self.tokenizer.`,name:"text_encoder"},{anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion.tokenizer",description:`<strong>tokenizer</strong> (<a href="https://huggingface.co/docs/transformers/main/en/model_doc/clip#transformers.CLIPTokenizer" rel="nofollow">CLIPTokenizer</a>, <em>optional</em>) &#x2014;
A <code>CLIPTokenizer</code> to tokenize text. If not specified, function will take self.tokenizer.`,name:"tokenizer"},{anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion.weight_name",description:`<strong>weight_name</strong> (<code>str</code>, <em>optional</em>) &#x2014;
Name of a custom weight file. This should be used when:</p>
<ul>
<li>The saved textual inversion file is in &#x1F917; Diffusers format, but was saved under a specific weight
name such as <code>text_inv.bin</code>.</li>
<li>The saved textual inversion file is in the Automatic1111 format.</li>
</ul>`,name:"weight_name"},{anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion.cache_dir",description:`<strong>cache_dir</strong> (<code>Union[str, os.PathLike]</code>, <em>optional</em>) &#x2014;
Path to a directory where a downloaded pretrained model configuration is cached if the standard cache
is not used.`,name:"cache_dir"},{anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion.force_download",description:`<strong>force_download</strong> (<code>bool</code>, <em>optional</em>, defaults to <code>False</code>) &#x2014;
Whether or not to force the (re-)download of the model weights and configuration files, overriding the
cached versions if they exist.`,name:"force_download"},{anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion.proxies",description:`<strong>proxies</strong> (<code>Dict[str, str]</code>, <em>optional</em>) &#x2014;
A dictionary of proxy servers to use by protocol or endpoint, for example, <code>{&apos;http&apos;: &apos;foo.bar:3128&apos;, &apos;http://hostname&apos;: &apos;foo.bar:4012&apos;}</code>. The proxies are used on each request.`,name:"proxies"},{anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion.local_files_only",description:`<strong>local_files_only</strong> (<code>bool</code>, <em>optional</em>, defaults to <code>False</code>) &#x2014;
Whether to only load local model weights and configuration files or not. If set to <code>True</code>, the model
won&#x2019;t be downloaded from the Hub.`,name:"local_files_only"},{anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion.token",description:`<strong>token</strong> (<code>str</code> or <em>bool</em>, <em>optional</em>) &#x2014;
The token to use as HTTP bearer authorization for remote files. If <code>True</code>, the token generated from
<code>diffusers-cli login</code> (stored in <code>~/.huggingface</code>) is used.`,name:"token"},{anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion.revision",description:`<strong>revision</strong> (<code>str</code>, <em>optional</em>, defaults to <code>&quot;main&quot;</code>) &#x2014;
The specific model version to use. It can be a branch name, a tag name, a commit id, or any identifier
allowed by Git.`,name:"revision"},{anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion.subfolder",description:`<strong>subfolder</strong> (<code>str</code>, <em>optional</em>, defaults to <code>&quot;&quot;</code>) &#x2014;
The subfolder location of a model file within a larger model repository on the Hub or locally.`,name:"subfolder"},{anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion.mirror",description:`<strong>mirror</strong> (<code>str</code>, <em>optional</em>) &#x2014;
Mirror source to resolve accessibility issues if you&#x2019;re downloading a model in China. We do not
guarantee the timeliness or safety of the source, and you should refer to the mirror site for more
information.`,name:"mirror"}],source:"https://github.com/huggingface/diffusers/blob/main/src/diffusers/loaders/textual_inversion.py#L263"}}),j=new xe({props:{anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion.example",$$slots:{default:[Re]},$$scope:{ctx:Z}}}),G=new xe({props:{anchor:"diffusers.loaders.TextualInversionLoaderMixin.load_textual_inversion.example-2",$$slots:{default:[Xe]},$$scope:{ctx:Z}}}),X=new le({props:{name:"maybe_convert_prompt",anchor:"diffusers.loaders.TextualInversionLoaderMixin.maybe_convert_prompt",parameters:[{name:"prompt",val:": Union"},{name:"tokenizer",val:": PreTrainedTokenizer"}],parametersDescription:[{anchor:"diffusers.loaders.TextualInversionLoaderMixin.maybe_convert_prompt.prompt",description:`<strong>prompt</strong> (<code>str</code> or list of <code>str</code>) &#x2014;
The prompt or prompts to guide the image generation.`,name:"prompt"},{anchor:"diffusers.loaders.TextualInversionLoaderMixin.maybe_convert_prompt.tokenizer",description:`<strong>tokenizer</strong> (<code>PreTrainedTokenizer</code>) &#x2014;
The tokenizer responsible for encoding the prompt into input tokens.`,name:"tokenizer"}],source:"https://github.com/huggingface/diffusers/blob/main/src/diffusers/loaders/textual_inversion.py#L115",returnDescription:`<script context="module">export const metadata = 'undefined';<\/script>
<p>The converted prompt</p>
`,returnType:`<script context="module">export const metadata = 'undefined';<\/script>
<p><code>str</code> or list of <code>str</code></p>
`}}),E=new le({props:{name:"unload_textual_inversion",anchor:"diffusers.loaders.TextualInversionLoaderMixin.unload_textual_inversion",parameters:[{name:"tokens",val:": Union = None"},{name:"tokenizer",val:": Optional = None"},{name:"text_encoder",val:": Optional = None"}],source:"https://github.com/huggingface/diffusers/blob/main/src/diffusers/loaders/textual_inversion.py#L458"}}),B=new xe({props:{anchor:"diffusers.loaders.TextualInversionLoaderMixin.unload_textual_inversion.example",$$slots:{default:[Ee]},$$scope:{ctx:Z}}}),z=new Ve({props:{source:"https://github.com/huggingface/diffusers/blob/main/docs/source/en/api/loaders/textual_inversion.md"}}),{c(){n=u("meta"),h=i(),a=u("p"),o=i(),x(d.$$.fragment),t=i(),m=u("p"),m.textContent=Te,K=i(),C=u("p"),C.innerHTML=ve,O=i(),x(k.$$.fragment),ee=i(),x(V.$$.fragment),te=i(),M=u("div"),x(N.$$.fragment),ie=i(),F=u("p"),F.textContent=Je,re=i(),b=u("div"),x(R.$$.fragment),de=i(),q=u("p"),q.innerHTML=_e,pe=i(),S=u("p"),S.textContent=we,ce=i(),x(j.$$.fragment),me=i(),Y=u("p"),Y.innerHTML=Ue,ue=i(),x(G.$$.fragment),fe=i(),W=u("div"),x(X.$$.fragment),he=i(),H=u("p"),H.textContent=Ze,be=i(),I=u("div"),x(E.$$.fragment),Me=i(),Q=u("p"),Q.innerHTML=Ie,ye=i(),x(B.$$.fragment),ne=i(),x(z.$$.fragment),oe=i(),D=u("p"),this.h()},l(e){const s=Be("svelte-u9bgzb",document.head);n=f(s,"META",{name:!0,content:!0}),s.forEach(l),h=r(e),a=f(e,"P",{}),P(a).forEach(l),o=r(e),g(d.$$.fragment,e),t=r(e),m=f(e,"P",{"data-svelte-h":!0}),U(m)!=="svelte-17iorh0"&&(m.textContent=Te),K=r(e),C=f(e,"P",{"data-svelte-h":!0}),U(C)!=="svelte-16c4ckr"&&(C.innerHTML=ve),O=r(e),g(k.$$.fragment,e),ee=r(e),g(V.$$.fragment,e),te=r(e),M=f(e,"DIV",{class:!0});var w=P(M);g(N.$$.fragment,w),ie=r(w),F=f(w,"P",{"data-svelte-h":!0}),U(F)!=="svelte-7bntze"&&(F.textContent=Je),re=r(w),b=f(w,"DIV",{class:!0});var y=P(b);g(R.$$.fragment,y),de=r(y),q=f(y,"P",{"data-svelte-h":!0}),U(q)!=="svelte-1lv88wo"&&(q.innerHTML=_e),pe=r(y),S=f(y,"P",{"data-svelte-h":!0}),U(S)!=="svelte-11lpom8"&&(S.textContent=we),ce=r(y),g(j.$$.fragment,y),me=r(y),Y=f(y,"P",{"data-svelte-h":!0}),U(Y)!=="svelte-15d7mv5"&&(Y.innerHTML=Ue),ue=r(y),g(G.$$.fragment,y),y.forEach(l),fe=r(w),W=f(w,"DIV",{class:!0});var L=P(W);g(X.$$.fragment,L),he=r(L),H=f(L,"P",{"data-svelte-h":!0}),U(H)!=="svelte-gjjmbb"&&(H.textContent=Ze),L.forEach(l),be=r(w),I=f(w,"DIV",{class:!0});var $=P(I);g(E.$$.fragment,$),Me=r($),Q=f($,"P",{"data-svelte-h":!0}),U(Q)!=="svelte-11w0m6j"&&(Q.innerHTML=Ie),ye=r($),g(B.$$.fragment,$),$.forEach(l),w.forEach(l),ne=r(e),g(z.$$.fragment,e),oe=r(e),D=f(e,"P",{}),P(D).forEach(l),this.h()},h(){A(n,"name","hf:doc:metadata"),A(n,"content",Le),A(b,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),A(W,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),A(I,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),A(M,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8")},m(e,s){p(document.head,n),c(e,h,s),c(e,a,s),c(e,o,s),T(d,e,s),c(e,t,s),c(e,m,s),c(e,K,s),c(e,C,s),c(e,O,s),T(k,e,s),c(e,ee,s),T(V,e,s),c(e,te,s),c(e,M,s),T(N,M,null),p(M,ie),p(M,F),p(M,re),p(M,b),T(R,b,null),p(b,de),p(b,q),p(b,pe),p(b,S),p(b,ce),T(j,b,null),p(b,me),p(b,Y),p(b,ue),T(G,b,null),p(M,fe),p(M,W),T(X,W,null),p(W,he),p(W,H),p(M,be),p(M,I),T(E,I,null),p(I,Me),p(I,Q),p(I,ye),T(B,I,null),c(e,ne,s),T(z,e,s),c(e,oe,s),c(e,D,s),se=!0},p(e,[s]){const w={};s&2&&(w.$$scope={dirty:s,ctx:e}),k.$set(w);const y={};s&2&&(y.$$scope={dirty:s,ctx:e}),j.$set(y);const L={};s&2&&(L.$$scope={dirty:s,ctx:e}),G.$set(L);const $={};s&2&&($.$$scope={dirty:s,ctx:e}),B.$set($)},i(e){se||(v(d.$$.fragment,e),v(k.$$.fragment,e),v(V.$$.fragment,e),v(N.$$.fragment,e),v(R.$$.fragment,e),v(j.$$.fragment,e),v(G.$$.fragment,e),v(X.$$.fragment,e),v(E.$$.fragment,e),v(B.$$.fragment,e),v(z.$$.fragment,e),se=!0)},o(e){J(d.$$.fragment,e),J(k.$$.fragment,e),J(V.$$.fragment,e),J(N.$$.fragment,e),J(R.$$.fragment,e),J(j.$$.fragment,e),J(G.$$.fragment,e),J(X.$$.fragment,e),J(E.$$.fragment,e),J(B.$$.fragment,e),J(z.$$.fragment,e),se=!1},d(e){e&&(l(h),l(a),l(o),l(t),l(m),l(K),l(C),l(O),l(ee),l(te),l(M),l(ne),l(oe),l(D)),l(n),_(d,e),_(k,e),_(V,e),_(N),_(R),_(j),_(G),_(X),_(E),_(B),_(z,e)}}}const Le='{"title":"Textual Inversion","local":"textual-inversion","sections":[{"title":"TextualInversionLoaderMixin","local":"diffusers.loaders.TextualInversionLoaderMixin","sections":[],"depth":2}],"depth":1}';function Fe(Z){return je(()=>{new URLSearchParams(window.location.search).get("fw")}),[]}class De extends Ge{constructor(n){super(),We(this,n,Fe,ze,ke,{})}}export{De as component};

Xet Storage Details

Size:
26.1 kB
·
Xet hash:
2d88e859d31af27285557aabf3d5227b9d1489bdb769ca6b1ea3aeb02990686a

Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.