Buckets:

rtrm's picture
download
raw
12.1 kB
import{s as be,n as we,o as $e}from"../chunks/scheduler.8c3d61f6.js";import{S as De,i as Se,g as a,s as o,r as g,A as He,h as c,f as n,c as i,j as P,u as _,x as E,k as L,y as t,a as l,v,d as x,t as b,w}from"../chunks/index.da70eac4.js";import{D as U}from"../chunks/Docstring.6b390b9a.js";import{H as xe,E as Fe}from"../chunks/EditOnGithub.1e64e623.js";function Me(ue){let u,G,V,j,$,R,D,he='<code>FlowMatchHeunDiscreteScheduler</code> is based on the flow-matching sampling introduced in <a href="https://arxiv.org/abs/2403.03206" rel="nofollow">EDM</a>.',W,S,B,s,H,te,I,fe="Heun scheduler.",ne,O,me=`This model inherits from <a href="/docs/diffusers/pr_10312/en/api/schedulers/overview#diffusers.SchedulerMixin">SchedulerMixin</a> and <a href="/docs/diffusers/pr_10312/en/api/configuration#diffusers.ConfigMixin">ConfigMixin</a>. Check the superclass documentation for the generic
methods the library implements for all schedulers such as loading and saving.`,se,h,F,re,k,pe="Forward process in flow-matching",oe,f,M,ie,q,ge="Sets the begin index for the scheduler. This function should be run from pipeline before the inference.",ae,m,T,ce,A,_e="Sets the discrete timesteps used for the diffusion chain (to be run before inference).",de,p,y,le,N,ve=`Predict the sample from the previous timestep by reversing the SDE. This function propagates the diffusion
process from the learned model outputs (most often the predicted noise).`,J,C,K,z,Q;return $=new xe({props:{title:"FlowMatchHeunDiscreteScheduler",local:"flowmatchheundiscretescheduler",headingTag:"h1"}}),S=new xe({props:{title:"FlowMatchHeunDiscreteScheduler",local:"diffusers.FlowMatchHeunDiscreteScheduler",headingTag:"h2"}}),H=new U({props:{name:"class diffusers.FlowMatchHeunDiscreteScheduler",anchor:"diffusers.FlowMatchHeunDiscreteScheduler",parameters:[{name:"num_train_timesteps",val:": int = 1000"},{name:"shift",val:": float = 1.0"}],parametersDescription:[{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.num_train_timesteps",description:`<strong>num_train_timesteps</strong> (<code>int</code>, defaults to 1000) &#x2014;
The number of diffusion steps to train the model.`,name:"num_train_timesteps"},{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.timestep_spacing",description:`<strong>timestep_spacing</strong> (<code>str</code>, defaults to <code>&quot;linspace&quot;</code>) &#x2014;
The way the timesteps should be scaled. Refer to Table 2 of the <a href="https://huggingface.co/papers/2305.08891" rel="nofollow">Common Diffusion Noise Schedules and
Sample Steps are Flawed</a> for more information.`,name:"timestep_spacing"},{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.shift",description:`<strong>shift</strong> (<code>float</code>, defaults to 1.0) &#x2014;
The shift value for the timestep schedule.`,name:"shift"}],source:"https://github.com/huggingface/diffusers/blob/vr_10312/src/diffusers/schedulers/scheduling_flow_match_heun_discrete.py#L44"}}),F=new U({props:{name:"scale_noise",anchor:"diffusers.FlowMatchHeunDiscreteScheduler.scale_noise",parameters:[{name:"sample",val:": FloatTensor"},{name:"timestep",val:": typing.Union[float, torch.FloatTensor]"},{name:"noise",val:": typing.Optional[torch.FloatTensor] = None"}],parametersDescription:[{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.scale_noise.sample",description:`<strong>sample</strong> (<code>torch.FloatTensor</code>) &#x2014;
The input sample.`,name:"sample"},{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.scale_noise.timestep",description:`<strong>timestep</strong> (<code>int</code>, <em>optional</em>) &#x2014;
The current timestep in the diffusion chain.`,name:"timestep"}],source:"https://github.com/huggingface/diffusers/blob/vr_10312/src/diffusers/schedulers/scheduling_flow_match_heun_discrete.py#L110",returnDescription:`<script context="module">export const metadata = 'undefined';<\/script>
<p>A scaled input sample.</p>
`,returnType:`<script context="module">export const metadata = 'undefined';<\/script>
<p><code>torch.FloatTensor</code></p>
`}}),M=new U({props:{name:"set_begin_index",anchor:"diffusers.FlowMatchHeunDiscreteScheduler.set_begin_index",parameters:[{name:"begin_index",val:": int = 0"}],parametersDescription:[{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.set_begin_index.begin_index",description:`<strong>begin_index</strong> (<code>int</code>) &#x2014;
The begin index for the scheduler.`,name:"begin_index"}],source:"https://github.com/huggingface/diffusers/blob/vr_10312/src/diffusers/schedulers/scheduling_flow_match_heun_discrete.py#L100"}}),T=new U({props:{name:"set_timesteps",anchor:"diffusers.FlowMatchHeunDiscreteScheduler.set_timesteps",parameters:[{name:"num_inference_steps",val:": int"},{name:"device",val:": typing.Union[str, torch.device] = None"}],parametersDescription:[{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.set_timesteps.num_inference_steps",description:`<strong>num_inference_steps</strong> (<code>int</code>) &#x2014;
The number of diffusion steps used when generating samples with a pre-trained model.`,name:"num_inference_steps"},{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.set_timesteps.device",description:`<strong>device</strong> (<code>str</code> or <code>torch.device</code>, <em>optional</em>) &#x2014;
The device to which the timesteps should be moved to. If <code>None</code>, the timesteps are not moved.`,name:"device"}],source:"https://github.com/huggingface/diffusers/blob/vr_10312/src/diffusers/schedulers/scheduling_flow_match_heun_discrete.py#L140"}}),y=new U({props:{name:"step",anchor:"diffusers.FlowMatchHeunDiscreteScheduler.step",parameters:[{name:"model_output",val:": FloatTensor"},{name:"timestep",val:": typing.Union[float, torch.FloatTensor]"},{name:"sample",val:": FloatTensor"},{name:"s_churn",val:": float = 0.0"},{name:"s_tmin",val:": float = 0.0"},{name:"s_tmax",val:": float = inf"},{name:"s_noise",val:": float = 1.0"},{name:"generator",val:": typing.Optional[torch._C.Generator] = None"},{name:"return_dict",val:": bool = True"}],parametersDescription:[{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.step.model_output",description:`<strong>model_output</strong> (<code>torch.FloatTensor</code>) &#x2014;
The direct output from learned diffusion model.`,name:"model_output"},{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.step.timestep",description:`<strong>timestep</strong> (<code>float</code>) &#x2014;
The current discrete timestep in the diffusion chain.`,name:"timestep"},{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.step.sample",description:`<strong>sample</strong> (<code>torch.FloatTensor</code>) &#x2014;
A current instance of a sample created by the diffusion process.`,name:"sample"},{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.step.s_churn",description:"<strong>s_churn</strong> (<code>float</code>) &#x2014;",name:"s_churn"},{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.step.s_tmin",description:"<strong>s_tmin</strong> (<code>float</code>) &#x2014;",name:"s_tmin"},{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.step.s_tmax",description:"<strong>s_tmax</strong> (<code>float</code>) &#x2014;",name:"s_tmax"},{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.step.s_noise",description:`<strong>s_noise</strong> (<code>float</code>, defaults to 1.0) &#x2014;
Scaling factor for noise added to the sample.`,name:"s_noise"},{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.step.generator",description:`<strong>generator</strong> (<code>torch.Generator</code>, <em>optional</em>) &#x2014;
A random number generator.`,name:"generator"},{anchor:"diffusers.FlowMatchHeunDiscreteScheduler.step.return_dict",description:`<strong>return_dict</strong> (<code>bool</code>) &#x2014;
Whether or not to return a <code>~schedulers.scheduling_Heun_discrete.HeunDiscreteSchedulerOutput</code> or
tuple.`,name:"return_dict"}],source:"https://github.com/huggingface/diffusers/blob/vr_10312/src/diffusers/schedulers/scheduling_flow_match_heun_discrete.py#L200",returnDescription:`<script context="module">export const metadata = 'undefined';<\/script>
<p>If return_dict is <code>True</code>, <code>~schedulers.scheduling_Heun_discrete.HeunDiscreteSchedulerOutput</code> is
returned, otherwise a tuple is returned where the first element is the sample tensor.</p>
`,returnType:`<script context="module">export const metadata = 'undefined';<\/script>
<p><code>~schedulers.scheduling_Heun_discrete.HeunDiscreteSchedulerOutput</code> or <code>tuple</code></p>
`}}),C=new Fe({props:{source:"https://github.com/huggingface/diffusers/blob/main/docs/source/en/api/schedulers/flow_match_heun_discrete.md"}}),{c(){u=a("meta"),G=o(),V=a("p"),j=o(),g($.$$.fragment),R=o(),D=a("p"),D.innerHTML=he,W=o(),g(S.$$.fragment),B=o(),s=a("div"),g(H.$$.fragment),te=o(),I=a("p"),I.textContent=fe,ne=o(),O=a("p"),O.innerHTML=me,se=o(),h=a("div"),g(F.$$.fragment),re=o(),k=a("p"),k.textContent=pe,oe=o(),f=a("div"),g(M.$$.fragment),ie=o(),q=a("p"),q.textContent=ge,ae=o(),m=a("div"),g(T.$$.fragment),ce=o(),A=a("p"),A.textContent=_e,de=o(),p=a("div"),g(y.$$.fragment),le=o(),N=a("p"),N.textContent=ve,J=o(),g(C.$$.fragment),K=o(),z=a("p"),this.h()},l(e){const r=He("svelte-u9bgzb",document.head);u=c(r,"META",{name:!0,content:!0}),r.forEach(n),G=i(e),V=c(e,"P",{}),P(V).forEach(n),j=i(e),_($.$$.fragment,e),R=i(e),D=c(e,"P",{"data-svelte-h":!0}),E(D)!=="svelte-jk6z0"&&(D.innerHTML=he),W=i(e),_(S.$$.fragment,e),B=i(e),s=c(e,"DIV",{class:!0});var d=P(s);_(H.$$.fragment,d),te=i(d),I=c(d,"P",{"data-svelte-h":!0}),E(I)!=="svelte-t4z76d"&&(I.textContent=fe),ne=i(d),O=c(d,"P",{"data-svelte-h":!0}),E(O)!=="svelte-1oywc2v"&&(O.innerHTML=me),se=i(d),h=c(d,"DIV",{class:!0});var X=P(h);_(F.$$.fragment,X),re=i(X),k=c(X,"P",{"data-svelte-h":!0}),E(k)!=="svelte-1nqwaax"&&(k.textContent=pe),X.forEach(n),oe=i(d),f=c(d,"DIV",{class:!0});var Y=P(f);_(M.$$.fragment,Y),ie=i(Y),q=c(Y,"P",{"data-svelte-h":!0}),E(q)!=="svelte-1k141rk"&&(q.textContent=ge),Y.forEach(n),ae=i(d),m=c(d,"DIV",{class:!0});var Z=P(m);_(T.$$.fragment,Z),ce=i(Z),A=c(Z,"P",{"data-svelte-h":!0}),E(A)!=="svelte-1vzm9q"&&(A.textContent=_e),Z.forEach(n),de=i(d),p=c(d,"DIV",{class:!0});var ee=P(p);_(y.$$.fragment,ee),le=i(ee),N=c(ee,"P",{"data-svelte-h":!0}),E(N)!=="svelte-hi84tp"&&(N.textContent=ve),ee.forEach(n),d.forEach(n),J=i(e),_(C.$$.fragment,e),K=i(e),z=c(e,"P",{}),P(z).forEach(n),this.h()},h(){L(u,"name","hf:doc:metadata"),L(u,"content",Te),L(h,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),L(f,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),L(m,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),L(p,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8"),L(s,"class","docstring border-l-2 border-t-2 pl-4 pt-3.5 border-gray-100 rounded-tl-xl mb-6 mt-8")},m(e,r){t(document.head,u),l(e,G,r),l(e,V,r),l(e,j,r),v($,e,r),l(e,R,r),l(e,D,r),l(e,W,r),v(S,e,r),l(e,B,r),l(e,s,r),v(H,s,null),t(s,te),t(s,I),t(s,ne),t(s,O),t(s,se),t(s,h),v(F,h,null),t(h,re),t(h,k),t(s,oe),t(s,f),v(M,f,null),t(f,ie),t(f,q),t(s,ae),t(s,m),v(T,m,null),t(m,ce),t(m,A),t(s,de),t(s,p),v(y,p,null),t(p,le),t(p,N),l(e,J,r),v(C,e,r),l(e,K,r),l(e,z,r),Q=!0},p:we,i(e){Q||(x($.$$.fragment,e),x(S.$$.fragment,e),x(H.$$.fragment,e),x(F.$$.fragment,e),x(M.$$.fragment,e),x(T.$$.fragment,e),x(y.$$.fragment,e),x(C.$$.fragment,e),Q=!0)},o(e){b($.$$.fragment,e),b(S.$$.fragment,e),b(H.$$.fragment,e),b(F.$$.fragment,e),b(M.$$.fragment,e),b(T.$$.fragment,e),b(y.$$.fragment,e),b(C.$$.fragment,e),Q=!1},d(e){e&&(n(G),n(V),n(j),n(R),n(D),n(W),n(B),n(s),n(J),n(K),n(z)),n(u),w($,e),w(S,e),w(H),w(F),w(M),w(T),w(y),w(C,e)}}}const Te='{"title":"FlowMatchHeunDiscreteScheduler","local":"flowmatchheundiscretescheduler","sections":[{"title":"FlowMatchHeunDiscreteScheduler","local":"diffusers.FlowMatchHeunDiscreteScheduler","sections":[],"depth":2}],"depth":1}';function ye(ue){return $e(()=>{new URLSearchParams(window.location.search).get("fw")}),[]}class Ie extends De{constructor(u){super(),Se(this,u,ye,Me,be,{})}}export{Ie as component};

Xet Storage Details

Size:
12.1 kB
·
Xet hash:
30a44d35a702ae6bab924e8bfaba36820080126e8625d8235fbdfac7d7398a47

Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.