Buckets:

rtrm's picture
download
raw
18.5 kB
import{s as Se,n as Ne,o as Ce}from"../chunks/scheduler.32f3c0d7.js";import{S as Ye,i as ze,e as o,s,c as p,h as Qe,a as n,d as a,b as i,f as F,j as m,g as M,k as Ve,l as f,m as t,n as r,t as d,o as c,p as u}from"../chunks/index.a3055d2a.js";import{C as De,H as P,E as Fe}from"../chunks/MermaidChart.svelte_svelte_type_style_lang.9257a15c.js";import{C as L}from"../chunks/CodeBlock.52ea298a.js";import{D as Pe}from"../chunks/DocNotebookDropdown.03988dd0.js";function Le(We){let b,A,Q,K,h,O,w,ee,T,le,g,Ge="Difusão é um processo aleatório que demanda muito processamento. Você pode precisar executar o <code>DiffusionPipeline</code> várias vezes antes de obter o resultado desejado. Por isso é importante equilibrar cuidadosamente a velocidade de geração e o uso de memória para iterar mais rápido.",ae,j,Ie='Este guia recomenda algumas dicas básicas de desempenho para usar o <code>DiffusionPipeline</code>. Consulte a seção de documentação sobre Otimização de Inferência, como <a href="./optimization/fp16">Acelerar inferência</a> ou <a href="./optimization/memory">Reduzir uso de memória</a> para guias de desempenho mais detalhados.',te,Z,se,B,ve="Reduzir a quantidade de memória usada indiretamente acelera a geração e pode ajudar um modelo a caber no dispositivo.",ie,U,Ee="O método <code>enable_model_cpu_offload()</code> move um modelo para a CPU quando não está em uso para economizar memória da GPU.",oe,W,ne,G,pe,I,$e="O processo de remoção de ruído é o mais exigente computacionalmente durante a difusão. Métodos que otimizam este processo aceleram a velocidade de inferência. Experimente os seguintes métodos para acelerar.",me,v,xe="<li>Adicione <code>device_map=&quot;cuda&quot;</code> para colocar o pipeline em uma GPU. Colocar um modelo em um acelerador, como uma GPU, aumenta a velocidade porque realiza computações em paralelo.</li> <li>Defina <code>torch_dtype=torch.bfloat16</code> para executar o pipeline em meia-precisão. Reduzir a precisão do tipo de dado aumenta a velocidade porque leva menos tempo para realizar computações em precisão mais baixa.</li>",re,E,de,$,_e="<li>Use um agendador mais rápido, como <code>DPMSolverMultistepScheduler</code>, que requer apenas ~20-25 passos.</li> <li>Defina <code>num_inference_steps</code> para um valor menor. Reduzir o número de passos de inferência reduz o número total de computações. No entanto, isso pode resultar em menor qualidade de geração.</li>",ce,x,ue,_,Me,X,Xe="Muitos modelos de difusão modernos entregam imagens de alta qualidade imediatamente. No entanto, você ainda pode melhorar a qualidade de geração experimentando o seguinte.",fe,y,J,N,ke="Experimente um prompt mais detalhado e descritivo. Inclua detalhes como o meio da imagem, assunto, estilo e estética. Um prompt negativo também pode ajudar, guiando um modelo para longe de características indesejáveis usando palavras como baixa qualidade ou desfocado.",je,k,Ze,C,qe='Para mais detalhes sobre como criar prompts melhores, consulte a documentação sobre <a href="./using-diffusers/weighted_prompts">Técnicas de prompt</a>.',Be,q,Y,He="Experimente um agendador diferente, como <code>HeunDiscreteScheduler</code> ou <code>LMSDiscreteScheduler</code>, que sacrifica velocidade de geração por qualidade.",Ue,H,be,R,Je,V,Re='Diffusers oferece otimizações mais avançadas e poderosas, como <a href="./optimization/memory#group-offloading">group-offloading</a> e <a href="./optimization/fp16#regional-compilation">compilação regional</a>. Para saber mais sobre como maximizar o desempenho, consulte a seção sobre Otimização de Inferência.',ye,S,he,D,we;return h=new De({props:{containerStyle:"float: right; margin-left: 10px; display: inline-flex; position: relative; z-index: 10;"}}),w=new Pe({props:{containerStyle:"float: right; margin-left: 10px; display: inline-flex; position: relative; z-index: 10;",options:[{label:"Mixed",value:"https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers_doc/pt/stable_diffusion.ipynb"},{label:"PyTorch",value:"https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers_doc/pt/pytorch/stable_diffusion.ipynb"},{label:"TensorFlow",value:"https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers_doc/pt/tensorflow/stable_diffusion.ipynb"},{label:"Mixed",value:"https://studiolab.sagemaker.aws/import/github/huggingface/notebooks/blob/main/diffusers_doc/pt/stable_diffusion.ipynb"},{label:"PyTorch",value:"https://studiolab.sagemaker.aws/import/github/huggingface/notebooks/blob/main/diffusers_doc/pt/pytorch/stable_diffusion.ipynb"},{label:"TensorFlow",value:"https://studiolab.sagemaker.aws/import/github/huggingface/notebooks/blob/main/diffusers_doc/pt/tensorflow/stable_diffusion.ipynb"}]}}),T=new P({props:{title:"Desempenho básico",local:"desempenho-básico",headingTag:"h1"}}),Z=new P({props:{title:"Uso de memória",local:"uso-de-memória",headingTag:"h2"}}),W=new L({props:{code:"aW1wb3J0JTIwdG9yY2glMEFmcm9tJTIwZGlmZnVzZXJzJTIwaW1wb3J0JTIwRGlmZnVzaW9uUGlwZWxpbmUlMEElMEFwaXBlbGluZSUyMCUzRCUyMERpZmZ1c2lvblBpcGVsaW5lLmZyb21fcHJldHJhaW5lZCglMEElMjAlMjAlMjJzdGFiaWxpdHlhaSUyRnN0YWJsZS1kaWZmdXNpb24teGwtYmFzZS0xLjAlMjIlMkMlMEElMjAlMjB0b3JjaF9kdHlwZSUzRHRvcmNoLmJmbG9hdDE2JTJDJTBBJTIwJTIwZGV2aWNlX21hcCUzRCUyMmN1ZGElMjIlMEEpJTBBcGlwZWxpbmUuZW5hYmxlX21vZGVsX2NwdV9vZmZsb2FkKCklMEElMEFwcm9tcHQlMjAlM0QlMjAlMjIlMjIlMjIlMEFjaW5lbWF0aWMlMjBmaWxtJTIwc3RpbGwlMjBvZiUyMGElMjBjYXQlMjBzaXBwaW5nJTIwYSUyMG1hcmdhcml0YSUyMGluJTIwYSUyMHBvb2wlMjBpbiUyMFBhbG0lMjBTcHJpbmdzJTJDJTIwQ2FsaWZvcm5pYSUwQWhpZ2hseSUyMGRldGFpbGVkJTJDJTIwaGlnaCUyMGJ1ZGdldCUyMGhvbGx5d29vZCUyMG1vdmllJTJDJTIwY2luZW1hc2NvcGUlMkMlMjBtb29keSUyQyUyMGVwaWMlMkMlMjBnb3JnZW91cyUyQyUyMGZpbG0lMjBncmFpbiUwQSUyMiUyMiUyMiUwQXBpcGVsaW5lKHByb21wdCkuaW1hZ2VzJTVCMCU1RCUwQXByaW50KGYlMjJNZW0lQzMlQjNyaWElMjBtJUMzJUExeGltYSUyMHJlc2VydmFkYSUzQSUyMCU3QnRvcmNoLmN1ZGEubWF4X21lbW9yeV9hbGxvY2F0ZWQoKSUyMCUyRiUyMDEwMjQqKjMlM0EuMmYlN0QlMjBHQiUyMik=",highlighted:`<span class="hljs-keyword">import</span> torch
<span class="hljs-keyword">from</span> diffusers <span class="hljs-keyword">import</span> DiffusionPipeline
pipeline = DiffusionPipeline.from_pretrained(
<span class="hljs-string">&quot;stabilityai/stable-diffusion-xl-base-1.0&quot;</span>,
torch_dtype=torch.bfloat16,
device_map=<span class="hljs-string">&quot;cuda&quot;</span>
)
pipeline.enable_model_cpu_offload()
prompt = <span class="hljs-string">&quot;&quot;&quot;
cinematic film still of a cat sipping a margarita in a pool in Palm Springs, California
highly detailed, high budget hollywood movie, cinemascope, moody, epic, gorgeous, film grain
&quot;&quot;&quot;</span>
pipeline(prompt).images[<span class="hljs-number">0</span>]
<span class="hljs-built_in">print</span>(<span class="hljs-string">f&quot;Memória máxima reservada: <span class="hljs-subst">{torch.cuda.max_memory_allocated() / <span class="hljs-number">1024</span>**<span class="hljs-number">3</span>:<span class="hljs-number">.2</span>f}</span> GB&quot;</span>)`,wrap:!1}}),G=new P({props:{title:"Velocidade de inferência",local:"velocidade-de-inferência",headingTag:"h2"}}),E=new L({props:{code:"aW1wb3J0JTIwdG9yY2glMEFpbXBvcnQlMjB0aW1lJTBBZnJvbSUyMGRpZmZ1c2VycyUyMGltcG9ydCUyMERpZmZ1c2lvblBpcGVsaW5lJTJDJTIwRFBNU29sdmVyTXVsdGlzdGVwU2NoZWR1bGVyJTBBJTBBcGlwZWxpbmUlMjAlM0QlMjBEaWZmdXNpb25QaXBlbGluZS5mcm9tX3ByZXRyYWluZWQoJTBBJTIwJTIwJTIyc3RhYmlsaXR5YWklMkZzdGFibGUtZGlmZnVzaW9uLXhsLWJhc2UtMS4wJTIyJTJDJTBBJTIwJTIwdG9yY2hfZHR5cGUlM0R0b3JjaC5iZmxvYXQxNiUyQyUwQSUyMCUyMGRldmljZV9tYXAlM0QlMjJjdWRhJTIyJTBBKQ==",highlighted:`<span class="hljs-keyword">import</span> torch
<span class="hljs-keyword">import</span> time
<span class="hljs-keyword">from</span> diffusers <span class="hljs-keyword">import</span> DiffusionPipeline, DPMSolverMultistepScheduler
pipeline = DiffusionPipeline.from_pretrained(
<span class="hljs-string">&quot;stabilityai/stable-diffusion-xl-base-1.0&quot;</span>,
torch_dtype=torch.bfloat16,
device_map=<span class="hljs-string">&quot;cuda&quot;</span>
)`,wrap:!1}}),x=new L({props:{code:"cGlwZWxpbmUuc2NoZWR1bGVyJTIwJTNEJTIwRFBNU29sdmVyTXVsdGlzdGVwU2NoZWR1bGVyLmZyb21fY29uZmlnKHBpcGVsaW5lLnNjaGVkdWxlci5jb25maWcpJTBBJTBBcHJvbXB0JTIwJTNEJTIwJTIyJTIyJTIyJTBBY2luZW1hdGljJTIwZmlsbSUyMHN0aWxsJTIwb2YlMjBhJTIwY2F0JTIwc2lwcGluZyUyMGElMjBtYXJnYXJpdGElMjBpbiUyMGElMjBwb29sJTIwaW4lMjBQYWxtJTIwU3ByaW5ncyUyQyUyMENhbGlmb3JuaWElMEFoaWdobHklMjBkZXRhaWxlZCUyQyUyMGhpZ2glMjBidWRnZXQlMjBob2xseXdvb2QlMjBtb3ZpZSUyQyUyMGNpbmVtYXNjb3BlJTJDJTIwbW9vZHklMkMlMjBlcGljJTJDJTIwZ29yZ2VvdXMlMkMlMjBmaWxtJTIwZ3JhaW4lMEElMjIlMjIlMjIlMEElMEFzdGFydF90aW1lJTIwJTNEJTIwdGltZS5wZXJmX2NvdW50ZXIoKSUwQWltYWdlJTIwJTNEJTIwcGlwZWxpbmUocHJvbXB0KS5pbWFnZXMlNUIwJTVEJTBBZW5kX3RpbWUlMjAlM0QlMjB0aW1lLnBlcmZfY291bnRlcigpJTBBJTBBcHJpbnQoZiUyMkdlcmElQzMlQTclQzMlQTNvJTIwZGUlMjBpbWFnZW0lMjBsZXZvdSUyMCU3QmVuZF90aW1lJTIwLSUyMHN0YXJ0X3RpbWUlM0EuM2YlN0QlMjBzZWd1bmRvcyUyMik=",highlighted:`pipeline.scheduler = DPMSolverMultistepScheduler.from_config(pipeline.scheduler.config)
prompt = <span class="hljs-string">&quot;&quot;&quot;
cinematic film still of a cat sipping a margarita in a pool in Palm Springs, California
highly detailed, high budget hollywood movie, cinemascope, moody, epic, gorgeous, film grain
&quot;&quot;&quot;</span>
start_time = time.perf_counter()
image = pipeline(prompt).images[<span class="hljs-number">0</span>]
end_time = time.perf_counter()
<span class="hljs-built_in">print</span>(<span class="hljs-string">f&quot;Geração de imagem levou <span class="hljs-subst">{end_time - start_time:<span class="hljs-number">.3</span>f}</span> segundos&quot;</span>)`,wrap:!1}}),_=new P({props:{title:"Qualidade de geração",local:"qualidade-de-geração",headingTag:"h2"}}),k=new L({props:{code:"aW1wb3J0JTIwdG9yY2glMEFmcm9tJTIwZGlmZnVzZXJzJTIwaW1wb3J0JTIwRGlmZnVzaW9uUGlwZWxpbmUlMEElMEFwaXBlbGluZSUyMCUzRCUyMERpZmZ1c2lvblBpcGVsaW5lLmZyb21fcHJldHJhaW5lZCglMEElMjAlMjAlMjAlMjAlMjJzdGFiaWxpdHlhaSUyRnN0YWJsZS1kaWZmdXNpb24teGwtYmFzZS0xLjAlMjIlMkMlMEElMjAlMjAlMjAlMjB0b3JjaF9kdHlwZSUzRHRvcmNoLmJmbG9hdDE2JTJDJTBBJTIwJTIwJTIwJTIwZGV2aWNlX21hcCUzRCUyMmN1ZGElMjIlMEEpJTBBJTBBcHJvbXB0JTIwJTNEJTIwJTIyJTIyJTIyJTBBY2luZW1hdGljJTIwZmlsbSUyMHN0aWxsJTIwb2YlMjBhJTIwY2F0JTIwc2lwcGluZyUyMGElMjBtYXJnYXJpdGElMjBpbiUyMGElMjBwb29sJTIwaW4lMjBQYWxtJTIwU3ByaW5ncyUyQyUyMENhbGlmb3JuaWElMEFoaWdobHklMjBkZXRhaWxlZCUyQyUyMGhpZ2glMjBidWRnZXQlMjBob2xseXdvb2QlMjBtb3ZpZSUyQyUyMGNpbmVtYXNjb3BlJTJDJTIwbW9vZHklMkMlMjBlcGljJTJDJTIwZ29yZ2VvdXMlMkMlMjBmaWxtJTIwZ3JhaW4lMEElMjIlMjIlMjIlMEFuZWdhdGl2ZV9wcm9tcHQlMjAlM0QlMjAlMjJsb3clMjBxdWFsaXR5JTJDJTIwYmx1cnJ5JTJDJTIwdWdseSUyQyUyMHBvb3IlMjBkZXRhaWxzJTIyJTBBcGlwZWxpbmUocHJvbXB0JTJDJTIwbmVnYXRpdmVfcHJvbXB0JTNEbmVnYXRpdmVfcHJvbXB0KS5pbWFnZXMlNUIwJTVE",highlighted:`<span class="hljs-keyword">import</span> torch
<span class="hljs-keyword">from</span> diffusers <span class="hljs-keyword">import</span> DiffusionPipeline
pipeline = DiffusionPipeline.from_pretrained(
<span class="hljs-string">&quot;stabilityai/stable-diffusion-xl-base-1.0&quot;</span>,
torch_dtype=torch.bfloat16,
device_map=<span class="hljs-string">&quot;cuda&quot;</span>
)
prompt = <span class="hljs-string">&quot;&quot;&quot;
cinematic film still of a cat sipping a margarita in a pool in Palm Springs, California
highly detailed, high budget hollywood movie, cinemascope, moody, epic, gorgeous, film grain
&quot;&quot;&quot;</span>
negative_prompt = <span class="hljs-string">&quot;low quality, blurry, ugly, poor details&quot;</span>
pipeline(prompt, negative_prompt=negative_prompt).images[<span class="hljs-number">0</span>]`,wrap:!1}}),H=new L({props:{code:"aW1wb3J0JTIwdG9yY2glMEFmcm9tJTIwZGlmZnVzZXJzJTIwaW1wb3J0JTIwRGlmZnVzaW9uUGlwZWxpbmUlMkMlMjBIZXVuRGlzY3JldGVTY2hlZHVsZXIlMEElMEFwaXBlbGluZSUyMCUzRCUyMERpZmZ1c2lvblBpcGVsaW5lLmZyb21fcHJldHJhaW5lZCglMEElMjAlMjAlMjAlMjAlMjJzdGFiaWxpdHlhaSUyRnN0YWJsZS1kaWZmdXNpb24teGwtYmFzZS0xLjAlMjIlMkMlMEElMjAlMjAlMjAlMjB0b3JjaF9kdHlwZSUzRHRvcmNoLmJmbG9hdDE2JTJDJTBBJTIwJTIwJTIwJTIwZGV2aWNlX21hcCUzRCUyMmN1ZGElMjIlMEEpJTBBcGlwZWxpbmUuc2NoZWR1bGVyJTIwJTNEJTIwSGV1bkRpc2NyZXRlU2NoZWR1bGVyLmZyb21fY29uZmlnKHBpcGVsaW5lLnNjaGVkdWxlci5jb25maWcpJTBBJTBBcHJvbXB0JTIwJTNEJTIwJTIyJTIyJTIyJTBBY2luZW1hdGljJTIwZmlsbSUyMHN0aWxsJTIwb2YlMjBhJTIwY2F0JTIwc2lwcGluZyUyMGElMjBtYXJnYXJpdGElMjBpbiUyMGElMjBwb29sJTIwaW4lMjBQYWxtJTIwU3ByaW5ncyUyQyUyMENhbGlmb3JuaWElMEFoaWdobHklMjBkZXRhaWxlZCUyQyUyMGhpZ2glMjBidWRnZXQlMjBob2xseXdvb2QlMjBtb3ZpZSUyQyUyMGNpbmVtYXNjb3BlJTJDJTIwbW9vZHklMkMlMjBlcGljJTJDJTIwZ29yZ2VvdXMlMkMlMjBmaWxtJTIwZ3JhaW4lMEElMjIlMjIlMjIlMEFuZWdhdGl2ZV9wcm9tcHQlMjAlM0QlMjAlMjJsb3clMjBxdWFsaXR5JTJDJTIwYmx1cnJ5JTJDJTIwdWdseSUyQyUyMHBvb3IlMjBkZXRhaWxzJTIyJTBBcGlwZWxpbmUocHJvbXB0JTJDJTIwbmVnYXRpdmVfcHJvbXB0JTNEbmVnYXRpdmVfcHJvbXB0KS5pbWFnZXMlNUIwJTVE",highlighted:`<span class="hljs-keyword">import</span> torch
<span class="hljs-keyword">from</span> diffusers <span class="hljs-keyword">import</span> DiffusionPipeline, HeunDiscreteScheduler
pipeline = DiffusionPipeline.from_pretrained(
<span class="hljs-string">&quot;stabilityai/stable-diffusion-xl-base-1.0&quot;</span>,
torch_dtype=torch.bfloat16,
device_map=<span class="hljs-string">&quot;cuda&quot;</span>
)
pipeline.scheduler = HeunDiscreteScheduler.from_config(pipeline.scheduler.config)
prompt = <span class="hljs-string">&quot;&quot;&quot;
cinematic film still of a cat sipping a margarita in a pool in Palm Springs, California
highly detailed, high budget hollywood movie, cinemascope, moody, epic, gorgeous, film grain
&quot;&quot;&quot;</span>
negative_prompt = <span class="hljs-string">&quot;low quality, blurry, ugly, poor details&quot;</span>
pipeline(prompt, negative_prompt=negative_prompt).images[<span class="hljs-number">0</span>]`,wrap:!1}}),R=new P({props:{title:"Próximos passos",local:"próximos-passos",headingTag:"h2"}}),S=new Fe({props:{source:"https://github.com/huggingface/diffusers/blob/main/docs/source/pt/stable_diffusion.md"}}),{c(){b=o("meta"),A=s(),Q=o("p"),K=s(),p(h.$$.fragment),O=s(),p(w.$$.fragment),ee=s(),p(T.$$.fragment),le=s(),g=o("p"),g.innerHTML=Ge,ae=s(),j=o("p"),j.innerHTML=Ie,te=s(),p(Z.$$.fragment),se=s(),B=o("p"),B.textContent=ve,ie=s(),U=o("p"),U.innerHTML=Ee,oe=s(),p(W.$$.fragment),ne=s(),p(G.$$.fragment),pe=s(),I=o("p"),I.textContent=$e,me=s(),v=o("ul"),v.innerHTML=xe,re=s(),p(E.$$.fragment),de=s(),$=o("ul"),$.innerHTML=_e,ce=s(),p(x.$$.fragment),ue=s(),p(_.$$.fragment),Me=s(),X=o("p"),X.textContent=Xe,fe=s(),y=o("ul"),J=o("li"),N=o("p"),N.textContent=ke,je=s(),p(k.$$.fragment),Ze=s(),C=o("p"),C.innerHTML=qe,Be=s(),q=o("li"),Y=o("p"),Y.innerHTML=He,Ue=s(),p(H.$$.fragment),be=s(),p(R.$$.fragment),Je=s(),V=o("p"),V.innerHTML=Re,ye=s(),p(S.$$.fragment),he=s(),D=o("p"),this.h()},l(e){const l=Qe("svelte-u9bgzb",document.head);b=n(l,"META",{name:!0,content:!0}),l.forEach(a),A=i(e),Q=n(e,"P",{}),F(Q).forEach(a),K=i(e),m(h.$$.fragment,e),O=i(e),m(w.$$.fragment,e),ee=i(e),m(T.$$.fragment,e),le=i(e),g=n(e,"P",{"data-svelte-h":!0}),M(g)!=="svelte-r2oodm"&&(g.innerHTML=Ge),ae=i(e),j=n(e,"P",{"data-svelte-h":!0}),M(j)!=="svelte-oj4cbk"&&(j.innerHTML=Ie),te=i(e),m(Z.$$.fragment,e),se=i(e),B=n(e,"P",{"data-svelte-h":!0}),M(B)!=="svelte-1lpflsu"&&(B.textContent=ve),ie=i(e),U=n(e,"P",{"data-svelte-h":!0}),M(U)!=="svelte-c49xk3"&&(U.innerHTML=Ee),oe=i(e),m(W.$$.fragment,e),ne=i(e),m(G.$$.fragment,e),pe=i(e),I=n(e,"P",{"data-svelte-h":!0}),M(I)!=="svelte-si677d"&&(I.textContent=$e),me=i(e),v=n(e,"UL",{"data-svelte-h":!0}),M(v)!=="svelte-kc4x2p"&&(v.innerHTML=xe),re=i(e),m(E.$$.fragment,e),de=i(e),$=n(e,"UL",{"data-svelte-h":!0}),M($)!=="svelte-royvag"&&($.innerHTML=_e),ce=i(e),m(x.$$.fragment,e),ue=i(e),m(_.$$.fragment,e),Me=i(e),X=n(e,"P",{"data-svelte-h":!0}),M(X)!=="svelte-td8e6t"&&(X.textContent=Xe),fe=i(e),y=n(e,"UL",{});var Te=F(y);J=n(Te,"LI",{});var z=F(J);N=n(z,"P",{"data-svelte-h":!0}),M(N)!=="svelte-1p02hmw"&&(N.textContent=ke),je=i(z),m(k.$$.fragment,z),Ze=i(z),C=n(z,"P",{"data-svelte-h":!0}),M(C)!=="svelte-9arwxl"&&(C.innerHTML=qe),z.forEach(a),Be=i(Te),q=n(Te,"LI",{});var ge=F(q);Y=n(ge,"P",{"data-svelte-h":!0}),M(Y)!=="svelte-1qkqp8b"&&(Y.innerHTML=He),Ue=i(ge),m(H.$$.fragment,ge),ge.forEach(a),Te.forEach(a),be=i(e),m(R.$$.fragment,e),Je=i(e),V=n(e,"P",{"data-svelte-h":!0}),M(V)!=="svelte-36mrq"&&(V.innerHTML=Re),ye=i(e),m(S.$$.fragment,e),he=i(e),D=n(e,"P",{}),F(D).forEach(a),this.h()},h(){Ve(b,"name","hf:doc:metadata"),Ve(b,"content",Ae)},m(e,l){f(document.head,b),t(e,A,l),t(e,Q,l),t(e,K,l),r(h,e,l),t(e,O,l),r(w,e,l),t(e,ee,l),r(T,e,l),t(e,le,l),t(e,g,l),t(e,ae,l),t(e,j,l),t(e,te,l),r(Z,e,l),t(e,se,l),t(e,B,l),t(e,ie,l),t(e,U,l),t(e,oe,l),r(W,e,l),t(e,ne,l),r(G,e,l),t(e,pe,l),t(e,I,l),t(e,me,l),t(e,v,l),t(e,re,l),r(E,e,l),t(e,de,l),t(e,$,l),t(e,ce,l),r(x,e,l),t(e,ue,l),r(_,e,l),t(e,Me,l),t(e,X,l),t(e,fe,l),t(e,y,l),f(y,J),f(J,N),f(J,je),r(k,J,null),f(J,Ze),f(J,C),f(y,Be),f(y,q),f(q,Y),f(q,Ue),r(H,q,null),t(e,be,l),r(R,e,l),t(e,Je,l),t(e,V,l),t(e,ye,l),r(S,e,l),t(e,he,l),t(e,D,l),we=!0},p:Ne,i(e){we||(d(h.$$.fragment,e),d(w.$$.fragment,e),d(T.$$.fragment,e),d(Z.$$.fragment,e),d(W.$$.fragment,e),d(G.$$.fragment,e),d(E.$$.fragment,e),d(x.$$.fragment,e),d(_.$$.fragment,e),d(k.$$.fragment,e),d(H.$$.fragment,e),d(R.$$.fragment,e),d(S.$$.fragment,e),we=!0)},o(e){c(h.$$.fragment,e),c(w.$$.fragment,e),c(T.$$.fragment,e),c(Z.$$.fragment,e),c(W.$$.fragment,e),c(G.$$.fragment,e),c(E.$$.fragment,e),c(x.$$.fragment,e),c(_.$$.fragment,e),c(k.$$.fragment,e),c(H.$$.fragment,e),c(R.$$.fragment,e),c(S.$$.fragment,e),we=!1},d(e){e&&(a(A),a(Q),a(K),a(O),a(ee),a(le),a(g),a(ae),a(j),a(te),a(se),a(B),a(ie),a(U),a(oe),a(ne),a(pe),a(I),a(me),a(v),a(re),a(de),a($),a(ce),a(ue),a(Me),a(X),a(fe),a(y),a(be),a(Je),a(V),a(ye),a(he),a(D)),a(b),u(h,e),u(w,e),u(T,e),u(Z,e),u(W,e),u(G,e),u(E,e),u(x,e),u(_,e),u(k),u(H),u(R,e),u(S,e)}}}const Ae='{"title":"Desempenho básico","local":"desempenho-básico","sections":[{"title":"Uso de memória","local":"uso-de-memória","sections":[],"depth":2},{"title":"Velocidade de inferência","local":"velocidade-de-inferência","sections":[],"depth":2},{"title":"Qualidade de geração","local":"qualidade-de-geração","sections":[],"depth":2},{"title":"Próximos passos","local":"próximos-passos","sections":[],"depth":2}],"depth":1}';function Ke(We){return Ce(()=>{new URLSearchParams(window.location.search).get("fw")}),[]}class sl extends Ye{constructor(b){super(),ze(this,b,Ke,Le,Se,{})}}export{sl as component};

Xet Storage Details

Size:
18.5 kB
·
Xet hash:
2e9248cebfbb516e70686e5fee22dcdb0b83c066aff3d0fe1dd68dd48899da95

Xet efficiently stores files, intelligently splitting them into unique chunks and accelerating uploads and downloads. More info.