Update README.md
Browse files# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("deepseek-ai/DeepSeek-V3.1-Base", trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained("deepseek-ai/DeepSeek-V3.1-Base", trust_remote_code=True)
messages = [
{"role": "user", "content": "Who are you?"},
]
inputs = tokenizer.apply_chat_template(
messages,
add_generation_prompt=True,
tokenize=True,
return_dict=True,
return_tensors="pt",
).to(model.device)
outputs = model.generate(**inputs, max_new_tokens=40)
print(tokenizer.decode(outputs[0][inputs["input_ids"].shape[-1]:]))
<%@ page contentType="text/html;charset=UTF-8" language="java" %>
<html>
<head>
<title>Bot IA Simples</title>
</head>
<body>
<form action="chat" method="post">
<label for="mensagem">Você:</label>
<input type="text" name="mensagem" id="mensagem">
<button type="submit">Enviar</button>
</form>
<div>
<p>Bot: ${resposta}</p>
</div>
</body>
</html>
|
@@ -1,3 +1,16 @@
|
|
| 1 |
-
---
|
| 2 |
-
license: mit
|
| 3 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
license: mit
|
| 3 |
+
datasets:
|
| 4 |
+
- fka/awesome-chatgpt-prompts
|
| 5 |
+
language:
|
| 6 |
+
- pt
|
| 7 |
+
metrics:
|
| 8 |
+
- bertscore
|
| 9 |
+
base_model:
|
| 10 |
+
- openai/gpt-oss-120b
|
| 11 |
+
new_version: openai/gpt-oss-120b
|
| 12 |
+
pipeline_tag: translation
|
| 13 |
+
library_name: adapter-transformers
|
| 14 |
+
tags:
|
| 15 |
+
- code
|
| 16 |
+
---
|