Upload folder using huggingface_hub
Browse files
README.md
CHANGED
|
@@ -47,7 +47,7 @@ prompt = "Explain the difference between machine learning and deep learning."
|
|
| 47 |
messages = [{"role": "user", "content": prompt}]
|
| 48 |
text = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
| 49 |
|
| 50 |
-
outputs = llm.generate([text], SamplingParams(temperature=0.
|
| 51 |
print(outputs[0]["text"])
|
| 52 |
```
|
| 53 |
|
|
@@ -60,7 +60,7 @@ messages = [
|
|
| 60 |
{"role": "user", "content": "Show me a hello world example."}
|
| 61 |
]
|
| 62 |
text = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
| 63 |
-
outputs = llm.generate([text], SamplingParams(temperature=0.
|
| 64 |
```
|
| 65 |
|
| 66 |
## HuggingFace Transformers
|
|
|
|
| 47 |
messages = [{"role": "user", "content": prompt}]
|
| 48 |
text = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
| 49 |
|
| 50 |
+
outputs = llm.generate([text], SamplingParams(temperature=0.2, max_tokens=512))
|
| 51 |
print(outputs[0]["text"])
|
| 52 |
```
|
| 53 |
|
|
|
|
| 60 |
{"role": "user", "content": "Show me a hello world example."}
|
| 61 |
]
|
| 62 |
text = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
|
| 63 |
+
outputs = llm.generate([text], SamplingParams(temperature=0.2, max_tokens=256))
|
| 64 |
```
|
| 65 |
|
| 66 |
## HuggingFace Transformers
|