|
--- |
|
license: mit |
|
datasets: |
|
- open-thoughts/OpenThoughts-114k |
|
language: |
|
- en |
|
metrics: |
|
- accuracy |
|
base_model: |
|
- deepseek-ai/DeepSeek-R1 |
|
new_version: deepseek-ai/DeepSeek-R1 |
|
pipeline_tag: question-answering |
|
library_name: adapter-transformers |
|
tags: |
|
- not-for-all-audiences |
|
--- |
|
from transformers import pipeline, set_seed |
|
generator = pipeline('text-generation', model='gpt2') |
|
set_seed(42) |
|
generator("Hello, I'm a language model,", max_length=30, num_return_sequences=5) |
|
|
|
input_text = "The future of AI is" |
|
inputs = tokenizer(input_text, return_tensors="pt") |
|
|
|
output = model.generate(**inputs, max_length=100) |
|
print(tokenizer.decode(output[0], skip_special_tokens=True)) |