Dataset Viewer
model_id
stringlengths 12
50
| downloads
int64 167
17M
| likes
int64 201
4.86k
| tags
stringlengths 116
738
| created_at
timestamp[ns, tz=UTC] | architectures
stringclasses 31
values | model_type
stringclasses 29
values | num_attention_heads
float64 12
128
⌀ | vocab_size
float64 32k
251k
⌀ | pad_token_id
float64 0
200k
⌀ | hidden_size
float64 896
16.4k
⌀ | intermediate_size
float64 4.1k
53.2k
⌀ | num_hidden_layers
float64 16
126
⌀ | hidden_act
stringclasses 5
values | layer_norm_eps
float64 0
0
⌀ | max_position_embeddings
float64 1.02k
1.05M
⌀ | activation_function
stringclasses 3
values | rms_norm_eps
float64 0
0
⌀ | attention_probs_dropout_prob
float64 0
0.1
⌀ | hidden_dropout_prob
float64 0
0.1
⌀ | year
int32 2.02k
2.03k
| month
int32 1
12
| day
int32 1
31
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bigscience/bloom
| 113,650 | 4,862 |
['transformers', 'pytorch', 'tensorboard', 'safetensors', 'bloom', 'text-generation', 'ak', 'ar', 'as', 'bm', 'bn', 'ca', 'code', 'en', 'es', 'eu', 'fon', 'fr', 'gu', 'hi', 'id', 'ig', 'ki', 'kn', 'lg', 'ln', 'ml', 'mr', 'ne', 'nso', 'ny', 'or', 'pa', 'pt', 'rn', 'rw', 'sn', 'st', 'sw', 'ta', 'te', 'tn', 'ts', 'tum', 'tw', 'ur', 'vi', 'wo', 'xh', 'yo', 'zh', 'zu', 'arxiv:2211.05100', 'arxiv:1909.08053', 'arxiv:2110.02861', 'arxiv:2108.12409', 'doi:10.57967/hf/0003', 'license:bigscience-bloom-rail-1.0', 'model-index', 'co2_eq_emissions', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2022-05-19T11:53:33 |
['BloomForCausalLM']
|
bloom
| 112 | 250,880 | 3 | null | null | null | null | null | null | null | null | null | null | 2,022 | 5 | 19 |
microsoft/phi-2
| 372,483 | 3,291 |
['transformers', 'safetensors', 'phi', 'text-generation', 'nlp', 'code', 'en', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-12-13T21:19:59 |
['PhiForCausalLM']
|
phi
| 32 | 51,200 | null | 2,560 | 10,240 | 32 |
gelu_new
| 0.00001 | 2,048 | null | null | null | null | 2,023 | 12 | 13 |
openai-community/gpt2
| 16,958,734 | 2,620 |
['transformers', 'pytorch', 'tf', 'jax', 'tflite', 'rust', 'onnx', 'safetensors', 'gpt2', 'text-generation', 'exbert', 'en', 'doi:10.57967/hf/0039', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2022-03-02T23:29:04 |
['GPT2LMHeadModel']
|
gpt2
| null | 50,257 | null | null | null | null | null | null | null |
gelu_new
| null | null | null | 2,022 | 3 | 2 |
tiiuae/falcon-40b
| 142,754 | 2,422 |
['transformers', 'pytorch', 'safetensors', 'falcon', 'text-generation', 'custom_code', 'en', 'de', 'es', 'fr', 'dataset:tiiuae/falcon-refinedweb', 'arxiv:2205.14135', 'arxiv:1911.02150', 'arxiv:2101.00027', 'arxiv:2005.14165', 'arxiv:2104.09864', 'arxiv:2306.01116', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'region:us']
| 2023-05-24T12:08:30 |
['FalconForCausalLM']
|
falcon
| 128 | 65,024 | null | 8,192 | null | 60 | null | null | null | null | null | null | null | 2,023 | 5 | 24 |
Qwen/QwQ-32B
| 369,581 | 2,265 |
['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2309.00071', 'arxiv:2412.15115', 'base_model:Qwen/Qwen2.5-32B', 'base_model:finetune:Qwen/Qwen2.5-32B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2025-03-05T14:16:59 |
['Qwen2ForCausalLM']
|
qwen2
| 40 | 152,064 | null | 5,120 | 27,648 | 64 |
silu
| null | 40,960 | null | 0.00001 | null | null | 2,025 | 3 | 5 |
nvidia/Llama-3.1-Nemotron-70B-Instruct-HF
| 262,081 | 2,028 |
['transformers', 'safetensors', 'llama', 'text-generation', 'nvidia', 'llama3.1', 'conversational', 'en', 'dataset:nvidia/HelpSteer2', 'arxiv:2410.01257', 'arxiv:2405.01481', 'arxiv:2406.08673', 'base_model:meta-llama/Llama-3.1-70B-Instruct', 'base_model:finetune:meta-llama/Llama-3.1-70B-Instruct', 'license:llama3.1', 'autotrain_compatible', 'text-generation-inference', 'region:us']
| 2024-10-12T02:37:13 |
['LlamaForCausalLM']
|
llama
| 64 | 128,256 | null | 8,192 | 28,672 | 80 |
silu
| null | 131,072 | null | 0.00001 | null | null | 2,024 | 10 | 12 |
microsoft/phi-4
| 493,023 | 1,905 |
['transformers', 'safetensors', 'phi3', 'text-generation', 'phi', 'nlp', 'math', 'code', 'chat', 'conversational', 'en', 'arxiv:2412.08905', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-12-11T11:47:29 |
['Phi3ForCausalLM']
|
phi3
| 40 | 100,352 | 100,349 | 5,120 | 17,920 | 40 |
silu
| null | 16,384 | null | 0.00001 | null | null | 2,024 | 12 | 11 |
Qwen/QwQ-32B-Preview
| 225,986 | 1,720 |
['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2407.10671', 'base_model:Qwen/Qwen2.5-32B-Instruct', 'base_model:finetune:Qwen/Qwen2.5-32B-Instruct', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-11-27T15:50:55 |
['Qwen2ForCausalLM']
|
qwen2
| 40 | 152,064 | null | 5,120 | 27,648 | 64 |
silu
| null | 32,768 | null | 0.00001 | null | null | 2,024 | 11 | 27 |
mattshumer/Reflection-Llama-3.1-70B
| 691 | 1,715 |
['transformers', 'safetensors', 'llama', 'text-generation', 'conversational', 'base_model:meta-llama/Llama-3.1-70B-Instruct', 'base_model:finetune:meta-llama/Llama-3.1-70B-Instruct', 'license:llama3.1', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-09-05T18:29:50 |
['LlamaForCausalLM']
|
llama
| 64 | 128,262 | null | 8,192 | 28,672 | 80 |
silu
| null | 8,192 | null | 0.00001 | null | null | 2,024 | 9 | 5 |
Qwen/Qwen2.5-Coder-32B-Instruct
| 259,126 | 1,714 |
['transformers', 'safetensors', 'qwen2', 'text-generation', 'code', 'codeqwen', 'chat', 'qwen', 'qwen-coder', 'conversational', 'en', 'arxiv:2409.12186', 'arxiv:2309.00071', 'arxiv:2407.10671', 'base_model:Qwen/Qwen2.5-Coder-32B', 'base_model:finetune:Qwen/Qwen2.5-Coder-32B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-11-06T07:49:50 |
['Qwen2ForCausalLM']
|
qwen2
| 40 | 152,064 | null | 5,120 | 27,648 | 64 |
silu
| null | 32,768 | null | 0.000001 | null | null | 2,024 | 11 | 6 |
microsoft/Phi-3-mini-128k-instruct
| 116,893 | 1,637 |
['transformers', 'safetensors', 'phi3', 'text-generation', 'nlp', 'code', 'conversational', 'custom_code', 'en', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-04-22T16:26:23 |
['Phi3ForCausalLM']
|
phi3
| 32 | 32,064 | 32,000 | 3,072 | 8,192 | 32 |
silu
| null | 131,072 | null | 0.00001 | null | null | 2,024 | 4 | 22 |
EleutherAI/gpt-j-6b
| 275,845 | 1,486 |
['transformers', 'pytorch', 'tf', 'jax', 'gptj', 'text-generation', 'causal-lm', 'en', 'dataset:EleutherAI/pile', 'arxiv:2104.09864', 'arxiv:2101.00027', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
| 2022-03-02T23:29:04 |
['GPTJForCausalLM']
|
gptj
| null | 50,400 | null | null | null | null | null | null | null |
gelu_new
| null | null | null | 2,022 | 3 | 2 |
microsoft/phi-1_5
| 108,020 | 1,329 |
['transformers', 'safetensors', 'phi', 'text-generation', 'nlp', 'code', 'en', 'arxiv:2309.05463', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-09-10T04:03:46 |
['PhiForCausalLM']
|
phi
| 32 | 51,200 | null | 2,048 | 8,192 | 24 |
gelu_new
| 0.00001 | 2,048 | null | null | null | null | 2,023 | 9 | 10 |
deepseek-ai/DeepSeek-R1-Distill-Qwen-32B
| 1,585,576 | 1,264 |
['transformers', 'safetensors', 'qwen2', 'text-generation', 'conversational', 'arxiv:2501.12948', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2025-01-20T09:19:00 |
['Qwen2ForCausalLM']
|
qwen2
| 40 | 152,064 | null | 5,120 | 27,648 | 64 |
silu
| null | 131,072 | null | 0.00001 | null | null | 2,025 | 1 | 20 |
cognitivecomputations/dolphin-2.5-mixtral-8x7b
| 6,214 | 1,230 |
['transformers', 'pytorch', 'safetensors', 'mixtral', 'text-generation', 'conversational', 'en', 'dataset:ehartford/dolphin', 'dataset:jondurbin/airoboros-2.2.1', 'dataset:ehartford/dolphin-coder', 'dataset:migtissera/Synthia-v1.3', 'dataset:teknium/openhermes', 'dataset:ise-uiuc/Magicoder-OSS-Instruct-75K', 'dataset:ise-uiuc/Magicoder-Evol-Instruct-110K', 'dataset:LDJnr/Pure-Dove', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-12-14T00:45:47 |
['MixtralForCausalLM']
|
mixtral
| 32 | 32,002 | null | 4,096 | 14,336 | 32 |
silu
| null | 32,768 | null | 0.00001 | null | null | 2,023 | 12 | 14 |
TinyLlama/TinyLlama-1.1B-Chat-v1.0
| 1,077,704 | 1,187 |
['transformers', 'safetensors', 'llama', 'text-generation', 'conversational', 'en', 'dataset:cerebras/SlimPajama-627B', 'dataset:bigcode/starcoderdata', 'dataset:HuggingFaceH4/ultrachat_200k', 'dataset:HuggingFaceH4/ultrafeedback_binarized', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-12-30T06:27:30 |
['LlamaForCausalLM']
|
llama
| 32 | 32,000 | null | 2,048 | 5,632 | 22 |
silu
| null | 2,048 | null | 0.00001 | null | null | 2,023 | 12 | 30 |
tiiuae/falcon-40b-instruct
| 90,022 | 1,175 |
['transformers', 'pytorch', 'falcon', 'text-generation', 'custom_code', 'en', 'dataset:tiiuae/falcon-refinedweb', 'arxiv:2205.14135', 'arxiv:1911.02150', 'arxiv:2005.14165', 'arxiv:2104.09864', 'arxiv:2306.01116', 'arxiv:2304.01196', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-05-25T10:14:36 |
['FalconForCausalLM']
|
falcon
| 128 | 65,024 | null | 8,192 | null | 60 | null | null | null | null | null | null | null | 2,023 | 5 | 25 |
mosaicml/mpt-7b
| 28,190 | 1,168 |
['transformers', 'pytorch', 'mpt', 'text-generation', 'Composer', 'MosaicML', 'llm-foundry', 'StreamingDatasets', 'custom_code', 'dataset:mc4', 'dataset:c4', 'dataset:togethercomputer/RedPajama-Data-1T', 'dataset:bigcode/the-stack', 'dataset:allenai/s2orc', 'arxiv:2108.12409', 'arxiv:2302.13971', 'arxiv:2205.14135', 'arxiv:2010.04245', 'arxiv:1909.08053', 'arxiv:2302.06675', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'region:us']
| 2023-05-05T00:48:02 |
['MPTForCausalLM']
|
mpt
| null | 50,432 | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 5 | 5 |
Qwen/Qwen2-VL-7B-Instruct
| 1,255,774 | 1,156 |
['transformers', 'safetensors', 'qwen2_vl', 'image-text-to-text', 'multimodal', 'conversational', 'en', 'arxiv:2409.12191', 'arxiv:2308.12966', 'base_model:Qwen/Qwen2-VL-7B', 'base_model:finetune:Qwen/Qwen2-VL-7B', 'license:apache-2.0', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-08-28T09:03:13 |
['Qwen2VLForConditionalGeneration']
|
qwen2_vl
| 28 | 152,064 | null | 3,584 | 18,944 | 28 |
silu
| null | 32,768 | null | 0.000001 | null | null | 2,024 | 8 | 28 |
microsoft/Phi-4-multimodal-instruct
| 586,127 | 1,155 |
['transformers', 'safetensors', 'phi4mm', 'text-generation', 'nlp', 'code', 'audio', 'automatic-speech-recognition', 'speech-summarization', 'speech-translation', 'visual-question-answering', 'phi-4-multimodal', 'phi', 'phi-4-mini', 'custom_code', 'multilingual', 'ar', 'zh', 'cs', 'da', 'nl', 'en', 'fi', 'fr', 'de', 'he', 'hu', 'it', 'ja', 'ko', 'no', 'pl', 'pt', 'ru', 'es', 'sv', 'th', 'tr', 'uk', 'arxiv:2407.13833', 'license:mit', 'autotrain_compatible', 'region:us']
| 2025-02-24T22:33:32 |
['Phi4MMForCausalLM']
|
phi4mm
| 24 | 200,064 | 199,999 | 3,072 | 8,192 | 32 |
silu
| null | 131,072 | null | 0.00001 | null | null | 2,025 | 2 | 24 |
microsoft/Phi-3-mini-4k-instruct
| 902,600 | 1,153 |
['transformers', 'safetensors', 'phi3', 'text-generation', 'nlp', 'code', 'conversational', 'custom_code', 'en', 'fr', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-04-22T16:18:17 |
['Phi3ForCausalLM']
|
phi3
| 32 | 32,064 | 32,000 | 3,072 | 8,192 | 32 |
silu
| null | 4,096 | null | 0.00001 | null | null | 2,024 | 4 | 22 |
tiiuae/falcon-7b
| 64,936 | 1,082 |
['transformers', 'pytorch', 'safetensors', 'falcon', 'text-generation', 'custom_code', 'en', 'dataset:tiiuae/falcon-refinedweb', 'arxiv:2205.14135', 'arxiv:1911.02150', 'arxiv:2101.00027', 'arxiv:2005.14165', 'arxiv:2104.09864', 'arxiv:2306.01116', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'region:us']
| 2023-04-24T16:36:24 |
['FalconForCausalLM']
|
falcon
| 71 | 65,024 | null | 4,544 | null | 32 | null | null | null | null | null | null | null | 2,023 | 4 | 24 |
deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B
| 1,624,456 | 1,041 |
['transformers', 'safetensors', 'qwen2', 'text-generation', 'conversational', 'arxiv:2501.12948', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2025-01-20T09:04:18 |
['Qwen2ForCausalLM']
|
qwen2
| 12 | 151,936 | null | 1,536 | 8,960 | 28 |
silu
| null | 131,072 | null | 0.000001 | null | null | 2,025 | 1 | 20 |
microsoft/Phi-3-vision-128k-instruct
| 83,521 | 957 |
['transformers', 'safetensors', 'phi3_v', 'text-generation', 'nlp', 'code', 'vision', 'conversational', 'custom_code', 'multilingual', 'license:mit', 'autotrain_compatible', 'region:us']
| 2024-05-19T15:07:39 |
['Phi3VForCausalLM']
|
phi3_v
| 32 | 32,064 | null | 3,072 | 8,192 | 32 |
silu
| null | 131,072 | null | 0.00001 | null | null | 2,024 | 5 | 19 |
tiiuae/falcon-7b-instruct
| 171,457 | 950 |
['transformers', 'pytorch', 'coreml', 'safetensors', 'falcon', 'text-generation', 'conversational', 'custom_code', 'en', 'dataset:tiiuae/falcon-refinedweb', 'arxiv:2205.14135', 'arxiv:1911.02150', 'arxiv:2005.14165', 'arxiv:2104.09864', 'arxiv:2306.01116', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-04-25T06:21:01 |
['FalconForCausalLM']
|
falcon
| 71 | 65,024 | null | 4,544 | null | 32 | null | null | null | null | null | null | null | 2,023 | 4 | 25 |
TheBloke/Llama-2-7B-Chat-GGML
| 3,307 | 871 |
['transformers', 'llama', 'facebook', 'meta', 'pytorch', 'llama-2', 'text-generation', 'en', 'arxiv:2307.09288', 'base_model:meta-llama/Llama-2-7b-chat-hf', 'base_model:finetune:meta-llama/Llama-2-7b-chat-hf', 'license:other', 'region:us']
| 2023-07-18T17:38:15 | null |
llama
| null | null | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 7 | 18 |
microsoft/Phi-3.5-mini-instruct
| 260,084 | 841 |
['transformers', 'safetensors', 'phi3', 'text-generation', 'nlp', 'code', 'conversational', 'custom_code', 'multilingual', 'arxiv:2404.14219', 'arxiv:2407.13833', 'arxiv:2403.06412', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-08-16T20:48:26 |
['Phi3ForCausalLM']
|
phi3
| 32 | 32,064 | 32,000 | 3,072 | 8,192 | 32 |
silu
| null | 131,072 | null | 0.00001 | null | null | 2,024 | 8 | 16 |
Phind/Phind-CodeLlama-34B-v2
| 11,528 | 832 |
['transformers', 'pytorch', 'llama', 'text-generation', 'code llama', 'license:llama2', 'model-index', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-08-28T21:29:09 |
['LlamaForCausalLM']
|
llama
| 64 | 32,000 | null | 8,192 | 22,016 | 48 |
silu
| null | 16,384 | null | 0.00001 | null | null | 2,023 | 8 | 28 |
mosaicml/mpt-7b-storywriter
| 3,471 | 831 |
['transformers', 'pytorch', 'mpt', 'text-generation', 'Composer', 'MosaicML', 'llm-foundry', 'custom_code', 'dataset:the_pile_books3', 'arxiv:2108.12409', 'arxiv:2205.14135', 'arxiv:2302.06675', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'region:us']
| 2023-05-04T22:55:02 |
['MPTForCausalLM']
|
mpt
| null | 50,432 | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 5 | 4 |
teknium/OpenHermes-2.5-Mistral-7B
| 111,465 | 830 |
['transformers', 'pytorch', 'safetensors', 'mistral', 'text-generation', 'instruct', 'finetune', 'chatml', 'gpt4', 'synthetic data', 'distillation', 'conversational', 'en', 'dataset:teknium/OpenHermes-2.5', 'base_model:mistralai/Mistral-7B-v0.1', 'base_model:finetune:mistralai/Mistral-7B-v0.1', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-10-29T20:36:39 |
['MistralForCausalLM']
|
mistral
| 32 | 32,002 | null | 4,096 | 14,336 | 32 |
silu
| null | 32,768 | null | 0.00001 | null | null | 2,023 | 10 | 29 |
Qwen/Qwen2.5-72B-Instruct
| 321,847 | 767 |
['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2309.00071', 'arxiv:2407.10671', 'base_model:Qwen/Qwen2.5-72B', 'base_model:finetune:Qwen/Qwen2.5-72B', 'license:other', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-09-16T11:56:31 |
['Qwen2ForCausalLM']
|
qwen2
| 64 | 152,064 | null | 8,192 | 29,568 | 80 |
silu
| null | 32,768 | null | 0.000001 | null | null | 2,024 | 9 | 16 |
Qwen/Qwen-7B-Chat
| 96,367 | 765 |
['transformers', 'safetensors', 'qwen', 'text-generation', 'custom_code', 'zh', 'en', 'arxiv:2309.16609', 'arxiv:2305.08322', 'arxiv:2009.03300', 'arxiv:2305.05280', 'arxiv:2210.03629', 'license:other', 'autotrain_compatible', 'region:us']
| 2023-08-03T03:01:31 |
['QWenLMHeadModel']
|
qwen
| 32 | 151,936 | null | 4,096 | 22,016 | 32 | null | null | 32,768 | null | null | null | null | 2,023 | 8 | 3 |
anon8231489123/gpt4-x-alpaca-13b-native-4bit-128g
| 2,422 | 735 |
['transformers', 'pytorch', 'llama', 'text-generation', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-04-01T01:04:45 |
['LLaMAForCausalLM']
|
llama
| 40 | 32,001 | null | 5,120 | 13,824 | 40 |
silu
| null | null | null | 0.000001 | null | null | 2,023 | 4 | 1 |
Gustavosta/MagicPrompt-Stable-Diffusion
| 18,878 | 723 |
['transformers', 'pytorch', 'coreml', 'safetensors', 'gpt2', 'text-generation', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2022-09-17T22:34:07 |
['GPT2LMHeadModel']
|
gpt2
| null | 50,257 | null | null | null | null | null | null | null |
gelu_new
| null | null | null | 2,022 | 9 | 17 |
Qwen/Qwen2-72B-Instruct
| 36,650 | 713 |
['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2309.00071', 'base_model:Qwen/Qwen2-72B', 'base_model:finetune:Qwen/Qwen2-72B', 'license:other', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-05-28T03:48:49 |
['Qwen2ForCausalLM']
|
qwen2
| 64 | 152,064 | null | 8,192 | 29,568 | 80 |
silu
| null | 32,768 | null | 0.000001 | null | null | 2,024 | 5 | 28 |
TheBloke/Llama-2-13B-chat-GGML
| 647 | 697 |
['transformers', 'llama', 'facebook', 'meta', 'pytorch', 'llama-2', 'text-generation', 'en', 'arxiv:2307.09288', 'base_model:meta-llama/Llama-2-13b-chat-hf', 'base_model:finetune:meta-llama/Llama-2-13b-chat-hf', 'license:other', 'region:us']
| 2023-07-18T18:03:26 | null |
llama
| null | null | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 7 | 18 |
togethercomputer/GPT-NeoXT-Chat-Base-20B
| 5,207 | 696 |
['transformers', 'pytorch', 'gpt_neox', 'text-generation', 'en', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-03-03T00:24:29 |
['GPTNeoXForCausalLM']
|
gpt_neox
| 64 | 50,432 | null | 6,144 | 24,576 | 44 |
gelu_fast
| 0.00001 | 2,048 | null | null | 0 | 0 | 2,023 | 3 | 3 |
gradientai/Llama-3-8B-Instruct-Gradient-1048k
| 5,103 | 682 |
['transformers', 'safetensors', 'llama', 'text-generation', 'meta', 'llama-3', 'conversational', 'en', 'arxiv:2309.00071', 'arxiv:2402.08268', 'arxiv:2305.14233', 'doi:10.57967/hf/3372', 'license:llama3', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-04-29T10:36:56 |
['LlamaForCausalLM']
|
llama
| 32 | 128,256 | null | 4,096 | 14,336 | 32 |
silu
| null | 1,048,576 | null | 0.00001 | null | null | 2,024 | 4 | 29 |
Open-Orca/Mistral-7B-OpenOrca
| 22,652 | 681 |
['transformers', 'pytorch', 'mistral', 'text-generation', 'conversational', 'en', 'dataset:Open-Orca/OpenOrca', 'arxiv:2306.02707', 'arxiv:2301.13688', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-09-29T19:18:38 |
['MistralForCausalLM']
|
mistral
| 32 | 32,002 | null | 4,096 | 14,336 | 32 |
silu
| null | 32,768 | null | 0.00001 | null | null | 2,023 | 9 | 29 |
Qwen/Qwen2.5-VL-7B-Instruct
| 3,334,765 | 678 |
['transformers', 'safetensors', 'qwen2_5_vl', 'image-text-to-text', 'multimodal', 'conversational', 'en', 'arxiv:2309.00071', 'arxiv:2409.12191', 'arxiv:2308.12966', 'license:apache-2.0', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2025-01-26T09:26:37 |
['Qwen2_5_VLForConditionalGeneration']
|
qwen2_5_vl
| 28 | 152,064 | null | 3,584 | 18,944 | 28 |
silu
| null | 128,000 | null | 0.000001 | null | null | 2,025 | 1 | 26 |
shenzhi-wang/Llama3-8B-Chinese-Chat
| 40,553 | 677 |
['transformers', 'safetensors', 'llama', 'text-generation', 'llama-factory', 'orpo', 'conversational', 'en', 'zh', 'base_model:meta-llama/Meta-Llama-3-8B-Instruct', 'base_model:finetune:meta-llama/Meta-Llama-3-8B-Instruct', 'doi:10.57967/hf/2316', 'license:llama3', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-04-21T15:11:45 |
['LlamaForCausalLM']
|
llama
| 32 | 128,256 | null | 4,096 | 14,336 | 32 |
silu
| null | 8,192 | null | 0.00001 | null | null | 2,024 | 4 | 21 |
mistral-community/Mixtral-8x22B-v0.1
| 3,939 | 674 |
['transformers', 'safetensors', 'mixtral', 'text-generation', 'moe', 'fr', 'it', 'de', 'es', 'en', 'license:apache-2.0', 'model-index', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-04-10T08:43:36 |
['MixtralForCausalLM']
|
mixtral
| 48 | 32,000 | null | 6,144 | 16,384 | 56 |
silu
| null | 65,536 | null | 0.00001 | null | null | 2,024 | 4 | 10 |
microsoft/Phi-3.5-vision-instruct
| 370,023 | 672 |
['transformers', 'safetensors', 'phi3_v', 'text-generation', 'nlp', 'code', 'vision', 'image-text-to-text', 'conversational', 'custom_code', 'multilingual', 'arxiv:2404.14219', 'license:mit', 'autotrain_compatible', 'region:us']
| 2024-08-16T23:48:22 |
['Phi3VForCausalLM']
|
phi3_v
| 32 | 32,064 | 32,000 | 3,072 | 8,192 | 32 |
silu
| null | 131,072 | null | 0.00001 | null | null | 2,024 | 8 | 16 |
anon8231489123/vicuna-13b-GPTQ-4bit-128g
| 2,339 | 666 |
['transformers', 'pytorch', 'llama', 'text-generation', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-04-03T20:52:35 |
['LlamaForCausalLM']
|
llama
| 40 | 32,001 | 0 | 5,120 | 13,824 | 40 |
silu
| null | 2,048 | null | 0.000001 | null | null | 2,023 | 4 | 3 |
cerebras/Cerebras-GPT-13B
| 2,504 | 647 |
['transformers', 'pytorch', 'gpt2', 'feature-extraction', 'causal-lm', 'text-generation', 'en', 'dataset:the_pile', 'arxiv:2304.03208', 'arxiv:2203.15556', 'arxiv:2101.00027', 'license:apache-2.0', 'text-generation-inference', 'region:us']
| 2023-03-20T20:45:54 |
['GPT2Model']
|
gpt2
| null | 50,257 | null | null | null | null | null | null | null |
gelu
| null | null | null | 2,023 | 3 | 20 |
deepseek-ai/DeepSeek-R1-Distill-Llama-8B
| 1,522,558 | 647 |
['transformers', 'safetensors', 'llama', 'text-generation', 'conversational', 'arxiv:2501.12948', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2025-01-20T09:09:42 |
['LlamaForCausalLM']
|
llama
| 32 | 128,256 | null | 4,096 | 14,336 | 32 |
silu
| null | 131,072 | null | 0.00001 | null | null | 2,025 | 1 | 20 |
deepseek-ai/DeepSeek-R1-Distill-Llama-70B
| 346,363 | 631 |
['transformers', 'safetensors', 'llama', 'text-generation', 'conversational', 'arxiv:2501.12948', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2025-01-20T09:13:33 |
['LlamaForCausalLM']
|
llama
| 64 | 128,256 | null | 8,192 | 28,672 | 80 |
silu
| null | 131,072 | null | 0.00001 | null | null | 2,025 | 1 | 20 |
Qwen/Qwen2-7B-Instruct
| 262,488 | 621 |
['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2309.00071', 'base_model:Qwen/Qwen2-7B', 'base_model:finetune:Qwen/Qwen2-7B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-06-04T10:07:03 |
['Qwen2ForCausalLM']
|
qwen2
| 28 | 152,064 | null | 3,584 | 18,944 | 28 |
silu
| null | 32,768 | null | 0.000001 | null | null | 2,024 | 6 | 4 |
TheBloke/Mixtral-8x7B-Instruct-v0.1-GGUF
| 23,684 | 611 |
['transformers', 'gguf', 'mixtral', 'fr', 'it', 'de', 'es', 'en', 'base_model:mistralai/Mixtral-8x7B-Instruct-v0.1', 'base_model:quantized:mistralai/Mixtral-8x7B-Instruct-v0.1', 'license:apache-2.0', 'region:us', 'conversational']
| 2023-12-11T18:08:33 | null |
mixtral
| null | null | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 12 | 11 |
bigcode/starcoder2-15b
| 15,845 | 593 |
['transformers', 'safetensors', 'starcoder2', 'text-generation', 'code', 'dataset:bigcode/the-stack-v2-train', 'arxiv:2305.13245', 'arxiv:2205.14135', 'arxiv:2004.05150', 'arxiv:2207.14255', 'arxiv:2402.19173', 'license:bigcode-openrail-m', 'model-index', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-02-20T17:58:19 |
['Starcoder2ForCausalLM']
|
starcoder2
| 48 | 49,152 | null | 6,144 | 24,576 | 40 |
gelu_pytorch_tanh
| null | 16,384 | null | null | null | null | 2,024 | 2 | 20 |
TheBloke/Wizard-Vicuna-30B-Uncensored-GPTQ
| 1,037 | 577 |
['transformers', 'safetensors', 'llama', 'text-generation', 'uncensored', 'en', 'dataset:ehartford/wizard_vicuna_70k_unfiltered', 'base_model:cognitivecomputations/Wizard-Vicuna-30B-Uncensored', 'base_model:quantized:cognitivecomputations/Wizard-Vicuna-30B-Uncensored', 'license:other', 'autotrain_compatible', 'text-generation-inference', '4-bit', 'gptq', 'region:us']
| 2023-05-30T03:11:00 |
['LlamaForCausalLM']
|
llama
| 52 | 32,000 | 0 | 6,656 | 17,920 | 60 |
silu
| null | 2,048 | null | 0.000001 | null | null | 2,023 | 5 | 30 |
NousResearch/Yarn-Mistral-7b-128k
| 15,415 | 572 |
['transformers', 'pytorch', 'mistral', 'text-generation', 'custom_code', 'en', 'dataset:emozilla/yarn-train-tokenized-16k-mistral', 'arxiv:2309.00071', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-10-31T13:15:14 |
['MistralForCausalLM']
|
mistral
| 32 | 32,000 | null | 4,096 | 14,336 | 32 |
silu
| null | 32,768 | null | 0.00001 | null | null | 2,023 | 10 | 31 |
Qwen/QVQ-72B-Preview
| 172,407 | 563 |
['transformers', 'safetensors', 'qwen2_vl', 'image-text-to-text', 'chat', 'conversational', 'en', 'arxiv:2409.12191', 'base_model:Qwen/Qwen2-VL-72B', 'base_model:finetune:Qwen/Qwen2-VL-72B', 'license:other', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-12-24T10:13:22 |
['Qwen2VLForConditionalGeneration']
|
qwen2_vl
| 64 | 152,064 | null | 8,192 | 29,568 | 80 |
silu
| null | 128,000 | null | 0.000001 | null | null | 2,024 | 12 | 24 |
Qwen/Qwen2.5-7B-Instruct
| 1,539,863 | 561 |
['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2309.00071', 'arxiv:2407.10671', 'base_model:Qwen/Qwen2.5-7B', 'base_model:finetune:Qwen/Qwen2.5-7B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-09-16T11:55:40 |
['Qwen2ForCausalLM']
|
qwen2
| 28 | 152,064 | null | 3,584 | 18,944 | 28 |
silu
| null | 32,768 | null | 0.000001 | null | null | 2,024 | 9 | 16 |
microsoft/Phi-3.5-MoE-instruct
| 42,190 | 556 |
['transformers', 'safetensors', 'phimoe', 'text-generation', 'nlp', 'code', 'conversational', 'custom_code', 'multilingual', 'arxiv:2404.14219', 'arxiv:2407.13833', 'arxiv:2403.06412', 'license:mit', 'autotrain_compatible', 'region:us']
| 2024-08-17T21:34:19 |
['PhiMoEForCausalLM']
|
phimoe
| 32 | 32,064 | null | 4,096 | 6,400 | 32 |
silu
| null | 131,072 | null | 0.00001 | null | null | 2,024 | 8 | 17 |
EleutherAI/gpt-neox-20b
| 19,438 | 555 |
['transformers', 'pytorch', 'safetensors', 'gpt_neox', 'text-generation', 'causal-lm', 'en', 'dataset:EleutherAI/pile', 'arxiv:2204.06745', 'arxiv:2101.00027', 'arxiv:2201.07311', 'arxiv:2104.09864', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2022-04-07T20:28:29 |
['GPTNeoXForCausalLM']
|
gpt_neox
| 64 | 50,432 | null | 6,144 | 24,576 | 44 |
gelu_fast
| 0.00001 | 2,048 | null | null | 0 | 0 | 2,022 | 4 | 7 |
nvidia/Llama3-ChatQA-1.5-8B
| 6,334 | 555 |
['transformers', 'safetensors', 'llama', 'text-generation', 'nvidia', 'chatqa-1.5', 'chatqa', 'llama-3', 'pytorch', 'conversational', 'en', 'arxiv:2401.10225', 'license:llama3', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-04-28T21:32:50 |
['LlamaForCausalLM']
|
llama
| 32 | 128,256 | null | 4,096 | 14,336 | 32 |
silu
| null | 8,192 | null | 0.00001 | null | null | 2,024 | 4 | 28 |
deepseek-ai/DeepSeek-R1-Distill-Qwen-7B
| 1,253,806 | 550 |
['transformers', 'safetensors', 'qwen2', 'text-generation', 'conversational', 'arxiv:2501.12948', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2025-01-20T09:16:14 |
['Qwen2ForCausalLM']
|
qwen2
| 28 | 152,064 | null | 3,584 | 18,944 | 28 |
silu
| null | 131,072 | null | 0.000001 | null | null | 2,025 | 1 | 20 |
togethercomputer/LLaMA-2-7B-32K
| 21,533 | 538 |
['transformers', 'pytorch', 'llama', 'text-generation', 'en', 'dataset:togethercomputer/RedPajama-Data-1T', 'dataset:togethercomputer/RedPajama-Data-Instruct', 'dataset:EleutherAI/pile', 'dataset:togethercomputer/Long-Data-Collections', 'license:llama2', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-07-26T02:19:41 |
['LlamaForCausalLM']
|
llama
| 32 | 32,000 | 0 | 4,096 | 11,008 | 32 |
silu
| null | 32,768 | null | 0.00001 | null | null | 2,023 | 7 | 26 |
TheBloke/Mistral-7B-Instruct-v0.1-GGUF
| 193,280 | 536 |
['transformers', 'gguf', 'mistral', 'finetuned', 'text-generation', 'base_model:mistralai/Mistral-7B-Instruct-v0.1', 'base_model:quantized:mistralai/Mistral-7B-Instruct-v0.1', 'license:apache-2.0', 'region:us']
| 2023-09-27T17:49:54 | null |
mistral
| null | null | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 9 | 27 |
mosaicml/mpt-7b-chat
| 90,975 | 513 |
['transformers', 'pytorch', 'mpt', 'text-generation', 'Composer', 'MosaicML', 'llm-foundry', 'custom_code', 'dataset:jeffwan/sharegpt_vicuna', 'dataset:Hello-SimpleAI/HC3', 'dataset:tatsu-lab/alpaca', 'dataset:Anthropic/hh-rlhf', 'dataset:victor123/evol_instruct_70k', 'arxiv:2205.14135', 'arxiv:2108.12409', 'arxiv:2010.04245', 'license:cc-by-nc-sa-4.0', 'autotrain_compatible', 'text-generation-inference', 'region:us']
| 2023-05-04T23:56:17 |
['MPTForCausalLM']
|
mpt
| null | 50,432 | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 5 | 4 |
bigscience/bloomz
| 3,398 | 511 |
['transformers', 'pytorch', 'tensorboard', 'safetensors', 'bloom', 'text-generation', 'ak', 'ar', 'as', 'bm', 'bn', 'ca', 'code', 'en', 'es', 'eu', 'fon', 'fr', 'gu', 'hi', 'id', 'ig', 'ki', 'kn', 'lg', 'ln', 'ml', 'mr', 'ne', 'nso', 'ny', 'or', 'pa', 'pt', 'rn', 'rw', 'sn', 'st', 'sw', 'ta', 'te', 'tn', 'ts', 'tum', 'tw', 'ur', 'vi', 'wo', 'xh', 'yo', 'zh', 'zu', 'dataset:bigscience/xP3', 'arxiv:2211.01786', 'license:bigscience-bloom-rail-1.0', 'model-index', 'autotrain_compatible', 'text-generation-inference', 'region:us']
| 2022-09-17T10:48:39 |
['BloomForCausalLM']
|
bloom
| 112 | 250,880 | 3 | null | null | null | null | null | null | null | null | null | null | 2,022 | 9 | 17 |
intfloat/e5-mistral-7b-instruct
| 176,181 | 500 |
['sentence-transformers', 'pytorch', 'safetensors', 'mistral', 'feature-extraction', 'mteb', 'transformers', 'en', 'arxiv:2401.00368', 'arxiv:2104.08663', 'arxiv:2210.07316', 'license:mit', 'model-index', 'autotrain_compatible', 'text-generation-inference', 'text-embeddings-inference', 'endpoints_compatible', 'region:us']
| 2023-12-20T10:17:02 |
['MistralModel']
|
mistral
| 32 | 32,000 | 2 | 4,096 | 14,336 | 32 |
silu
| null | 32,768 | null | 0.00001 | null | null | 2,023 | 12 | 20 |
distilbert/distilgpt2
| 2,362,144 | 499 |
['transformers', 'pytorch', 'tf', 'jax', 'tflite', 'rust', 'coreml', 'safetensors', 'gpt2', 'text-generation', 'exbert', 'en', 'dataset:openwebtext', 'arxiv:1910.01108', 'arxiv:2201.08542', 'arxiv:2203.12574', 'arxiv:1910.09700', 'arxiv:1503.02531', 'license:apache-2.0', 'model-index', 'co2_eq_emissions', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2022-03-02T23:29:04 |
['GPT2LMHeadModel']
|
gpt2
| null | 50,257 | null | null | null | null | null | null | null |
gelu_new
| null | null | null | 2,022 | 3 | 2 |
NousResearch/Hermes-2-Pro-Mistral-7B
| 12,579 | 490 |
['transformers', 'safetensors', 'mistral', 'text-generation', 'Mistral', 'instruct', 'finetune', 'chatml', 'DPO', 'RLHF', 'gpt4', 'synthetic data', 'distillation', 'function calling', 'json mode', 'conversational', 'en', 'dataset:teknium/OpenHermes-2.5', 'base_model:mistralai/Mistral-7B-v0.1', 'base_model:finetune:mistralai/Mistral-7B-v0.1', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-03-11T12:55:27 |
['MistralForCausalLM']
|
mistral
| 32 | 32,032 | null | 4,096 | 14,336 | 32 |
silu
| null | 32,768 | null | 0.00001 | null | null | 2,024 | 3 | 11 |
chavinlo/gpt4-x-alpaca
| 2,151 | 485 |
['transformers', 'pytorch', 'llama', 'text-generation', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-03-31T13:03:46 |
['LlamaForCausalLM']
|
llama
| 40 | 32,001 | null | 5,120 | 13,824 | 40 |
silu
| null | null | null | 0.000001 | null | null | 2,023 | 3 | 31 |
EleutherAI/gpt-neo-2.7B
| 87,569 | 477 |
['transformers', 'pytorch', 'jax', 'rust', 'safetensors', 'gpt_neo', 'text-generation', 'text generation', 'causal-lm', 'en', 'dataset:EleutherAI/pile', 'arxiv:2101.00027', 'license:mit', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
| 2022-03-02T23:29:04 |
['GPTNeoForCausalLM']
|
gpt_neo
| null | 50,257 | null | 2,560 | null | null | null | null | 2,048 |
gelu_new
| null | null | null | 2,022 | 3 | 2 |
mosaicml/mpt-7b-instruct
| 9,929 | 469 |
['transformers', 'pytorch', 'mpt', 'text-generation', 'Composer', 'MosaicML', 'llm-foundry', 'custom_code', 'dataset:mosaicml/dolly_hhrlhf', 'arxiv:2205.14135', 'arxiv:2108.12409', 'arxiv:2010.04245', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'region:us']
| 2023-05-05T00:52:12 |
['MPTForCausalLM']
|
mpt
| null | 50,432 | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 5 | 5 |
deepseek-ai/DeepSeek-R1-Distill-Qwen-14B
| 677,313 | 469 |
['transformers', 'safetensors', 'qwen2', 'text-generation', 'conversational', 'arxiv:2501.12948', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2025-01-20T09:18:27 |
['Qwen2ForCausalLM']
|
qwen2
| 40 | 152,064 | null | 5,120 | 13,824 | 48 |
silu
| null | 131,072 | null | 0.00001 | null | null | 2,025 | 1 | 20 |
TheBloke/Llama-2-7B-Chat-GGUF
| 77,189 | 458 |
['transformers', 'gguf', 'llama', 'facebook', 'meta', 'pytorch', 'llama-2', 'text-generation', 'en', 'arxiv:2307.09288', 'base_model:meta-llama/Llama-2-7b-chat-hf', 'base_model:quantized:meta-llama/Llama-2-7b-chat-hf', 'license:llama2', 'region:us']
| 2023-09-04T16:38:41 | null |
llama
| null | null | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 9 | 4 |
openlm-research/open_llama_13b
| 3,572 | 455 |
['transformers', 'pytorch', 'llama', 'text-generation', 'dataset:togethercomputer/RedPajama-Data-1T', 'arxiv:2302.13971', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-06-15T10:51:45 |
['LlamaForCausalLM']
|
llama
| 40 | 32,000 | 0 | 5,120 | 13,824 | 40 |
silu
| null | 2,048 | null | 0.000001 | null | null | 2,023 | 6 | 15 |
cognitivecomputations/dolphin-2.9-llama3-8b
| 2,712 | 444 |
['transformers', 'safetensors', 'llama', 'text-generation', 'generated_from_trainer', 'axolotl', 'conversational', 'dataset:cognitivecomputations/Dolphin-2.9', 'dataset:teknium/OpenHermes-2.5', 'dataset:m-a-p/CodeFeedback-Filtered-Instruction', 'dataset:cognitivecomputations/dolphin-coder', 'dataset:cognitivecomputations/samantha-data', 'dataset:HuggingFaceH4/ultrachat_200k', 'dataset:microsoft/orca-math-word-problems-200k', 'dataset:abacusai/SystemChat-1.1', 'dataset:Locutusque/function-calling-chatml', 'dataset:internlm/Agent-FLAN', 'base_model:meta-llama/Meta-Llama-3-8B', 'base_model:finetune:meta-llama/Meta-Llama-3-8B', 'license:other', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-04-20T23:14:52 |
['LlamaForCausalLM']
|
llama
| 32 | 128,258 | null | 4,096 | 14,336 | 32 |
silu
| null | 8,192 | null | 0.00001 | null | null | 2,024 | 4 | 20 |
Qwen/Qwen2.5-Coder-7B-Instruct
| 174,917 | 441 |
['transformers', 'safetensors', 'qwen2', 'text-generation', 'code', 'codeqwen', 'chat', 'qwen', 'qwen-coder', 'conversational', 'en', 'arxiv:2409.12186', 'arxiv:2309.00071', 'arxiv:2407.10671', 'base_model:Qwen/Qwen2.5-Coder-7B', 'base_model:finetune:Qwen/Qwen2.5-Coder-7B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-09-17T13:38:49 |
['Qwen2ForCausalLM']
|
qwen2
| 28 | 152,064 | null | 3,584 | 18,944 | 28 |
silu
| null | 32,768 | null | 0.000001 | null | null | 2,024 | 9 | 17 |
AWS/MistralLite
| 2,499 | 430 |
['transformers', 'pytorch', 'mistral', 'text-generation', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'region:us']
| 2023-10-16T00:57:56 |
['MistralForCausalLM']
|
mistral
| 32 | 32,003 | null | 4,096 | 14,336 | 32 |
silu
| null | 32,768 | null | 0.00001 | null | null | 2,023 | 10 | 16 |
NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO
| 8,323 | 427 |
['transformers', 'safetensors', 'mixtral', 'text-generation', 'Mixtral', 'instruct', 'finetune', 'chatml', 'DPO', 'RLHF', 'gpt4', 'synthetic data', 'distillation', 'conversational', 'en', 'dataset:teknium/OpenHermes-2.5', 'base_model:mistralai/Mixtral-8x7B-v0.1', 'base_model:finetune:mistralai/Mixtral-8x7B-v0.1', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-01-11T06:14:47 |
['MixtralForCausalLM']
|
mixtral
| 32 | 32,002 | null | 4,096 | 14,336 | 32 |
silu
| null | 32,768 | null | 0.00001 | null | null | 2,024 | 1 | 11 |
BioMistral/BioMistral-7B
| 11,476 | 426 |
['transformers', 'pytorch', 'tensorboard', 'mistral', 'text-generation', 'medical', 'biology', 'conversational', 'fr', 'en', 'de', 'nl', 'es', 'pt', 'pl', 'ro', 'it', 'arxiv:2402.10373', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-02-14T11:33:32 |
['MistralForCausalLM']
|
mistral
| 32 | 32,000 | null | 4,096 | 14,336 | 32 |
silu
| null | 32,768 | null | 0.00001 | null | null | 2,024 | 2 | 14 |
TheBloke/Mixtral-8x7B-v0.1-GGUF
| 30,791 | 425 |
['transformers', 'gguf', 'mixtral', 'fr', 'it', 'de', 'es', 'en', 'base_model:mistralai/Mixtral-8x7B-v0.1', 'base_model:quantized:mistralai/Mixtral-8x7B-v0.1', 'license:apache-2.0', 'region:us']
| 2023-12-11T13:23:32 | null |
mixtral
| null | null | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 12 | 11 |
TheBloke/Mistral-7B-Instruct-v0.2-GGUF
| 91,100 | 423 |
['transformers', 'gguf', 'mistral', 'finetuned', 'text-generation', 'arxiv:2310.06825', 'base_model:mistralai/Mistral-7B-Instruct-v0.2', 'base_model:quantized:mistralai/Mistral-7B-Instruct-v0.2', 'license:apache-2.0', 'region:us', 'conversational']
| 2023-12-11T22:18:46 | null |
mistral
| null | null | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 12 | 11 |
NousResearch/Hermes-2-Pro-Llama-3-8B
| 30,559 | 418 |
['transformers', 'safetensors', 'llama', 'text-generation', 'Llama-3', 'instruct', 'finetune', 'chatml', 'DPO', 'RLHF', 'gpt4', 'synthetic data', 'distillation', 'function calling', 'json mode', 'axolotl', 'conversational', 'en', 'dataset:teknium/OpenHermes-2.5', 'base_model:NousResearch/Meta-Llama-3-8B', 'base_model:finetune:NousResearch/Meta-Llama-3-8B', 'license:llama3', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-04-30T00:16:18 |
['LlamaForCausalLM']
|
llama
| 32 | 128,288 | null | 4,096 | 14,336 | 32 |
silu
| null | 8,192 | null | 0.00001 | null | null | 2,024 | 4 | 30 |
Qwen/Qwen2-VL-2B-Instruct
| 1,182,625 | 406 |
['transformers', 'safetensors', 'qwen2_vl', 'image-text-to-text', 'multimodal', 'conversational', 'en', 'arxiv:2409.12191', 'arxiv:2308.12966', 'base_model:Qwen/Qwen2-VL-2B', 'base_model:finetune:Qwen/Qwen2-VL-2B', 'license:apache-2.0', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-08-28T09:02:15 |
['Qwen2VLForConditionalGeneration']
|
qwen2_vl
| 12 | 151,936 | null | 1,536 | 8,960 | 28 |
silu
| null | 32,768 | null | 0.000001 | null | null | 2,024 | 8 | 28 |
aaditya/Llama3-OpenBioLLM-70B
| 24,929 | 402 |
['transformers', 'pytorch', 'llama', 'text-generation', 'llama-3', 'Mixtral', 'instruct', 'finetune', 'chatml', 'DPO', 'RLHF', 'gpt4', 'distillation', 'heathcare', 'medical', 'clinical', 'med', 'lifescience', 'Pharmaceutical', 'Pharma', 'conversational', 'en', 'arxiv:2305.18290', 'arxiv:2303.13375', 'arxiv:2212.13138', 'arxiv:2305.09617', 'arxiv:2402.07023', 'base_model:meta-llama/Meta-Llama-3-70B-Instruct', 'base_model:finetune:meta-llama/Meta-Llama-3-70B-Instruct', 'license:llama3', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-04-24T02:21:12 |
['LlamaForCausalLM']
|
llama
| 64 | 128,256 | null | 8,192 | 28,672 | 80 |
silu
| null | 8,192 | null | 0.00001 | null | null | 2,024 | 4 | 24 |
ICTNLP/Llama-3.1-8B-Omni
| 7,016 | 398 |
['llama-omni', 'safetensors', 'omni_speech2s_llama', 'large language models', 'speech-language models', 'speech interaction', 'speech-to-speech', 'en', 'arxiv:2409.06666', 'base_model:meta-llama/Llama-3.1-8B-Instruct', 'base_model:finetune:meta-llama/Llama-3.1-8B-Instruct', 'license:other', 'region:us']
| 2024-09-10T05:23:09 |
['OmniSpeech2SLlamaForCausalLM']
|
omni_speech2s_llama
| 32 | 128,256 | null | 4,096 | 14,336 | 32 |
silu
| null | 131,072 | null | 0.00001 | null | null | 2,024 | 9 | 10 |
georgesung/llama2_7b_chat_uncensored
| 4,007 | 382 |
['transformers', 'pytorch', 'tensorboard', 'safetensors', 'llama', 'text-generation', 'dataset:georgesung/wizard_vicuna_70k_unfiltered', 'license:other', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-07-20T10:45:03 |
['LlamaForCausalLM']
|
llama
| 32 | 32,000 | 0 | 4,096 | 11,008 | 32 |
silu
| null | 2,048 | null | 0.00001 | null | null | 2,023 | 7 | 20 |
microsoft/Phi-3-medium-128k-instruct
| 16,786 | 381 |
['transformers', 'safetensors', 'phi3', 'text-generation', 'nlp', 'code', 'conversational', 'custom_code', 'multilingual', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-05-07T15:27:32 |
['Phi3ForCausalLM']
|
phi3
| 40 | 32,064 | null | 5,120 | 17,920 | 40 |
silu
| null | 131,072 | null | 0.00001 | null | null | 2,024 | 5 | 7 |
Qwen/Qwen-7B
| 28,885 | 379 |
['transformers', 'safetensors', 'qwen', 'text-generation', 'custom_code', 'zh', 'en', 'arxiv:2309.16609', 'license:other', 'autotrain_compatible', 'region:us']
| 2023-08-03T02:51:18 |
['QWenLMHeadModel']
|
qwen
| 32 | 151,936 | null | 4,096 | 22,016 | 32 | null | null | 32,768 | null | null | null | null | 2,023 | 8 | 3 |
Qwen/Qwen2.5-VL-72B-Instruct
| 335,945 | 374 |
['transformers', 'safetensors', 'qwen2_5_vl', 'image-text-to-text', 'multimodal', 'conversational', 'en', 'arxiv:2309.00071', 'arxiv:2409.12191', 'arxiv:2308.12966', 'base_model:Qwen/Qwen2.5-VL-72B-Instruct', 'base_model:finetune:Qwen/Qwen2.5-VL-72B-Instruct', 'license:other', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2025-01-27T04:12:04 |
['Qwen2_5_VLForConditionalGeneration']
|
qwen2_5_vl
| 64 | 152,064 | null | 8,192 | 29,568 | 80 |
silu
| null | 128,000 | null | 0.000001 | null | null | 2,025 | 1 | 27 |
Qwen/Qwen2-Audio-7B-Instruct
| 150,420 | 370 |
['transformers', 'safetensors', 'qwen2_audio', 'text2text-generation', 'chat', 'audio', 'audio-text-to-text', 'en', 'arxiv:2407.10759', 'arxiv:2311.07919', 'license:apache-2.0', 'autotrain_compatible', 'endpoints_compatible', 'region:us']
| 2024-07-31T09:22:21 |
['Qwen2AudioForConditionalGeneration']
|
qwen2_audio
| null | 156,032 | null | null | null | null | null | null | null | null | null | null | null | 2,024 | 7 | 31 |
Qwen/Qwen2.5-1.5B-Instruct
| 1,083,905 | 367 |
['transformers', 'safetensors', 'qwen2', 'text-generation', 'chat', 'conversational', 'en', 'arxiv:2407.10671', 'base_model:Qwen/Qwen2.5-1.5B', 'base_model:finetune:Qwen/Qwen2.5-1.5B', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2024-09-17T14:10:29 |
['Qwen2ForCausalLM']
|
qwen2
| 12 | 151,936 | null | 1,536 | 8,960 | 28 |
silu
| null | 32,768 | null | 0.000001 | null | null | 2,024 | 9 | 17 |
OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5
| 498,245 | 366 |
['transformers', 'pytorch', 'gpt_neox', 'text-generation', 'sft', 'en', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-04-03T20:06:28 |
['GPTNeoXForCausalLM']
|
gpt_neox
| 40 | 50,288 | null | 5,120 | 20,480 | 36 |
gelu
| 0.00001 | 2,048 | null | null | null | null | 2,023 | 4 | 3 |
sambanovasystems/BLOOMChat-176B-v1
| 2,244 | 365 |
['transformers', 'pytorch', 'bloom', 'text-generation', 'license:other', 'autotrain_compatible', 'text-generation-inference', 'region:us']
| 2023-05-10T21:17:39 |
['BloomForCausalLM']
|
bloom
| null | 250,880 | 3 | 14,336 | null | null | null | null | null | null | null | null | null | 2,023 | 5 | 10 |
TheBloke/Llama-2-13B-chat-GPTQ
| 12,448 | 362 |
['transformers', 'safetensors', 'llama', 'text-generation', 'facebook', 'meta', 'pytorch', 'llama-2', 'en', 'arxiv:2307.09288', 'base_model:meta-llama/Llama-2-13b-chat-hf', 'base_model:quantized:meta-llama/Llama-2-13b-chat-hf', 'license:llama2', 'autotrain_compatible', 'text-generation-inference', '4-bit', 'gptq', 'region:us']
| 2023-07-18T18:28:36 |
['LlamaForCausalLM']
|
llama
| 40 | 32,000 | 0 | 5,120 | 13,824 | 40 |
silu
| null | 4,096 | null | 0.00001 | null | null | 2,023 | 7 | 18 |
stabilityai/stablelm-tuned-alpha-7b
| 4,278 | 360 |
['transformers', 'pytorch', 'gpt_neox', 'text-generation', 'causal-lm', 'en', 'dataset:dmayhem93/ChatCombined', 'dataset:tatsu-lab/alpaca', 'dataset:nomic-ai/gpt4all_prompt_generations', 'dataset:Dahoas/full-hh-rlhf', 'dataset:jeffwan/sharegpt_vicuna', 'dataset:HuggingFaceH4/databricks_dolly_15k', 'license:cc-by-nc-sa-4.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-04-19T04:29:58 |
['GPTNeoXForCausalLM']
|
gpt_neox
| 48 | 50,432 | null | 6,144 | 24,576 | 16 |
gelu
| 0.00001 | 4,096 | null | null | null | null | 2,023 | 4 | 19 |
Qwen/Qwen-14B-Chat
| 2,615 | 360 |
['transformers', 'safetensors', 'qwen', 'text-generation', 'custom_code', 'zh', 'en', 'arxiv:2309.16609', 'arxiv:2305.08322', 'arxiv:2009.03300', 'arxiv:2305.05280', 'arxiv:2210.03629', 'autotrain_compatible', 'region:us']
| 2023-09-24T03:27:58 |
['QWenLMHeadModel']
|
qwen
| 40 | 152,064 | null | 5,120 | 27,392 | 40 | null | null | 8,192 | null | null | null | null | 2,023 | 9 | 24 |
microsoft/Phi-4-mini-instruct
| 197,412 | 360 |
['transformers', 'safetensors', 'phi3', 'text-generation', 'nlp', 'code', 'conversational', 'custom_code', 'multilingual', 'ar', 'zh', 'cs', 'da', 'nl', 'en', 'fi', 'fr', 'de', 'he', 'hu', 'it', 'ja', 'ko', 'no', 'pl', 'pt', 'ru', 'es', 'sv', 'th', 'tr', 'uk', 'arxiv:2503.01743', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2025-02-19T01:00:58 |
['Phi3ForCausalLM']
|
phi3
| 24 | 200,064 | 199,999 | 3,072 | 8,192 | 32 |
silu
| null | 131,072 | null | 0.00001 | null | null | 2,025 | 2 | 19 |
microsoft/DialoGPT-medium
| 320,044 | 359 |
['transformers', 'pytorch', 'tf', 'jax', 'rust', 'gpt2', 'text-generation', 'conversational', 'arxiv:1911.00536', 'license:mit', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2022-03-02T23:29:05 |
['GPT2LMHeadModel']
|
gpt2
| null | 50,257 | null | null | null | null | null | null | null |
gelu_new
| null | null | null | 2,022 | 3 | 2 |
Qwen/Qwen-VL-Chat
| 39,037 | 359 |
['transformers', 'pytorch', 'qwen', 'text-generation', 'custom_code', 'zh', 'en', 'arxiv:2308.12966', 'autotrain_compatible', 'region:us']
| 2023-08-20T04:45:22 |
['QWenLMHeadModel']
|
qwen
| 32 | 151,936 | null | 4,096 | 22,016 | 32 | null | null | 8,192 | null | null | null | null | 2,023 | 8 | 20 |
Qwen/Qwen-72B
| 5,072 | 355 |
['transformers', 'safetensors', 'qwen', 'text-generation', 'custom_code', 'zh', 'en', 'arxiv:2309.16609', 'license:other', 'autotrain_compatible', 'region:us']
| 2023-11-26T16:16:31 |
['QWenLMHeadModel']
|
qwen
| 64 | 152,064 | null | 8,192 | 49,152 | 80 | null | null | 32,768 | null | null | null | null | 2,023 | 11 | 26 |
bigscience/bloom-560m
| 184,845 | 351 |
['transformers', 'pytorch', 'jax', 'onnx', 'safetensors', 'bloom', 'text-generation', 'ak', 'ar', 'as', 'bm', 'bn', 'ca', 'code', 'en', 'es', 'eu', 'fon', 'fr', 'gu', 'hi', 'id', 'ig', 'ki', 'kn', 'lg', 'ln', 'ml', 'mr', 'ne', 'nso', 'ny', 'or', 'pa', 'pt', 'rn', 'rw', 'sn', 'st', 'sw', 'ta', 'te', 'tn', 'ts', 'tum', 'tw', 'ur', 'vi', 'wo', 'xh', 'yo', 'zh', 'zhs', 'zht', 'zu', 'arxiv:1909.08053', 'arxiv:2110.02861', 'arxiv:2108.12409', 'license:bigscience-bloom-rail-1.0', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2022-05-19T11:51:24 |
['BloomForCausalLM']
|
bloom
| 16 | 250,880 | 3 | null | null | null | null | null | null | null | null | null | null | 2,022 | 5 | 19 |
codellama/CodeLlama-7b-hf
| 67,904 | 350 |
['transformers', 'pytorch', 'safetensors', 'llama', 'text-generation', 'llama-2', 'code', 'arxiv:2308.12950', 'license:llama2', 'autotrain_compatible', 'text-generation-inference', 'endpoints_compatible', 'region:us']
| 2023-08-24T16:31:11 |
['LlamaForCausalLM']
|
llama
| 32 | 32,016 | null | 4,096 | 11,008 | 32 |
silu
| null | 16,384 | null | 0.00001 | null | null | 2,023 | 8 | 24 |
mosaicml/mpt-30b
| 5,849 | 342 |
['transformers', 'pytorch', 'mpt', 'text-generation', 'Composer', 'MosaicML', 'llm-foundry', 'StreamingDatasets', 'custom_code', 'dataset:allenai/c4', 'dataset:mc4', 'dataset:togethercomputer/RedPajama-Data-1T', 'dataset:bigcode/the-stack-dedup', 'dataset:allenai/s2orc', 'arxiv:2108.12409', 'arxiv:2302.13971', 'arxiv:2205.14135', 'arxiv:2010.04245', 'arxiv:1909.08053', 'arxiv:2302.06675', 'license:apache-2.0', 'autotrain_compatible', 'text-generation-inference', 'region:us']
| 2023-06-20T16:29:39 |
['MPTForCausalLM']
|
mpt
| null | 50,432 | null | null | null | null | null | null | null | null | null | null | null | 2,023 | 6 | 20 |
End of preview. Expand
in Data Studio
- Downloads last month
- 5