Upload folder using huggingface_hub
This commit is contained in:
parent
a3911b57cd
commit
898999bd25
54
README.md
Normal file
54
README.md
Normal file
@ -0,0 +1,54 @@
|
|||||||
|
---
|
||||||
|
language:
|
||||||
|
- en
|
||||||
|
- de
|
||||||
|
- fr
|
||||||
|
- it
|
||||||
|
- pt
|
||||||
|
- hi
|
||||||
|
- es
|
||||||
|
- th
|
||||||
|
library_name: transformers
|
||||||
|
pipeline_tag: text-generation
|
||||||
|
tags:
|
||||||
|
- facebook
|
||||||
|
- meta
|
||||||
|
- pytorch
|
||||||
|
- llama
|
||||||
|
- llama-3
|
||||||
|
---
|
||||||
|
|
||||||
|
This repository is an early access checkpoint for Llama 3.2 1B Instruct.
|
||||||
|
|
||||||
|
This repo contains two versions of the model, for use with `transformers` and with the original `llama3` codebase (under the `original` directory).
|
||||||
|
|
||||||
|
### Use with transformers
|
||||||
|
|
||||||
|
Here is an example of simple usage with `transformers`
|
||||||
|
|
||||||
|
```python
|
||||||
|
from transformers import pipeline
|
||||||
|
import torch
|
||||||
|
|
||||||
|
model_id = "nltpt/Llama-3.2-1B-Instruct"
|
||||||
|
|
||||||
|
pipe = pipeline(
|
||||||
|
"text-generation",
|
||||||
|
model=model_id,
|
||||||
|
model_kwargs={"torch_dtype": torch.bfloat16},
|
||||||
|
device_map="auto"
|
||||||
|
)
|
||||||
|
|
||||||
|
messages = [
|
||||||
|
{"role": "user", "content": "What is the capital of France?"},
|
||||||
|
]
|
||||||
|
pipe(messages, max_length=50)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Use with `llama3`
|
||||||
|
|
||||||
|
Please follow the instructions provided for that repository. To download the checkpoints from the Hub, see this example command using `huggingface-cli`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
huggingface-cli download nltpt/Llama-3.2-1B-Instruct --include "original/*" --local-dir Llama-3.2-1B-Instruct
|
||||||
|
```
|
||||||
39
config.json
Normal file
39
config.json
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
{
|
||||||
|
"architectures": [
|
||||||
|
"LlamaForCausalLM"
|
||||||
|
],
|
||||||
|
"attention_bias": false,
|
||||||
|
"attention_dropout": 0.0,
|
||||||
|
"bos_token_id": 128000,
|
||||||
|
"eos_token_id": [
|
||||||
|
128001,
|
||||||
|
128008,
|
||||||
|
128009
|
||||||
|
],
|
||||||
|
"head_dim": 64,
|
||||||
|
"hidden_act": "silu",
|
||||||
|
"hidden_size": 2048,
|
||||||
|
"initializer_range": 0.02,
|
||||||
|
"intermediate_size": 8192,
|
||||||
|
"max_position_embeddings": 131072,
|
||||||
|
"mlp_bias": false,
|
||||||
|
"model_type": "llama",
|
||||||
|
"num_attention_heads": 32,
|
||||||
|
"num_hidden_layers": 16,
|
||||||
|
"num_key_value_heads": 8,
|
||||||
|
"pretraining_tp": 1,
|
||||||
|
"rms_norm_eps": 1e-05,
|
||||||
|
"rope_scaling": {
|
||||||
|
"factor": 8.0,
|
||||||
|
"high_freq_factor": 4.0,
|
||||||
|
"low_freq_factor": 1.0,
|
||||||
|
"original_max_position_embeddings": 8192,
|
||||||
|
"rope_type": "llama3"
|
||||||
|
},
|
||||||
|
"rope_theta": 500000.0,
|
||||||
|
"tie_word_embeddings": true,
|
||||||
|
"torch_dtype": "bfloat16",
|
||||||
|
"transformers_version": "4.45.0.dev0",
|
||||||
|
"use_cache": true,
|
||||||
|
"vocab_size": 128256
|
||||||
|
}
|
||||||
12
generation_config.json
Normal file
12
generation_config.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"bos_token_id": 128000,
|
||||||
|
"do_sample": true,
|
||||||
|
"eos_token_id": [
|
||||||
|
128001,
|
||||||
|
128008,
|
||||||
|
128009
|
||||||
|
],
|
||||||
|
"temperature": 0.6,
|
||||||
|
"top_p": 0.9,
|
||||||
|
"transformers_version": "4.45.0.dev0"
|
||||||
|
}
|
||||||
BIN
model.safetensors
(Stored with Git LFS)
Normal file
BIN
model.safetensors
(Stored with Git LFS)
Normal file
Binary file not shown.
BIN
original/consolidated.00.pth
(Stored with Git LFS)
Normal file
BIN
original/consolidated.00.pth
(Stored with Git LFS)
Normal file
Binary file not shown.
12
original/params.json
Normal file
12
original/params.json
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"dim": 2048,
|
||||||
|
"n_layers": 16,
|
||||||
|
"n_heads": 32,
|
||||||
|
"n_kv_heads": 8,
|
||||||
|
"vocab_size": 128256,
|
||||||
|
"ffn_dim_multiplier": 1.5,
|
||||||
|
"multiple_of": 256,
|
||||||
|
"norm_eps": 1e-05,
|
||||||
|
"rope_theta": 500000.0,
|
||||||
|
"use_scaled_rope": true
|
||||||
|
}
|
||||||
BIN
original/tokenizer.model
(Stored with Git LFS)
Normal file
BIN
original/tokenizer.model
(Stored with Git LFS)
Normal file
Binary file not shown.
4
special_tokens_map.json
Normal file
4
special_tokens_map.json
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"bos_token": "<|begin_of_text|>",
|
||||||
|
"eos_token": "<|eot_id|>"
|
||||||
|
}
|
||||||
410504
tokenizer.json
Normal file
410504
tokenizer.json
Normal file
File diff suppressed because it is too large
Load Diff
2062
tokenizer_config.json
Normal file
2062
tokenizer_config.json
Normal file
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user