Update README.md
This commit is contained in:
parent
24ad69c3c0
commit
34046b03b7
30
README.md
30
README.md
@ -35,18 +35,24 @@ where the model generates the text after "Bob:".
|
|||||||
|
|
||||||
#### Code format:
|
#### Code format:
|
||||||
```python
|
```python
|
||||||
|
\`\`\`python
|
||||||
def print_prime(n):
|
def print_prime(n):
|
||||||
"""
|
"""
|
||||||
Print all primes between 1 and n
|
Print all primes between 1 and n
|
||||||
"""
|
"""
|
||||||
primes = []
|
primes = []
|
||||||
for num in range(2, n+1):
|
for num in range(2, n+1):
|
||||||
for i in range(2, num):
|
is_prime = True
|
||||||
|
for i in range(2, int(num**0.5)+1):
|
||||||
if num % i == 0:
|
if num % i == 0:
|
||||||
|
is_prime = False
|
||||||
break
|
break
|
||||||
else:
|
if is_prime:
|
||||||
primes.append(num)
|
primes.append(num)
|
||||||
print(primes)
|
print(primes)
|
||||||
|
|
||||||
|
print_prime(20)
|
||||||
|
\`\`\`
|
||||||
```
|
```
|
||||||
where the model generates the text after the comments. (Note: This is a legitimate and correct use of the else statement in Python loops.)
|
where the model generates the text after the comments. (Note: This is a legitimate and correct use of the else statement in Python loops.)
|
||||||
|
|
||||||
@ -81,6 +87,26 @@ where the model generates the text after the comments. (Note: This is a legitima
|
|||||||
### License
|
### License
|
||||||
The model is licensed under the [Research License](https://huggingface.co/microsoft/phi-1_5/resolve/main/Research%20License.docx).
|
The model is licensed under the [Research License](https://huggingface.co/microsoft/phi-1_5/resolve/main/Research%20License.docx).
|
||||||
|
|
||||||
|
### Sample Code
|
||||||
|
```python
|
||||||
|
import torch
|
||||||
|
from transformers import AutoModelForCausalLM, AutoTokenizer
|
||||||
|
|
||||||
|
torch.set_default_device('cuda')
|
||||||
|
model = AutoModelForCausalLM.from_pretrained("microsoft/phi-1_5", trust_remote_code=True, torch_dtype="auto")
|
||||||
|
tokenizer = AutoTokenizer.from_pretrained("microsoft/phi-1_5", trust_remote_code=True, torch_dtype="auto")
|
||||||
|
inputs = tokenizer('''```python
|
||||||
|
def print_prime(n):
|
||||||
|
"""
|
||||||
|
Print all primes between 1 and n
|
||||||
|
"""''', return_tensors="pt", return_attention_mask=False)
|
||||||
|
|
||||||
|
eos_token_id = tokenizer.encode("```")[0]
|
||||||
|
outputs = model.generate(**inputs, max_length=500)
|
||||||
|
text = tokenizer.batch_decode(outputs)[0]
|
||||||
|
print(text)
|
||||||
|
```
|
||||||
|
|
||||||
### Citation
|
### Citation
|
||||||
```bib
|
```bib
|
||||||
@article{textbooks2,
|
@article{textbooks2,
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user