Update README.md
Browse files
README.md
CHANGED
|
@@ -23,8 +23,8 @@ This model can be easily loaded using the `AutoModelForCausalLM` functionality.
|
|
| 23 |
For regular causal sampling, simply generate completions given the context:
|
| 24 |
```python
|
| 25 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 26 |
-
tokenizer = AutoTokenizer.from_pretrained("Salesforce/codegen2-
|
| 27 |
-
model = AutoModelForCausalLM.from_pretrained("Salesforce/codegen2-
|
| 28 |
|
| 29 |
text = "def hello_world():"
|
| 30 |
input_ids = tokenizer(text, return_tensors="pt").input_ids
|
|
@@ -56,8 +56,8 @@ The final snippet looks as follows:
|
|
| 56 |
|
| 57 |
```python
|
| 58 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 59 |
-
tokenizer = AutoTokenizer.from_pretrained("Salesforce/codegen2-
|
| 60 |
-
model = AutoModelForCausalLM.from_pretrained("Salesforce/codegen2-
|
| 61 |
|
| 62 |
|
| 63 |
def format(prefix, suffix):
|
|
|
|
| 23 |
For regular causal sampling, simply generate completions given the context:
|
| 24 |
```python
|
| 25 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 26 |
+
tokenizer = AutoTokenizer.from_pretrained("Salesforce/codegen2-3_7B")
|
| 27 |
+
model = AutoModelForCausalLM.from_pretrained("Salesforce/codegen2-3_7B", trust_remote_code=True, revision="main")
|
| 28 |
|
| 29 |
text = "def hello_world():"
|
| 30 |
input_ids = tokenizer(text, return_tensors="pt").input_ids
|
|
|
|
| 56 |
|
| 57 |
```python
|
| 58 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
| 59 |
+
tokenizer = AutoTokenizer.from_pretrained("Salesforce/codegen2-3_7B")
|
| 60 |
+
model = AutoModelForCausalLM.from_pretrained("Salesforce/codegen2-3_7B", trust_remote_code=True, revision="main")
|
| 61 |
|
| 62 |
|
| 63 |
def format(prefix, suffix):
|