Update README.md
Browse files
README.md
CHANGED
@@ -288,7 +288,6 @@ Here are examples of how to run and sample from the model.
|
|
288 |
|
289 |
## Generic generaion
|
290 |
```python
|
291 |
-
import json
|
292 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
293 |
|
294 |
example = """
|
@@ -338,18 +337,23 @@ print("### Prediction")
|
|
338 |
print(tokenizer.decode(out[0][input_len:]))
|
339 |
```
|
340 |
|
341 |
-
## Fill in the middle generation
|
342 |
```python
|
343 |
-
|
344 |
-
def
|
345 |
-
|
346 |
-
|
347 |
-
|
348 |
-
|
349 |
-
|
350 |
-
|
|
|
|
|
|
|
|
|
|
|
351 |
|
352 |
-
encoded_input = tokenizer(
|
353 |
out = model.generate(
|
354 |
**encoded_input,
|
355 |
max_new_tokens=100,
|
|
|
288 |
|
289 |
## Generic generaion
|
290 |
```python
|
|
|
291 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
292 |
|
293 |
example = """
|
|
|
337 |
print(tokenizer.decode(out[0][input_len:]))
|
338 |
```
|
339 |
|
340 |
+
## Fill in the middle with additional files as context generation
|
341 |
```python
|
342 |
+
example = """<filename>utils.py
|
343 |
+
def multiply(x, y):
|
344 |
+
return x * y
|
345 |
+
<filename>config.py
|
346 |
+
DEBUG = True
|
347 |
+
MAX_VALUE = 100
|
348 |
+
<filename>example.py
|
349 |
+
<fim_suffix>
|
350 |
+
|
351 |
+
# Test the function
|
352 |
+
result = calculate_sum(5, 10)
|
353 |
+
print(result)<fim_prefix>def calculate_sum(a, b):
|
354 |
+
<fim_middle>"""
|
355 |
|
356 |
+
encoded_input = tokenizer(example, return_tensors='pt', return_token_type_ids=False)
|
357 |
out = model.generate(
|
358 |
**encoded_input,
|
359 |
max_new_tokens=100,
|