Update functions.py
Browse files- functions.py +2 -5
functions.py
CHANGED
|
@@ -507,15 +507,12 @@ def generate_eval(raw_text, N, chunk):
|
|
| 507 |
n = len(raw_text)
|
| 508 |
starting_indices = [random.randint(0, n-chunk) for _ in range(N)]
|
| 509 |
sub_sequences = [raw_text[i:i+chunk] for i in starting_indices]
|
| 510 |
-
chain = QAGenerationChain.from_llm(ChatOpenAI(temperature=0
|
| 511 |
eval_set = []
|
| 512 |
-
|
| 513 |
-
print(f'subs_sequence: {sub_sequences}')
|
| 514 |
|
| 515 |
for i, b in enumerate(sub_sequences):
|
| 516 |
try:
|
| 517 |
-
|
| 518 |
-
qa = chain.run(str(b))
|
| 519 |
eval_set.append(qa)
|
| 520 |
ques_update.info(f"Creating Question: {i+1}")
|
| 521 |
|
|
|
|
| 507 |
n = len(raw_text)
|
| 508 |
starting_indices = [random.randint(0, n-chunk) for _ in range(N)]
|
| 509 |
sub_sequences = [raw_text[i:i+chunk] for i in starting_indices]
|
| 510 |
+
chain = QAGenerationChain.from_llm(ChatOpenAI(temperature=0))
|
| 511 |
eval_set = []
|
|
|
|
|
|
|
| 512 |
|
| 513 |
for i, b in enumerate(sub_sequences):
|
| 514 |
try:
|
| 515 |
+
qa = chain.run(b)
|
|
|
|
| 516 |
eval_set.append(qa)
|
| 517 |
ques_update.info(f"Creating Question: {i+1}")
|
| 518 |
|