-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathInference_ChatGPT.py
62 lines (51 loc) · 1.79 KB
/
Inference_ChatGPT.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
import os
import pandas as pd
import torch
import torch.nn as nn
from datasets import load_dataset
from tqdm import tqdm
import time
from timeout_decorator import timeout
import openai
openai.api_key = "Your OpenAI API"
@timeout(15)
def chatGPT(prompt):
res = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "system", "content": prompt},
],
temperature=0,
max_tokens=5)
return res
###########################################################
f = 'Enter file path from respective */ChatGPT/ folder'
df = pd.read_excel(f)
"""The below code should be commented only when reproducing the results
This was written to take the dataset (prompt injected) from respective files of open-source models
So this drops the aditional outputs of open-source models and keep only the text, gold labels & ChatGPT's output"""
# df = df[df.columns[:8]] # choose this based on the dataset
# df = df.sample(n=500, random_state=42).reset_index(drop=True)
dft = df.text
anno = []
for i in tqdm(range(df.shape[0])):
# if file is in */ChatGPT/ folder (prompt in English language)
prompt = dft.iloc[i]+" Answer in one word only."
# if file is in ML/ChatGPT_SL/ folder (prompt in same language)
prompt = dft.iloc[i]
try:
response = chatGPT(prompt)
except:
print('Sleep')
time.sleep(30)
print('& here we go...')
try:
response = chatGPT(prompt)
except:
print('Sleep')
time.sleep(30)
print('& here we go...')
response = chatGPT(prompt)
anno += [response["choices"][0]["message"]["content"].lower()]
df["cgpt"] = anno
df.to_excel(f, index=False)