forked from menik1126/DQ-LoRe
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmp_api_scorer.py
More file actions
179 lines (146 loc) · 5.22 KB
/
mp_api_scorer.py
File metadata and controls
179 lines (146 loc) · 5.22 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
import glob
import json
import os
import logging
import hydra
import torch
import tqdm
from transformers import set_seed
from accelerate import Accelerator
from inferencer import Inferencer, APInferencer
from src.utils.misc import save_json
# Inferencer.py import
import glob
import json
import os
import logging
import hydra
import hydra.utils as hu
import torch
import tqdm
from accelerate import Accelerator
from omegaconf import OmegaConf
from torch.utils.data import DataLoader
from transformers import set_seed
from src.metrics import get_metric
from src.utils.collators import DataCollatorWithPaddingAndCuda
from src.utils.statistics import show_statistics
from src.models.api_client import run_api
from src.utils.misc import parallel_run, save_json
from src.models.model import ppl_generate
import openai
import time
import random
import numpy as np
import logging
import codecs
import os
import threading
res = []
lock = threading.Lock()
def add_to_list(data):
global lock, res
with lock:
res.append(data)
logger = logging.getLogger(__name__)
def process_data(entry):
metadata = entry.pop("metadata")
loss = []
#print("metadata:{}".format(metadata))
#prompts = [(item['prompt'] + item['question']) for item in metadata]
prompts = metadata['ice_prompts_list'][0] + '\n' + metadata['question']
#print("#################### api scorer prompts:{}".format(prompts))
result = None
while result is None:
try:
key = os.environ.get('OPENAI_API_KEY', 'your-api-key-here')
result = openai.Completion.create(
engine='text-davinci-003',
prompt=prompts,
api_key=key,
max_tokens=2000,
temperature=1,
n=1,
#stop=stop,
logprobs=1,
#echo=echo
)
#time.sleep(5)
#print("=============call api result:{}".format(result))
except Exception as e:
logger.info(f"{str(e)}, 'Retry.")
time.sleep(5)
texts = [r['text'] for r in result['choices']]
logprobs = [np.sum(np.exp(r['logprobs']['token_logprobs'])) for r in result['choices']]
metadata['score'] = logprobs
add_to_list(metadata)
def process_data_thread(bdata):
ts = []
for data in bdata:
t = threading.Thread(target=process_data, args=(data,))
t.start()
ts.append(t)
for t in ts:
t.join()
class Scorer(APInferencer):
def forward(self):
if self.accelerator.is_main_process:
dataloader = tqdm.tqdm(self.dataloader)
else:
dataloader = self.dataloader
bs = 512
n = 0
bdata = []
for i, entry in enumerate(dataloader):
n += 1
bdata.append(entry)
if(n == bs):
process_data_thread(bdata)
n=0
bdata = []
#assert 1==0
with open(f"{self.output_file}tmp_{self.accelerator.device}.bin", "w") as f:
json.dump(res, f)
def write_results(self):
data = []
for i, path in enumerate(glob.glob(f"{self.output_file}tmp_*.bin")):
with open(path) as f:
one_device = json.load(f)
logger.info(f"device: {i}, idx {[i['idx'] for i in one_device][:200]}...")
data.extend(one_device)
# grouping results by uid
example_dict = {}
uid_field = 'idx'
for entry in data:
ctxs = {"ctxs": entry.pop('ctxs'), "score": entry.pop("score")}
if entry[uid_field] not in example_dict:
entry['ctxs_candidates'] = [ctxs]
example_dict[entry[uid_field]] = entry
else:
example_dict[entry[uid_field]]['ctxs_candidates'].append(ctxs)
example_list = list(example_dict.values())
mrr = 0
num_candidates = len(example_list[0]['ctxs_candidates'])
for entry in example_list:
assert len(entry['ctxs_candidates']) == num_candidates, f"{len(entry['ctxs_candidates'])}!={num_candidates}"
sorted_tuple = sorted(enumerate(entry['ctxs_candidates']), key=lambda x: x[1]['score'])
entry['ctxs_candidates'] = [i[1]['ctxs'] for i in sorted_tuple]
entry['ctxs'] = entry['ctxs_candidates'][0] # set top-scored cand to ctxs
mrr += 1/([i[0] for i in sorted_tuple].index(0)+1)
logger.info(f"MRR: {mrr/len(example_list)}")
save_json(self.output_file, example_list)
for path in glob.glob(f"{self.output_file}tmp_*.bin"):
os.remove(path)
@hydra.main(config_path="configs", config_name="api-scorer")
def main(cfg):
logger.info(cfg)
set_seed(43)
accelerator = Accelerator()
scorer = Scorer(cfg, accelerator)
scorer.forward()
accelerator.wait_for_everyone()
if accelerator.is_main_process:
scorer.write_results()
if __name__ == "__main__":
torch.multiprocessing.set_start_method('spawn')
main()