|
|
import argparse |
|
|
import json |
|
|
import os |
|
|
import logging |
|
|
from tqdm import tqdm |
|
|
from models.qwen import Qwen_vllm_Model |
|
|
from datasets import load_dataset, concatenate_datasets |
|
|
from data_utils import load_yaml, verify_response, build_query |
|
|
|
|
|
def do_generate(dataset_name, model_path, output_path, subject=['Math', 'Physics', 'Chemistry', 'Coding'], split='test', config_path='/user/konglingyu/VLMEvalKit/EMMA/configs/gpt.yaml', strategy='TrainCoT', save_every=20, rerun=False, greedy=0, max_tokens=4096, ngpu=1, logger=logging.getLogger(__name__), seed=42): |
|
|
|
|
|
logger.info(f"Loading dataset {dataset_name}, subject: {subject}") |
|
|
sub_dataset_list = [] |
|
|
for subj in subject: |
|
|
sub_dataset = load_dataset(dataset_name, subj, split=split) |
|
|
sub_dataset_list.append(sub_dataset) |
|
|
dataset = concatenate_datasets(sub_dataset_list) |
|
|
|
|
|
|
|
|
logger.info(f"Loading config") |
|
|
config = load_yaml(config_path) |
|
|
|
|
|
|
|
|
|
|
|
logger.info(f"Loading local model {model_path}") |
|
|
device = 0 |
|
|
world_size = 1 |
|
|
try: |
|
|
device = int(os.environ["LOCAL_RANK"]) |
|
|
world_size = int(os.environ["WORLD_SIZE"]) |
|
|
dist_keys = [ |
|
|
"RANK", |
|
|
"LOCAL_RANK", |
|
|
"WORLD_SIZE", |
|
|
"LOCAL_WORLD_SIZE", |
|
|
"GROUP_RANK", |
|
|
"ROLE_RANK", |
|
|
"ROLE_NAME", |
|
|
"OMP_NUM_THREADS", |
|
|
"MASTER_ADDR", |
|
|
"MASTER_PORT", |
|
|
"TORCHELASTIC_USE_AGENT_STORE", |
|
|
"TORCHELASTIC_MAX_RESTARTS", |
|
|
"TORCHELASTIC_RUN_ID", |
|
|
"TORCH_NCCL_ASYNC_ERROR_HANDLING", |
|
|
"TORCHELASTIC_ERROR_FILE", |
|
|
] |
|
|
|
|
|
for dist_key in dist_keys: |
|
|
del os.environ[dist_key] |
|
|
except: |
|
|
pass |
|
|
|
|
|
if world_size > 1: |
|
|
assert ngpu==1 |
|
|
|
|
|
model = Qwen_vllm_Model(model_path, greedy=greedy, max_tokens=max_tokens, parallel=ngpu, seed=seed, device=device) |
|
|
|
|
|
logger.info(f"Model loaded!") |
|
|
|
|
|
if world_size > 1: |
|
|
logger.info(f"Using distributed mode with {world_size} GPUs, device {device}") |
|
|
output_path = output_path.replace('.json', f'_{device}.json') |
|
|
else: |
|
|
logger.info(f"Using single GPU mode") |
|
|
logger.info(f"Output path: {output_path}") |
|
|
|
|
|
if os.path.exists(output_path): |
|
|
logger.info("Results already exists.") |
|
|
logger.info(f"Reading {output_path}") |
|
|
with open(output_path, 'r') as f: |
|
|
results = json.load(f) |
|
|
else: |
|
|
results = {} |
|
|
|
|
|
skip_pids = [] |
|
|
if not rerun and results: |
|
|
for pid, data in results.items(): |
|
|
if 'response' in data and verify_response(data['response']): |
|
|
skip_pids.append(pid) |
|
|
|
|
|
if len(skip_pids) > 0: |
|
|
logger.info( |
|
|
f"Found existing results file with {len(skip_pids)} problems with valid responses. Skipping these problems...") |
|
|
|
|
|
logger.info(f"Starting to generate.....") |
|
|
for idx, sample in enumerate(tqdm(dataset)): |
|
|
pid = sample['pid'] |
|
|
if skip_pids and pid in skip_pids: |
|
|
continue |
|
|
if idx % world_size != device: |
|
|
continue |
|
|
sample = build_query(sample, config, strategy) |
|
|
problem: dict = sample.copy() |
|
|
for i in range(1, 6): |
|
|
problem.pop('image_' + str(i)) |
|
|
|
|
|
try: |
|
|
response = model.get_response(sample) |
|
|
results[pid] = problem |
|
|
results[pid]['response'] = response |
|
|
except Exception as e: |
|
|
logger.error(f"Error in generating answer for {pid}") |
|
|
logger.error(e) |
|
|
results[pid] = problem |
|
|
results[pid]['error'] = str(e) |
|
|
|
|
|
if idx == 2 or (idx % save_every == 0 and idx > 0) or idx == len(dataset) - 1: |
|
|
try: |
|
|
with open(output_path, 'w') as f: |
|
|
f.write(json.dumps(results, indent=2)) |
|
|
logger.info(f"Save results to {output_path}") |
|
|
except Exception as e: |
|
|
logger.info(f"Error in saving {output_path}") |
|
|
logger.info(e) |
|
|
|
|
|
with open(output_path, 'w') as f: |
|
|
f.write(json.dumps(results, indent=2)) |
|
|
logger.info(f"Save results to {output_path}") |
|
|
|
|
|
logger.info("End Generation......") |
|
|
|
|
|
def main(): |
|
|
parser = argparse.ArgumentParser() |
|
|
parser.add_argument('--dataset_name', type=str, default='/root/LMUData/EMMA-mini') |
|
|
parser.add_argument('--subject', nargs='+', type=str, default=['Math', 'Physics', 'Chemistry', 'Coding']) |
|
|
parser.add_argument('--split', type=str, default='test') |
|
|
parser.add_argument('--strategy', type=str, default='CoT', choices=['CoT', 'Direct', 'TrainCoT']) |
|
|
parser.add_argument('--config_path', type=str, default="configs/gpt.yaml") |
|
|
parser.add_argument('--output_path', type=str, default='results/test-full.json') |
|
|
parser.add_argument('--save_every', type=int, default=20, help='save every n problems') |
|
|
parser.add_argument('--rerun', action='store_true', help='rerun the answer generation') |
|
|
|
|
|
parser.add_argument('--model_path', type=str, default='/user/konglingyu/ckpts/Qwen2-VL-7B', help="local model path or huggingface model name") |
|
|
parser.add_argument('--max_tokens', type=int, default=4096) |
|
|
parser.add_argument('--greedy', type=int, default=0) |
|
|
parser.add_argument('--ngpu', type=int, default=1) |
|
|
|
|
|
args = parser.parse_args() |
|
|
do_generate( |
|
|
dataset_name=args.dataset_name, |
|
|
model_path=args.model_path, |
|
|
output_path=args.output_path, |
|
|
subject=args.subject, |
|
|
split=args.split, |
|
|
config_path=args.config_path, |
|
|
strategy=args.strategy, |
|
|
save_every=args.save_every, |
|
|
rerun=args.rerun, |
|
|
greedy=args.greedy, |
|
|
max_tokens=args.max_tokens, |
|
|
ngpu=args.ngpu |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
logging.basicConfig( |
|
|
level=os.environ.get("LOGLEVEL", "INFO").upper(), |
|
|
format="[%(name)s] %(message)s", |
|
|
datefmt="[%X]" |
|
|
) |
|
|
logger_blocklist = [ |
|
|
"asyncio", |
|
|
"azure", |
|
|
"azureml", |
|
|
"datasets", |
|
|
"httpx", |
|
|
"httpcore", |
|
|
"filelock", |
|
|
"fsspec", |
|
|
"msal", |
|
|
"msrest", |
|
|
"openai", |
|
|
"PIL", |
|
|
"urllib3", |
|
|
] |
|
|
for module in logger_blocklist: |
|
|
logging.getLogger(module).setLevel(logging.WARNING) |
|
|
if not os.path.exists("/root/LMUData"): |
|
|
os.symlink("/user/konglingyu/LMUData", "/root/LMUData") |
|
|
main() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|