Update monkey.py
This commit is contained in:
@@ -38,7 +38,7 @@ def _get_args():
|
|||||||
return args
|
return args
|
||||||
|
|
||||||
OCRBench_score = {"Regular Text Recognition":0,"Irregular Text Recognition":0,"Artistic Text Recognition":0,"Handwriting Recognition":0,
|
OCRBench_score = {"Regular Text Recognition":0,"Irregular Text Recognition":0,"Artistic Text Recognition":0,"Handwriting Recognition":0,
|
||||||
"Digit String Recognition":0,"Non-Semantic Text Recognition":0,"Scene Text-centric VQA":0,"Doc-oriented VQA":0,"Doc-oriented VQA":0,
|
"Digit String Recognition":0,"Non-Semantic Text Recognition":0,"Scene Text-centric VQA":0,"Doc-oriented VQA":0,
|
||||||
"Key Information Extraction":0,"Handwritten Mathematical Expression Recognition":0}
|
"Key Information Extraction":0,"Handwritten Mathematical Expression Recognition":0}
|
||||||
AllDataset_score = {"IIIT5K":0,"svt":0,"IC13_857":0,"IC15_1811":0,"svtp":0,"ct80":0,"cocotext":0,"ctw":0,"totaltext":0,"HOST":0,"WOST":0,"WordArt":0,"IAM":0,"ReCTS":0,"ORAND":0,"NonSemanticText":0,"SemanticText":0,
|
AllDataset_score = {"IIIT5K":0,"svt":0,"IC13_857":0,"IC15_1811":0,"svtp":0,"ct80":0,"cocotext":0,"ctw":0,"totaltext":0,"HOST":0,"WOST":0,"WordArt":0,"IAM":0,"ReCTS":0,"ORAND":0,"NonSemanticText":0,"SemanticText":0,
|
||||||
"STVQA":0,"textVQA":0,"ocrVQA":0,"ESTVQA":0,"ESTVQA_cn":0,"docVQA":0,"infographicVQA":0,"ChartQA":0,"ChartQA_Human":0,"FUNSD":0,"SROIE":0,"POIE":0,"HME100k":0}
|
"STVQA":0,"textVQA":0,"ocrVQA":0,"ESTVQA":0,"ESTVQA_cn":0,"docVQA":0,"infographicVQA":0,"ChartQA":0,"ChartQA_Human":0,"FUNSD":0,"SROIE":0,"POIE":0,"HME100k":0}
|
||||||
@@ -48,7 +48,7 @@ num_all = {"IIIT5K":0,"svt":0,"IC13_857":0,"IC15_1811":0,"svtp":0,"ct80":0,"coco
|
|||||||
def eval_worker(args, data, eval_id, output_queue):
|
def eval_worker(args, data, eval_id, output_queue):
|
||||||
print(f"Process {eval_id} start.")
|
print(f"Process {eval_id} start.")
|
||||||
checkpoint = args.model_path
|
checkpoint = args.model_path
|
||||||
model = AutoModelForCausalLM.from_pretrained(checkpoint, device_map='cuda', trust_remote_code=True).eval()
|
model = AutoModelForCausalLM.from_pretrained(checkpoint, device_map=f'cuda:{eval_id}', trust_remote_code=True).eval()
|
||||||
tokenizer = AutoTokenizer.from_pretrained(checkpoint, trust_remote_code=True)
|
tokenizer = AutoTokenizer.from_pretrained(checkpoint, trust_remote_code=True)
|
||||||
tokenizer.padding_side = 'left'
|
tokenizer.padding_side = 'left'
|
||||||
tokenizer.pad_token_id = tokenizer.eod_id
|
tokenizer.pad_token_id = tokenizer.eod_id
|
||||||
|
Reference in New Issue
Block a user