This commit is contained in:
Myhs-phz 2025-02-26 06:42:28 +00:00
parent f5dddf4e45
commit 114cf1366c

View File

@ -1,37 +1,59 @@
import os
import json
from dotenv import load_dotenv
import argparse import argparse
import json
import os
import yaml import yaml
from dotenv import load_dotenv
load_dotenv() load_dotenv()
RESULTS_STATION_PATH = os.getenv("RESULTS_STATION_PATH") RESULTS_STATION_PATH = os.getenv('RESULTS_STATION_PATH')
data_file_map = { data_file_map = {
'ifeval': 'IFEval', 'ifeval': 'IFEval',
} }
data_prefix_map = { data_prefix_map = {}
}
with open('dataset-index.yml', 'r') as f1: with open('dataset-index.yml', 'r') as f1:
data_list = yaml.load(f1, Loader=yaml.FullLoader) data_list = yaml.load(f1, Loader=yaml.FullLoader)
f1.close() f1.close()
data_searchable_list = [next(iter(i.keys())) for i in data_list] data_searchable_list = [next(iter(i.keys())) for i in data_list]
def parse_args(): def parse_args():
parser = argparse.ArgumentParser(description='connect to results station') parser = argparse.ArgumentParser(description='connect to results station')
parser.add_argument('-sp', '--station-path', type=str, default=None, help='if no env path, use this.') parser.add_argument('-sp',
parser.add_argument('-p', '--my-path', type=str, default=None, help='your operation path.') '--station-path',
parser.add_argument('-op', '--operation', type=str, default='d', help='u:update, d:download, ls: show dataset and model options') type=str,
parser.add_argument('-d', '--dataset', type=str, default='mmlu_pro', help='target dataset name') default=None,
parser.add_argument('-m', '--model', type=str, default='deepseek-v2_5-turbomind', help='target model name') help='if no env path, use this.')
# parser.add_argument('-all', '--all-transfer', action='store_true', default=False, help='transfer all files under the path') parser.add_argument('-p',
'--my-path',
type=str,
default=None,
help='your operation path.')
parser.add_argument(
'-op',
'--operation',
type=str,
default='d',
help='u:update, d:download, ls: show dataset and model options')
parser.add_argument('-d',
'--dataset',
type=str,
default='mmlu_pro',
help='target dataset name')
parser.add_argument('-m',
'--model',
type=str,
default='deepseek-v2_5-turbomind',
help='target model name')
# parser.add_argument('-all',
# '--all-transfer',
# action='store_true',
# default=False,
# help='transfer all files under the path')
args = parser.parse_args() args = parser.parse_args()
return args return args
@ -51,9 +73,9 @@ def load_json_files_by_prefix(prefix, target_path):
prefix = data_file_map[prefix] prefix = data_file_map[prefix]
result_dict = {} result_dict = {}
for filename in os.listdir(target_path): for filename in os.listdir(target_path):
if filename.startswith(prefix) and filename.endswith(".json"): if filename.startswith(prefix) and filename.endswith('.json'):
file_path = os.path.join(target_path, filename) file_path = os.path.join(target_path, filename)
with open(file_path, "r", encoding="utf-8") as file: with open(file_path, 'r', encoding='utf-8') as file:
json_data = json.load(file) json_data = json.load(file)
result_dict[os.path.splitext(filename)[0]] = json_data result_dict[os.path.splitext(filename)[0]] = json_data
return result_dict return result_dict
@ -64,29 +86,37 @@ def main(path, mypath, args):
if args.dataset not in data_searchable_list: if args.dataset not in data_searchable_list:
raise ValueError('invalid dataset input!') raise ValueError('invalid dataset input!')
update_path = path + args.dataset if path[-1] == '/' else path + '/' + args.dataset update_path = path + args.dataset if path[
-1] == '/' else path + '/' + args.dataset
update_filename = args.dataset + '_' + args.model + '.json' update_filename = args.dataset + '_' + args.model + '.json'
update_goal = update_path + '/' + update_filename update_goal = update_path + '/' + update_filename
# update from your path to result station # update from your path to result station
if args.operation == 'u': if args.operation == 'u':
mypath_prediction = (mypath + 'predictions/' + args.model) if mypath[-1] == '/' else (mypath + '/predictions/' + args.model) mypath_prediction = (mypath + 'predictions/' +
mypath_result = (mypath + 'results/' + args.model) if mypath[-1] == '/' else (mypath + '/results/' + args.model) args.model) if mypath[-1] == '/' else (
mypath + '/predictions/' + args.model)
mypath_result = (mypath + 'results/' +
args.model) if mypath[-1] == '/' else (mypath +
'/results/' +
args.model)
if os.path.exists(mypath_prediction) and os.path.exists(mypath_result): if os.path.exists(mypath_prediction) and os.path.exists(mypath_result):
result_dict = load_json_files_by_prefix(args.dataset, mypath_result) result_dict = load_json_files_by_prefix(args.dataset,
mypath_result)
prediction_list = [] prediction_list = []
for i in result_dict.keys(): for i in result_dict.keys():
prediction_dict = load_json_files_by_prefix(i, mypath_prediction) prediction_dict = load_json_files_by_prefix(
i, mypath_prediction)
for j in range(len(prediction_dict)): for j in range(len(prediction_dict)):
for k in prediction_dict[i + '_' + str(j)].keys(): for k in prediction_dict[i + '_' + str(j)].keys():
prediction_list.append( prediction_list.append({
{ 'prediction':
'prediction': prediction_dict[i + '_' + str(j)][k], prediction_dict[i + '_' + str(j)][k],
'sub_category': i 'sub_category':
} i
) })
update_dict = { update_dict = {
'predictions': prediction_list, 'predictions': prediction_list,
'results': result_dict, 'results': result_dict,
@ -95,29 +125,33 @@ def main(path, mypath, args):
if not os.path.exists(update_path): if not os.path.exists(update_path):
os.makedirs(update_path) os.makedirs(update_path)
if os.path.exists(update_goal): if os.path.exists(update_goal):
input("This result exists! Press any key to continue...") input('This result exists! Press any key to continue...')
with open(update_goal, 'w', encoding="utf-8") as f: with open(update_goal, 'w', encoding='utf-8') as f:
json.dump(update_dict, f, ensure_ascii=False, indent=4) json.dump(update_dict, f, ensure_ascii=False, indent=4)
f.close() f.close()
# read from result station to your path # read from result station to your path
if args.operation == 'd': if args.operation == 'd':
if not os.path.exists(update_goal): if not os.path.exists(update_goal):
raise ValueError('This result does not exist!') raise ValueError('This result does not exist!')
with open(update_goal, 'r', encoding="utf-8") as f: with open(update_goal, 'r', encoding='utf-8') as f:
results = json.load(f) results = json.load(f)
f.close() f.close()
legal_key_set = {'predictions', 'results'} legal_key_set = {'predictions', 'results'}
if set(results.keys()) == legal_key_set and isinstance(results['predictions'], list) and isinstance(results['results'], dict): if set(results.keys()) == legal_key_set and isinstance(
print("Successfully download result from station! you've got a dict with format as follows: \n content['precitions', 'results']") results['predictions'], list) and isinstance(
results['results'], dict):
print('Successfully download result from station!'
"you've got a dict with format as follows:"
"\n content['precitions', 'results']")
else: else:
raise ValueError('illegal format of the result!') raise ValueError('illegal format of the result!')
save_path = args.my_path if args.my_path[-1] == '/' else args.my_path + '/' save_path = args.my_path if args.my_path[
-1] == '/' else args.my_path + '/'
save_path += args.dataset + '/' save_path += args.dataset + '/'
if not os.path.exists(save_path): if not os.path.exists(save_path):
os.makedirs(save_path) os.makedirs(save_path)
with open(save_path + update_filename, 'w', encoding="utf-8") as f: with open(save_path + update_filename, 'w', encoding='utf-8') as f:
json.dump(results, f, ensure_ascii=False, indent=4) json.dump(results, f, ensure_ascii=False, indent=4)
f.close() f.close()
@ -126,9 +160,9 @@ if __name__ == '__main__':
args = parse_args() args = parse_args()
if args.operation == 'ls': if args.operation == 'ls':
print("----DATASET LIST----") print('----DATASET LIST----')
print(data_searchable_list) print(data_searchable_list)
print("----MODEL LIST----") print('----MODEL LIST----')
else: else:
if RESULTS_STATION_PATH is not None: if RESULTS_STATION_PATH is not None: