Weyaxi's picture
not add random chracters
5253b4c verified
raw
history blame contribute delete
No virus
4.46 kB
from huggingface_hub import *
import os
import json
import gradio as gr
fs = HfFileSystem()
api = HfApi()
def remove_from(text, from_model, to_model):
text = text.replace(from_model, to_model)
return text
def return_operation_requests(from_model, to_model):
ls = [i['name'] for i in fs.ls(path=f'datasets/open-llm-leaderboard/requests/{from_model.split("/")[0]}') if from_model in i['name']]
liste=[]
for i in range(len(ls)):
path_for = ls[i]
will_write = json.loads(fs.read_text(path_for))
will_write['model'] = to_model
will_write = json.dumps(will_write, indent=2)
liste.extend([CommitOperationAdd(path_in_repo="/".join(remove_from(path_for, from_model, to_model).split("/")[3:]), path_or_fileobj=will_write.encode()),
CommitOperationDelete(path_in_repo="/".join(path_for.split("/")[3:]))])
return liste
def return_operation_results(from_model, to_model):
ls = [i['name'] for i in fs.ls(path=f'datasets/open-llm-leaderboard/results/{from_model}') if from_model in i['name']]
liste=[]
for i in range(len(ls)):
path_for = ls[i]
will_write = json.loads(fs.read_text(path_for))
will_write['model_name'] = to_model
will_write['config']['model_args'] = will_write['config']['model_args'].replace(from_model, to_model)
will_write['model_name_sanitized'] = to_model.replace("/", "__", 1)
will_write = json.dumps(will_write, indent=2, ensure_ascii=False).encode('utf8').decode()
liste.extend([CommitOperationAdd(path_in_repo="/".join(remove_from(path_for, from_model, to_model).split("/")[3:]), path_or_fileobj=will_write.encode()),
CommitOperationDelete(path_in_repo="/".join(path_for.split("/")[3:]))])
return liste
def model_name_to_details(model_name):
return f"datasets/open-llm-leaderboard/{model_name.split('/')[0]}__{model_name.split('/')[1]}-details"
def return_operation_details(from_model, to_model):
ls = [i['name'] for i in fs.ls(path=model_name_to_details(from_model)) if ("results" in i['name'] and ".json" in i['name'])]
liste=[]
for i in range(len(ls)):
path_for = ls[i]
will_write = json.loads(fs.read_text(path_for))
will_write['config_general']['model_name'] = to_model
will_write = json.dumps(will_write, indent=2)
readme_file = fs.read_text("/".join(path_for.split("/")[:3])+"/README.md").replace(from_model, to_model).replace(model_name_to_details(from_model).split('/')[2], model_name_to_details(to_model).split('/')[2])
liste.extend([CommitOperationAdd(path_in_repo="/".join(path_for.split("/")[3:]), path_or_fileobj=will_write.encode()),
CommitOperationAdd(path_in_repo="README.md", path_or_fileobj=readme_file.encode())])
return liste
def commit(liste_requests, liste_results, from_model, to_model):
common_for_commits = {"commit_message": f"Renaming Model {from_model} to {to_model}", "repo_type": "dataset", "create_pr": True}
request_commit = (create_commit(repo_id="open-llm-leaderboard/requests", operations=liste_requests, **common_for_commits))
result_commit = (create_commit(repo_id="open-llm-leaderboard/results", operations=liste_results, **common_for_commits))
all_commits = [request_commit, result_commit]
all_repo_ids = ["open-llm-leaderboard/requests", "open-llm-leaderboard/results"]
# Edit comment descriptions
content = f"{request_commit.pr_url}\n{result_commit.pr_url}"
content = f"""This is a pull request aiming to rename the model {from_model} to {to_model}. All related pull requests to rename this model can be found below.
# Requests
{request_commit.pr_url}
# Results
{result_commit.pr_url}
"""
for i, common_repo_id in enumerate(all_repo_ids):
commit = all_commits[i]
common_for_edits = {"repo_id": common_repo_id, "discussion_num": commit.pr_num, "repo_type": "dataset"}
comment_id = get_discussion_details(**common_for_edits).events[0].id
edit_discussion_comment(**common_for_edits, comment_id=comment_id, new_content=content)
return f"{request_commit.pr_url}\n{result_commit.pr_url}"
def commit_gradio(from_model, to_model, hf_token):
try:
login(hf_token)
return commit(return_operation_requests(from_model, to_model), return_operation_results(from_model, to_model), from_model, to_model)
except Exception as e:
return e
demo = gr.Interface(fn=commit_gradio, inputs=["text", "text", "text"], outputs="text")
demo.launch(debug=True)