import os import re import json import requests class Skill: def __init__(self:object) -> None: pass def set_tokenizer(self:object, tokenizer:object): self.tokenizer = tokenizer def predict(self, question:str, seed=1234, out_seq_length=256, min_gen_length=1, sampling_strategy="BaseStrategy", num_beams=2, length_penalty=1, no_repeat_ngram_size=3, temperature=0.7, topk=8, topp=0.7): url = 'https://pretrain.aminer.cn/api/v2/completions' prompt = "{}\n答:[gMASK]".format(question) payload = json.dumps({ "apikey": os.environ.get("WINNIE_APIKEY"), "apisecret": os.environ.get("WINNIE_APISECRET"), "model": "glm", "language": "zh-CN", "prompt": prompt, "temperature": temperature, "top_k": topk, "top_p": topp, "max_tokens": out_seq_length, "stop": ["\n"], "presence_penalty": 2, "frequency_penalty": 2 }) headers = { 'Content-Type': 'application/json' } response = requests.request("POST", url, headers=headers, data=payload).json() if "output" in response["result"]: answer = response["result"]["output"]["raw"].split("<|startofpiece|>")[-1] return answer def process(self:object, input_txt:str, history_list:list, role_card:dict): output_text:str = None if re.match(r'^请问.+[??.。!!]$', input_txt): history_list.append( self.tokenizer.encode(input_txt, add_special_tokens=False) ) output_text = self.predict(input_txt[2:]) history_list.append( self.tokenizer.encode(output_text, add_special_tokens=False) ) return output_text, history_list, role_card