base.py 573 B

12345678910
  1. from transformers import AutoModelForCausalLM, AutoTokenizer
  2. from transformers.generation import GenerationConfig
  3. class HFModel(object):
  4. def __init__(self, model_path):
  5. self.tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
  6. self.model = AutoModelForCausalLM.from_pretrained(model_path, trust_remote_code=True, device_map='auto', low_cpu_mem_usage=True).eval()
  7. self.model.generation_config = GenerationConfig.from_pretrained(model_path, trust_remote_code=True)
  8. self.model.generation_config.do_sample = False