Open mjmjmmmjjj opened 10 months ago
from transformers import AutoTokenizer, AutoModel tokenizer = AutoTokenizer.from_pretrained("改成模型路径", trust_remote_code=True) model = AutoModel.from_pretrained("改成模型路径", trust_remote_code=True, device='cuda') model = model.eval()
prompt = "# language: Python\n# write a bubble sort function\n" inputs = tokenizer.encode(prompt, return_tensors="pt").to(model.device) outputs = model.generate(inputs, max_length=256, top_k=1) response = tokenizer.decode(outputs[0])
print(response) 可以看看官网 https://huggingface.co/THUDM/codegeex2-6b
我相信langchain是可以调用的,给一个例子?