LLukas22 / llm-rs-python

Unofficial python bindings for the rust llm library. 🐍❤️🦀
MIT License
71 stars 4 forks source link

Need help for converting to rust #35

Closed andri-jpg closed 10 months ago

andri-jpg commented 10 months ago

I understand that this might be off-topic, but could you please assist in converting this code to Rust using the llm library? I'm currently in the process of learning Rust, and I find the llm library somewhat confusing. The ultimate goal is to turn this code into a shared object for running on Android natively with flutter.

python code:

from llm_rs import SessionConfig, GenerationConfig, Gpt2

class Chainer:
    _instance = None

    def __new__(cls):
        if cls._instance is None:
            cls._instance = super(Chainer, cls).__new__(cls)
            cls._instance.init_chainer()
        return cls._instance

    def init_chainer(self):
        self.stop_words = ['<EOL>', '<eol>', '<Eol>', 'pertanyaan :', 'Human', 'human', 'Pertanyaan', '\n']
        self.previous_qa = []

        session_config = SessionConfig(
            threads=4,
            context_length=1300,
            prefer_mmap=False
        )

        self.generation_config = GenerationConfig(
            top_p=0.44,
            top_k=1,
            temperature=0.22,
            max_new_tokens=120,
            repetition_penalty=1.13,
            stop_words=self.stop_words
        )

        self.model = Gpt2("2midguifSfFt5SbHJsxP.bin", session_config=session_config)

    def chain(self, user_input):
        if self.previous_qa:
            previous_question, previous_answer = self.previous_qa[-1]
        else:
            previous_question, previous_answer = "", ""

        template = f"I can answer health-related questions.\nHello! I am a chatbot that will answer health-related questions. I am a chatbot, not a human.\nYou can ask me questions about health using the text field below.\n\nMy question:\n{previous_question}\n\nYour answer:\n{previous_answer}\n\nMy question: {user_input}.\nYour answer:"

        result = self.model.generate(template, generation_config=self.generation_config)
        response = result.text.strip()

        self.previous_qa.append((user_input, response))

        if len(self.previous_qa) > 1:
            self.previous_qa.pop(0)

        return response

generator = Chainer()

def generate(text_input):
    result = generator.chain(text_input)
    return result

Thank you in advance.