Undertone0809 / promptulate

🚀Lightweight Large language model automation and Autonomous Language Agents development framework. Build your LLM Agent Application in a pythonic way!
https://promptulate.cn/
Apache License 2.0
328 stars 33 forks source link

Optimize Config #30

Open Undertone0809 opened 1 year ago

Undertone0809 commented 1 year ago

Many instance parameters of prompt.config.Config can be change to class method. Using class method obtain more optimized code experience.

class Config(metaclass=Singleton): def init(self): logger.info(f"[pne config] Config initialization") self.enable_cache: bool = True self._proxy_mode: str = PROXY_MODE[0] self._proxies: Optional[dict] = None self.openai_chat_api_url = "https://api.openai.com/v1/chat/completions" self.openai_completion_api_url = "https://api.openai.com/v1/completions" self.openai_proxy_url = "https://chatgpt-api.shn.hk/v1/" # FREE API self.ernie_bot_url = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions" self.ernie_bot_turbo_url = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant" self.ernie_bot_token = "https://aip.baidubce.com/oauth/2.0/token" self.key_default_retry_times = 5 """If llm(like OpenAI) unable to obtain data, retry request until the data is obtained.""" self.enable_stdout_hook = True ernie_token_pool: ErnieTokenPool = ErnieTokenPool() ernie_token_pool.start(self.get_ernie_api_key, self.get_ernie_api_secret)

    if self.enable_stdout_hook:
        StdOutHook.registry_stdout_hooks()

def turn_off_stdout_hook(self):
    if self.enable_stdout_hook:
        self.enable_stdout_hook = False
        StdOutHook.unregister_stdout_hooks()

@property
def openai_api_key(self):
    """This attribution has deprecated to use. Using `get_openai_api_key`"""
    if "OPENAI_API_KEY" in os.environ.keys():
        if self.enable_cache:
            utils.get_cache()["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY")
        return os.getenv("OPENAI_API_KEY")
    if self.enable_cache and "OPENAI_API_KEY" in utils.get_cache():
        return utils.get_cache()["OPENAI_API_KEY"]
    raise ValueError("OPENAI API key is not provided. Please set your key.")

def get_openai_api_key(self, model: str) -> str:
    """Get openai key from KeyPool and environ"""
    if self.enable_cache:
        openai_key_pool: OpenAIKeyPool = OpenAIKeyPool()
        key = openai_key_pool.get(model)
        if key:
            return key
    return self.openai_api_key

@property
def get_ernie_api_key(self) -> str:
    if "ERNIE_API_KEY" in os.environ.keys():
        if self.enable_cache:
            utils.get_cache()["ERNIE_API_KEY"] = os.getenv("ERNIE_API_KEY")
        return os.getenv("ERNIE_API_KEY")
    if self.enable_cache and "ERNIE_API_KEY" in utils.get_cache():
        return utils.get_cache()["ERNIE_API_KEY"]
    raise ValueError("ERNIE_API_KEY is not provided. Please set your key.")

@property
def get_ernie_api_secret(self) -> str:
    if "ERNIE_API_SECRET" in os.environ.keys():
        if self.enable_cache:
            utils.get_cache()["ERNIE_API_SECRET"] = os.getenv("ERNIE_API_SECRET")
        return os.getenv("ERNIE_API_SECRET")
    if self.enable_cache and "ERNIE_API_SECRET" in utils.get_cache():
        return utils.get_cache()["ERNIE_API_SECRET"]
    raise ValueError("ERNIE_API_SECRET is not provided. Please set your secret.")

def get_ernie_token(self) -> str:
    ernie_token_pool: ErnieTokenPool = ErnieTokenPool()
    if self.enable_cache:
        if "ERNIE_TOKEN" in utils.get_cache():
            return ernie_token_pool.get_token()
    return ernie_token_pool.nocache_get_token(
        self.get_ernie_api_key, self.get_ernie_api_secret
    )

def get_key_retry_times(self, model: str) -> int:
    if self.enable_cache:
        openai_key_pool: OpenAIKeyPool = OpenAIKeyPool()
        return openai_key_pool.get_num(model)
    return self.key_default_retry_times

@property
def proxy_mode(self) -> str:
    if self.enable_cache and "PROXY_MODE" in utils.get_cache():
        return utils.get_cache()["PROXY_MODE"]
    return self._proxy_mode

@proxy_mode.setter
def proxy_mode(self, value):
    self._proxy_mode = value
    if self.enable_cache:
        utils.get_cache()["PROXY_MODE"] = value

@property
def proxies(self) -> Optional[dict]:
    if self.enable_cache and "PROXIES" in utils.get_cache():
        return utils.get_cache()["PROXIES"] if self.proxy_mode == "custom" else None
    return self._proxies

@proxies.setter
def proxies(self, value):
    self._proxies = value
    if self.enable_cache:
        utils.get_cache()["PROXIES"] = value

@property
def openai_chat_request_url(self) -> str:
    if self.proxy_mode == PROXY_MODE[2]:
        self.proxies = None
        return self.openai_proxy_url
    return self.openai_chat_api_url

@property
def openai_completion_request_url(self) -> str:
    if self.proxy_mode == PROXY_MODE[2]:
        self.proxies = None
        return f"{self.openai_proxy_url}completions"
    return self.openai_completion_api_url

def set_proxy_mode(self, mode: str, proxies: Optional[dict] = None):
    self.proxy_mode = mode
    self.proxies = proxies
    logger.info(f"[pne] proxy mode: {mode}, proxies: {proxies}")
ruanrongman commented 1 year ago

maybe can use yaml to config

Undertone0809 commented 1 year ago

Yaml is a better choose in a project, but not in a script. I think we can change to the following style.

class PneConfig:
    param1 = "value1"
    param2 = "value3"
    param3 = "value3"

    @classmethod
    def get_param4(cls):
        ....

Class attribution and class method is better than a single class in some cases. What do you think? @ruanrongman