yeagerai / yeagerai-agent

MIT License
584 stars 58 forks source link

Error while creating tool #7

Open MuncleUscles opened 1 year ago

MuncleUscles commented 1 year ago

Prompt

> create a tool that 1. pulls the top 1000 coins from coingecko 2. divides the 24h price change by the circulating market cap 3. sorts them by the adjusted price change 4. lists top 10 gainers and losers

> Entering new AgentExecutor chain...
Thought: I need to create a solution sketch for this tool.
Action: Design Tool Solution Sketch
Action Input: create a tool that 1. pulls the top 1000 coins from coingecko 2. divides the 24h price change by the circulating market cap 3. sorts them by the adjusted price change 4. lists top 10 gainers and losers

Last observation and error:

Observation:The file test_CryptoGainersAndLosersRun.py has been written in the /home/edib0/.yeagerai-sessions/9fe2b5b-edib0 successfully!
Here is the source code of the CryptoGainersAndLosersRun LangChain tool based on given requirements:

import pytest
from unittest.mock import MagicMock
from yeagerai.toolkit.yeagerai_tool import YeagerAITool
from your_tool_module import CryptoGainersAndLosersAPIWrapper, CryptoGainersAndLosersRun

# Mock the CoinGecko API response
coin_gecko_response = [
    {"id": "coin1", "symbol": "c1", "name": "Coin 1", "price_change_percentage_24h": 5, "market_cap": 1000000},
    {"id": "coin2", "symbol": "c2", "name": "Coin 2", "price_change_percentage_24h": -3, "market_cap": 2000000},
    {"id": "coin3", "symbol": "c3", "name": "Coin 3", "price_change_percentage_24h": 8, "market_cap": 1500000},
    {"id": "coin4", "symbol": "c4", "name": "Coin 4", "price_change_percentage_24h": -10, "market_cap": 3000000}
]

@pytest.fixture
def api_wrapper():
    wrapper = CryptoGainersAndLosersAPIWrapper()
    wrapper._get_top_1000_coins = MagicMock(return_value=coin_gecko_response)
    return wrapper

def test_get_top_1000_coins(api_wrapper):
    coins = api_wrapper._get_top_1000_coins()
    assert coins == coin_gecko_response

def test_calculate_adjusted_price_change(api_wrapper):
    coin = coin_gecko_response[0]
    adjusted_price_change = api_wrapper._calculate_adjusted_price_change(coin)
    assert adjusted_price_change == 5 / 1000000

def test_sort_by_adjusted_price_change(api_wrapper):
    sorted_coins = api_wrapper._sort_by_adjusted_price_change(coin_gecko_response)
    expected_sorted_coins = [
        {"id": "coin4", "symbol": "c4", "name": "Coin 4", "price_change_percentage_24h": -10, "market_cap": 3000000, "adjusted_price_change": -10 / 3000000},
        {"id": "coin2", "symbol": "c2", "name": "Coin 2", "price_change_percentage_24h": -3, "market_cap": 2000000, "adjusted_price_change": -3 / 2000000},
        {"id": "coin1", "symbol": "c1", "name": "Coin 1", "price_change_percentage_24h": 5, "market_cap": 1000000, "adjusted_price_change": 5 / 1000000},
        {"id": "coin3", "symbol": "c3", "name": "Coin 3", "price_change_percentage_24h": 8, "market_cap": 1500000, "adjusted_price_change": 8 / 1500000},
    ]
    assert sorted_coins == expected_sorted_coins

def test_run(api_wrapper):
    result = api_wrapper.run("")
    expected_result = (
        "Top 10 gainers and losers (based on adjusted price change):\n"
        "Gainers:\n"
        "1. Coin 3 (c3): 8.00% (Adjusted price change: 5.33E-06)\n"
        "2. Coin 1 (c1): 5.00% (Adjusted price change: 5.00E-06)\n"
        "Losers:\n"
        "1. Coin 4 (c4): -10.00% (Adjusted price change: -3.33E-06)\n"
        "2. Coin 2 (c2): -3.00% (Adjusted price change: -1.50E-06)\n"
    )
    assert result == expected_result

def test_yeager_ai_tool_run(api_wrapper):
    tool = CryptoGainersAndLosersRun(api_wrapper=api_wrapper)
    result = tool._run("")
    expected_result = (
        "Top 10 gainers and losers (based on adjusted price change):\n"
        "Gainers:\n"
        "1. Coin 3 (c3): 8.00% (Adjusted price change: 5.33E-06)\n"
        "2. Coin 1 (c1): 5.00% (Adjusted price change: 5.00E-06)\n"
        "Losers:\n"
        "1. Coin 4 (c4): -10.00% (Adjusted price change: -3.33E-06)\n"
        "2. Coin 2 (c2): -3.00% (Adjusted price change: -1.50E-06)\n"
    )
    assert result == expected_result

Traceback (most recent call last):
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/urllib3/response.py", line 761, in _update_chunk_length
    self.chunk_left = int(line, 16)
ValueError: invalid literal for int() with base 16: b''

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/urllib3/response.py", line 444, in _error_catcher
    yield
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/urllib3/response.py", line 828, in read_chunked
    self._update_chunk_length()
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/urllib3/response.py", line 765, in _update_chunk_length
    raise InvalidChunkLength(self, line)
urllib3.exceptions.InvalidChunkLength: InvalidChunkLength(got length b'', 0 bytes read)

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/requests/models.py", line 816, in generate
    yield from self.raw.stream(chunk_size, decode_content=True)
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/urllib3/response.py", line 624, in stream
    for line in self.read_chunked(amt, decode_content=decode_content):
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/urllib3/response.py", line 816, in read_chunked
    with self._error_catcher():
  File "/home/edib0/miniconda3/lib/python3.10/contextlib.py", line 153, in __exit__
    self.gen.throw(typ, value, traceback)
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/urllib3/response.py", line 461, in _error_catcher
    raise ProtocolError("Connection broken: %r" % e, e)
urllib3.exceptions.ProtocolError: ("Connection broken: InvalidChunkLength(got length b'', 0 bytes read)", InvalidChunkLength(got length b'', 0 bytes read))

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
  File "/home/edib0/miniconda3/bin/yeagerai-agent", line 8, in <module>
    sys.exit(main())
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/click/core.py", line 1130, in __call__
    return self.main(*args, **kwargs)
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/click/core.py", line 1055, in main
    rv = self.invoke(ctx)
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/click/core.py", line 1404, in invoke
    return ctx.invoke(self.callback, **ctx.params)
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/click/core.py", line 760, in invoke
    return __callback(*args, **kwargs)
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/yeagerai/interfaces/cli.py", line 181, in main
    chat_interface(
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/yeagerai/interfaces/cli.py", line 105, in chat_interface
    agent.run(prompt_text)
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/yeagerai/agent/yeagerai_agent.py", line 82, in run
    return self.agent_executor.run(input)
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/chains/base.py", line 213, in run
    return self(args[0])[self.output_keys[0]]
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/chains/base.py", line 116, in __call__
    raise e
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/chains/base.py", line 113, in __call__
    outputs = self._call(inputs)
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/agents/agent.py", line 812, in _call
    next_step_output = self._take_next_step(
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/agents/agent.py", line 692, in _take_next_step
    output = self.agent.plan(intermediate_steps, **inputs)
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/agents/agent.py", line 290, in plan
    output = self.llm_chain.run(
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/chains/base.py", line 216, in run
    return self(kwargs)[self.output_keys[0]]
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/chains/base.py", line 116, in __call__
    raise e
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/chains/base.py", line 113, in __call__
    outputs = self._call(inputs)
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/chains/llm.py", line 57, in _call
    return self.apply([inputs])[0]
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/chains/llm.py", line 118, in apply
    response = self.generate(input_list)
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/chains/llm.py", line 62, in generate
    return self.llm.generate_prompt(prompts, stop)
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/chat_models/base.py", line 82, in generate_prompt
    raise e
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/chat_models/base.py", line 79, in generate_prompt
    output = self.generate(prompt_messages, stop=stop)
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/chat_models/base.py", line 54, in generate
    results = [self._generate(m, stop=stop) for m in messages]
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/chat_models/base.py", line 54, in <listcomp>
    results = [self._generate(m, stop=stop) for m in messages]
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/langchain/chat_models/openai.py", line 252, in _generate
    for stream_resp in self.completion_with_retry(
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/openai/api_resources/abstract/engine_api_resource.py", line 166, in <genexpr>
    return (
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/openai/api_requestor.py", line 612, in <genexpr>
    return (
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/openai/api_requestor.py", line 107, in parse_stream
    for line in rbody:
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/requests/models.py", line 865, in iter_lines
    for chunk in self.iter_content(
  File "/home/edib0/miniconda3/lib/python3.10/site-packages/requests/models.py", line 818, in generate
    raise ChunkedEncodingError(e)
requests.exceptions.ChunkedEncodingError: ("Connection broken: InvalidChunkLength(got length b'', 0 bytes read)", InvalidChunkLength(got length b'', 0 bytes read))

System: Ubuntu 22.04.2 LTS, WSL on Windows 11

seshubonam commented 1 year ago

Traceback (most recent call last): File "/Users/seshubonam/anaconda3/bin/yeagerai-agent", line 8, in sys.exit(main()) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/click/core.py", line 1128, in call return self.main(args, kwargs) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/click/core.py", line 1053, in main rv = self.invoke(ctx) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/click/core.py", line 1395, in invoke return ctx.invoke(self.callback, ctx.params) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/click/core.py", line 754, in invoke return __callback(args, kwargs) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/yeagerai/interfaces/cli.py", line 181, in main chat_interface( File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/yeagerai/interfaces/cli.py", line 105, in chat_interface agent.run(prompt_text) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/yeagerai/agent/yeagerai_agent.py", line 82, in run return self.agent_executor.run(input) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chains/base.py", line 213, in run return self(args[0])[self.output_keys[0]] File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chains/base.py", line 116, in call raise e File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chains/base.py", line 113, in call outputs = self._call(inputs) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/agents/agent.py", line 822, in _call next_step_output = self._take_next_step( File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/agents/agent.py", line 702, in _take_next_step output = self.agent.plan(intermediate_steps, inputs) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/agents/agent.py", line 300, in plan output = self.llm_chain.run( File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chains/base.py", line 216, in run return self(kwargs)[self.output_keys[0]] File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chains/base.py", line 116, in call raise e File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chains/base.py", line 113, in call outputs = self._call(inputs) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chains/llm.py", line 57, in _call return self.apply([inputs])[0] File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chains/llm.py", line 118, in apply response = self.generate(input_list) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chains/llm.py", line 62, in generate return self.llm.generate_prompt(prompts, stop) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chat_models/base.py", line 82, in generate_prompt raise e File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chat_models/base.py", line 79, in generate_prompt output = self.generate(prompt_messages, stop=stop) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chat_models/base.py", line 54, in generate results = [self._generate(m, stop=stop) for m in messages] File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chat_models/base.py", line 54, in results = [self._generate(m, stop=stop) for m in messages] File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chat_models/openai.py", line 252, in _generate for stream_resp in self.completion_with_retry( File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chat_models/openai.py", line 228, in completion_with_retry return _completion_with_retry(kwargs) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/tenacity/init.py", line 289, in wrapped_f return self(f, *args, *kw) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/tenacity/init.py", line 379, in call do = self.iter(retry_state=retry_state) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/tenacity/init.py", line 314, in iter return fut.result() File "/Users/seshubonam/anaconda3/lib/python3.10/concurrent/futures/_base.py", line 451, in result return self.get_result() File "/Users/seshubonam/anaconda3/lib/python3.10/concurrent/futures/_base.py", line 403, in get_result raise self._exception File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/tenacity/init.py", line 382, in call result = fn(args, kwargs) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/langchain/chat_models/openai.py", line 226, in _completion_with_retry return self.client.create(*kwargs) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/openai/api_resources/chat_completion.py", line 25, in create return super().create(args, **kwargs) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/openai/api_resources/abstract/engine_apiresource.py", line 153, in create response, , api_key = requestor.request( File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/openai/api_requestor.py", line 226, in request resp, got_stream = self._interpret_response(result, stream) File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/openai/api_requestor.py", line 620, in _interpret_response self._interpret_response_line( File "/Users/seshubonam/anaconda3/lib/python3.10/site-packages/openai/api_requestor.py", line 683, in _interpret_response_line raise self.handle_error_response( openai.error.AuthenticationError: Incorrect API key provided: 1234. You can find your API key at https://platform.openai.com/account/api-keys.

jmlago commented 1 year ago

that is because when it starts creating, it generates a .env file in the .yeagerai-sessions folder, and sets the OPENAI_API_KEY=1234 You have to change that value in the .env before start using the agent.

Any thoughts for a better UX? I don't want to ask it in the terminal, bc if you are sharing the screen somehow... it can be dangerous.

MuncleUscles commented 1 year ago

That's not my error, not sure why it was posted here. I have my API key set, and it generated multiple stages before crashing