googleapis / python-bigquery-storage

Apache License 2.0
112 stars 44 forks source link

tests.system.reader.test_reader_dataframe: test_read_rows_to_dataframe[v1beta2-AVRO-avro_schema] failed #660

Closed flaky-bot[bot] closed 11 months ago

flaky-bot[bot] commented 11 months ago

Note: #562 was also for this test, but it was closed more than 10 days ago. So, I didn't mark it flaky.


commit: cbde9a6d475fa8c74b3cacee8ded5ad8a76b9492 buildURL: Build Status, Sponge status: failed

Test output
client_and_types = (, )
project_id = 'precise-truck-742', data_format = 'AVRO'
expected_schema_type = 'avro_schema'

    @pytest.mark.parametrize(
        "data_format,expected_schema_type",
        (("AVRO", "avro_schema"), ("ARROW", "arrow_schema")),
    )
    def test_read_rows_to_dataframe(
        client_and_types, project_id, data_format, expected_schema_type
    ):
        client, types = client_and_types
        read_session = types.ReadSession()
        read_session.table = "projects/{}/datasets/{}/tables/{}".format(
            "bigquery-public-data", "new_york_citibike", "citibike_stations"
        )
        read_session.data_format = data_format

        session = client.create_read_session(
            request={
                "parent": "projects/{}".format(project_id),
                "read_session": read_session,
                "max_stream_count": 1,
            }
        )
        schema_type = session._pb.WhichOneof("schema")
        assert schema_type == expected_schema_type

        stream = session.streams[0].name

>       frame = client.read_rows(stream).to_dataframe(
            session, dtypes={"latitude": numpy.float16}
        )

tests/system/reader/test_reader_dataframe.py:86: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
google/cloud/bigquery_storage_v1/reader.py:299: in to_dataframe
    return self.rows(read_session=read_session).to_dataframe(dtypes=dtypes)
google/cloud/bigquery_storage_v1/reader.py:415: in to_dataframe
    frames = [page.to_dataframe(dtypes=dtypes) for page in self.pages]
google/cloud/bigquery_storage_v1/reader.py:415: in 
    frames = [page.to_dataframe(dtypes=dtypes) for page in self.pages]
google/cloud/bigquery_storage_v1/reader.py:556: in to_dataframe
    return self._stream_parser.to_dataframe(self._message, dtypes=dtypes)
google/cloud/bigquery_storage_v1/reader.py:670: in to_dataframe
    for row in self.to_rows(message):
google/cloud/bigquery_storage_v1/reader.py:714: in to_rows
    yield fastavro.schemaless_reader(messageio, self._fastavro_schema)
fastavro/_read.pyx:1126: in fastavro._read.schemaless_reader
    ???
fastavro/_read.pyx:1153: in fastavro._read.schemaless_reader
    ???
fastavro/_read.pyx:743: in fastavro._read._read_data
    ???
fastavro/_read.pyx:616: in fastavro._read.read_record
    ???
fastavro/_read.pyx:703: in fastavro._read._read_data
    ???
fastavro/_read.pyx:301: in fastavro._read.read_utf8
    ???
fastavro/_read.pyx:284: in fastavro._read.read_bytes
    ???
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

>   ???
E   EOFError

fastavro/_read.pyx:176: EOFError
flaky-bot[bot] commented 11 months ago

Test passed for commit 0b796f9d0f062b39fc6ec035d53a674cbcb122a3 (Build Status, Sponge)! Closing this issue.