googleapis / python-bigquery-storage

Apache License 2.0
112 stars 44 forks source link

System test `test_append_rows_with_proto3` fails with `PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python` #797

Closed parthea closed 2 months ago

parthea commented 2 months ago

See the presubmit failure Kokoro Prerelease Dependencies in PR https://github.com/googleapis/python-bigquery-storage/pull/796 where the system test test_append_rows_with_proto3 fails when using PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python.

To reproduce the issue, install this library using pip install . and run PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python pytest tests/system/test_writer.py::test_append_rows_with_proto3

=================================== FAILURES ===================================
_________________________ test_append_rows_with_proto3 _________________________

bqstorage_write_client = 
table = Table(TableReference(DatasetReference('precise-truck-742', 'python_bigquery_storage_tests_system_20240704194623_d71d4d'), 'users_4cf692cf_9770_4eb4_b06d_c3762bf7acf4'))

    def test_append_rows_with_proto3(bqstorage_write_client, table):
        from google.protobuf import descriptor_pb2
        import proto

        # Using Proto Plus to build proto3
        # Declare proto3 field `optional` for presence
        class PersonProto(proto.Message):
            first_name = proto.Field(
                proto.STRING,
                number=1,
                optional=True,
            )
            last_name = proto.Field(
                proto.STRING,
                number=2,
                optional=True,
            )
            age = proto.Field(
                proto.INT64,
                number=3,
                optional=True,
            )

        person_pb = PersonProto.pb()

        stream_name = f"projects/{table.project}/datasets/{table.dataset_id}/tables/{table.table_id}/_default"
        request_template = gapic_types.AppendRowsRequest()
        request_template.write_stream = stream_name

        proto_schema = gapic_types.ProtoSchema()
        proto_descriptor = descriptor_pb2.DescriptorProto()
>       person_pb.DESCRIPTOR.CopyToProto(
            proto_descriptor,
        )

tests/system/test_writer.py:91: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
.nox/prerelease_deps-protobuf_implementation-python/lib/python3.12/site-packages/google/protobuf/descriptor.py:471: in CopyToProto
    super(Descriptor, self).CopyToProto(proto)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
proto = 

    def CopyToProto(self, proto):
      """Copies this to the matching proto in descriptor_pb2.

      Args:
        proto: An empty proto instance from descriptor_pb2.

      Raises:
        Error: If self couldn't be serialized, due to to few constructor
          arguments.
      """
      if (self.file is not None and
          self._serialized_start is not None and
          self._serialized_end is not None):
        proto.ParseFromString(self.file.serialized_pb[
            self._serialized_start:self._serialized_end])
      else:
>       raise Error('Descriptor does not contain serialization.')
E       google.protobuf.descriptor.Error: Descriptor does not contain serialization.