I encountered the following error while building the image. Following the error's suggestion, downgrading the protobuf version to 3.20.3 resolves the issue. However, starting from version 4.20, it no longer functions properly.
Building container image and pushing to ECR
Traceback (most recent call last):
File "/opt/conda/envs/studio/bin/sm-docker", line 8, in
sys.exit(main())
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker_studio_image_build/cli.py", line 133, in main
args.func(args, unknown)
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker_studio_image_build/cli.py", line 74, in build_image
args.repository, get_role(args), args.bucket, args.compute_type,
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker_studio_image_build/cli.py", line 46, in get_role
import sagemaker
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/init.py", line 18, in
from sagemaker import estimator, parameter, tuner # noqa: F401
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/estimator.py", line 27, in
from sagemaker import git_utils, image_uris, vpc_utils
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/image_uris.py", line 24, in
from sagemaker.spark import defaults
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/spark/init.py", line 16, in
from sagemaker.spark.processing import PySparkProcessor, SparkJarProcessor # noqa: F401
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/spark/processing.py", line 35, in
from sagemaker.local.image import _ecr_login_if_needed, _pull_image
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/local/init.py", line 16, in
from .local_session import ( # noqa: F401
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/local/local_session.py", line 23, in
from sagemaker.local.image import _SageMakerContainer
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/local/image.py", line 38, in
import sagemaker.local.data
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/local/data.py", line 26, in
import sagemaker.amazon.common
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/amazon/common.py", line 23, in
from sagemaker.amazon.record_pb2 import Record
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/amazon/record_pb2.py", line 36, in
_descriptor.FieldDescriptor(
File "/opt/conda/envs/studio/lib/python3.9/site-packages/google/protobuf/descriptor.py", line 561, in new
_message.Message._CheckCalledFromGeneratedFile()
TypeError: Descriptors cannot not be created directly.
If this call came from a _pb2.py file, your generated code is out of date and must be regenerated with protoc >= 3.19.0.
If you cannot immediately regenerate your protos, some other possible workarounds are:
Downgrade the protobuf package to 3.20.x or lower.
Set PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python (but this will use pure-Python parsing and will be much slower).
I encountered the following error while building the image. Following the error's suggestion, downgrading the protobuf version to 3.20.3 resolves the issue. However, starting from version 4.20, it no longer functions properly.
Building container image and pushing to ECR Traceback (most recent call last): File "/opt/conda/envs/studio/bin/sm-docker", line 8, in
sys.exit(main())
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker_studio_image_build/cli.py", line 133, in main
args.func(args, unknown)
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker_studio_image_build/cli.py", line 74, in build_image
args.repository, get_role(args), args.bucket, args.compute_type,
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker_studio_image_build/cli.py", line 46, in get_role
import sagemaker
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/init.py", line 18, in
from sagemaker import estimator, parameter, tuner # noqa: F401
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/estimator.py", line 27, in
from sagemaker import git_utils, image_uris, vpc_utils
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/image_uris.py", line 24, in
from sagemaker.spark import defaults
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/spark/init.py", line 16, in
from sagemaker.spark.processing import PySparkProcessor, SparkJarProcessor # noqa: F401
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/spark/processing.py", line 35, in
from sagemaker.local.image import _ecr_login_if_needed, _pull_image
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/local/init.py", line 16, in
from .local_session import ( # noqa: F401
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/local/local_session.py", line 23, in
from sagemaker.local.image import _SageMakerContainer
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/local/image.py", line 38, in
import sagemaker.local.data
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/local/data.py", line 26, in
import sagemaker.amazon.common
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/amazon/common.py", line 23, in
from sagemaker.amazon.record_pb2 import Record
File "/opt/conda/envs/studio/lib/python3.9/site-packages/sagemaker/amazon/record_pb2.py", line 36, in
_descriptor.FieldDescriptor(
File "/opt/conda/envs/studio/lib/python3.9/site-packages/google/protobuf/descriptor.py", line 561, in new
_message.Message._CheckCalledFromGeneratedFile()
TypeError: Descriptors cannot not be created directly.
If this call came from a _pb2.py file, your generated code is out of date and must be regenerated with protoc >= 3.19.0.
If you cannot immediately regenerate your protos, some other possible workarounds are:
More information: https://developers.google.com/protocol-buffers/docs/news/2022-05-06#python-updates make: *** [container] Error 1