Open manish-rocks opened 2 years ago
from azure.storage.blob import ContainerClient
import os
import json
import datetime
import time
def file_upload_blob(sitename, property_list, location="", containername=os.getenv('containerName')):
try:
# connect to the container client.
container_client = ContainerClient.from_connection_string(
conn_str=os.getenv("BLOB_STORAGE_CONNECTION_STRING"),
container_name=containername
)
time.sleep(3)
logger.info(f"connected to the container {containername}")
# perform file operations
filename = os.path.join(sitename,
"{sn}_{location}{ts}.json".format(
ts=datetime.datetime.now().strftime(r'%Y-%b-%d_%H-%M-%S'),
sn=sitename,
location=location)
)
try:
if property_list:
container_client.upload_blob(
name=filename,
data=json.dumps(obj=property_list, indent=4),
blob_type='BlockBlob',
timeout=3600 # Set a longer timeout in seconds (1 hour in this example)
)
logger.info(f'file "{filename}" has been uploaded in azure blob storage')
else:
container_client.upload_blob(
name=filename,
data=json.dumps(obj=['got empty file'], indent=4),
blob_type='BlockBlob',
timeout=3600 # Set a longer timeout in seconds (1 hour in this example)
)
logger.info('found empty file! .. uploading empty file')
except Exception as e:
logger.error(f"getting error while uploading file to blob storage:{e}")
traceback.print_exc()
except Exception as e1:
logger.error(f" blob storage error: {e1}")
# Call the function with appropriate parameters
# file_upload_blob(sitename, property_list, location, containername)
the timeout parameter in the upload_blob method has been set to 3600 seconds (1 hour) to allow longer time for the upload operation to complete
Which service(blob, file, queue) does this issue concern?
Which version of the SDK was used? Please provide the output of
pip freeze
.azure-storage-blob==12.11.0
What problem was encountered?
error : azure.core.exceptions.ServiceResponseError: ('Connection aborted.', timeout('The write operation timed out'))
Have you found a mitigation/solution?
NO. for small size of lists below function is working well but when the size is bigger around 3000 dictionaries in the list then the code is giving error as I mentioned above. plz help how can I solve this problem.