# todays_cloud_data_file_exist = check_today_cloud_data_file_exist()
# if todays_cloud_data_file_exist is True:
# last_cloud_file_name = get_todays_cloud_data_file_name()
# TODO: implement code below
# log.info("Cloud scraper module data file: %s "
# "found will be used in "
# " data_formater_module, last_cloud_file_name")
# pass
# if todays_cloud_data_file_exist is False:
# # check if local
# sraper job did run today and save output file in "data" folder
Whats needed:
async def run_long_task(city: str, background_tasks: BackgroundTasks): """ Endpint to trigger scrape, format and insert data in DB"""
background_tasks.add_task(download_latest_lambda_file)