Closed vdusek closed 8 months ago
UPDATE: I abandoned trying to get multiple spiders working, instead I'm investing my time to implementing the monorepo approach, registering each spider as an individual actor:
async def main() -> None:
actor_path = os.environ['ACTOR_PATH_IN_DOCKER_CONTEXT'] # e.g. juniorguru_plucker/jobs_startupjobs
spider_module_name = f"{actor_path.replace('/', '.')}.spider"
async with Actor:
Actor.log.info(f'Actor {actor_path} is being executed…')
settings = apply_apify_settings(get_project_settings())
crawler = CrawlerProcess(settings, install_root_handler=False)
Actor.log.info(f"Actor's spider: {spider_module_name}")
crawler.crawl(importlib.import_module(spider_module_name).Spider)
crawler.start()
https://github.com/apify/actor-templates/blob/087b2dc4315e029e38b6282f7d312fc80c0c4e0d/templates/python-scrapy/src/main.py#L42:L45