import asyncio

from crawlee import ConcurrencySettings, service_locator
from crawlee.configuration import Configuration
from crawlee.crawlers import BeautifulSoupCrawler, BeautifulSoupCrawlingContext


async def main():
    configuration = Configuration(purge_on_start=True)
    service_locator.set_configuration(configuration)

    crawler = BeautifulSoupCrawler(
        concurrency_settings=ConcurrencySettings(desired_concurrency=1, max_concurrency=1),
        max_requests_per_crawl=10,
    )

    @crawler.router.default_handler
    async def default_handler(ctx: BeautifulSoupCrawlingContext):
        ctx.log.info("Handling %s and sleeping", ctx.request.url)
        await asyncio.sleep(1)

    await crawler.run([
        "https://api.ipify.org?q=1",
        "https://api.ipify.org?q=2",
    ])


if __name__ == '__main__':
    asyncio.run(main())