URL fetching with concurrency limit

import paco
import aiohttp
import asyncio

async def fetch(url):
    async with aiohttp.ClientSession() as session:
        async with session.get(url) as resp:
            return resp

async def fetch_urls():
    urls = [

    # Map concurrent executor with 3 concurrent limit
    responses = await paco.map(fetch, urls, limit=3)

    for res in responses:
        print('Response:', res.status)

loop = asyncio.get_event_loop()