import requests
import asyncio
import aiohttp


def get_all_titles(project):
    ''' cf: https://scrapbox.io/takker/ScrapboxのAPI取得支援関数を作る#5fd11f561280f00000e5693e '''
    titles = []
    following_id = None
    while 1:
        params = f'?followingId={following_id}' if following_id else ''
        response = requests.get(
            f'https://scrapbox.io/api/pages/{project}/search/titles{params}')
        titles.extend([page['title'] for page in response.json()])
        following_id = response.headers['X-Following-Id']
        if not following_id:
            break
    return titles


async def get(session, url):
    async with session.get(url) as response:
        return await response.json()


async def get_all(urls, **session_kwargs):
    async with aiohttp.ClientSession(**session_kwargs) as session:
        tasks = [get(session, url) for url in urls]
        return await asyncio.gather(*tasks)