update endpoints

This commit is contained in:
Trevor Hobenshield
2023-04-07 16:59:18 -07:00
parent 1d05427da7
commit c5b13cd367
4 changed files with 17 additions and 1037 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -104,7 +104,7 @@ def get_headers(filename: str = 'headers.txt') -> dict:
'(KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'}
async def process(fn: callable, headers: dict, urls: any, **kwargs) -> tuple:
async def process(fn: callable, headers: dict, urls: any, **kwargs) -> list:
conn = aiohttp.TCPConnector(ssl=False, limit=420, ttl_dns_cache=69)
async with aiohttp.ClientSession(headers=headers, connector=conn) as s:
return await asyncio.gather(*(fn(s, u, **kwargs) for u in urls))

View File

@@ -3,20 +3,19 @@ from textwrap import dedent
from setuptools import find_packages, setup
install_requires = [
'aiodns',
'aiohttp',
"nest_asyncio",
"aiohttp",
"requests",
"tqdm",
"orjson",
'uvloop; platform_system != "Windows"',
'nest_asyncio',
'requests',
'tqdm',
'orjson',
]
setup(
name='twitter-api-client',
version='0.5.6',
python_requires='>=3.11.0',
description='Twitter API',
name="twitter-api-client",
version="0.5.8",
python_requires=">=3.11.0",
description="Twitter API",
long_description=dedent('''
Complete implementation of the undocumented Twitter API
@@ -205,11 +204,11 @@ setup(
```
'''),
long_description_content_type='text/markdown',
author='Trevor Hobenshield',
author_email='trevorhobenshield@gmail.com',
url='https://github.com/trevorhobenshield/twitter-api',
author="Trevor Hobenshield",
author_email="trevorhobenshield@gmail.com",
url="https://github.com/trevorhobenshield/twitter-api",
install_requires=install_requires,
keywords='twitter api client async search automation bot scrape',
keywords="twitter api client async search automation bot scrape",
packages=find_packages(),
include_package_data=True,
)

View File

@@ -9,7 +9,7 @@ from pathlib import Path
from urllib.parse import urlsplit
import orjson
from aiohttp import ClientSession, TCPConnector, AsyncResolver
from aiohttp import ClientSession, TCPConnector
from tqdm import tqdm
from .config.log import log_config
@@ -120,7 +120,7 @@ class Scraper:
return res
async def process(self, urls: list, headers: dict) -> tuple:
conn = TCPConnector(limit=100, ssl=False, ttl_dns_cache=69, resolver=AsyncResolver())
conn = TCPConnector(limit=100, ssl=False, ttl_dns_cache=69)
async with ClientSession(headers=headers, connector=conn) as s:
# add cookies from logged-in session
s.cookie_jar.update_cookies(self.session.cookies)
@@ -142,7 +142,7 @@ class Scraper:
logger.debug(f'failed to download {url}: {e}')
async def pagination(self, res: list, operation: tuple, limit: int) -> tuple:
conn = TCPConnector(limit=100, ssl=False, ttl_dns_cache=69, resolver=AsyncResolver())
conn = TCPConnector(limit=100, ssl=False, ttl_dns_cache=69)
headers = get_headers(self.session)
headers['content-type'] = "application/json"
async with ClientSession(headers=headers, connector=conn) as s: