Skip to content

Commit

Permalink
Merge pull request #16 from NekoAria/main
Browse files Browse the repository at this point in the history
  • Loading branch information
kitUIN authored Mar 14, 2022
2 parents 43bf73d + 49c9c0b commit b96b528
Show file tree
Hide file tree
Showing 40 changed files with 842 additions and 659 deletions.
8 changes: 4 additions & 4 deletions PicImageSearch/Async/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from .network import NetWork
from .tracemoe import AsyncTraceMoe
from .saucenao import AsyncSauceNAO
from .ascii2d import AsyncAscii2D
from .iqdb import AsyncIqdb
from .baidu import AsyncBaiDu
from .google import AsyncGoogle
from .iqdb import AsyncIqdb
from .network import NetWork
from .saucenao import AsyncSauceNAO
from .tracemoe import AsyncTraceMoe
16 changes: 8 additions & 8 deletions PicImageSearch/Async/ascii2d.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from bs4 import BeautifulSoup
from loguru import logger

from .network import HandOver
from ..Utils import Ascii2DResponse, get_error_message
from .network import HandOver


class AsyncAscii2D(HandOver):
Expand All @@ -25,8 +25,8 @@ def __init__(self, bovw=False, **requests_kwargs):

@staticmethod
def _slice(res) -> Ascii2DResponse:
soup = BeautifulSoup(res, 'html.parser')
resp = soup.find_all(class_='row item-box')
soup = BeautifulSoup(res, "html.parser")
resp = soup.find_all(class_="row item-box")
return Ascii2DResponse(resp)

async def search(self, url) -> Ascii2DResponse:
Expand All @@ -48,17 +48,17 @@ async def search(self, url) -> Ascii2DResponse:
• .raw[0].detail = First index of details image that was found
"""
try:
if url[:4] == 'http': # 网络url
ascii2d_url = 'https://ascii2d.net/search/uri'
if url[:4] == "http": # 网络url
ascii2d_url = "https://ascii2d.net/search/uri"
res = await self.post(ascii2d_url, _data={"uri": url})
else: # 是否是本地文件
ascii2d_url = 'https://ascii2d.net/search/file'
res = await self.post(ascii2d_url, _files={"file": open(url, 'rb')})
ascii2d_url = "https://ascii2d.net/search/file"
res = await self.post(ascii2d_url, _files={"file": open(url, "rb")})

if res.status_code == 200:
if self.bovw:
# 如果启用bovw选项,第一次请求是向服务器提交文件
res = await self.get(str(res.url).replace('/color/', '/bovw/'))
res = await self.get(str(res.url).replace("/color/", "/bovw/"))
else:
logger.error(res.status_code)
logger.error(get_error_message(res.status_code))
Expand Down
50 changes: 26 additions & 24 deletions PicImageSearch/Async/baidu.py
Original file line number Diff line number Diff line change
@@ -1,40 +1,42 @@
import time

from .network import HandOver
from PicImageSearch.Utils import BaiDuResponse

from .network import HandOver

class AsyncBaiDu(HandOver):

class AsyncBaiDu(HandOver):
def __init__(self, **requests_kwargs):
super().__init__(**requests_kwargs)
self.url = 'https://graph.baidu.com/upload'
self.url = "https://graph.baidu.com/upload"
self.requests_kwargs = requests_kwargs
self.headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.72 Safari/537.36 Edg/89.0.774.45'
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.72 Safari/537.36 Edg/89.0.774.45"
}

async def search(self, url: str) -> BaiDuResponse:
params = {
'uptime': int(time.time())
}
if url[:4] == 'http': # 网络url
m = {'image': url,
'range': '{"page_from": "searchIndex"}',
'from': "pc",
'tn': 'pc',
'image_source': 'PC_UPLOAD_MOVE',
'sdkParams': '{"data":"a4388c3ef696d354e7f05402e1d38daf48bfb4f3d5bd941e2d0c920dc3b387065b7c85440986897b1f56ef6d352e3b94b3ea435ba5e1bb5a86c5feb88e2e9e1179abd5b8699370b6be8e7cfb96e6e605","key_id":"23","sign":"f22953e8"}'
}
params = {"uptime": int(time.time())}
if url[:4] == "http": # 网络url
m = {
"image": url,
"range": '{"page_from": "searchIndex"}',
"from": "pc",
"tn": "pc",
"image_source": "PC_UPLOAD_MOVE",
"sdkParams": '{"data":"a4388c3ef696d354e7f05402e1d38daf48bfb4f3d5bd941e2d0c920dc3b387065b7c85440986897b1f56ef6d352e3b94b3ea435ba5e1bb5a86c5feb88e2e9e1179abd5b8699370b6be8e7cfb96e6e605","key_id":"23","sign":"f22953e8"}',
}
else: # 文件
m = {'image': ('filename', open(url, 'rb')),
'range': '{"page_from": "searchIndex"}',
'from': "pc",
'tn': 'pc',
'image_source': 'PC_UPLOAD_SEARCH_FILE',
'sdkParams': '{"data":"a4388c3ef696d354e7f05402e1d38daf48bfb4f3d5bd941e2d0c920dc3b387065b7c85440986897b1f56ef6d352e3b94b3ea435ba5e1bb5a86c5feb88e2e9e1179abd5b8699370b6be8e7cfb96e6e605","key_id":"23","sign":"f22953e8"}'
}
res = await self.post(self.url, _headers=self.headers, _params=params, _data=m) # 上传文件
url = res.json()['data']['url']
m = {
"image": ("filename", open(url, "rb")),
"range": '{"page_from": "searchIndex"}',
"from": "pc",
"tn": "pc",
"image_source": "PC_UPLOAD_SEARCH_FILE",
"sdkParams": '{"data":"a4388c3ef696d354e7f05402e1d38daf48bfb4f3d5bd941e2d0c920dc3b387065b7c85440986897b1f56ef6d352e3b94b3ea435ba5e1bb5a86c5feb88e2e9e1179abd5b8699370b6be8e7cfb96e6e605","key_id":"23","sign":"f22953e8"}',
}
res = await self.post(
self.url, _headers=self.headers, _params=params, _data=m
) # 上传文件
url = res.json()["data"]["url"]
resp = await self.get(url, _headers=self.headers)
return BaiDuResponse(resp)
33 changes: 17 additions & 16 deletions PicImageSearch/Async/google.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from urllib.parse import quote

from bs4 import BeautifulSoup
from loguru import logger

from .network import HandOver
from PicImageSearch.Utils import GoogleResponse
from urllib.parse import quote

from ..Utils import get_error_message
from .network import HandOver


class AsyncGoogle(HandOver):
Expand All @@ -22,23 +23,22 @@ class AsyncGoogle(HandOver):
def __init__(self, **request_kwargs):
super().__init__(**request_kwargs)
params = dict()
self.url = 'https://www.google.com/searchbyimage'
self.url = "https://www.google.com/searchbyimage"
self.params = params
self.header = {
'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:61.0) Gecko/20100101 Firefox/61.0',
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:61.0) Gecko/20100101 Firefox/61.0",
}
self.requests_kwargs = request_kwargs

@staticmethod
def _slice(res, index) -> GoogleResponse:
soup = BeautifulSoup(res, 'html.parser')
resp = soup.find_all(class_='g')
soup = BeautifulSoup(res, "html.parser")
resp = soup.find_all(class_="g")
pages = soup.find_all("td")
return GoogleResponse(resp, pages[1:], index)

async def goto_page(self, url, index):
response = await self.get(
url, _headers=self.header)
response = await self.get(url, _headers=self.header)
if response.status_code == 200:
return self._slice(response.text, index)

Expand All @@ -60,16 +60,17 @@ async def search(self, url) -> GoogleResponse:
"""
try:
params = self.params
if url[:4] == 'http':
encoded_image_url = quote(url, safe='')
params['image_url'] = encoded_image_url
if url[:4] == "http":
encoded_image_url = quote(url, safe="")
params["image_url"] = encoded_image_url
response = await self.get(
self.url, _params=params, _headers=self.header)
self.url, _params=params, _headers=self.header
)
else:
multipart = {'encoded_image': (
url, open(url, 'rb'))}
multipart = {"encoded_image": (url, open(url, "rb"))}
response = await self.post(
f"{self.url}/upload", _files=multipart, _headers=self.header)
f"{self.url}/upload", _files=multipart, _headers=self.header
)
if response.status_code == 200:
return self._slice(response.text, 1)
else:
Expand Down
25 changes: 10 additions & 15 deletions PicImageSearch/Async/iqdb.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from loguru import logger
from .network import HandOver
from PicImageSearch.Utils.iqdb import IqdbResponse

from ..Utils import get_error_message
from .network import HandOver


class AsyncIqdb(HandOver):
Expand All @@ -19,8 +20,8 @@ class AsyncIqdb(HandOver):
def __init__(self, **requests_kwargs):
super().__init__(**requests_kwargs)
self.requests_kwargs = requests_kwargs
self.url = 'https://www.iqdb.org/'
self.url_3d = 'https://3d.iqdb.org/'
self.url = "https://www.iqdb.org/"
self.url_3d = "https://3d.iqdb.org/"

async def search(self, url) -> IqdbResponse:
"""
Expand Down Expand Up @@ -49,14 +50,11 @@ async def search(self, url) -> IqdbResponse:
"""
try:

if url[:4] == 'http': # 网络url
datas = {
"url": url
}
if url[:4] == "http": # 网络url
datas = {"url": url}
res = await self.post(self.url, _data=datas)
else: # 是否是本地文件
res = await self.post(self.url,
_files={'file': open(url, 'rb')})
res = await self.post(self.url, _files={"file": open(url, "rb")})
if res.status_code == 200:
# logger.info(res.text)
return IqdbResponse(res.content)
Expand Down Expand Up @@ -84,14 +82,11 @@ async def search_3d(self, url) -> IqdbResponse:
• .raw[0].size = First index detail of image size that was found
"""
try:
if url[:4] == 'http': # 网络url
datas = {
"url": url
}
if url[:4] == "http": # 网络url
datas = {"url": url}
res = await self.post(self.url_3d, _data=datas)
else: # 是否是本地文件
res = await self.post(self.url_3d,
_files={'file': open(url, 'rb')})
res = await self.post(self.url_3d, _files={"file": open(url, "rb")})
if res.status_code == 200:
return IqdbResponse(res.content)
else:
Expand Down
41 changes: 29 additions & 12 deletions PicImageSearch/Async/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,15 @@


class NetWork:
def __init__(self, limit=30, max_connections=100, timeout=20, env=False, internal=False, proxy=None):
def __init__(
self,
limit=30,
max_connections=100,
timeout=20,
env=False,
internal=False,
proxy=None,
):
"""
:param limit:
Expand All @@ -22,8 +30,10 @@ def __init__(self, limit=30, max_connections=100, timeout=20, env=False, interna
verify=False,
timeout=httpx.Timeout(timeout, connect=60),
proxies=self.proxy,
limits=httpx.Limits(max_keepalive_connections=limit, max_connections=max_connections),
trust_env=env
limits=httpx.Limits(
max_keepalive_connections=limit, max_connections=max_connections
),
trust_env=env,
)

def start(self):
Expand Down Expand Up @@ -55,9 +65,8 @@ async def __aenter__(self):
return self.session

async def __aexit__(self, exception_type, exception_value, traceback):
if isinstance(self.session, NetWork):
if self.session.internal:
await self.session.close()
if isinstance(self.session, NetWork) and self.session.internal:
await self.session.close()


class HandOver(object):
Expand All @@ -73,25 +82,33 @@ async def get(self, _url, _headers=None, _params=None):
await asyncio.sleep(0)
return res

async def post(self, _url, _headers=None, _params=None, _data=None, _json=None, _files=None):
async def post(
self, _url, _headers=None, _params=None, _data=None, _json=None, _files=None
):
async with ClientManager(self.session, self.env, self.proxy) as session:
if _json:
res = await session.post(_url, headers=_headers, params=_params, json=_json)
res = await session.post(
_url, headers=_headers, params=_params, json=_json
)
elif _files:
res = await session.post(_url, headers=_headers, params=_params, files=_files)
res = await session.post(
_url, headers=_headers, params=_params, files=_files
)
else:
res = await session.post(_url, headers=_headers, params=_params, data=_data)
res = await session.post(
_url, headers=_headers, params=_params, data=_data
)
await asyncio.sleep(0)
return res

async def downloader(self, url='', path=None, filename=''): # 下载器
async def downloader(self, url="", path=None, filename=""): # 下载器
async with ClientManager(self.session, self.env, self.proxy) as session:
async with session.stream("GET", url=url) as r:
if path:
file = Path(path).joinpath(filename)
else:
file = Path().cwd().joinpath(filename)
async with aiofiles.open(file, 'wb') as out_file:
async with aiofiles.open(file, "wb") as out_file:
async for chunk in r.aiter_bytes():
await out_file.write(chunk)
return file
Loading

0 comments on commit b96b528

Please sign in to comment.