[e621] update to new interface / API endpoints (closes #635)
parent
d1cf7ccdb3
commit
ebc70e87ce
@ -1,71 +1,193 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
# Copyright 2014-2019 Mike Fährmann
|
# Copyright 2014-2020 Mike Fährmann
|
||||||
#
|
#
|
||||||
# This program is free software; you can redistribute it and/or modify
|
# This program is free software; you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU General Public License version 2 as
|
# it under the terms of the GNU General Public License version 2 as
|
||||||
# published by the Free Software Foundation.
|
# published by the Free Software Foundation.
|
||||||
|
|
||||||
"""Extract images from https://e621.net/"""
|
"""Extractors for https://e621.net/"""
|
||||||
|
|
||||||
from . import booru
|
from .common import Extractor, Message, SharedConfigMixin
|
||||||
|
from .. import text
|
||||||
|
import datetime
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
class E621Extractor(booru.MoebooruPageMixin, booru.BooruExtractor):
|
BASE_PATTERN = r"(?:https?://)?e(621|926)\.net"
|
||||||
|
|
||||||
|
|
||||||
|
class E621Extractor(SharedConfigMixin, Extractor):
|
||||||
"""Base class for e621 extractors"""
|
"""Base class for e621 extractors"""
|
||||||
|
basecategory = "booru"
|
||||||
category = "e621"
|
category = "e621"
|
||||||
api_url = "https://e621.net/post/index.json"
|
filename_fmt = "{category}_{id}_{file[md5]}.{extension}"
|
||||||
post_url = "https://e621.net/post/show/{}"
|
|
||||||
page_limit = 750
|
page_limit = 750
|
||||||
|
page_start = None
|
||||||
|
per_page = 200
|
||||||
|
_last_request = 0
|
||||||
|
|
||||||
|
def __init__(self, match):
|
||||||
|
Extractor.__init__(self, match)
|
||||||
|
self.root = "https://e{}.net".format(match.group(1))
|
||||||
|
self.params = {}
|
||||||
|
|
||||||
|
username, api_key = self._get_auth_info()
|
||||||
|
if username:
|
||||||
|
self.log.debug("Using HTTP Basic Auth for user '%s'", username)
|
||||||
|
self.session.auth = (username, api_key)
|
||||||
|
|
||||||
|
def request(self, url, **kwargs):
|
||||||
|
diff = time.time() - E621Extractor._last_request
|
||||||
|
if diff < 1.0:
|
||||||
|
self.log.debug("Sleeping for %s seconds", diff)
|
||||||
|
time.sleep(diff)
|
||||||
|
kwargs["headers"] = {"User-Agent": "gallery-dl/1.13.0 (by mikf)"}
|
||||||
|
response = Extractor.request(self, url, **kwargs)
|
||||||
|
E621Extractor._last_request = time.time()
|
||||||
|
return response
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
data = self.metadata()
|
||||||
|
for post in self.posts():
|
||||||
|
file = post["file"]
|
||||||
|
|
||||||
|
if not file["url"]:
|
||||||
|
ihash = file["md5"]
|
||||||
|
file["url"] = "https://static1.{}/data/{}/{}/{}.{}".format(
|
||||||
|
self.root[8:], ihash[0:2], ihash[2:4], ihash, file["ext"])
|
||||||
|
|
||||||
|
post["filename"] = file["md5"]
|
||||||
|
post["extension"] = file["ext"]
|
||||||
|
post.update(data)
|
||||||
|
yield Message.Directory, post
|
||||||
|
yield Message.Url, file["url"], post
|
||||||
|
|
||||||
|
def metadata(self):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def posts(self):
|
||||||
|
return self._pagination(self.root + "/posts.json")
|
||||||
|
|
||||||
class E621TagExtractor(booru.TagMixin, E621Extractor):
|
def _pagination(self, url):
|
||||||
"""Extractor for images from e621.net based on search-tags"""
|
params = self.params.copy()
|
||||||
pattern = (r"(?:https?://)?(?:www\.)?e621\.net/post"
|
params["limit"] = self.per_page
|
||||||
r"(?:/index/\d+/|\?tags=)(?P<tags>[^/?&#]+)")
|
tags = params.get("tags", "")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
posts = self.request(url, params=params).json()["posts"]
|
||||||
|
yield from posts
|
||||||
|
|
||||||
|
if len(posts) < self.per_page:
|
||||||
|
return
|
||||||
|
params["tags"] = "id:<{} {}".format(posts[-1]["id"], tags)
|
||||||
|
|
||||||
|
|
||||||
|
class E621TagExtractor(E621Extractor):
|
||||||
|
"""Extractor for e621 posts from tag searches"""
|
||||||
|
subcategory = "tag"
|
||||||
|
directory_fmt = ("{category}", "{search_tags}")
|
||||||
|
archive_fmt = "t_{search_tags}_{id}"
|
||||||
|
pattern = BASE_PATTERN + r"/posts?(?:\?.*?tags=|/index/\d+/)([^&#]+)"
|
||||||
test = (
|
test = (
|
||||||
("https://e621.net/post/index/1/anry", {
|
("https://e621.net/posts?tags=anry", {
|
||||||
"url": "8021e5ea28d47c474c1ffc9bd44863c4d45700ba",
|
"url": "8021e5ea28d47c474c1ffc9bd44863c4d45700ba",
|
||||||
"content": "501d1e5d922da20ee8ff9806f5ed3ce3a684fd58",
|
"content": "501d1e5d922da20ee8ff9806f5ed3ce3a684fd58",
|
||||||
}),
|
}),
|
||||||
|
("https://e926.net/posts?tags=anry"),
|
||||||
|
("https://e621.net/post/index/1/anry"),
|
||||||
("https://e621.net/post?tags=anry"),
|
("https://e621.net/post?tags=anry"),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def __init__(self, match):
|
||||||
|
E621Extractor.__init__(self, match)
|
||||||
|
self.params["tags"] = text.unquote(match.group(2).replace("+", " "))
|
||||||
|
|
||||||
|
def metadata(self):
|
||||||
|
return {"search_tags": self.params["tags"]}
|
||||||
|
|
||||||
|
|
||||||
class E621PoolExtractor(booru.PoolMixin, E621Extractor):
|
class E621PoolExtractor(E621Extractor):
|
||||||
"""Extractor for image-pools from e621.net"""
|
"""Extractor for e621 pools"""
|
||||||
pattern = r"(?:https?://)?(?:www\.)?e621\.net/pool/show/(?P<pool>\d+)"
|
subcategory = "pool"
|
||||||
test = ("https://e621.net/pool/show/73", {
|
directory_fmt = ("{category}", "pool", "{pool[id]} {pool[name]}")
|
||||||
"url": "842f2fb065c7c339486a9b1d689020b8569888ed",
|
archive_fmt = "p_{pool[id]}_{id}"
|
||||||
"content": "c2c87b7a9150509496cddc75ccab08109922876a",
|
pattern = BASE_PATTERN + r"/pool(?:s|/show)/(\d+)"
|
||||||
})
|
test = (
|
||||||
|
("https://e621.net/pools/73", {
|
||||||
|
"url": "842f2fb065c7c339486a9b1d689020b8569888ed",
|
||||||
class E621PostExtractor(booru.PostMixin, E621Extractor):
|
"content": "c2c87b7a9150509496cddc75ccab08109922876a",
|
||||||
"""Extractor for single images from e621.net"""
|
}),
|
||||||
pattern = r"(?:https?://)?(?:www\.)?e621\.net/post/show/(?P<post>\d+)"
|
("https://e621.net/pool/show/73"),
|
||||||
test = ("https://e621.net/post/show/535", {
|
)
|
||||||
"url": "f7f78b44c9b88f8f09caac080adc8d6d9fdaa529",
|
|
||||||
"content": "66f46e96a893fba8e694c4e049b23c2acc9af462",
|
|
||||||
"options": (("tags", True),),
|
|
||||||
"keyword": {
|
|
||||||
"tags_artist": "anry",
|
|
||||||
"tags_general": str,
|
|
||||||
"tags_species": str,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
class E621PopularExtractor(booru.MoebooruPopularMixin, E621Extractor):
|
|
||||||
"""Extractor for popular images from 621.net"""
|
|
||||||
pattern = (r"(?:https?://)?(?:www\.)?e621\.net"
|
|
||||||
r"/post/popular_by_(?P<scale>day|week|month)"
|
|
||||||
r"(?:\?(?P<query>[^#]*))?")
|
|
||||||
test = ("https://e621.net/post/popular_by_month?month=6&year=2013", {
|
|
||||||
"count": 32,
|
|
||||||
})
|
|
||||||
|
|
||||||
def __init__(self, match):
|
def __init__(self, match):
|
||||||
super().__init__(match)
|
E621Extractor.__init__(self, match)
|
||||||
self.api_url = "https://e621.net/post/popular_by_{scale}.json".format(
|
self.pool_id = match.group(2)
|
||||||
scale=self.scale)
|
self.params["tags"] = "pool:" + self.pool_id
|
||||||
|
|
||||||
|
def metadata(self):
|
||||||
|
url = "{}/pools/{}.json".format(self.root, self.pool_id)
|
||||||
|
pool = self.request(url).json()
|
||||||
|
pool["name"] = pool["name"].replace("_", " ")
|
||||||
|
del pool["post_ids"]
|
||||||
|
return {"pool": pool}
|
||||||
|
|
||||||
|
|
||||||
|
class E621PostExtractor(E621Extractor):
|
||||||
|
"""Extractor for single e621 posts"""
|
||||||
|
subcategory = "post"
|
||||||
|
archive_fmt = "{id}"
|
||||||
|
pattern = BASE_PATTERN + r"/post(?:s|/show)/(\d+)"
|
||||||
|
test = (
|
||||||
|
("https://e621.net/posts/535", {
|
||||||
|
"url": "f7f78b44c9b88f8f09caac080adc8d6d9fdaa529",
|
||||||
|
"content": "66f46e96a893fba8e694c4e049b23c2acc9af462",
|
||||||
|
}),
|
||||||
|
("https://e621.net/post/show/535"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, match):
|
||||||
|
E621Extractor.__init__(self, match)
|
||||||
|
self.post_id = match.group(2)
|
||||||
|
|
||||||
|
def posts(self):
|
||||||
|
url = "{}/posts/{}.json".format(self.root, self.post_id)
|
||||||
|
return (self.request(url).json()["post"],)
|
||||||
|
|
||||||
|
|
||||||
|
class E621PopularExtractor(E621Extractor):
|
||||||
|
"""Extractor for popular images from e621"""
|
||||||
|
subcategory = "popular"
|
||||||
|
directory_fmt = ("{category}", "popular", "{scale}", "{date}")
|
||||||
|
archive_fmt = "P_{scale[0]}_{date}_{id}"
|
||||||
|
pattern = BASE_PATTERN + r"/explore/posts/popular(?:\?([^#]*))?"
|
||||||
|
test = (
|
||||||
|
("https://e621.net/explore/posts/popular"),
|
||||||
|
(("https://e621.net/explore/posts/popular"
|
||||||
|
"?date=2019-06-01&scale=month"), {
|
||||||
|
"pattern": r"https://static\d.e621.net/data/../../[0-9a-f]+",
|
||||||
|
"count": ">= 70",
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self, match):
|
||||||
|
E621Extractor.__init__(self, match)
|
||||||
|
self.params.update(text.parse_query(match.group(2)))
|
||||||
|
|
||||||
|
def metadata(self):
|
||||||
|
scale = self.params.get("scale", "day")
|
||||||
|
date = self.params.get("date") or datetime.date.today().isoformat()
|
||||||
|
date = date[:10]
|
||||||
|
|
||||||
|
if scale == "week":
|
||||||
|
date = datetime.date.fromisoformat(date)
|
||||||
|
date = (date - datetime.timedelta(days=date.weekday())).isoformat()
|
||||||
|
elif scale == "month":
|
||||||
|
date = date[:-3]
|
||||||
|
|
||||||
|
return {"date": date, "scale": scale}
|
||||||
|
|
||||||
|
def posts(self):
|
||||||
|
url = self.root + "/explore/posts/popular.json"
|
||||||
|
return self._pagination(url)
|
||||||
|
Loading…
Reference in new issue