diff --git a/JianshuResearchTools/__init__.py b/JianshuResearchTools/__init__.py index c016e79..f431ded 100644 --- a/JianshuResearchTools/__init__.py +++ b/JianshuResearchTools/__init__.py @@ -1,4 +1,4 @@ -__version__ = "2.10.0" +__version__ = "2.10.1" from . import (article, beikeisland, collection, island, notebook, objects, rank, user) diff --git a/JianshuResearchTools/article.py b/JianshuResearchTools/article.py index ab5fe93..9475c53 100644 --- a/JianshuResearchTools/article.py +++ b/JianshuResearchTools/article.py @@ -192,6 +192,7 @@ def GetArticlePublishTime(article_url: str, disable_check: bool = False) -> date Args: article_url (str): 文章 URL + disable_check (str): 禁用参数有效性检查. Defaults to False. Returns: datetime: 文章发布时间 @@ -530,7 +531,6 @@ def GetArticleAllCommentsData(article_id: int, count: int = 10, author_only: boo author_only (bool, optional): 为 True 时只获取作者发布的评论,包含作者发布的子评论及其父评论. Defaults to False. sorting_method (str, optional): 排序方式,为”positive“时按时间正序排列,为”reverse“时按时间倒序排列. Defaults to "positive". max_count (int, optional): 获取的文章评论信息数量上限,Defaults to None. - disable_check (bool): 禁用参数有效性检查. Defaults to False. Yields: Iterator[Dict], None, None]: 文章信息 diff --git a/JianshuResearchTools/assert_funcs.py b/JianshuResearchTools/assert_funcs.py index 22c65be..fa71a26 100644 --- a/JianshuResearchTools/assert_funcs.py +++ b/JianshuResearchTools/assert_funcs.py @@ -1,6 +1,6 @@ from functools import lru_cache -from typing import Any from re import compile as re_compile +from typing import Any from .basic_apis import (GetArticleJsonDataApi, GetCollectionJsonDataApi, GetIslandJsonDataApi, GetNotebookJsonDataApi, @@ -28,18 +28,18 @@ JIANSHU_ISLAND_POST_URL_REGEX = re_compile(r"^https://www\.jianshu\.com/gp/\w{16}/?$") -def AssertType(object: Any, type_obj: Any) -> None: +def AssertType(obj: Any, type_obj: Any) -> None: """判断对象是否是指定类型 Args: - object (Any): 需要进行判断的对象 + obj (Any): 需要进行判断的对象 type_obj (object): 目标类型 Raises: TypeError: 对象类型错误时抛出此错误 """ - if not isinstance(object, type_obj): - raise TypeError(f"{object} 不是 {type_obj.__name__} 类型,而是 { type(object).__name__ } 类型") + if not isinstance(obj, type_obj): + raise TypeError(f"{obj} 不是 {type_obj.__name__} 类型,而是 { type(obj).__name__ } 类型") def AssertJianshuUrl(string: str) -> None: diff --git a/JianshuResearchTools/basic_apis.py b/JianshuResearchTools/basic_apis.py index 53d4454..f952f99 100644 --- a/JianshuResearchTools/basic_apis.py +++ b/JianshuResearchTools/basic_apis.py @@ -1,8 +1,9 @@ -from typing import Dict, List, Union +from typing import Dict, List, Union, Optional from httpx import get as httpx_get from httpx import post as httpx_post from lxml import etree +from lxml.etree import _Element from .headers import (BeikeIsland_request_header, PC_header, api_request_header, mobile_header) @@ -37,7 +38,7 @@ def GetArticleJsonDataApi(article_url: str) -> Dict: return json_obj -def GetArticleHtmlJsonDataApi(article_url: str) -> Dict: +def GetArticleHtmlJsonDataApi(article_url: str) -> _Element: source = httpx_get(article_url, headers=PC_header).content html_obj = etree.HTML(source) json_obj = json_loads(html_obj.xpath("//script[@id='__NEXT_DATA__']/text()")[0]) @@ -190,7 +191,7 @@ def GetDailyArticleRankListJsonDataApi() -> Dict: return json_obj -def GetArticlesFPRankListJsonDataApi(date: str, type_: str) -> Dict: # 避免覆盖内置函数 +def GetArticlesFPRankListJsonDataApi(date: str, type_: Optional[str]) -> Dict: # 避免覆盖内置函数 params = { "date": date, "type": type_ @@ -207,7 +208,7 @@ def GetUserJsonDataApi(user_url: str) -> Dict: return json_obj -def GetUserPCHtmlDataApi(user_url: str) -> Dict: +def GetUserPCHtmlDataApi(user_url: str) -> _Element: source = httpx_get(user_url, headers=PC_header).content html_obj = etree.HTML(source) return html_obj @@ -236,7 +237,7 @@ def GetUserArticlesListJsonDataApi(user_url: str, page: int, return json_obj -def GetUserFollowingListHtmlDataApi(user_url: str, page: int): +def GetUserFollowingListHtmlDataApi(user_url: str, page: int) -> _Element: request_url = user_url.replace("/u/", "/users/") + "/following" params = { "page": page @@ -246,7 +247,7 @@ def GetUserFollowingListHtmlDataApi(user_url: str, page: int): return html_obj -def GetUserFollowersListHtmlDataApi(user_url: str, page: int): +def GetUserFollowersListHtmlDataApi(user_url: str, page: int) -> _Element: request_url = user_url.replace("/u/", "/users/") + "/followers" params = { "page": page @@ -256,7 +257,7 @@ def GetUserFollowersListHtmlDataApi(user_url: str, page: int): return html_obj -def GetUserNextAnniversaryDayHtmlDataApi(user_slug: str): +def GetUserNextAnniversaryDayHtmlDataApi(user_slug: str) -> _Element: request_url = f"https://www.jianshu.com/mobile/u/{user_slug}/anniversary" source = httpx_get(request_url, headers=mobile_header).content html_obj = etree.HTML(source) @@ -270,7 +271,7 @@ def GetIslandPostJsonDataApi(post_slug: str) -> List[Dict]: return json_obj -def GetUserTimelineHtmlDataApi(uslug: str, max_id: int) -> Dict: +def GetUserTimelineHtmlDataApi(uslug: str, max_id: int) -> _Element: request_url = f"https://www.jianshu.com/users/{uslug}/timeline" params = { "max_id": max_id diff --git a/JianshuResearchTools/beikeisland.py b/JianshuResearchTools/beikeisland.py index 43460b8..25931e3 100644 --- a/JianshuResearchTools/beikeisland.py +++ b/JianshuResearchTools/beikeisland.py @@ -126,30 +126,45 @@ def GetBeikeIslandTradeOrderInfo(trade_type: str, page: int = 1) -> List[Dict]: "buy": 2, "sell": 1 }[trade_type] - json_obj = GetBeikeIslandTradeListJsonDataApi(pageIndex=page, retype=retype) + json_obj = GetBeikeIslandTradeListJsonDataApi(pageIndex=page, + retype=retype) result = [] for item in json_obj["data"]["tradelist"]: item_data = { - "tradeid": item["id"], - "tradeslug": item["tradeno"], # ? 我也不确定这个 no 什么意思,回来去问问 - "user": { - "jianshuname": item["jianshuname"], - "bkname": item["reusername"], # ? 还有个 nickname,不知道哪个对 - "avatar_url": item["avatarurl"], - "userlevelcode": item["levelnum"], - "userlevel": item["userlevel"], - "user_trade_count": item["tradecount"] + "trade_id": item["id"], + "trade_slug": item["tradeno"], + "publish_time": datetime.fromisoformat(item["releasetime"]), + "status": { + "code": item["statuscode"], + "text": item["statustext"] }, - "total": item["recount"], - "traded": item["recount"] - item["cantradenum"], - "remaining": item["cantradenum"], - "price": item["reprice"], - "minimum_limit": item["minlimit"], - "percentage": item["compeletper"], - "statuscode": item["statuscode"], - "status": item["statustext"], - "publish_time": datetime.fromisoformat(item["releasetime"]) + "trade": { + "total": item["recount"], + "traded": item["recount"] - item["cantradenum"], + "remaining": item["cantradenum"], + "minimum_trade_limit": item["minlimit"], + "traded_percentage": round( + float(item["compeletper"]) / 100, 3 + ), + "price": item["reprice"], + } } + + if item["anonymity"]: + item_data["user"] = { + "is_anonymity": True + } + else: + item_data["user"] = { + "is_anonymity": False, + "name": item["reusername"], + "avatar_url": item["avatarurl"], + "level": { + "code": item["levelnum"], + "text": item["userlevel"] + } + } + result.append(item_data) return result diff --git a/JianshuResearchTools/convert.py b/JianshuResearchTools/convert.py index 38dfb54..b33e5f2 100644 --- a/JianshuResearchTools/convert.py +++ b/JianshuResearchTools/convert.py @@ -38,7 +38,7 @@ def UserSlugToUserId(user_slug: str) -> int: """用户 Slug 转用户 ID Args: - user_url (str): 用户 Slug + user_slug (str): 用户 Slug Returns: int: 用户 ID @@ -318,7 +318,7 @@ def IslandPostUrlToIslandPostSlug(post_url: str) -> str: """小岛文章 URL 转小岛帖子 Slug Args: - island_url (str): 小岛帖子 URL + post_url (str): 小岛帖子 URL Returns: str: 小岛帖子 Slug @@ -333,7 +333,7 @@ def IslandPostSlugToIslandPostUrl(post_slug: str) -> str: """小岛帖子 Slug 转小岛帖子 URL Args: - island_url (str): 小岛帖子 Slug + post_slug (str): 小岛帖子 Slug Returns: str: 小岛帖子 URL diff --git a/JianshuResearchTools/island.py b/JianshuResearchTools/island.py index b429753..207c581 100644 --- a/JianshuResearchTools/island.py +++ b/JianshuResearchTools/island.py @@ -11,7 +11,7 @@ __all__ = [ "GetIslandName", "GetIslandAvatarUrl", "GetIslandIntroduction", "GetIslandMembersCount", "GetIslandPostsCount", "GetIslandCategory", - "GetIslandPostFullConetnt", "GetIslandPosts", "GetIslandAllBasicData", + "GetIslandPostFullContent", "GetIslandPosts", "GetIslandAllBasicData", "GetIslandAllPostsData" ] @@ -124,11 +124,11 @@ def GetIslandCategory(island_url: str, disable_check: bool = False) -> str: return result -def GetIslandPostFullConetnt(post_url: str, disable_check: bool = False) -> str: +def GetIslandPostFullContent(post_url: str, disable_check: bool = False) -> str: """获取小岛帖子完整内容 Args: - island_url (str): 小岛 URL + post_url (str): 小岛帖子 URL disable_check (bool): 禁用参数有效性检查. Defaults to False. Returns: @@ -153,9 +153,9 @@ def GetIslandPosts(island_url: str, start_sort_id: int = None, count: int = 10, count (int, optional): 每次返回的数据数量. Defaults to 10. topic_id (int, optional): 话题 ID. Defaults to None. sorting_method (str, optional): 排序方法,"time" 为按照发布时间排序, - "comment_time" 为按照最近评论时间排序,"hot" 为按照热度排序. Defaults to "time". + "comment_time" 为按照最近评论时间排序,"hot" 为按照热度排序. Defaults to "time". get_full_content (bool, optional): 为 True 时,当检测到获取的帖子内容不全时, - 自动调用 GetIslandPostFullConetnt 函数获取完整内容并替换. Defaults to False. + 自动调用 GetIslandPostFullContent 函数获取完整内容并替换. Defaults to False. disable_check (bool): 禁用参数有效性检查. Defaults to False. Returns: @@ -170,7 +170,8 @@ def GetIslandPosts(island_url: str, start_sort_id: int = None, count: int = 10, "most_valuable": "best" }[sorting_method], json_obj = GetIslandPostsJsonDataApi(group_slug=IslandUrlToIslandSlug(island_url), - max_id=start_sort_id, count=count, topic_id=topic_id, order_by=order_by) + max_id=start_sort_id, count=count, topic_id=topic_id, + order_by=order_by) result = [] for item in json_obj: @@ -228,7 +229,7 @@ def GetIslandPosts(island_url: str, start_sort_id: int = None, count: int = 10, except KeyError: pass # 没有话题则跳过 if get_full_content and "..." in item_data["content"]: # 获取到的帖子内容不全 - item_data["content"] = GetIslandPostFullConetnt(IslandPostSlugToIslandPostUrl(item_data["pslug"]), + item_data["content"] = GetIslandPostFullContent(IslandPostSlugToIslandPostUrl(item_data["pslug"]), disable_check=True) result.append(item_data) return result @@ -272,7 +273,7 @@ def GetIslandAllPostsData(island_url: str, count: int = 10, sorting_method (str, optional): 排序方法,time 为按照发布时间排序, comment_time 为按照最近评论时间排序,hot 为按照热度排序. Defaults to "time". get_full_content (bool, optional): 为 True 时,当检测到获取的帖子内容不全时, - 自动调用 GetIslandPostFullConetnt 函数获取完整内容并替换. Defaults to False. + 自动调用 GetIslandPostFullContent 函数获取完整内容并替换. Defaults to False. max_count (int, optional): 获取的小岛帖子信息数量上限,Defaults to None. disable_check (bool): 禁用参数有效性检查. Defaults to False. diff --git a/JianshuResearchTools/objects.py b/JianshuResearchTools/objects.py index 3758a18..74e6cd3 100644 --- a/JianshuResearchTools/objects.py +++ b/JianshuResearchTools/objects.py @@ -10,7 +10,8 @@ from .convert import (ArticleSlugToArticleUrl, CollectionSlugToCollectionUrl, IslandSlugToIslandUrl, IslandUrlToIslandSlug, NotebookSlugToNotebookUrl, UserSlugToUserUrl, - UserUrlToUserSlug) + UserUrlToUserSlug, ArticleUrlToArticleSlug, + NotebookUrlToNotebookId, NotebookUrlToNotebookSlug, CollectionUrlToCollectionSlug) from .exceptions import InputError from .utils import CallWithoutCheck, NameValueMappingToString, OnlyOne @@ -85,7 +86,7 @@ def clear_cache(): _cache_dict.clear() -class User(): +class User: """用户类 """ def __init__(self, user_url: str = None, *, user_slug: str = None): @@ -416,7 +417,7 @@ def __str__(self) -> str: }, title="用户信息摘要") -class Article(): +class Article: """文章类 """ def __init__(self, article_url: str = None, article_slug: str = None): @@ -479,7 +480,7 @@ def slug(self) -> str: Returns: str: 文章 Slug """ - return article.GetArticleSlug(self._url) + return ArticleUrlToArticleSlug(self._url) @property @cache_result_wrapper @@ -709,7 +710,7 @@ def __str__(self) -> str: }, title="文章信息摘要") -class Notebook(): +class Notebook: """文集类 """ def __init__(self, notebook_url: str = None, notebook_slug: str = None): @@ -772,7 +773,7 @@ def id(self) -> int: Returns: int: 文集 ID """ - return notebook.GetNotebookId(self._url) + return NotebookUrlToNotebookId(self._url) @property @cache_result_wrapper @@ -782,7 +783,7 @@ def slug(self) -> str: Returns: str: 文集 Slug """ - return notebook.GetNotebookSlug(self._url) + return NotebookUrlToNotebookSlug(self._url) @property @cache_result_wrapper @@ -910,7 +911,7 @@ def __str__(self) -> str: }, title="文集信息摘要") -class Collection(): +class Collection: """专题类 """ def __init__(self, collection_url: str = None, collection_slug: str = None, @@ -979,7 +980,7 @@ def slug(self) -> str: Returns: str: 专题 Slug """ - return collection.GetCollectionSlug(self._url) + return CollectionUrlToCollectionSlug(self._url) @property @cache_result_wrapper @@ -1076,7 +1077,7 @@ def editors_info(self, page: int = 1) -> List[Dict]: """获取专题编辑信息 Args: - page (int, optional): 页码. Defause to 1. + page (int, optional): 页码. Default to 1. Raises: InputError: 因缺少 ID 参数而无法获取结果时抛出此异常 @@ -1181,7 +1182,7 @@ def __str__(self) -> str: }, title="专题信息摘要") -class Island(): +class Island: """小岛类 """ def __init__(self, island_url: str = None, island_slug: str = None): diff --git a/JianshuResearchTools/rank.py b/JianshuResearchTools/rank.py index 290ded8..7c91130 100644 --- a/JianshuResearchTools/rank.py +++ b/JianshuResearchTools/rank.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import datetime, timedelta, date from typing import Dict, List from .basic_apis import (GetArticlesFPRankListJsonDataApi, @@ -67,13 +67,13 @@ def GetDailyArticleRankData() -> List[Dict]: return result -def GetArticleFPRankData(date: str = "latest") -> List[Dict]: +def GetArticleFPRankData(target_date: str = "latest") -> List[Dict]: """获取文章收益排行榜信息 目前只能获取 2020 年 6 月 20 日之后的数据。 Args: - date (str, optional): 日期,格式“YYYYMMDD”. Defaults to "latest". + target_date (str, optional): 日期,格式“YYYYMMDD”. Defaults to "latest". Raises: ResourceError: 对应日期的排行榜数据为空时抛出此异常 @@ -81,9 +81,9 @@ def GetArticleFPRankData(date: str = "latest") -> List[Dict]: Returns: List[Dict]: 文章收益排行榜信息 """ - if date == "latest": - date = (datetime.today() + timedelta(days=-1)).strftime(r"%Y%m%d") - json_obj = GetArticlesFPRankListJsonDataApi(date=date, type_=None) + if target_date == "latest": + target_date = (datetime.today() + timedelta(days=-1)).strftime(r"%Y%m%d") + json_obj = GetArticlesFPRankListJsonDataApi(date=target_date, type_=None) if json_obj["notes"] == []: raise ResourceError(f"对应日期 {date} 的排行榜数据为空") result = [] @@ -102,13 +102,13 @@ def GetArticleFPRankData(date: str = "latest") -> List[Dict]: return result -def GetArticleFPRankBasicInfo(date: str = "latest") -> Dict: +def GetArticleFPRankBasicInfo(target_date: str = "latest") -> Dict: """获取文章收益排行榜信息 目前只能获取 2020 年 6 月 20 日之后的数据。 Args: - date (str, optional): 日期,格式“YYYYMMDD”. Defaults to "latest". + target_date (str, optional): 日期,格式“YYYYMMDD”. Defaults to "latest". Raises: ResourceError: 对应日期的排行榜数据为空时抛出此异常 @@ -116,11 +116,11 @@ def GetArticleFPRankBasicInfo(date: str = "latest") -> Dict: Returns: Dict: 文章收益排行榜基础信息 """ - if date == "latest": - date = (datetime.date.today() + datetime.timedelta(days=-1)).strftime("%Y%m%d") - json_obj = GetArticlesFPRankListJsonDataApi(date=date, type_=None) + if target_date == "latest": + target_date = (date.today() + timedelta(days=-1)).strftime("%Y%m%d") + json_obj = GetArticlesFPRankListJsonDataApi(date=target_date, type_=None) if json_obj["notes"] == []: - raise ResourceError(f"对应日期 {date} 的排行榜数据为空") + raise ResourceError(f"对应日期 {target_date} 的排行榜数据为空") result = { "total_fp": json_obj["fp"], "fp_to_author": json_obj["author_fp"], @@ -129,13 +129,14 @@ def GetArticleFPRankBasicInfo(date: str = "latest") -> Dict: return result -def GetUserFPRankData(date: str = "latest", rank_type: str = "all") -> List[Dict]: +def GetUserFPRankData(target_date: str = "latest", rank_type: str = "all") -> List[Dict]: """获取用户收益排行榜信息 目前只能获取 2020 年 6 月 20 日之后的数据。 Args: - date (str, optional): 日期,格式“YYYYMMDD”. Defaults to "latest". + target_date (str, optional): 日期,格式“YYYYMMDD”. Defaults to "latest". + rank_type (str, optional): 排行榜分类,"all" 为总收益榜,"write" 为内容收益榜,"vote" 为投票收益榜 Raises: ResourceError: 对应日期的排行榜数据为空时抛出此异常 @@ -148,9 +149,9 @@ def GetUserFPRankData(date: str = "latest", rank_type: str = "all") -> List[Dict "write": "note", "vote": "like" }[rank_type] - json_obj = GetArticlesFPRankListJsonDataApi(date=date, type_=type_) + json_obj = GetArticlesFPRankListJsonDataApi(date=target_date, type_=type_) if json_obj["users"] == []: - raise ResourceError(f"对应日期 {date} 的排行榜数据为空") + raise ResourceError(f"对应日期 {target_date} 的排行榜数据为空") result = [] for ranking, item in enumerate(json_obj["users"]): item_data = { diff --git a/JianshuResearchTools/user.py b/JianshuResearchTools/user.py index fc08110..76bb72b 100644 --- a/JianshuResearchTools/user.py +++ b/JianshuResearchTools/user.py @@ -662,10 +662,11 @@ def GetUserTimelineInfo(user_url: str, max_id: int = 1000000000, disable_check: result = [] for block in blocks: - item_data = {} - item_data["operation_id"] = int(block.xpath("//li/@id")[0][5:]) - item_data["operation_type"] = block.xpath("//span[starts-with(@data-datetime, '20')]/@data-type")[0] - item_data["operation_time"] = datetime.fromisoformat(block.xpath("//span[starts-with(@data-datetime, '20')]/@data-datetime")[0]) + item_data = { + "operation_id": int(block.xpath("//li/@id")[0][5:]), + "operation_type": block.xpath("//span[starts-with(@data-datetime, '20')]/@data-type")[0], + "operation_time": datetime.fromisoformat(block.xpath("//span[starts-with(@data-datetime, '20')]/@data-datetime")[0]) + } if item_data["operation_type"] == "like_note": # 对文章点赞 item_data["operation_type"] = "like_article" # 鬼知道谁把对文章点赞写成 like_note 的 @@ -754,13 +755,13 @@ def GetUserTimelineInfo(user_url: str, max_id: int = 1000000000, disable_check: item_data["operator_name"] = block.xpath("//a[@class='nickname']/text()")[0] item_data["operator_url"] = UserSlugToUserUrl(block.xpath("//a[@class='nickname']/@href")[0][4:]) item_data["operator_avatar_url"] = block.xpath("//a[@class='avatar']/img/@src")[0] - item_data["target_collecton_title"] = block.xpath("//a[@class='title']/text()")[0] - item_data["target_collecton_url"] = CollectionSlugToCollectionUrl(block.xpath("//a[@class='title']/@href")[0][3:]) - item_data["target_collecton_avatar_url"] = block.xpath("//div[@class='follow-detail']/div/a/img/@src")[0] + item_data["target_collection_title"] = block.xpath("//a[@class='title']/text()")[0] + item_data["target_collection_url"] = CollectionSlugToCollectionUrl(block.xpath("//a[@class='title']/@href")[0][3:]) + item_data["target_collection_avatar_url"] = block.xpath("//div[@class='follow-detail']/div/a/img/@src")[0] item_data["target_user_name"] = block.xpath("//a[@class='creater']/text()")[0] item_data["target_user_url"] = UserSlugToUserUrl(block.xpath("//a[@class='creater']/@href")[0][3:]) - item_data["target_collecton_articles_count"] = int(findall(r"\d+", block.xpath("//div[@class='info'][1]/p/text()")[1])[0]) - item_data["target_collecton_subscribers_count"] = int(findall(r"\d+", block.xpath("//div[@class='info'][1]/p/text()")[1])[1]) + item_data["target_collection_articles_count"] = int(findall(r"\d+", block.xpath("//div[@class='info'][1]/p/text()")[1])[0]) + item_data["target_collection_subscribers_count"] = int(findall(r"\d+", block.xpath("//div[@class='info'][1]/p/text()")[1])[1]) elif item_data["operation_type"] == "like_user": # 关注用户 item_data["operation_type"] = "follow_user" # 鬼知道谁把关注用户写成 like_user 的 diff --git a/Pipfile b/Pipfile index dbf704c..0df05e9 100644 --- a/Pipfile +++ b/Pipfile @@ -10,12 +10,13 @@ tomd = "==0.1.3" ujson = "==5.3.0" [dev-packages] -pytest = "==7.0.1" -pytest-xdist = "==2.5.0" -pytest-cov = "==3.0.0" -flake8 = "==4.0.1" -mypy = "==0.960" -pyyaml = "==6.0.0" +pytest = "*" +pytest-xdist = "*" +pytest-cov = "*" +flake8 = "*" +mypy = "*" +pyyaml = "*" +yapf = "*" [requires] python_version = "3.8" diff --git a/Pipfile.lock b/Pipfile.lock index 248201d..0598248 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "814865d21536859e1d55e9d06c88469b59554582d19fab854a8777de6d045129" + "sha256": "101db41b6c1888faa2942f0072d5871901a5c42b39df05c669cd26bbff67a9d2" }, "pipfile-spec": 6, "requires": { @@ -37,7 +37,7 @@ "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597", "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df" ], - "markers": "python_full_version >= '3.5.0'", + "markers": "python_version >= '3.5'", "version": "==2.0.12" }, "h11": { @@ -69,7 +69,7 @@ "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff", "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d" ], - "markers": "python_full_version >= '3.5.0'", + "markers": "python_version >= '3.5'", "version": "==3.3" }, "lxml": { @@ -154,7 +154,7 @@ "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663", "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de" ], - "markers": "python_full_version >= '3.5.0'", + "markers": "python_version >= '3.5'", "version": "==1.2.0" }, "tomd": { @@ -251,50 +251,50 @@ "toml" ], "hashes": [ - "sha256:00c8544510f3c98476bbd58201ac2b150ffbcce46a8c3e4fb89ebf01998f806a", - "sha256:016d7f5cf1c8c84f533a3c1f8f36126fbe00b2ec0ccca47cc5731c3723d327c6", - "sha256:03014a74023abaf5a591eeeaf1ac66a73d54eba178ff4cb1fa0c0a44aae70383", - "sha256:033ebec282793bd9eb988d0271c211e58442c31077976c19c442e24d827d356f", - "sha256:21e6686a95025927775ac501e74f5940cdf6fe052292f3a3f7349b0abae6d00f", - "sha256:26f8f92699756cb7af2b30720de0c5bb8d028e923a95b6d0c891088025a1ac8f", - "sha256:2e76bd16f0e31bc2b07e0fb1379551fcd40daf8cdf7e24f31a29e442878a827c", - "sha256:341e9c2008c481c5c72d0e0dbf64980a4b2238631a7f9780b0fe2e95755fb018", - "sha256:3cfd07c5889ddb96a401449109a8b97a165be9d67077df6802f59708bfb07720", - "sha256:4002f9e8c1f286e986fe96ec58742b93484195defc01d5cc7809b8f7acb5ece3", - "sha256:50ed480b798febce113709846b11f5d5ed1e529c88d8ae92f707806c50297abf", - "sha256:543e172ce4c0de533fa892034cce260467b213c0ea8e39da2f65f9a477425211", - "sha256:5a78cf2c43b13aa6b56003707c5203f28585944c277c1f3f109c7b041b16bd39", - "sha256:5cd698341626f3c77784858427bad0cdd54a713115b423d22ac83a28303d1d95", - "sha256:60c2147921da7f4d2d04f570e1838db32b95c5509d248f3fe6417e91437eaf41", - "sha256:62d382f7d77eeeaff14b30516b17bcbe80f645f5cf02bb755baac376591c653c", - "sha256:69432946f154c6add0e9ede03cc43b96e2ef2733110a77444823c053b1ff5166", - "sha256:727dafd7f67a6e1cad808dc884bd9c5a2f6ef1f8f6d2f22b37b96cb0080d4f49", - "sha256:742fb8b43835078dd7496c3c25a1ec8d15351df49fb0037bffb4754291ef30ce", - "sha256:750e13834b597eeb8ae6e72aa58d1d831b96beec5ad1d04479ae3772373a8088", - "sha256:7b546cf2b1974ddc2cb222a109b37c6ed1778b9be7e6b0c0bc0cf0438d9e45a6", - "sha256:83bd142cdec5e4a5c4ca1d4ff6fa807d28460f9db919f9f6a31babaaa8b88426", - "sha256:8d2e80dd3438e93b19e1223a9850fa65425e77f2607a364b6fd134fcd52dc9df", - "sha256:9229d074e097f21dfe0643d9d0140ee7433814b3f0fc3706b4abffd1e3038632", - "sha256:968ed5407f9460bd5a591cefd1388cc00a8f5099de9e76234655ae48cfdbe2c3", - "sha256:9c82f2cd69c71698152e943f4a5a6b83a3ab1db73b88f6e769fabc86074c3b08", - "sha256:a00441f5ea4504f5abbc047589d09e0dc33eb447dc45a1a527c8b74bfdd32c65", - "sha256:a022394996419142b33a0cf7274cb444c01d2bb123727c4bb0b9acabcb515dea", - "sha256:af5b9ee0fc146e907aa0f5fb858c3b3da9199d78b7bb2c9973d95550bd40f701", - "sha256:b5578efe4038be02d76c344007b13119b2b20acd009a88dde8adec2de4f630b5", - "sha256:b84ab65444dcc68d761e95d4d70f3cfd347ceca5a029f2ffec37d4f124f61311", - "sha256:c53ad261dfc8695062fc8811ac7c162bd6096a05a19f26097f411bdf5747aee7", - "sha256:cc173f1ce9ffb16b299f51c9ce53f66a62f4d975abe5640e976904066f3c835d", - "sha256:d548edacbf16a8276af13063a2b0669d58bbcfca7c55a255f84aac2870786a61", - "sha256:d55fae115ef9f67934e9f1103c9ba826b4c690e4c5bcf94482b8b2398311bf9c", - "sha256:d8099ea680201c2221f8468c372198ceba9338a5fec0e940111962b03b3f716a", - "sha256:e35217031e4b534b09f9b9a5841b9344a30a6357627761d4218818b865d45055", - "sha256:e4f52c272fdc82e7c65ff3f17a7179bc5f710ebc8ce8a5cadac81215e8326740", - "sha256:e637ae0b7b481905358624ef2e81d7fb0b1af55f5ff99f9ba05442a444b11e45", - "sha256:eef5292b60b6de753d6e7f2d128d5841c7915fb1e3321c3a1fe6acfe76c38052", - "sha256:fb45fe08e1abc64eb836d187b20a59172053999823f7f6ef4f18a819c44ba16f" + "sha256:01c5615d13f3dd3aa8543afc069e5319cfa0c7d712f6e04b920431e5c564a749", + "sha256:106c16dfe494de3193ec55cac9640dd039b66e196e4641fa8ac396181578b982", + "sha256:129cd05ba6f0d08a766d942a9ed4b29283aff7b2cccf5b7ce279d50796860bb3", + "sha256:145f296d00441ca703a659e8f3eb48ae39fb083baba2d7ce4482fb2723e050d9", + "sha256:1480ff858b4113db2718848d7b2d1b75bc79895a9c22e76a221b9d8d62496428", + "sha256:269eaa2c20a13a5bf17558d4dc91a8d078c4fa1872f25303dddcbba3a813085e", + "sha256:26dff09fb0d82693ba9e6231248641d60ba606150d02ed45110f9ec26404ed1c", + "sha256:2bd9a6fc18aab8d2e18f89b7ff91c0f34ff4d5e0ba0b33e989b3cd4194c81fd9", + "sha256:309ce4a522ed5fca432af4ebe0f32b21d6d7ccbb0f5fcc99290e71feba67c264", + "sha256:3384f2a3652cef289e38100f2d037956194a837221edd520a7ee5b42d00cc605", + "sha256:342d4aefd1c3e7f620a13f4fe563154d808b69cccef415415aece4c786665397", + "sha256:39ee53946bf009788108b4dd2894bf1349b4e0ca18c2016ffa7d26ce46b8f10d", + "sha256:4321f075095a096e70aff1d002030ee612b65a205a0a0f5b815280d5dc58100c", + "sha256:4803e7ccf93230accb928f3a68f00ffa80a88213af98ed338a57ad021ef06815", + "sha256:4ce1b258493cbf8aec43e9b50d89982346b98e9ffdfaae8ae5793bc112fb0068", + "sha256:664a47ce62fe4bef9e2d2c430306e1428ecea207ffd68649e3b942fa8ea83b0b", + "sha256:75ab269400706fab15981fd4bd5080c56bd5cc07c3bccb86aab5e1d5a88dc8f4", + "sha256:83c4e737f60c6936460c5be330d296dd5b48b3963f48634c53b3f7deb0f34ec4", + "sha256:84631e81dd053e8a0d4967cedab6db94345f1c36107c71698f746cb2636c63e3", + "sha256:84e65ef149028516c6d64461b95a8dbcfce95cfd5b9eb634320596173332ea84", + "sha256:865d69ae811a392f4d06bde506d531f6a28a00af36f5c8649684a9e5e4a85c83", + "sha256:87f4f3df85aa39da00fd3ec4b5abeb7407e82b68c7c5ad181308b0e2526da5d4", + "sha256:8c08da0bd238f2970230c2a0d28ff0e99961598cb2e810245d7fc5afcf1254e8", + "sha256:961e2fb0680b4f5ad63234e0bf55dfb90d302740ae9c7ed0120677a94a1590cb", + "sha256:9b3e07152b4563722be523e8cd0b209e0d1a373022cfbde395ebb6575bf6790d", + "sha256:a7f3049243783df2e6cc6deafc49ea123522b59f464831476d3d1448e30d72df", + "sha256:bf5601c33213d3cb19d17a796f8a14a9eaa5e87629a53979a5981e3e3ae166f6", + "sha256:cec3a0f75c8f1031825e19cd86ee787e87cf03e4fd2865c79c057092e69e3a3b", + "sha256:d42c549a8f41dc103a8004b9f0c433e2086add8a719da00e246e17cbe4056f72", + "sha256:d67d44996140af8b84284e5e7d398e589574b376fb4de8ccd28d82ad8e3bea13", + "sha256:d9c80df769f5ec05ad21ea34be7458d1dc51ff1fb4b2219e77fe24edf462d6df", + "sha256:e57816f8ffe46b1df8f12e1b348f06d164fd5219beba7d9433ba79608ef011cc", + "sha256:ee2ddcac99b2d2aec413e36d7a429ae9ebcadf912946b13ffa88e7d4c9b712d6", + "sha256:f02cbbf8119db68455b9d763f2f8737bb7db7e43720afa07d8eb1604e5c5ae28", + "sha256:f1d5aa2703e1dab4ae6cf416eb0095304f49d004c39e9db1d86f57924f43006b", + "sha256:f5b66caa62922531059bc5ac04f836860412f7f88d38a476eda0a6f11d4724f4", + "sha256:f69718750eaae75efe506406c490d6fc5a6161d047206cc63ce25527e8a3adad", + "sha256:fb73e0011b8793c053bfa85e53129ba5f0250fdc0392c1591fd35d915ec75c46", + "sha256:fd180ed867e289964404051a958f7cccabdeed423f91a899829264bb7974d3d3", + "sha256:fdb6f7bd51c2d1714cea40718f6149ad9be6a2ee7d93b19e9f00934c0f2a74d9", + "sha256:ffa9297c3a453fba4717d06df579af42ab9a28022444cae7fa605af4df612d54" ], "markers": "python_version >= '3.7'", - "version": "==6.4" + "version": "==6.4.1" }, "execnet": { "hashes": [ @@ -328,32 +328,32 @@ }, "mypy": { "hashes": [ - "sha256:0ebfb3f414204b98c06791af37a3a96772203da60636e2897408517fcfeee7a8", - "sha256:239d6b2242d6c7f5822163ee082ef7a28ee02e7ac86c35593ef923796826a385", - "sha256:29dc94d9215c3eb80ac3c2ad29d0c22628accfb060348fd23d73abe3ace6c10d", - "sha256:2c7f8bb9619290836a4e167e2ef1f2cf14d70e0bc36c04441e41487456561409", - "sha256:33d53a232bb79057f33332dbbb6393e68acbcb776d2f571ba4b1d50a2c8ba873", - "sha256:3a3e525cd76c2c4f90f1449fd034ba21fcca68050ff7c8397bb7dd25dd8b8248", - "sha256:3eabcbd2525f295da322dff8175258f3fc4c3eb53f6d1929644ef4d99b92e72d", - "sha256:481f98c6b24383188c928f33dd2f0776690807e12e9989dd0419edd5c74aa53b", - "sha256:7a76dc4f91e92db119b1be293892df8379b08fd31795bb44e0ff84256d34c251", - "sha256:7d390248ec07fa344b9f365e6ed9d205bd0205e485c555bed37c4235c868e9d5", - "sha256:826a2917c275e2ee05b7c7b736c1e6549a35b7ea5a198ca457f8c2ebea2cbecf", - "sha256:85cf2b14d32b61db24ade8ac9ae7691bdfc572a403e3cb8537da936e74713275", - "sha256:8d645e9e7f7a5da3ec3bbcc314ebb9bb22c7ce39e70367830eb3c08d0140b9ce", - "sha256:925aa84369a07846b7f3b8556ccade1f371aa554f2bd4fb31cb97a24b73b036e", - "sha256:a85a20b43fa69efc0b955eba1db435e2ffecb1ca695fe359768e0503b91ea89f", - "sha256:bfd4f6536bd384c27c392a8b8f790fd0ed5c0cf2f63fc2fed7bce56751d53026", - "sha256:cb7752b24528c118a7403ee955b6a578bfcf5879d5ee91790667c8ea511d2085", - "sha256:cc537885891382e08129d9862553b3d00d4be3eb15b8cae9e2466452f52b0117", - "sha256:d4fccf04c1acf750babd74252e0f2db6bd2ac3aa8fe960797d9f3ef41cf2bfd4", - "sha256:f1ba54d440d4feee49d8768ea952137316d454b15301c44403db3f2cb51af024", - "sha256:f47322796c412271f5aea48381a528a613f33e0a115452d03ae35d673e6064f8", - "sha256:fbfb873cf2b8d8c3c513367febde932e061a5f73f762896826ba06391d932b2a", - "sha256:ffdad80a92c100d1b0fe3d3cf1a4724136029a29afe8566404c0146747114382" + "sha256:006be38474216b833eca29ff6b73e143386f352e10e9c2fbe76aa8549e5554f5", + "sha256:03c6cc893e7563e7b2949b969e63f02c000b32502a1b4d1314cabe391aa87d66", + "sha256:0e9f70df36405c25cc530a86eeda1e0867863d9471fe76d1273c783df3d35c2e", + "sha256:1ece702f29270ec6af25db8cf6185c04c02311c6bb21a69f423d40e527b75c56", + "sha256:3e09f1f983a71d0672bbc97ae33ee3709d10c779beb613febc36805a6e28bb4e", + "sha256:439c726a3b3da7ca84a0199a8ab444cd8896d95012c4a6c4a0d808e3147abf5d", + "sha256:5a0b53747f713f490affdceef835d8f0cb7285187a6a44c33821b6d1f46ed813", + "sha256:5f1332964963d4832a94bebc10f13d3279be3ce8f6c64da563d6ee6e2eeda932", + "sha256:63e85a03770ebf403291ec50097954cc5caf2a9205c888ce3a61bd3f82e17569", + "sha256:64759a273d590040a592e0f4186539858c948302c653c2eac840c7a3cd29e51b", + "sha256:697540876638ce349b01b6786bc6094ccdaba88af446a9abb967293ce6eaa2b0", + "sha256:9940e6916ed9371809b35b2154baf1f684acba935cd09928952310fbddaba648", + "sha256:9f5f5a74085d9a81a1f9c78081d60a0040c3efb3f28e5c9912b900adf59a16e6", + "sha256:a5ea0875a049de1b63b972456542f04643daf320d27dc592d7c3d9cd5d9bf950", + "sha256:b117650592e1782819829605a193360a08aa99f1fc23d1d71e1a75a142dc7e15", + "sha256:b24be97351084b11582fef18d79004b3e4db572219deee0212078f7cf6352723", + "sha256:b88f784e9e35dcaa075519096dc947a388319cb86811b6af621e3523980f1c8a", + "sha256:bdd5ca340beffb8c44cb9dc26697628d1b88c6bddf5c2f6eb308c46f269bb6f3", + "sha256:d5aaf1edaa7692490f72bdb9fbd941fbf2e201713523bdb3f4038be0af8846c6", + "sha256:e999229b9f3198c0c880d5e269f9f8129c8862451ce53a011326cad38b9ccd24", + "sha256:f4a21d01fc0ba4e31d82f0fff195682e29f9401a8bdb7173891070eb260aeb3b", + "sha256:f4b794db44168a4fc886e3450201365c9526a522c46ba089b55e1f11c163750d", + "sha256:f730d56cb924d371c26b8eaddeea3cc07d78ff51c521c6d04899ac6904b75492" ], "index": "pypi", - "version": "==0.960" + "version": "==0.961" }, "mypy-extensions": { "hashes": [ @@ -412,11 +412,11 @@ }, "pytest": { "hashes": [ - "sha256:9ce3ff477af913ecf6321fe337b93a2c0dcf2a0a1439c43f5452112c1e4280db", - "sha256:e30905a0c131d3d94b89624a1cc5afec3e0ba2fbdb151867d8e0ebd49850f171" + "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c", + "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45" ], "index": "pypi", - "version": "==7.0.1" + "version": "==7.1.2" }, "pytest-cov": { "hashes": [ @@ -479,7 +479,7 @@ "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5" ], "index": "pypi", - "version": "==6.0.0" + "version": "==6.0" }, "tomli": { "hashes": [ @@ -496,6 +496,14 @@ ], "markers": "python_version >= '3.7'", "version": "==4.2.0" + }, + "yapf": { + "hashes": [ + "sha256:8fea849025584e486fd06d6ba2bed717f396080fd3cc236ba10cb97c4c51cf32", + "sha256:a3f5085d37ef7e3e004c4ba9f9b3e40c54ff1901cd111f05145ae313a7c67d1b" + ], + "index": "pypi", + "version": "==0.32.0" } } } diff --git a/setup.py b/setup.py index c1d1de3..7d92fca 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ }, classifiers=[ "Programming Language :: Python :: 3", - "License :: OSI Approved :: MIT License", + "License :: OSI Approved :: MIT Licerense", "Operating System :: OS Independent", ], python_requires=">=3.6" diff --git a/test_all.py b/test_all.py index 023c996..bf87fed 100644 --- a/test_all.py +++ b/test_all.py @@ -55,12 +55,12 @@ def AssertListCase(value: List[Any], case: List[Any]): test_cases = yaml_load(f, Loader=FullLoader) -class TestEggs(): # 测试彩蛋内容 +class TestEggs: # 测试彩蛋内容 def TestFuture(self): jrt.future() -class TestConvertModule(): +class TestConvertModule: def test_UserUrlToUserId(self): for case in test_cases["convert_cases"]["user_convert_cases"]: AssertNormalCase(UserUrlToUserId(case["url"]), case["uid"]) @@ -122,7 +122,7 @@ def test_IslandSlugToIslandUrl(self): AssertNormalCase(IslandSlugToIslandUrl(case["islug"]), case["url"]) -class TestArticleModule(): +class TestArticleModule: def test_GetArticleTitle(self): for case in test_cases["article_cases"]["success_cases"]: AssertNormalCase(jrt.article.GetArticleTitle(case["url"]), case["title"]) @@ -236,7 +236,7 @@ def test_GetArticleCommentStatus(self): jrt.article.GetArticleCommentStatus(case["url"]) -class TestUserModule(): +class TestUserModule: def test_GetUserName(self): for case in test_cases["user_cases"]["success_cases"]: AssertNormalCase(jrt.user.GetUserName(case["url"]), case["name"]) @@ -342,7 +342,7 @@ def test_GetUserNextAnniversaryDay(self): jrt.user.GetUserNextAnniversaryDay(case["url"]) -class TestCollectionModule(): +class TestCollectionModule: def test_GetCollectionAvatarUrl(self): for case in test_cases["collection_cases"]["success_cases"]: AssertNormalCase(jrt.collection.GetCollectionAvatarUrl(case["url"]), case["avatar_url"]) @@ -384,7 +384,7 @@ def test_GetCollectionInformationUpdateTime(self): jrt.collection.GetCollectionInformationUpdateTime(case["url"]) -class TestIslandModule(): +class TestIslandModule: def test_GetArticleName(self): for case in test_cases["island_cases"]["success_cases"]: AssertNormalCase(jrt.island.GetIslandName(case["url"]), case["name"]) @@ -426,7 +426,7 @@ def test_GetIslandCategory(self): jrt.island.GetIslandCategory(case["url"]) -class TestNotebookModule(): +class TestNotebookModule: def test_GetNotebookName(self): for case in test_cases["notebook_cases"]["success_cases"]: AssertNormalCase(jrt.notebook.GetNotebookName(case["url"]), case["name"]) @@ -449,7 +449,7 @@ def test_GetNotebookAuthorName(self): for case in test_cases["notebook_cases"]["fail_cases"]: with pytest.raises(error_text_to_obj[case["exception_name"]]): - jrt.notebook.GetNotebookAuthorInfo(case["url"])["name"] + _ = jrt.notebook.GetNotebookAuthorInfo(case["url"])["name"] def test_GetNotebookAuthorAvatarUrl(self): for case in test_cases["notebook_cases"]["success_cases"]: @@ -457,7 +457,7 @@ def test_GetNotebookAuthorAvatarUrl(self): for case in test_cases["notebook_cases"]["fail_cases"]: with pytest.raises(error_text_to_obj[case["exception_name"]]): - jrt.notebook.GetNotebookAuthorInfo(case["url"])["author_avatar_url"] + _ = jrt.notebook.GetNotebookAuthorInfo(case["url"])["author_avatar_url"] def test_GetNotebookWordage(self): for case in test_cases["notebook_cases"]["success_cases"]: