diff --git a/jkit/user.py b/jkit/user.py index 073d7bd..d93111b 100644 --- a/jkit/user.py +++ b/jkit/user.py @@ -1,12 +1,22 @@ from datetime import datetime from enum import Enum -from typing import TYPE_CHECKING, Optional, Tuple +from typing import ( + TYPE_CHECKING, + Any, + AsyncGenerator, + Dict, + List, + Literal, + Optional, + Tuple, +) from httpx import HTTPStatusError from typing_extensions import Self from jkit._base import DATA_OBJECT_CONFIG, DataObject, StandardResourceObject from jkit._constraints import ( + ArticleSlugStr, CollectionSlugStr, NonEmptyStr, NonNegativeFloat, @@ -14,6 +24,7 @@ NormalizedDatetime, PositiveInt, UserNameStr, + UserSlugStr, UserUploadedUrlStr, ) from jkit._network_request import get_html, get_json @@ -25,6 +36,7 @@ from jkit.identifier_convert import user_slug_to_url, user_url_to_slug if TYPE_CHECKING: + from jkit.article import Article from jkit.collection import Collection @@ -93,6 +105,42 @@ class UserNotebookInfo(DataObject, **DATA_OBJECT_CONFIG): # TODO: get_notebook_obj +class UserArticleAuthorInfo(DataObject, **DATA_OBJECT_CONFIG): + id: PositiveInt # noqa: A003 + slug: UserSlugStr + name: UserNameStr + avatar_url: UserUploadedUrlStr + + def to_user_obj(self) -> "User": + from jkit.user import User + + return User.from_slug(self.slug)._from_trusted_source() + + +class UserArticleInfo(DataObject, **DATA_OBJECT_CONFIG): + id: PositiveInt # noqa: A003 + slug: ArticleSlugStr + title: NonEmptyStr + description: NonEmptyStr + image_url: Optional[UserUploadedUrlStr] + published_at: NormalizedDatetime + is_top: bool + is_paid: bool + can_comment: bool + author_info: UserArticleAuthorInfo + + views_count: NonNegativeInt + likes_count: NonNegativeInt + comments_count: NonNegativeInt + tips_count: NonNegativeInt + earned_fp_amount: NonNegativeFloat + + def to_article_obj(self) -> "Article": + from jkit.article import Article + + return Article.from_slug(self.slug)._from_trusted_source() + + class User(StandardResourceObject): def __init__( self, *, url: Optional[str] = None, slug: Optional[str] = None @@ -345,3 +393,81 @@ async def notebooks( )._validate() for item in data["notebooks"] ) + + async def get_articles( + self, + page: int = 1, + order_by: Literal[ + "published_at", "last_comment_time", "popularity" + ] = "published_at", + page_size: int = 10, + ) -> Tuple[UserArticleInfo, ...]: + data: List[Dict[str, Any]] = await get_json( + endpoint=ENDPOINT_CONFIG.jianshu, + path=f"/asimov/users/slug/{self.slug}/public_notes", + params={ + "page": page, + "count": page_size, + "order_by": { + "published_at": "shared_at", + "last_comment_time": "commented_at", + "popularity": "top", + }[order_by], + }, + ) # type: ignore + + return tuple( + UserArticleInfo( + id=item["object"]["data"]["id"], + slug=item["object"]["data"]["slug"], + title=item["object"]["data"]["title"], + description=item["object"]["data"]["public_abbr"], + image_url=item["object"]["data"]["list_image_url"] + if item["object"]["data"]["list_image_url"] + else None, + published_at=normalize_datetime( + item["object"]["data"]["first_shared_at"] + ), + is_top=item["object"]["data"]["is_top"], + is_paid=item["object"]["data"]["paid"], + can_comment=item["object"]["data"]["commentable"], + author_info=UserArticleAuthorInfo( + id=item["object"]["data"]["user"]["id"], + slug=item["object"]["data"]["user"]["slug"], + name=item["object"]["data"]["user"]["nickname"], + avatar_url=item["object"]["data"]["user"]["avatar"], + ), + views_count=item["object"]["data"]["views_count"], + likes_count=item["object"]["data"]["likes_count"], + comments_count=item["object"]["data"]["public_comments_count"], + tips_count=item["object"]["data"]["total_rewards_count"], + earned_fp_amount=normalize_assets_amount( + item["object"]["data"]["total_fp_amount"] + ), + )._validate() + for item in data + ) + + async def iter_articles( + self, + *, + start_page: int = 1, + order_by: Literal[ + "published_at", "last_comment_time", "popularity" + ] = "published_at", + page_size: int = 10, + ) -> AsyncGenerator[UserArticleInfo, None]: + now_page = start_page + while True: + data = await self.get_articles( + page=now_page, + order_by=order_by, + page_size=page_size, + ) + if not data: + return + + for item in data: + yield item + + now_page += 1