twitter2telegram/defs/feed.py
2023-05-24 23:20:21 +08:00

65 lines
1.8 KiB
Python

import traceback
from datetime import datetime
from typing import List, Optional
from bs4 import BeautifulSoup
from init import request
from defs.glover import rss_hub_host
from defs.models import Tweet, User
from feedparser import parse, FeedParserDict
class UsernameNotFound(Exception):
pass
async def get(username: str):
url = f"{rss_hub_host}/twitter/user/{username}"
response = await request.get(url)
if response.status_code == 200:
return parse(response.text)
elif response.status_code == 404:
raise UsernameNotFound
else:
return None
async def parse_tweets(data: List[FeedParserDict]) -> List[Tweet]:
tweets = []
for tweet in data:
try:
description = tweet.get("description", "")
soup = BeautifulSoup(description, "lxml")
content = soup.get_text()
img_tag = soup.find_all("img")
images = [img.get("src") for img in img_tag if img.get("src")]
url = tweet.get("link", "")
time = datetime.strptime(tweet.get("published", ""), "%a, %d %b %Y %H:%M:%S %Z")
tweets.append(
Tweet(
content=content,
url=url,
time=time,
images=images
)
)
except Exception:
traceback.print_exc()
return tweets
async def parse_user(username: str, data: FeedParserDict) -> User:
title = data.get("feed", {}).get("title", "")
name = title.replace("Twitter @", "")
tweets = await parse_tweets(data.get("entries", []))
return User(username=username, name=name, tweets=tweets)
async def get_user(username: str) -> Optional[User]:
data = await get(username)
if data:
return await parse_user(username, data)
else:
return None