Change to CLI; introduce tests
This commit is contained in:
parent
b797746229
commit
d8f0beec43
@ -10,7 +10,6 @@ class Config(object):
|
|||||||
DISPLAY_SQL = False
|
DISPLAY_SQL = False
|
||||||
ERRORS_FROM = ['noreply@midnighthax.com']
|
ERRORS_FROM = ['noreply@midnighthax.com']
|
||||||
ERRORS_TO = ['kae@midnighthax.com']
|
ERRORS_TO = ['kae@midnighthax.com']
|
||||||
FOLLOWED_COLOUR = '#8ae234'
|
|
||||||
LOG_LEVEL_STDERR = logging.ERROR
|
LOG_LEVEL_STDERR = logging.ERROR
|
||||||
LOG_LEVEL_SYSLOG = logging.DEBUG
|
LOG_LEVEL_SYSLOG = logging.DEBUG
|
||||||
LOG_NAME = "urma"
|
LOG_NAME = "urma"
|
||||||
@ -19,6 +18,6 @@ class Config(object):
|
|||||||
MAIL_SERVER = os.environ.get('MAIL_SERVER') or "woodlands.midnighthax.com"
|
MAIL_SERVER = os.environ.get('MAIL_SERVER') or "woodlands.midnighthax.com"
|
||||||
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
|
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
|
||||||
MAIL_USE_TLS = os.environ.get('MAIL_USE_TLS') is not None
|
MAIL_USE_TLS = os.environ.get('MAIL_USE_TLS') is not None
|
||||||
MAX_CONTENT_LENGTH = 4096
|
MAX_DAYS_TO_FETCH = 10
|
||||||
MAX_POSTS_TO_FETCH = 2000
|
POINTS_BOOSTED = 1
|
||||||
NORMAL_COLOUR = "#f6f5f4"
|
POINTS_FAVOURITED = 1
|
||||||
|
|||||||
@ -10,31 +10,6 @@ from log import log
|
|||||||
|
|
||||||
from typing import Any, List
|
from typing import Any, List
|
||||||
|
|
||||||
from PyQt5.QtWidgets import QMessageBox
|
|
||||||
|
|
||||||
|
|
||||||
def ask_yes_no(title: str, question: str) -> bool:
|
|
||||||
"""Ask question; return True for yes, False for no"""
|
|
||||||
|
|
||||||
button_reply = QMessageBox.question(None, title, question)
|
|
||||||
|
|
||||||
return button_reply == QMessageBox.Yes
|
|
||||||
|
|
||||||
|
|
||||||
def format_display_name(account) -> str:
|
|
||||||
"""
|
|
||||||
Format account display name according to whether we follow that account
|
|
||||||
or not.
|
|
||||||
"""
|
|
||||||
|
|
||||||
username = account.display_name
|
|
||||||
if account.followed:
|
|
||||||
colour = Config.FOLLOWED_COLOUR
|
|
||||||
else:
|
|
||||||
colour = Config.NORMAL_COLOUR
|
|
||||||
|
|
||||||
return '<span style="color:' + colour + '">' + username + '</span>'
|
|
||||||
|
|
||||||
|
|
||||||
def index_ojects_by_parameter(object_list: List, param: Any):
|
def index_ojects_by_parameter(object_list: List, param: Any):
|
||||||
"""
|
"""
|
||||||
@ -74,15 +49,3 @@ def send_mail(to_addr, from_addr, subj, body):
|
|||||||
print(e)
|
print(e)
|
||||||
finally:
|
finally:
|
||||||
s.quit()
|
s.quit()
|
||||||
|
|
||||||
|
|
||||||
def show_OK(title: str, msg: str) -> None:
|
|
||||||
"""Display a message to user"""
|
|
||||||
|
|
||||||
QMessageBox.information(None, title, msg, buttons=QMessageBox.Ok)
|
|
||||||
|
|
||||||
|
|
||||||
def show_warning(title: str, msg: str) -> None:
|
|
||||||
"""Display a warning to user"""
|
|
||||||
|
|
||||||
QMessageBox.warning(None, title, msg, buttons=QMessageBox.Cancel)
|
|
||||||
|
|||||||
164
app/models.py
164
app/models.py
@ -44,7 +44,6 @@ class Accounts(Base):
|
|||||||
bot = Column(Boolean, index=False, nullable=False, default=False)
|
bot = Column(Boolean, index=False, nullable=False, default=False)
|
||||||
url = Column(String(256), index=False)
|
url = Column(String(256), index=False)
|
||||||
followed = Column(Boolean, index=False, nullable=False, default=False)
|
followed = Column(Boolean, index=False, nullable=False, default=False)
|
||||||
posts = relationship("Posts", back_populates="account")
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return (
|
return (
|
||||||
@ -57,7 +56,7 @@ class Accounts(Base):
|
|||||||
self.account_id = account_id
|
self.account_id = account_id
|
||||||
|
|
||||||
session.add(self)
|
session.add(self)
|
||||||
session.commit()
|
session.flush()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_followed(cls, session: Session) -> List["Accounts"]:
|
def get_followed(cls, session: Session) -> List["Accounts"]:
|
||||||
@ -93,54 +92,6 @@ class Accounts(Base):
|
|||||||
return rec
|
return rec
|
||||||
|
|
||||||
|
|
||||||
class Attachments(Base):
|
|
||||||
__tablename__ = 'attachments'
|
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
|
||||||
media_id = Column(String(32), index=True, nullable=False)
|
|
||||||
url = Column(String(256), index=False)
|
|
||||||
preview_url = Column(String(256), index=False)
|
|
||||||
description = Column(String(2048), index=False)
|
|
||||||
post_id = Column(Integer, ForeignKey("posts.id"))
|
|
||||||
type = Column(String(256), index=False)
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return (
|
|
||||||
f"<Attachments(id={self.id}, url={self.url}, "
|
|
||||||
f"description={self.description}>"
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(self, session: Session, media_id: str, post_id: int) -> None:
|
|
||||||
|
|
||||||
self.media_id = media_id
|
|
||||||
self.post_id = post_id
|
|
||||||
|
|
||||||
session.add(self)
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_or_create(cls, session: Session, media_id: str,
|
|
||||||
post_id: int) -> "Attachments":
|
|
||||||
"""
|
|
||||||
Return any existing Attachment with this id or create a new one
|
|
||||||
"""
|
|
||||||
|
|
||||||
try:
|
|
||||||
rec = (
|
|
||||||
session.execute(
|
|
||||||
select(cls)
|
|
||||||
.where(
|
|
||||||
cls.media_id == media_id,
|
|
||||||
cls.post_id == post_id
|
|
||||||
)
|
|
||||||
).scalar_one()
|
|
||||||
)
|
|
||||||
except NoResultFound:
|
|
||||||
rec = Attachments(session, media_id, post_id)
|
|
||||||
|
|
||||||
return rec
|
|
||||||
|
|
||||||
|
|
||||||
class Hashtags(Base):
|
class Hashtags(Base):
|
||||||
__tablename__ = 'hashtags'
|
__tablename__ = 'hashtags'
|
||||||
|
|
||||||
@ -164,7 +115,21 @@ class Hashtags(Base):
|
|||||||
self.url = url
|
self.url = url
|
||||||
|
|
||||||
session.add(self)
|
session.add(self)
|
||||||
session.commit()
|
session.flush()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_all(cls, session: Session) -> List["Hashtags"]:
|
||||||
|
"""
|
||||||
|
Return a list of all hashtags
|
||||||
|
"""
|
||||||
|
|
||||||
|
records = (
|
||||||
|
session.execute(
|
||||||
|
select(cls)
|
||||||
|
).scalars().all()
|
||||||
|
)
|
||||||
|
|
||||||
|
return records
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_followed(cls, session: Session) -> List["Hashtags"]:
|
def get_followed(cls, session: Session) -> List["Hashtags"]:
|
||||||
@ -206,105 +171,32 @@ class Posts(Base):
|
|||||||
|
|
||||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||||
post_id = Column(String(32), index=True, nullable=False)
|
post_id = Column(String(32), index=True, nullable=False)
|
||||||
|
|
||||||
|
account_id = Column(Integer, ForeignKey('accounts.id'), nullable=True)
|
||||||
|
account = relationship("Accounts", foreign_keys=[account_id])
|
||||||
|
|
||||||
|
boosted_by_id = Column(Integer, ForeignKey('accounts.id'), nullable=True)
|
||||||
|
boosted_by = relationship("Accounts", foreign_keys=[boosted_by_id])
|
||||||
|
|
||||||
created_at = Column(DateTime, index=True, default=None)
|
created_at = Column(DateTime, index=True, default=None)
|
||||||
uri = Column(String(256), index=False)
|
uri = Column(String(256), index=False)
|
||||||
url = Column(String(256), index=False)
|
|
||||||
content = Column(String(Config.MAX_CONTENT_LENGTH), index=False,
|
|
||||||
default="")
|
|
||||||
account_id = Column(Integer, ForeignKey('accounts.id'), nullable=True)
|
|
||||||
account = relationship("Accounts", back_populates="posts")
|
|
||||||
|
|
||||||
reblogged_by_post = relationship("Posts")
|
|
||||||
boosted_post_id = Column(Integer, ForeignKey("posts.id"))
|
|
||||||
|
|
||||||
media_attachments = relationship("Attachments")
|
|
||||||
|
|
||||||
posts_to_tags = relationship("PostTags", back_populates="post")
|
posts_to_tags = relationship("PostTags", back_populates="post")
|
||||||
hashtags = association_proxy("posts_to_tags", "hashtag")
|
hashtags = association_proxy("posts_to_tags", "hashtag")
|
||||||
|
|
||||||
rating = Column(Integer, index=True, default=None)
|
favourited = Column(Boolean, index=True, nullable=False, default=False)
|
||||||
|
boosted = Column(Boolean, index=True, nullable=False, default=False)
|
||||||
|
bookmarked = Column(Boolean, index=True, nullable=False, default=False)
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return f"<Posts(id={self.id}, content={self.content[:60]}>"
|
return f"<Posts(id={self.id}>"
|
||||||
|
|
||||||
def __init__(self, session: Session, post_id) -> None:
|
def __init__(self, session: Session, post_id) -> None:
|
||||||
|
|
||||||
self.post_id = post_id
|
self.post_id = post_id
|
||||||
|
|
||||||
session.add(self)
|
session.add(self)
|
||||||
session.commit()
|
session.flush()
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_unrated_after(cls, session: Session,
|
|
||||||
post_id: int) -> Optional["Posts"]:
|
|
||||||
"""
|
|
||||||
Return earliest unrated Posts object after passed post_id, or None
|
|
||||||
if there isn't one.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return (
|
|
||||||
session.scalars(
|
|
||||||
select(cls)
|
|
||||||
.where(
|
|
||||||
(cls.rating.is_(None)),
|
|
||||||
(cls.post_id > post_id)
|
|
||||||
)
|
|
||||||
.order_by(cls.post_id.asc())
|
|
||||||
.limit(1)
|
|
||||||
).first()
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_unrated_before(cls, session: Session,
|
|
||||||
post_id: int) -> Optional["Posts"]:
|
|
||||||
"""
|
|
||||||
Return latest unrated Posts object before passed post_id, or None
|
|
||||||
if there isn't one.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return (
|
|
||||||
session.scalars(
|
|
||||||
select(cls)
|
|
||||||
.where(
|
|
||||||
(cls.rating.is_(None)),
|
|
||||||
(cls.post_id < post_id)
|
|
||||||
)
|
|
||||||
.order_by(cls.post_id.desc())
|
|
||||||
.limit(1)
|
|
||||||
).first()
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_unrated_newest(cls, session: Session) -> Optional["Posts"]:
|
|
||||||
"""
|
|
||||||
Return most recent Posts object that has not been rated and which
|
|
||||||
is not a boosted post, or None if there isn't one.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return (
|
|
||||||
session.scalars(
|
|
||||||
select(cls)
|
|
||||||
.where(cls.rating.is_(None))
|
|
||||||
.order_by(cls.post_id.desc())
|
|
||||||
.limit(1)
|
|
||||||
).first()
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_unrated_oldest(cls, session: Session) -> Optional["Posts"]:
|
|
||||||
"""
|
|
||||||
Return oldest Posts object that has not been rated and which
|
|
||||||
is not a boosted post, or None if there isn't one.
|
|
||||||
"""
|
|
||||||
|
|
||||||
return (
|
|
||||||
session.scalars(
|
|
||||||
select(cls)
|
|
||||||
.where(cls.rating.is_(None))
|
|
||||||
.order_by(cls.post_id.asc())
|
|
||||||
.limit(1)
|
|
||||||
).first()
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_by_post_id(cls, session: Session, post_id: str) -> "Posts":
|
def get_by_post_id(cls, session: Session, post_id: str) -> "Posts":
|
||||||
|
|||||||
606
app/urma.py
606
app/urma.py
@ -10,42 +10,29 @@ import stackprinter
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
from config import Config
|
from config import Config
|
||||||
from dbconfig import engine, Session, scoped_session
|
from dbconfig import (
|
||||||
|
engine,
|
||||||
|
Session,
|
||||||
|
scoped_session,
|
||||||
|
)
|
||||||
from helpers import (
|
from helpers import (
|
||||||
format_display_name,
|
|
||||||
index_ojects_by_parameter,
|
index_ojects_by_parameter,
|
||||||
send_mail,
|
send_mail,
|
||||||
)
|
)
|
||||||
from helpers import show_OK
|
|
||||||
from log import log
|
from log import log
|
||||||
from mastodon import Mastodon
|
from mastodon import Mastodon
|
||||||
from models import (
|
from models import (
|
||||||
Accounts,
|
Accounts,
|
||||||
Attachments,
|
|
||||||
Base,
|
Base,
|
||||||
Hashtags,
|
Hashtags,
|
||||||
Posts,
|
Posts,
|
||||||
PostTags,
|
PostTags,
|
||||||
)
|
)
|
||||||
|
|
||||||
from typing import List, Optional
|
from typing import List, Optional, Union
|
||||||
|
|
||||||
from PyQt5.QtCore import Qt
|
|
||||||
from PyQt5.QtGui import (
|
|
||||||
QImage,
|
|
||||||
QPixmap,
|
|
||||||
)
|
|
||||||
from PyQt5.QtWidgets import (
|
|
||||||
QApplication,
|
|
||||||
QLabel,
|
|
||||||
QMainWindow,
|
|
||||||
QPushButton,
|
|
||||||
)
|
|
||||||
|
|
||||||
from ui.main_window_ui import Ui_MainWindow # type: ignore
|
|
||||||
|
|
||||||
TESTDATA = "/home/kae/git/urma/hometl.pickle"
|
|
||||||
|
|
||||||
|
# TESTDATA = "/home/kae/git/urma/hometl.pickle"
|
||||||
|
#
|
||||||
# Mastodon.create_app(
|
# Mastodon.create_app(
|
||||||
# 'urma',
|
# 'urma',
|
||||||
# api_base_url='mastodon.org.uk',
|
# api_base_url='mastodon.org.uk',
|
||||||
@ -77,6 +64,63 @@ class MastodonAPI:
|
|||||||
|
|
||||||
return self.mastodon.fetch_remaining(page1)
|
return self.mastodon.fetch_remaining(page1)
|
||||||
|
|
||||||
|
def get_bookmarked(self, since: int) -> List[dict]:
|
||||||
|
"""
|
||||||
|
Return posts bookmarked since id 'since'
|
||||||
|
"""
|
||||||
|
|
||||||
|
results = []
|
||||||
|
data = self.mastodon.bookmarks()
|
||||||
|
while data:
|
||||||
|
# Add in new data
|
||||||
|
results.extend(data)
|
||||||
|
# Have we reached minimum id?
|
||||||
|
if min([a.id for a in data]) < since:
|
||||||
|
break
|
||||||
|
# Get more data
|
||||||
|
data = self.mastodon.fetch_next(data)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def get_boosted(self, since: int) -> List[dict]:
|
||||||
|
"""
|
||||||
|
Return posts boosted since id 'since'
|
||||||
|
"""
|
||||||
|
|
||||||
|
results = []
|
||||||
|
data = self.mastodon.account_statuses(self.me.id)
|
||||||
|
while data:
|
||||||
|
for datum in data:
|
||||||
|
# Have we reached minimum id?
|
||||||
|
if datum.id < since:
|
||||||
|
break
|
||||||
|
# Is this our post that we boosted?
|
||||||
|
if datum.account.id == self.me.id and datum.reblog:
|
||||||
|
# Add in new data
|
||||||
|
results.append(datum)
|
||||||
|
# Get more data
|
||||||
|
data = self.mastodon.fetch_next(data)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def get_favourited(self, since: Union[int, List[dict]]) -> List[dict]:
|
||||||
|
"""
|
||||||
|
Return posts favourite since id 'since'
|
||||||
|
"""
|
||||||
|
|
||||||
|
results = []
|
||||||
|
data = self.mastodon.favourites()
|
||||||
|
while data:
|
||||||
|
# Add in new data
|
||||||
|
results.extend(data)
|
||||||
|
# Have we reached minimum id?
|
||||||
|
if min([a.id for a in data]) < since:
|
||||||
|
break
|
||||||
|
# Get more data
|
||||||
|
data = self.mastodon.fetch_next(data)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
def get_hashtag_following(self):
|
def get_hashtag_following(self):
|
||||||
"""
|
"""
|
||||||
Return a list of hashtag_dicts that we are following
|
Return a list of hashtag_dicts that we are following
|
||||||
@ -86,391 +130,204 @@ class MastodonAPI:
|
|||||||
|
|
||||||
return self.mastodon.fetch_remaining(page1)
|
return self.mastodon.fetch_remaining(page1)
|
||||||
|
|
||||||
|
def unbookmark(self, post_id: int) -> None:
|
||||||
class Window(QMainWindow, Ui_MainWindow):
|
|
||||||
def __init__(self, parent=None) -> None:
|
|
||||||
super().__init__(parent)
|
|
||||||
self.setupUi(self)
|
|
||||||
|
|
||||||
self.mastapi = MastodonAPI(Config.ACCESS_TOKEN)
|
|
||||||
self.update_db()
|
|
||||||
|
|
||||||
self.current_post_id = None
|
|
||||||
self.next_post = self.next
|
|
||||||
|
|
||||||
self.btnDislike.clicked.connect(self.dislike)
|
|
||||||
self.btnFirst.clicked.connect(self.first)
|
|
||||||
self.btnLast.clicked.connect(self.last)
|
|
||||||
self.btnLike.clicked.connect(self.like)
|
|
||||||
self.btnNext.clicked.connect(self.next)
|
|
||||||
self.btnPrev.clicked.connect(self.prev)
|
|
||||||
self.btnUnsure.clicked.connect(self.unsure)
|
|
||||||
|
|
||||||
# Show first record
|
|
||||||
self.next()
|
|
||||||
|
|
||||||
def display(self, session: Session, post: Posts) -> None:
|
|
||||||
"""
|
"""
|
||||||
Prepare to display post
|
Remove bookmark on passed post ID
|
||||||
"""
|
"""
|
||||||
|
|
||||||
boosted_by = None
|
log.debug(f"unbookmark({post_id=})")
|
||||||
if post.boosted_post_id:
|
|
||||||
boosted_by = post.account
|
|
||||||
while post.boosted_post_id:
|
|
||||||
post = session.get(Posts, post.boosted_post_id)
|
|
||||||
self._display(session, post, boosted_by)
|
|
||||||
|
|
||||||
def _display(self, session: Session, post: int,
|
_ = self.mastodon.status_unbookmark(post_id)
|
||||||
boosted_by: Optional[Accounts] = None) -> None:
|
|
||||||
"""
|
|
||||||
Display passed post
|
|
||||||
"""
|
|
||||||
|
|
||||||
if post is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Boosted
|
def main() -> None:
|
||||||
if boosted_by:
|
"""
|
||||||
self.txtBoosted.setText(
|
Main loop
|
||||||
"Boosted by: " + format_display_name(boosted_by))
|
"""
|
||||||
self.txtBoosted.show()
|
|
||||||
else:
|
|
||||||
self.txtBoosted.hide()
|
|
||||||
|
|
||||||
# Username
|
mastapi = MastodonAPI(Config.ACCESS_TOKEN)
|
||||||
self.txtUsername.setText(format_display_name(post.account))
|
|
||||||
|
|
||||||
# Debug
|
with Session() as session:
|
||||||
self.lblDebug.setText(str(post.id))
|
since = get_since_id(session)
|
||||||
|
|
||||||
# Account
|
update_followed_accounts(session, mastapi)
|
||||||
self.lblAcct.setText(post.account.acct)
|
update_followed_hashtags(session, mastapi)
|
||||||
|
|
||||||
# Hashtags
|
favourited = mastapi.get_favourited(since)
|
||||||
unfollowed_hashtags = [
|
process_favourited_posts(session, favourited)
|
||||||
'#' + a.name for a in post.hashtags if not a.followed]
|
|
||||||
followed_hashtags = [
|
|
||||||
'#' + a.name for a in post.hashtags if a.followed]
|
|
||||||
hashtag_text = (
|
|
||||||
'<span style="color:' + Config.FOLLOWED_COLOUR + '">' +
|
|
||||||
'<br />'.join(followed_hashtags) +
|
|
||||||
'</span><br />' +
|
|
||||||
'<span style="color:' + Config.NORMAL_COLOUR + '">' +
|
|
||||||
'<br />'.join(unfollowed_hashtags) +
|
|
||||||
'</span>'
|
|
||||||
)
|
|
||||||
self.txtHashtags.setText(hashtag_text)
|
|
||||||
|
|
||||||
# Post
|
boosted = mastapi.get_boosted(since)
|
||||||
self.txtPost.setHtml(post.content)
|
process_boosted_posts(session, boosted)
|
||||||
|
|
||||||
# Image
|
bookmarked = mastapi.get_bookmarked(since)
|
||||||
if post.media_attachments:
|
process_bookmarked_posts(session, mastapi, bookmarked)
|
||||||
# TODO: handle multiple images, not just [0]
|
|
||||||
url_image = post.media_attachments[0].preview_url
|
|
||||||
pixmap = QPixmap()
|
|
||||||
pixmap.loadFromData(requests.get(url_image).content)
|
|
||||||
s_pixmap = pixmap.scaled(self.lblPicture.size(),
|
|
||||||
Qt.KeepAspectRatio)
|
|
||||||
self.lblPicture.show()
|
|
||||||
self.lblPicture.setPixmap(s_pixmap)
|
|
||||||
else:
|
|
||||||
self.lblPicture.hide()
|
|
||||||
|
|
||||||
def dislike(self):
|
|
||||||
"""
|
|
||||||
Mark a post as rated negatively
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.rate_post(rating=-1)
|
def get_since_id(session: Session) -> int:
|
||||||
|
"""
|
||||||
|
Return id to use as 'min_id' when fetching posts.
|
||||||
|
|
||||||
def first(self):
|
We don't want to fetch anything older than MAX_DAYS_TO_FETCH.
|
||||||
"""
|
"""
|
||||||
actions
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
# Build psuedo id for MAX_DAYS_TO_FETCH time ago
|
||||||
|
now = datetime.datetime.now()
|
||||||
|
max_days_ago_dt = now - datetime.timedelta(days=Config.MAX_DAYS_TO_FETCH)
|
||||||
|
# From mastodon.py package, use code from internals.py:__unpack_id
|
||||||
|
max_days_ago_id = (int(max_days_ago_dt.timestamp()) << 16) * 1000
|
||||||
|
return max_days_ago_id
|
||||||
|
|
||||||
def last(self):
|
# Get newest ID from database
|
||||||
"""
|
newest_db_id = Posts.max_post_id(session)
|
||||||
actions
|
|
||||||
"""
|
|
||||||
|
|
||||||
pass
|
if not newest_db_id:
|
||||||
|
return max_days_ago_id
|
||||||
|
else:
|
||||||
|
return max(max_days_ago_id, newest_db_id)
|
||||||
|
|
||||||
def like(self):
|
|
||||||
"""
|
|
||||||
Mark a post as rated positively
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.rate_post(rating=1)
|
def process_bookmarked_posts(session, mastapi, posts) -> None:
|
||||||
|
"""
|
||||||
|
Process bookmarked posts
|
||||||
|
"""
|
||||||
|
|
||||||
def next(self) -> None:
|
for post in posts:
|
||||||
"""
|
record = _process_post(session, post)
|
||||||
Display next post. We work BACKWARDS through posts, starting with the
|
# Posts that are favourited and bookmarked are genuine bookmark
|
||||||
most recent, so "next" is actually one older.
|
# posts: ignore.
|
||||||
|
if record.favourited:
|
||||||
|
continue
|
||||||
|
record.bookmarked = True
|
||||||
|
return
|
||||||
|
# TODO: mastapi.unbookmark(int(post.id))
|
||||||
|
|
||||||
If we are called with self.current_post_id set to None, retrieve and
|
|
||||||
display newest unrated post.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Remember whether we're going forward or backwards through
|
def process_boosted_posts(session, posts) -> None:
|
||||||
# posts
|
"""
|
||||||
self.next_post = self.next
|
Process boosted posts
|
||||||
|
"""
|
||||||
|
|
||||||
# Get post to display
|
for post in posts:
|
||||||
with Session() as session:
|
record = _process_post(session, post)
|
||||||
if self.current_post_id is None:
|
record.boosted = True
|
||||||
post = Posts.get_unrated_newest(session)
|
|
||||||
else:
|
|
||||||
post = Posts.get_unrated_before(session, self.current_post_id)
|
|
||||||
# Don't process posts that are boosted as they will be
|
|
||||||
# processed by the boosting post
|
|
||||||
while post and post.reblogged_by_post:
|
|
||||||
post = Posts.get_unrated_before(session, post.post_id)
|
|
||||||
if not post:
|
|
||||||
self.current_post_id = None
|
|
||||||
show_OK("All done", "No more posts to process")
|
|
||||||
return
|
|
||||||
|
|
||||||
self.current_post_id = post.post_id
|
|
||||||
self.display(session, post)
|
|
||||||
|
|
||||||
def prev(self):
|
def process_favourited_posts(session, posts) -> None:
|
||||||
"""
|
"""
|
||||||
Display previous post. We work BACKWARDS through posts so
|
Process favourited posts
|
||||||
"previous" is actually one newer.
|
"""
|
||||||
|
|
||||||
If we are called with self.current_post_id set to None, retrieve and
|
for post in posts:
|
||||||
display oldest unrated post.
|
record = _process_post(session, post)
|
||||||
"""
|
record.favourited = True
|
||||||
|
|
||||||
# Remember whether we're going forward or backwards through
|
|
||||||
# posts
|
|
||||||
self.next_post = self.prev
|
|
||||||
|
|
||||||
# Get post to display, but don't process posts that are boosted
|
def _process_post(session: Session, post) -> Posts:
|
||||||
# as they will be processed by the boosting post
|
"""
|
||||||
with Session() as session:
|
Add passsed post to database
|
||||||
if self.current_post_id is None:
|
"""
|
||||||
post = Posts.get_unrated_oldest(session)
|
|
||||||
else:
|
|
||||||
post = Posts.get_unrated_after(session, self.current_post_id)
|
|
||||||
# Don't process posts that are boosted as they will be
|
|
||||||
# processed by the boosting post
|
|
||||||
while post and post.reblogged_by_post:
|
|
||||||
post = Posts.get_unrated_after(session, post.post_id)
|
|
||||||
if not post:
|
|
||||||
self.current_post_id = None
|
|
||||||
show_OK("All done", "No more posts to process")
|
|
||||||
return
|
|
||||||
|
|
||||||
self.current_post_id = post.post_id
|
|
||||||
self.display(session, post)
|
|
||||||
|
|
||||||
def rate_post(self, rating: int) -> None:
|
|
||||||
"""
|
|
||||||
Add rating to current post
|
|
||||||
"""
|
|
||||||
|
|
||||||
with Session() as session:
|
|
||||||
post = Posts.get_by_post_id(session, self.current_post_id)
|
|
||||||
post.rating = rating
|
|
||||||
self.next_post()
|
|
||||||
|
|
||||||
def unsure(self):
|
|
||||||
"""
|
|
||||||
Mark a post as rated neutrally
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.rate_post(rating=0)
|
|
||||||
|
|
||||||
def update_db(self) -> None:
|
|
||||||
"""
|
|
||||||
Update database from Mastodon
|
|
||||||
|
|
||||||
Save a copy of downloaded data for debugging
|
|
||||||
"""
|
|
||||||
|
|
||||||
with Session() as session:
|
|
||||||
minimum_post_id = Posts.max_post_id(session)
|
|
||||||
if not minimum_post_id:
|
|
||||||
minimum_post_id = "1"
|
|
||||||
posts_to_get = Config.MAX_POSTS_TO_FETCH
|
|
||||||
reached_minimum = False
|
|
||||||
hometl = []
|
|
||||||
|
|
||||||
while True:
|
|
||||||
|
|
||||||
# Create a filename to save data
|
|
||||||
now = datetime.datetime.now()
|
|
||||||
seq = 0
|
|
||||||
while True:
|
|
||||||
fname = (
|
|
||||||
"testdata/" +
|
|
||||||
now.strftime("%Y-%m-%d_%H:%M:%S_") +
|
|
||||||
f"{seq:02d}.pickle"
|
|
||||||
)
|
|
||||||
if not os.path.isfile(fname):
|
|
||||||
print(f"{fname=}")
|
|
||||||
break
|
|
||||||
seq += 1
|
|
||||||
print(f"{seq=}")
|
|
||||||
|
|
||||||
# Fetch data
|
|
||||||
if not hometl:
|
|
||||||
print("Fetching first data...")
|
|
||||||
hometl = self.mastapi.mastodon.timeline()
|
|
||||||
else:
|
|
||||||
print("Fetching next data...")
|
|
||||||
hometl = self.mastapi.mastodon.fetch_next(hometl)
|
|
||||||
print(f"Fetched additional {len(hometl)} posts")
|
|
||||||
with open(fname, "wb") as f:
|
|
||||||
pickle.dump(hometl, f)
|
|
||||||
|
|
||||||
for post in hometl:
|
|
||||||
if str(post.id) <= minimum_post_id:
|
|
||||||
reached_minimum = True
|
|
||||||
break
|
|
||||||
print(f"Processing {post.id=}")
|
|
||||||
self._process_post(session, post)
|
|
||||||
|
|
||||||
posts_to_get -= len(hometl)
|
|
||||||
print(f"{posts_to_get=}")
|
|
||||||
if posts_to_get <= 0 or reached_minimum or not hometl:
|
|
||||||
break
|
|
||||||
|
|
||||||
def _process_post(self, session: Session, post) -> Posts:
|
|
||||||
"""
|
|
||||||
Add passsed post to database
|
|
||||||
"""
|
|
||||||
|
|
||||||
log.debug(f"{post.id=} processing")
|
|
||||||
rec = Posts.get_or_create(session, str(post.id))
|
|
||||||
if rec.account_id is not None:
|
|
||||||
# We already have this post
|
|
||||||
log.debug(f"{post.id=} already in db")
|
|
||||||
return rec
|
|
||||||
|
|
||||||
# Create account record if needed
|
|
||||||
log.debug(f"{post.id=} processing {post.account.id=}")
|
|
||||||
account_rec = Accounts.get_or_create(session, str(post.account.id))
|
|
||||||
if account_rec.username is None:
|
|
||||||
log.debug(f"{post.id=} populating new account {post.account.id=}")
|
|
||||||
account_rec.username = post.account.username
|
|
||||||
account_rec.acct = post.account.acct
|
|
||||||
account_rec.display_name = post.account.display_name
|
|
||||||
account_rec.bot = post.account.bot
|
|
||||||
account_rec.url = post.account.url
|
|
||||||
rec.account_id = account_rec.id
|
|
||||||
|
|
||||||
# Create hashtag records as needed
|
|
||||||
for tag in post.tags:
|
|
||||||
log.debug(f"{post.id=} processing {tag.name=}")
|
|
||||||
hashtag = Hashtags.get_or_create(session, tag.name, tag.url)
|
|
||||||
rec.hashtags.append(hashtag)
|
|
||||||
|
|
||||||
# Handle media
|
|
||||||
if post.media_attachments:
|
|
||||||
for media in post.media_attachments:
|
|
||||||
log.debug(f"{post.id=} processing {media.id=}")
|
|
||||||
media_rec = Attachments.get_or_create(
|
|
||||||
session, str(media.id), rec.id)
|
|
||||||
if not media_rec.type:
|
|
||||||
log.debug(f"{post.id=} {media.id=} new record")
|
|
||||||
media_rec.type = media.type
|
|
||||||
media_rec.url = media.url
|
|
||||||
media_rec.preview_url = media.preview_url
|
|
||||||
media_rec.description = media.description
|
|
||||||
else:
|
|
||||||
log.debug(f"{post.id=} {media.id=} already exists")
|
|
||||||
else:
|
|
||||||
log.debug(f"{post.id=} No media attachments")
|
|
||||||
|
|
||||||
rec.account_id = account_rec.id
|
|
||||||
rec.created_at = post.created_at
|
|
||||||
rec.uri = post.uri
|
|
||||||
rec.url = post.url
|
|
||||||
rec.content = post.content[:Config.MAX_CONTENT_LENGTH]
|
|
||||||
log.debug(f"{post.id=} {post.content=}")
|
|
||||||
|
|
||||||
if post.reblog:
|
|
||||||
log.debug(f"{post.id=} {post.reblog.id=}")
|
|
||||||
rec.boosted_post_id = self._process_post(
|
|
||||||
session, post.reblog).id
|
|
||||||
log.debug(f"{post.id=} {rec.boosted_post_id=}")
|
|
||||||
|
|
||||||
|
log.debug(f"{post.id=} processing")
|
||||||
|
rec = Posts.get_or_create(session, str(post.id))
|
||||||
|
if rec.account_id is not None:
|
||||||
|
# We already have this post
|
||||||
|
log.debug(f"{post.id=} already in db")
|
||||||
return rec
|
return rec
|
||||||
|
|
||||||
def update_followed_accounts(self, session: Session) -> None:
|
# Create account record if needed
|
||||||
"""
|
log.debug(f"{post.id=} processing {post.account.id=}")
|
||||||
Retrieve list of followed accounts and update accounts
|
account_rec = Accounts.get_or_create(session, str(post.account.id))
|
||||||
in database to match
|
if account_rec.username is None:
|
||||||
"""
|
log.debug(f"{post.id=} populating new account {post.account.id=}")
|
||||||
|
account_rec.username = post.account.username
|
||||||
|
account_rec.acct = post.account.acct
|
||||||
|
account_rec.display_name = post.account.display_name
|
||||||
|
account_rec.bot = post.account.bot
|
||||||
|
account_rec.url = post.account.url
|
||||||
|
rec.account_id = account_rec.id
|
||||||
|
|
||||||
mast_followed_accounts = self.mastapi.get_account_following()
|
# Create hashtag records as needed
|
||||||
mast_followed_accounts_d = index_ojects_by_parameter(
|
for tag in post.tags:
|
||||||
mast_followed_accounts, "username")
|
log.debug(f"{post.id=} processing {tag.name=}")
|
||||||
|
hashtag = Hashtags.get_or_create(session, tag.name, tag.url)
|
||||||
|
rec.hashtags.append(hashtag)
|
||||||
|
|
||||||
our_followed_accounts = Accounts.get_followed(session)
|
rec.created_at = post.created_at
|
||||||
our_followed_accounts_d = index_ojects_by_parameter(
|
rec.uri = post.uri
|
||||||
our_followed_accounts, "username")
|
|
||||||
|
|
||||||
# Add those we are missing
|
if post.reblog:
|
||||||
for username in (
|
log.debug(f"{post.id=} {post.reblog.id=}")
|
||||||
set(mast_followed_accounts_d.keys()) -
|
boosted_rec = _process_post(session, post.reblog)
|
||||||
set(our_followed_accounts_d.keys())
|
rec.boosted_by_id = boosted_rec.account_id
|
||||||
):
|
|
||||||
account = Accounts.get_or_create(
|
|
||||||
session, str(mast_followed_accounts_d[username].id)
|
|
||||||
)
|
|
||||||
account.followed = True
|
|
||||||
|
|
||||||
# Remove any we no longer follow
|
return rec
|
||||||
for username in (
|
|
||||||
set(our_followed_accounts_d.keys()) -
|
|
||||||
set(mast_followed_accounts_d.keys())
|
|
||||||
):
|
|
||||||
account = Accounts.get_or_create(
|
|
||||||
session, str(our_followed_accounts_d[username].account_id)
|
|
||||||
)
|
|
||||||
account.followed = False
|
|
||||||
|
|
||||||
def update_followed_hashtags(self, session: Session) -> None:
|
|
||||||
"""
|
|
||||||
Retrieve list of followed hashtags and update hashtags
|
|
||||||
"""
|
|
||||||
|
|
||||||
mast_followed_hashtags = self.mastapi.get_hashtag_following()
|
|
||||||
mast_followed_hashtags_d = index_ojects_by_parameter(
|
|
||||||
mast_followed_hashtags, "name")
|
|
||||||
|
|
||||||
our_followed_hashtags = Hashtags.get_followed(session)
|
|
||||||
our_followed_hashtags_d = index_ojects_by_parameter(
|
|
||||||
our_followed_hashtags, "name")
|
|
||||||
|
|
||||||
# Add those we are missing
|
|
||||||
for name in (
|
|
||||||
set(mast_followed_hashtags_d.keys()) -
|
|
||||||
set(our_followed_hashtags_d.keys())
|
|
||||||
):
|
|
||||||
hashtag = Hashtags.get_or_create(
|
|
||||||
session, name, mast_followed_hashtags_d[name].url)
|
|
||||||
hashtag.followed = True
|
|
||||||
|
|
||||||
# Remove any we no longer follow
|
|
||||||
for name in (
|
|
||||||
set(our_followed_hashtags_d.keys()) -
|
|
||||||
set(mast_followed_hashtags_d.keys())
|
|
||||||
):
|
|
||||||
hashtag = hashtags.get_or_create(
|
|
||||||
session, name, our_followed_hashtags_d[username].name)
|
|
||||||
hashtag.followed = False
|
|
||||||
|
|
||||||
|
|
||||||
# class HoldingPot:
|
def update_followed_accounts(session: Session, mastapi: MastodonAPI) -> None:
|
||||||
# def process_post(post):
|
"""
|
||||||
|
Retrieve list of followed accounts and update accounts
|
||||||
|
in database to match
|
||||||
|
"""
|
||||||
|
|
||||||
|
mast_followed_accounts = mastapi.get_account_following()
|
||||||
|
mast_followed_accounts_d = index_ojects_by_parameter(
|
||||||
|
mast_followed_accounts, "username")
|
||||||
|
|
||||||
|
our_followed_accounts = Accounts.get_followed(session)
|
||||||
|
our_followed_accounts_d = index_ojects_by_parameter(
|
||||||
|
our_followed_accounts, "username")
|
||||||
|
|
||||||
|
# Add those we are missing
|
||||||
|
for username in (
|
||||||
|
set(mast_followed_accounts_d.keys()) -
|
||||||
|
set(our_followed_accounts_d.keys())
|
||||||
|
):
|
||||||
|
account = Accounts.get_or_create(
|
||||||
|
session, str(mast_followed_accounts_d[username].id)
|
||||||
|
)
|
||||||
|
account.followed = True
|
||||||
|
|
||||||
|
# Remove any we no longer follow
|
||||||
|
for username in (
|
||||||
|
set(our_followed_accounts_d.keys()) -
|
||||||
|
set(mast_followed_accounts_d.keys())
|
||||||
|
):
|
||||||
|
account = Accounts.get_or_create(
|
||||||
|
session, str(our_followed_accounts_d[username].account_id)
|
||||||
|
)
|
||||||
|
account.followed = False
|
||||||
|
|
||||||
|
|
||||||
|
def update_followed_hashtags(session: Session, mastapi: MastodonAPI) -> None:
|
||||||
|
"""
|
||||||
|
Retrieve list of followed hashtags and update hashtags
|
||||||
|
"""
|
||||||
|
|
||||||
|
mast_followed_hashtags = mastapi.get_hashtag_following()
|
||||||
|
mast_followed_hashtags_d = index_ojects_by_parameter(
|
||||||
|
mast_followed_hashtags, "name")
|
||||||
|
|
||||||
|
our_followed_hashtags = Hashtags.get_followed(session)
|
||||||
|
our_followed_hashtags_d = index_ojects_by_parameter(
|
||||||
|
our_followed_hashtags, "name")
|
||||||
|
|
||||||
|
# Add those we are missing
|
||||||
|
for name in (
|
||||||
|
set(mast_followed_hashtags_d.keys()) -
|
||||||
|
set(our_followed_hashtags_d.keys())
|
||||||
|
):
|
||||||
|
hashtag = Hashtags.get_or_create(
|
||||||
|
session, name, mast_followed_hashtags_d[name].url)
|
||||||
|
hashtag.followed = True
|
||||||
|
|
||||||
|
# Remove any we no longer follow
|
||||||
|
for name in (
|
||||||
|
set(our_followed_hashtags_d.keys()) -
|
||||||
|
set(mast_followed_hashtags_d.keys())
|
||||||
|
):
|
||||||
|
hashtag = hashtags.get_or_create(
|
||||||
|
session, name, our_followed_hashtags_d[username].name)
|
||||||
|
hashtag.followed = False
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
@ -481,10 +338,7 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
Base.metadata.create_all(engine)
|
Base.metadata.create_all(engine)
|
||||||
app = QApplication(sys.argv)
|
sys.exit(main())
|
||||||
win = Window()
|
|
||||||
win.show()
|
|
||||||
sys.exit(app.exec())
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
|
||||||
if os.environ["URMA_ENV"] != "DEVELOPMENT":
|
if os.environ["URMA_ENV"] != "DEVELOPMENT":
|
||||||
@ -495,11 +349,3 @@ if __name__ == "__main__":
|
|||||||
print("\033[1;31;47mUnhandled exception starts")
|
print("\033[1;31;47mUnhandled exception starts")
|
||||||
stackprinter.show(style="darkbg")
|
stackprinter.show(style="darkbg")
|
||||||
print("Unhandled exception ends\033[1;37;40m")
|
print("Unhandled exception ends\033[1;37;40m")
|
||||||
|
|
||||||
# # Data for development
|
|
||||||
# with open(TESTDATA, "rb") as inp:
|
|
||||||
# hometl = pickle.load(inp)
|
|
||||||
#
|
|
||||||
# with Session() as session:
|
|
||||||
# for post in hometl:
|
|
||||||
# process_post(post)
|
|
||||||
|
|||||||
46
conftest.py
Normal file
46
conftest.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
# https://itnext.io/setting-up-transactional-tests-with-pytest-and-sqlalchemy-b2d726347629
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import sys
|
||||||
|
sys.path.append("app")
|
||||||
|
import models # noqa E402 (import not at top of file)
|
||||||
|
|
||||||
|
from sqlalchemy import create_engine # noqa E402
|
||||||
|
from sqlalchemy.orm import ( # noqa E402
|
||||||
|
scoped_session,
|
||||||
|
Session,
|
||||||
|
sessionmaker,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def engine():
|
||||||
|
return create_engine(
|
||||||
|
"mysql+mysqldb://dev_urma_testing:dev_urma_testing@"
|
||||||
|
"localhost/dev_urma_testing",
|
||||||
|
encoding='utf-8',
|
||||||
|
pool_pre_ping=True,
|
||||||
|
future=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope="session")
|
||||||
|
def setup_database(engine):
|
||||||
|
"""
|
||||||
|
Made scope=function (the default) to ensure any committed objects
|
||||||
|
are removed
|
||||||
|
"""
|
||||||
|
|
||||||
|
from app.models import Base # noqa E402
|
||||||
|
Base.metadata.create_all(engine)
|
||||||
|
# seed_database()
|
||||||
|
yield
|
||||||
|
Base.metadata.drop_all(engine)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def session(setup_database, engine):
|
||||||
|
session = scoped_session(sessionmaker(autoflush=False, bind=engine))
|
||||||
|
session.begin()
|
||||||
|
yield session
|
||||||
|
session.rollback()
|
||||||
BIN
hometl.pickle
BIN
hometl.pickle
Binary file not shown.
223
poetry.lock
generated
223
poetry.lock
generated
@ -1,6 +1,6 @@
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "alembic"
|
name = "alembic"
|
||||||
version = "1.9.1"
|
version = "1.9.2"
|
||||||
description = "A database migration tool for SQLAlchemy."
|
description = "A database migration tool for SQLAlchemy."
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
@ -35,14 +35,6 @@ six = "*"
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
test = ["astroid", "pytest"]
|
test = ["astroid", "pytest"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "atomicwrites"
|
|
||||||
version = "1.4.1"
|
|
||||||
description = "Atomic file writes."
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "attrs"
|
name = "attrs"
|
||||||
version = "22.2.0"
|
version = "22.2.0"
|
||||||
@ -88,14 +80,11 @@ python-versions = ">=3.6"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "charset-normalizer"
|
name = "charset-normalizer"
|
||||||
version = "2.1.1"
|
version = "3.0.1"
|
||||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6.0"
|
python-versions = "*"
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
unicode_backport = ["unicodedata2"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "colorama"
|
name = "colorama"
|
||||||
@ -113,6 +102,17 @@ category = "main"
|
|||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.5"
|
python-versions = ">=3.5"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "exceptiongroup"
|
||||||
|
version = "1.1.0"
|
||||||
|
description = "Backport of PEP 654 (exception groups)"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
test = ["pytest (>=6)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "executing"
|
name = "executing"
|
||||||
version = "1.2.0"
|
version = "1.2.0"
|
||||||
@ -124,6 +124,18 @@ python-versions = "*"
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
tests = ["asttokens", "pytest", "littleutils", "rich"]
|
tests = ["asttokens", "pytest", "littleutils", "rich"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "fancycompleter"
|
||||||
|
version = "0.9.1"
|
||||||
|
description = "colorful TAB completion for Python prompt"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
pyreadline = {version = "*", markers = "platform_system == \"Windows\""}
|
||||||
|
pyrepl = ">=0.8.2"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "greenlet"
|
name = "greenlet"
|
||||||
version = "2.0.1"
|
version = "2.0.1"
|
||||||
@ -144,6 +156,14 @@ category = "main"
|
|||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.5"
|
python-versions = ">=3.5"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "iniconfig"
|
||||||
|
version = "2.0.0"
|
||||||
|
description = "brain-dead simple config-ini parsing"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ipdb"
|
name = "ipdb"
|
||||||
version = "0.13.11"
|
version = "0.13.11"
|
||||||
@ -159,7 +179,7 @@ tomli = {version = "*", markers = "python_version > \"3.6\" and python_version <
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ipython"
|
name = "ipython"
|
||||||
version = "8.7.0"
|
version = "8.8.0"
|
||||||
description = "IPython: Productive Interactive Computing"
|
description = "IPython: Productive Interactive Computing"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
@ -264,14 +284,6 @@ python-versions = ">=3.5"
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
traitlets = "*"
|
traitlets = "*"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "more-itertools"
|
|
||||||
version = "9.0.0"
|
|
||||||
description = "More routines for operating on iterables, beyond itertools"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mysqlclient"
|
name = "mysqlclient"
|
||||||
version = "2.1.1"
|
version = "2.1.1"
|
||||||
@ -282,7 +294,7 @@ python-versions = ">=3.5"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "packaging"
|
name = "packaging"
|
||||||
version = "22.0"
|
version = "23.0"
|
||||||
description = "Core utilities for Python packages"
|
description = "Core utilities for Python packages"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
@ -300,6 +312,23 @@ python-versions = ">=3.6"
|
|||||||
qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
|
qa = ["flake8 (==3.8.3)", "mypy (==0.782)"]
|
||||||
testing = ["docopt", "pytest (<6.0.0)"]
|
testing = ["docopt", "pytest (<6.0.0)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pdbpp"
|
||||||
|
version = "0.10.3"
|
||||||
|
description = "pdb++, a drop-in replacement for pdb"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
fancycompleter = ">=0.8"
|
||||||
|
pygments = "*"
|
||||||
|
wmctrl = "*"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
funcsigs = ["funcsigs"]
|
||||||
|
testing = ["funcsigs", "pytest"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pexpect"
|
name = "pexpect"
|
||||||
version = "4.8.0"
|
version = "4.8.0"
|
||||||
@ -321,14 +350,15 @@ python-versions = "*"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pluggy"
|
name = "pluggy"
|
||||||
version = "0.13.1"
|
version = "1.0.0"
|
||||||
description = "plugin and hook calling mechanisms for python"
|
description = "plugin and hook calling mechanisms for python"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
python-versions = ">=3.6"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
dev = ["pre-commit", "tox"]
|
dev = ["pre-commit", "tox"]
|
||||||
|
testing = ["pytest", "pytest-benchmark"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "prompt-toolkit"
|
name = "prompt-toolkit"
|
||||||
@ -360,14 +390,6 @@ python-versions = "*"
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
tests = ["pytest"]
|
tests = ["pytest"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "py"
|
|
||||||
version = "1.11.0"
|
|
||||||
description = "library with cross-python path, ini-parsing, io, code, log facilities"
|
|
||||||
category = "dev"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pygments"
|
name = "pygments"
|
||||||
version = "2.14.0"
|
version = "2.14.0"
|
||||||
@ -380,65 +402,54 @@ python-versions = ">=3.6"
|
|||||||
plugins = ["importlib-metadata"]
|
plugins = ["importlib-metadata"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyqt5"
|
name = "pyreadline"
|
||||||
version = "5.15.7"
|
version = "2.1"
|
||||||
description = "Python bindings for the Qt cross platform application toolkit"
|
description = "A python implmementation of GNU readline."
|
||||||
category = "main"
|
category = "dev"
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
PyQt5-Qt5 = ">=5.15.0"
|
|
||||||
PyQt5-sip = ">=12.11,<13"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pyqt5-qt5"
|
|
||||||
version = "5.15.2"
|
|
||||||
description = "The subset of a Qt installation needed by PyQt5."
|
|
||||||
category = "main"
|
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyqt5-sip"
|
name = "pyrepl"
|
||||||
version = "12.11.0"
|
version = "0.9.0"
|
||||||
description = "The sip module support for PyQt5"
|
description = "A library for building flexible command line interfaces"
|
||||||
category = "main"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.7"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "pyqt5-stubs"
|
|
||||||
version = "5.15.6.0"
|
|
||||||
description = "PEP561 stub files for the PyQt5 framework"
|
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">= 3.5"
|
python-versions = "*"
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
dev = ["mypy (==0.930)", "pytest", "pytest-xvfb"]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pytest"
|
name = "pytest"
|
||||||
version = "5.4.3"
|
version = "7.2.1"
|
||||||
description = "pytest: simple powerful testing with Python"
|
description = "pytest: simple powerful testing with Python"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.5"
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
|
attrs = ">=19.2.0"
|
||||||
attrs = ">=17.4.0"
|
|
||||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||||
more-itertools = ">=4.0.0"
|
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
|
||||||
|
iniconfig = "*"
|
||||||
packaging = "*"
|
packaging = "*"
|
||||||
pluggy = ">=0.12,<1.0"
|
pluggy = ">=0.12,<2.0"
|
||||||
py = ">=1.5.0"
|
tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
|
||||||
wcwidth = "*"
|
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
checkqa-mypy = ["mypy (==v0.761)"]
|
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
|
||||||
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytest-env"
|
||||||
|
version = "0.8.1"
|
||||||
|
description = "py.test plugin that allows you to add environment variables."
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
pytest = ">=7.1.3"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
test = ["coverage (>=6.5)", "pytest-mock (>=3.10)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "python-dateutil"
|
name = "python-dateutil"
|
||||||
@ -461,7 +472,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "requests"
|
name = "requests"
|
||||||
version = "2.28.1"
|
version = "2.28.2"
|
||||||
description = "Python HTTP for Humans."
|
description = "Python HTTP for Humans."
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
@ -469,7 +480,7 @@ python-versions = ">=3.7, <4"
|
|||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
certifi = ">=2017.4.17"
|
certifi = ">=2017.4.17"
|
||||||
charset-normalizer = ">=2,<3"
|
charset-normalizer = ">=2,<4"
|
||||||
idna = ">=2.5,<4"
|
idna = ">=2.5,<4"
|
||||||
urllib3 = ">=1.21.1,<1.27"
|
urllib3 = ">=1.21.1,<1.27"
|
||||||
|
|
||||||
@ -487,7 +498,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sqlalchemy"
|
name = "sqlalchemy"
|
||||||
version = "1.4.45"
|
version = "1.4.46"
|
||||||
description = "Database Abstraction Library"
|
description = "Database Abstraction Library"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
@ -551,7 +562,7 @@ python-versions = ">=3.7"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "traitlets"
|
name = "traitlets"
|
||||||
version = "5.8.0"
|
version = "5.8.1"
|
||||||
description = "Traitlets Python configuration system"
|
description = "Traitlets Python configuration system"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
@ -563,7 +574,7 @@ test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "urllib3"
|
name = "urllib3"
|
||||||
version = "1.26.13"
|
version = "1.26.14"
|
||||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
@ -582,16 +593,23 @@ category = "dev"
|
|||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wmctrl"
|
||||||
|
version = "0.4"
|
||||||
|
description = "A tool to programmatically control windows inside X"
|
||||||
|
category = "dev"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "1.1"
|
lock-version = "1.1"
|
||||||
python-versions = "^3.9"
|
python-versions = "^3.9"
|
||||||
content-hash = "d3bb7fb1fedd37ef4df5b2b2c8097a57f268e54041c9aa28fe230824db299a5a"
|
content-hash = "6595ea3da23f353d916879141650fbb0b213e1393466925fa7be5e2e5bb6d5a3"
|
||||||
|
|
||||||
[metadata.files]
|
[metadata.files]
|
||||||
alembic = []
|
alembic = []
|
||||||
appnope = []
|
appnope = []
|
||||||
asttokens = []
|
asttokens = []
|
||||||
atomicwrites = []
|
|
||||||
attrs = []
|
attrs = []
|
||||||
backcall = [
|
backcall = [
|
||||||
{file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"},
|
{file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"},
|
||||||
@ -605,9 +623,15 @@ decorator = [
|
|||||||
{file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
|
{file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
|
||||||
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
|
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
|
||||||
]
|
]
|
||||||
|
exceptiongroup = []
|
||||||
executing = []
|
executing = []
|
||||||
|
fancycompleter = [
|
||||||
|
{file = "fancycompleter-0.9.1-py3-none-any.whl", hash = "sha256:dd076bca7d9d524cc7f25ec8f35ef95388ffef9ef46def4d3d25e9b044ad7080"},
|
||||||
|
{file = "fancycompleter-0.9.1.tar.gz", hash = "sha256:09e0feb8ae242abdfd7ef2ba55069a46f011814a80fe5476be48f51b00247272"},
|
||||||
|
]
|
||||||
greenlet = []
|
greenlet = []
|
||||||
idna = []
|
idna = []
|
||||||
|
iniconfig = []
|
||||||
ipdb = []
|
ipdb = []
|
||||||
ipython = []
|
ipython = []
|
||||||
jedi = []
|
jedi = []
|
||||||
@ -615,13 +639,16 @@ mako = []
|
|||||||
markupsafe = []
|
markupsafe = []
|
||||||
"mastodon.py" = []
|
"mastodon.py" = []
|
||||||
matplotlib-inline = []
|
matplotlib-inline = []
|
||||||
more-itertools = []
|
|
||||||
mysqlclient = []
|
mysqlclient = []
|
||||||
packaging = []
|
packaging = []
|
||||||
parso = [
|
parso = [
|
||||||
{file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"},
|
{file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"},
|
||||||
{file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"},
|
{file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"},
|
||||||
]
|
]
|
||||||
|
pdbpp = [
|
||||||
|
{file = "pdbpp-0.10.3-py2.py3-none-any.whl", hash = "sha256:79580568e33eb3d6f6b462b1187f53e10cd8e4538f7d31495c9181e2cf9665d1"},
|
||||||
|
{file = "pdbpp-0.10.3.tar.gz", hash = "sha256:d9e43f4fda388eeb365f2887f4e7b66ac09dce9b6236b76f63616530e2f669f5"},
|
||||||
|
]
|
||||||
pexpect = [
|
pexpect = [
|
||||||
{file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
|
{file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"},
|
||||||
{file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
|
{file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"},
|
||||||
@ -631,8 +658,8 @@ pickleshare = [
|
|||||||
{file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"},
|
{file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"},
|
||||||
]
|
]
|
||||||
pluggy = [
|
pluggy = [
|
||||||
{file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
|
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
|
||||||
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
|
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
|
||||||
]
|
]
|
||||||
prompt-toolkit = []
|
prompt-toolkit = []
|
||||||
ptyprocess = [
|
ptyprocess = [
|
||||||
@ -643,24 +670,17 @@ pure-eval = [
|
|||||||
{file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"},
|
{file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"},
|
||||||
{file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"},
|
{file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"},
|
||||||
]
|
]
|
||||||
py = [
|
|
||||||
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
|
|
||||||
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
|
||||||
]
|
|
||||||
pygments = []
|
pygments = []
|
||||||
pyqt5 = []
|
pyreadline = [
|
||||||
pyqt5-qt5 = [
|
{file = "pyreadline-2.1.win-amd64.exe", hash = "sha256:9ce5fa65b8992dfa373bddc5b6e0864ead8f291c94fbfec05fbd5c836162e67b"},
|
||||||
{file = "PyQt5_Qt5-5.15.2-py3-none-macosx_10_13_intel.whl", hash = "sha256:76980cd3d7ae87e3c7a33bfebfaee84448fd650bad6840471d6cae199b56e154"},
|
{file = "pyreadline-2.1.win32.exe", hash = "sha256:65540c21bfe14405a3a77e4c085ecfce88724743a4ead47c66b84defcf82c32e"},
|
||||||
{file = "PyQt5_Qt5-5.15.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:1988f364ec8caf87a6ee5d5a3a5210d57539988bf8e84714c7d60972692e2f4a"},
|
{file = "pyreadline-2.1.zip", hash = "sha256:4530592fc2e85b25b1a9f79664433da09237c1a270e4d78ea5aa3a2c7229e2d1"},
|
||||||
{file = "PyQt5_Qt5-5.15.2-py3-none-win32.whl", hash = "sha256:9cc7a768b1921f4b982ebc00a318ccb38578e44e45316c7a4a850e953e1dd327"},
|
|
||||||
{file = "PyQt5_Qt5-5.15.2-py3-none-win_amd64.whl", hash = "sha256:750b78e4dba6bdf1607febedc08738e318ea09e9b10aea9ff0d73073f11f6962"},
|
|
||||||
]
|
]
|
||||||
pyqt5-sip = []
|
pyrepl = [
|
||||||
pyqt5-stubs = []
|
{file = "pyrepl-0.9.0.tar.gz", hash = "sha256:292570f34b5502e871bbb966d639474f2b57fbfcd3373c2d6a2f3d56e681a775"},
|
||||||
pytest = [
|
|
||||||
{file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"},
|
|
||||||
{file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"},
|
|
||||||
]
|
]
|
||||||
|
pytest = []
|
||||||
|
pytest-env = []
|
||||||
python-dateutil = [
|
python-dateutil = [
|
||||||
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
|
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
|
||||||
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
|
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
|
||||||
@ -684,3 +704,6 @@ wcwidth = [
|
|||||||
{file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
|
{file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"},
|
||||||
{file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
|
{file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"},
|
||||||
]
|
]
|
||||||
|
wmctrl = [
|
||||||
|
{file = "wmctrl-0.4.tar.gz", hash = "sha256:66cbff72b0ca06a22ec3883ac3a4d7c41078bdae4fb7310f52951769b10e14e0"},
|
||||||
|
]
|
||||||
|
|||||||
@ -11,13 +11,11 @@ stackprinter = "^0.2.10"
|
|||||||
SQLAlchemy = "^1.4.45"
|
SQLAlchemy = "^1.4.45"
|
||||||
mysqlclient = "^2.1.1"
|
mysqlclient = "^2.1.1"
|
||||||
alembic = "^1.9.1"
|
alembic = "^1.9.1"
|
||||||
PyQt5 = "^5.15.7"
|
|
||||||
PyQt5-sip = "^12.11.0"
|
|
||||||
|
|
||||||
[tool.poetry.dev-dependencies]
|
[tool.poetry.dev-dependencies]
|
||||||
pytest = "^5.2"
|
|
||||||
ipdb = "^0.13.11"
|
ipdb = "^0.13.11"
|
||||||
PyQt5-stubs = "^5.15.6"
|
pytest-env = "^0.8.1"
|
||||||
|
pdbpp = "^0.10.3"
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core>=1.0.0"]
|
requires = ["poetry-core>=1.0.0"]
|
||||||
@ -27,6 +25,9 @@ build-backend = "poetry.core.masonry.api"
|
|||||||
mypy_path = "/home/kae/.cache/pypoetry/virtualenvs/urma-e3I_sS5U-py3.9:/home/kae/git/urma/app"
|
mypy_path = "/home/kae/.cache/pypoetry/virtualenvs/urma-e3I_sS5U-py3.9:/home/kae/git/urma/app"
|
||||||
plugins = "sqlalchemy.ext.mypy.plugin"
|
plugins = "sqlalchemy.ext.mypy.plugin"
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
addopts = "-xls --pdb"
|
||||||
|
|
||||||
[tool.vulture]
|
[tool.vulture]
|
||||||
exclude = ["migrations"]
|
exclude = ["migrations"]
|
||||||
paths = ["app"]
|
paths = ["app"]
|
||||||
|
|||||||
@ -1,5 +0,0 @@
|
|||||||
from urma import __version__
|
|
||||||
|
|
||||||
|
|
||||||
def test_version():
|
|
||||||
assert __version__ == '0.1.0'
|
|
||||||
117
tests/test_models.py
Normal file
117
tests/test_models.py
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
from app.models import (
|
||||||
|
Accounts,
|
||||||
|
Hashtags,
|
||||||
|
Posts,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_accounts_creation(session):
|
||||||
|
"""Account creation"""
|
||||||
|
|
||||||
|
account_id = "109568725613662482"
|
||||||
|
|
||||||
|
acct = Accounts(session, account_id)
|
||||||
|
assert acct
|
||||||
|
assert acct.account_id == account_id
|
||||||
|
assert acct.username is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_or_add_account(session):
|
||||||
|
"""Check we can retrieve existing account"""
|
||||||
|
|
||||||
|
account_id = "109568725613662482"
|
||||||
|
|
||||||
|
acct = Accounts.get_or_create(session, account_id)
|
||||||
|
acct2 = Accounts.get_or_create(session, account_id)
|
||||||
|
assert acct is acct2
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_followed_accounts(session):
|
||||||
|
"""Test retrieval of followed accounts"""
|
||||||
|
|
||||||
|
account1_id = "109568725613662482"
|
||||||
|
account2_id = "109568725613662483"
|
||||||
|
|
||||||
|
acct1 = Accounts.get_or_create(session, account1_id)
|
||||||
|
acct2 = Accounts.get_or_create(session, account2_id)
|
||||||
|
|
||||||
|
acct2.followed = True
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
accts_followed = Accounts.get_followed(session)
|
||||||
|
assert acct1 not in accts_followed
|
||||||
|
assert acct2 in accts_followed
|
||||||
|
|
||||||
|
|
||||||
|
def test_hashtags_access(session):
|
||||||
|
"""Test we can access hashtags table"""
|
||||||
|
|
||||||
|
result = Hashtags.get_all(session)
|
||||||
|
assert result == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_hashtag(session):
|
||||||
|
"""Create a hashtag"""
|
||||||
|
|
||||||
|
h_name = "MyHashtag"
|
||||||
|
h_url = "https://example.com"
|
||||||
|
|
||||||
|
ht = Hashtags.get_or_create(session, h_name, h_url)
|
||||||
|
assert ht
|
||||||
|
assert ht.name == h_name
|
||||||
|
assert ht.url == h_url
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_or_add_hashtag(session):
|
||||||
|
"""Check we can retrieve existing hashtag"""
|
||||||
|
|
||||||
|
h_name = "MyHashtag"
|
||||||
|
h_url = "https://example.com"
|
||||||
|
|
||||||
|
ht = Hashtags.get_or_create(session, h_name, h_url)
|
||||||
|
ht2 = Hashtags.get_or_create(session, h_name, h_url)
|
||||||
|
assert ht is ht2
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_followed_hashtags(session):
|
||||||
|
"""Test retrieval of followed hashtags"""
|
||||||
|
|
||||||
|
ht1 = "HashTagOne"
|
||||||
|
ht1_url = "https://one.example.com"
|
||||||
|
ht2 = "HashTagTwo"
|
||||||
|
ht2_url = "https://two.example.com"
|
||||||
|
|
||||||
|
hashtag1 = Hashtags.get_or_create(session, ht1, ht1_url)
|
||||||
|
hashtag2 = Hashtags.get_or_create(session, ht2, ht2_url)
|
||||||
|
|
||||||
|
hashtag2.followed = True
|
||||||
|
session.flush()
|
||||||
|
|
||||||
|
hashtags_followed = Hashtags.get_followed(session)
|
||||||
|
assert hashtag1 not in hashtags_followed
|
||||||
|
assert hashtag2 in hashtags_followed
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_posts(session):
|
||||||
|
"""Test we can create posts"""
|
||||||
|
|
||||||
|
post_id = "109666763623624320"
|
||||||
|
|
||||||
|
post = Posts(session, post_id)
|
||||||
|
assert post.post_id == post_id
|
||||||
|
assert post.account_id is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_by_post_id(session):
|
||||||
|
"""Retrieve by post ID"""
|
||||||
|
|
||||||
|
post1_id = "109666763623624320"
|
||||||
|
post2_id = "109666763623624321"
|
||||||
|
|
||||||
|
post1 = Posts(session, post1_id)
|
||||||
|
post2 = Posts(session, post2_id)
|
||||||
|
|
||||||
|
post = Posts.get_by_post_id(session, post1_id)
|
||||||
|
|
||||||
|
assert post is post1
|
||||||
|
assert post is not post2
|
||||||
Loading…
Reference in New Issue
Block a user