Merge branch 'unittests' into mastodon

This commit is contained in:
Manuel Cortez 2022-02-24 15:07:11 -06:00
commit e3137f4c3d
No known key found for this signature in database
GPG Key ID: 9E0735CA15EFE790
9 changed files with 272 additions and 203 deletions

View File

@ -1,4 +1,6 @@
wxpython
pytest
coverage
wheel
six
configobj

View File

@ -21,7 +21,7 @@ def parse(s):
lst.remove(item)
#end if
if len(lst) > 1: #more than one key, parse error
raise ValueError, 'unknown modifier %s' % lst[0]
raise ValueError('unknown modifier %s' % lst[0])
return (m, lst[0].lower())
class AtspiThread(threading.Thread):
def run(self):

View File

@ -136,8 +136,7 @@ class Session(base.baseSession):
if self.settings["twitter"]["user_key"] != None and self.settings["twitter"]["user_secret"] != None:
try:
log.debug("Logging in to twitter...")
self.auth = tweepy.OAuth1UserHandler(appkeys.twitter_api_key, appkeys.twitter_api_secret)
self.auth.set_access_token(self.settings["twitter"]["user_key"], self.settings["twitter"]["user_secret"])
self.auth = tweepy.OAuth1UserHandler(consumer_key=appkeys.twitter_api_key, consumer_secret=appkeys.twitter_api_secret, access_token=self.settings["twitter"]["user_key"], access_token_secret=self.settings["twitter"]["user_secret"])
self.twitter = tweepy.API(self.auth)
self.twitter_v2 = tweepy.Client(consumer_key=appkeys.twitter_api_key, consumer_secret=appkeys.twitter_api_secret, access_token=self.settings["twitter"]["user_key"], access_token_secret=self.settings["twitter"]["user_secret"])
if verify_credentials == True:

View File

@ -0,0 +1 @@
# -*- coding: utf-8 -*-

View File

@ -0,0 +1,201 @@
# -*- coding: utf-8 -*-
import sys
import types
import pytest
import os
import sqlitedict
import shutil
from unittest import mock
# Mock sound module, so LibVLc won't complain.
sound_module = types.ModuleType("sound")
sys.modules["sound"] = sound_module
sound_module.soundManager = mock.MagicMock(name="sound.soundManager")
from sessions import base
# path where we will save our test config, as we can't rely on paths module due to pytest's paths being different.
session_path = os.path.join(os.getcwd(), "config", "testing")
@pytest.fixture
def session():
""" Configures a fake base session from where we can test things. """
global session_path
s = base.baseSession("testing")
if os.path.exists(session_path) == False:
os.mkdir(session_path)
# Patches paths.app_path and paths.config_path, so we will not have issues during session configuration.
with mock.patch("paths.app_path", return_value=os.getcwd()) as app_path:
with mock.patch("paths.config_path", return_value=os.path.join(os.getcwd(), "config")) as config_path:
s.get_configuration()
yield s
# Session's cleanup code.
if os.path.exists(session_path):
shutil.rmtree(session_path)
del s
@pytest.fixture
def dataset():
""" Generates a sample dataset"""
dataset = dict(home_timeline=["message" for i in range(10000)], mentions_timeline=["mention" for i in range(20000)])
yield dataset
### Testing database being read from disk.
def test_cache_in_disk_unlimited_size(session, dataset):
""" Tests cache database being read from disk, storing the whole datasets. """
session.settings["general"]["load_cache_in_memory"] = False
session.settings["general"]["persist_size"] = -1
session.load_persistent_data()
session.db["home_timeline"] = dataset["home_timeline"]
session.db["mentions_timeline"] = dataset["mentions_timeline"]
session.save_persistent_data()
assert isinstance(session.db, sqlitedict.SqliteDict)
assert session.db.get("home_timeline") != None
assert session.db.get("mentions_timeline") != None
assert len(session.db.get("home_timeline")) == 10000
assert len(session.db.get("mentions_timeline")) == 20000
session.db.close()
def test_cache_in_disk_limited_dataset(session, dataset):
""" Tests wether the cache stores only the amount of items we ask it to store. """
session.settings["general"]["load_cache_in_memory"] = False
session.settings["general"]["persist_size"] = 100
session.load_persistent_data()
session.db["home_timeline"] = dataset["home_timeline"]
session.db["mentions_timeline"] = dataset["mentions_timeline"]
# We need to save and load the db again because we cannot modify buffers' size while the database is opened.
# As TWBlue reads directly from db when reading from disk, an attempt to modify buffers size while Blue is reading the db
# Might cause an out of sync error between the GUI lists and the database.
# So we perform the changes to buffer size when loading data during app startup if the DB is read from disk.
session.save_persistent_data()
session.db = dict()
session.load_persistent_data()
assert isinstance(session.db, sqlitedict.SqliteDict)
assert session.db.get("home_timeline") != None
assert session.db.get("mentions_timeline") != None
assert len(session.db.get("home_timeline")) == 100
assert len(session.db.get("mentions_timeline")) == 100
session.db.close()
def test_cache_in_disk_limited_dataset_unreversed(session):
"""Test if the cache is saved properly in unreversed buffers, when newest items are at the end of the list. """
dataset = dict(home_timeline=[i for i in range(20)], mentions_timeline=[i for i in range(20)])
session.settings["general"]["load_cache_in_memory"] = False
session.settings["general"]["persist_size"] = 10
session.load_persistent_data()
session.db["home_timeline"] = dataset["home_timeline"]
session.db["mentions_timeline"] = dataset["mentions_timeline"]
# We need to save and load the db again because we cannot modify buffers' size while the database is opened.
# As TWBlue reads directly from db when reading from disk, an attempt to modify buffers size while Blue is reading the db
# Might cause an out of sync error between the GUI lists and the database.
# So we perform the changes to buffer size when loading data during app startup if the DB is read from disk.
session.save_persistent_data()
session.db = dict()
session.load_persistent_data()
assert isinstance(session.db, sqlitedict.SqliteDict)
assert session.db.get("home_timeline") != None
assert session.db.get("mentions_timeline") != None
assert session.db.get("home_timeline")[0] == 10
assert session.db.get("mentions_timeline")[0] == 10
assert session.db.get("home_timeline")[-1] == 19
assert session.db.get("mentions_timeline")[-1] == 19
session.db.close()
def test_cache_in_disk_limited_dataset_reversed(session):
"""Test if the cache is saved properly in reversed buffers, when newest items are at the start of the list. """
dataset = dict(home_timeline=[i for i in range(19, -1, -1)], mentions_timeline=[i for i in range(19, -1, -1)])
session.settings["general"]["load_cache_in_memory"] = False
session.settings["general"]["persist_size"] = 10
session.settings["general"]["reverse_timelines"] = True
session.load_persistent_data()
session.db["home_timeline"] = dataset["home_timeline"]
session.db["mentions_timeline"] = dataset["mentions_timeline"]
# We need to save and load the db again because we cannot modify buffers' size while the database is opened.
# As TWBlue reads directly from db when reading from disk, an attempt to modify buffers size while Blue is reading the db
# Might cause an out of sync error between the GUI lists and the database.
# So we perform the changes to buffer size when loading data during app startup if the DB is read from disk.
session.save_persistent_data()
session.db = dict()
session.load_persistent_data()
assert isinstance(session.db, sqlitedict.SqliteDict)
assert session.db.get("home_timeline") != None
assert session.db.get("mentions_timeline") != None
assert session.db.get("home_timeline")[0] == 19
assert session.db.get("mentions_timeline")[0] == 19
assert session.db.get("home_timeline")[-1] == 10
assert session.db.get("mentions_timeline")[-1] == 10
session.db.close()
### Testing database being loaded into memory. Those tests should give the same results than before
### but as we have different code depending whether we load db into memory or read it from disk,
### We need to test this anyways.
def test_cache_in_memory_unlimited_size(session, dataset):
""" Tests cache database being loaded in memory, storing the whole datasets. """
session.settings["general"]["load_cache_in_memory"] = True
session.settings["general"]["persist_size"] = -1
session.load_persistent_data()
session.db["home_timeline"] = dataset["home_timeline"]
session.db["mentions_timeline"] = dataset["mentions_timeline"]
session.save_persistent_data()
session.db = dict()
session.load_persistent_data()
assert isinstance(session.db, dict)
assert session.db.get("home_timeline") != None
assert session.db.get("mentions_timeline") != None
assert len(session.db.get("home_timeline")) == 10000
assert len(session.db.get("mentions_timeline")) == 20000
def test_cache_in_memory_limited_dataset(session, dataset):
""" Tests wether the cache stores only the amount of items we ask it to store, when loaded in memory. """
session.settings["general"]["load_cache_in_memory"] = True
session.settings["general"]["persist_size"] = 100
session.load_persistent_data()
session.db["home_timeline"] = dataset["home_timeline"]
session.db["mentions_timeline"] = dataset["mentions_timeline"]
session.save_persistent_data()
session.db = dict()
session.load_persistent_data()
assert isinstance(session.db, dict)
assert session.db.get("home_timeline") != None
assert session.db.get("mentions_timeline") != None
assert len(session.db.get("home_timeline")) == 100
assert len(session.db.get("mentions_timeline")) == 100
def test_cache_in_memory_limited_dataset_unreversed(session):
"""Test if the cache is saved properly when loaded in memory in unreversed buffers, when newest items are at the end of the list. """
dataset = dict(home_timeline=[i for i in range(20)], mentions_timeline=[i for i in range(20)])
session.settings["general"]["load_cache_in_memory"] = True
session.settings["general"]["persist_size"] = 10
session.load_persistent_data()
assert len(session.db)==1
session.db["home_timeline"] = dataset["home_timeline"]
session.db["mentions_timeline"] = dataset["mentions_timeline"]
session.save_persistent_data()
session.db = dict()
session.load_persistent_data()
assert isinstance(session.db, dict)
assert session.db.get("home_timeline") != None
assert session.db.get("mentions_timeline") != None
assert session.db.get("home_timeline")[0] == 10
assert session.db.get("mentions_timeline")[0] == 10
assert session.db.get("home_timeline")[-1] == 19
assert session.db.get("mentions_timeline")[-1] == 19
def test_cache_in_memory_limited_dataset_reversed(session):
"""Test if the cache is saved properly in reversed buffers, when newest items are at the start of the list. This test if for db read into memory. """
dataset = dict(home_timeline=[i for i in range(19, -1, -1)], mentions_timeline=[i for i in range(19, -1, -1)])
session.settings["general"]["load_cache_in_memory"] = True
session.settings["general"]["persist_size"] = 10
session.settings["general"]["reverse_timelines"] = True
session.load_persistent_data()
session.db["home_timeline"] = dataset["home_timeline"]
session.db["mentions_timeline"] = dataset["mentions_timeline"]
session.save_persistent_data()
session.db = dict()
session.load_persistent_data()
assert isinstance(session.db, dict)
assert session.db.get("home_timeline") != None
assert session.db.get("mentions_timeline") != None
assert session.db.get("home_timeline")[0] == 19
assert session.db.get("mentions_timeline")[0] == 19
assert session.db.get("home_timeline")[-1] == 10
assert session.db.get("mentions_timeline")[-1] == 10

View File

@ -0,0 +1 @@
# -*- coding: utf-8 -*-

View File

@ -0,0 +1,13 @@
# -*- coding: utf-8 -*-
import pytest
from tweepy.models import Status
@pytest.fixture
def basic_tweet():
data = {'created_at': 'Mon Jan 03 15:03:36 +0000 2022', 'id': 1478019218884857856, 'id_str': '1478019218884857856', 'full_text': 'Changes in projects for next year https://t.co/nW3GS9RmHd', 'truncated': False, 'display_text_range': [0, 57], 'entities': {'hashtags': [], 'symbols': [], 'user_mentions': [], 'urls': [{'url': 'https://t.co/nW3GS9RmHd', 'expanded_url': 'https://manuelcortez.net/blog/changes-in-projects-for-next-year/#.YdMQQU6t1FI.twitter', 'display_url': 'manuelcortez.net/blog/changes-i…', 'indices': [34, 57]}]}, 'source': '<a href="https://mobile.twitter.com" rel="nofollow">Twitter Web App</a>', 'in_reply_to_status_id': None, 'in_reply_to_status_id_str': None, 'in_reply_to_user_id': None, 'in_reply_to_user_id_str': None, 'in_reply_to_screen_name': None, 'user': {'id': 258677951, 'id_str': '258677951', 'name': 'Manuel Cortez', 'screen_name': 'manuelcortez00', 'location': 'Nuevo León, México', 'description': 'Python developer, , interested in reading, accessibility, astronomy, physics and science. Я учу русский.', 'url': 'https://t.co/JFRKRA73ZV', 'entities': {'url': {'urls': [{'url': 'https://t.co/JFRKRA73ZV', 'expanded_url': 'https://manuelcortez.net', 'display_url': 'manuelcortez.net', 'indices': [0, 23]}]}, 'description': {'urls': []}}, 'protected': False, 'followers_count': 1453, 'friends_count': 568, 'listed_count': 45, 'created_at': 'Mon Feb 28 06:52:48 +0000 2011', 'favourites_count': 283, 'utc_offset': None, 'time_zone': None, 'geo_enabled': True, 'verified': False, 'statuses_count': 43371, 'lang': None, 'contributors_enabled': False, 'is_translator': False, 'is_translation_enabled': False, 'profile_background_color': 'C0DEED', 'profile_background_image_url': 'http://abs.twimg.com/images/themes/theme1/bg.png', 'profile_background_image_url_https': 'https://abs.twimg.com/images/themes/theme1/bg.png', 'profile_background_tile': False, 'profile_image_url': 'http://pbs.twimg.com/profile_images/442466677645508608/3EBBC-OX_normal.jpeg', 'profile_image_url_https': 'https://pbs.twimg.com/profile_images/442466677645508608/3EBBC-OX_normal.jpeg', 'profile_image_extensions_alt_text': None, 'profile_link_color': '1DA1F2', 'profile_sidebar_border_color': 'C0DEED', 'profile_sidebar_fill_color': 'DDEEF6', 'profile_text_color': '333333', 'profile_use_background_image': True, 'has_extended_profile': False, 'default_profile': True, 'default_profile_image': False, 'following': False, 'follow_request_sent': False, 'notifications': False, 'translator_type': 'regular', 'withheld_in_countries': []}, 'geo': None, 'coordinates': None, 'place': None, 'contributors': None, 'is_quote_status': False, 'retweet_count': 6, 'favorite_count': 2, 'favorited': False, 'retweeted': False, 'possibly_sensitive': False, 'possibly_sensitive_appealable': False, 'lang': 'en'}
yield Status().parse(api=None, json=data)
@pytest.fixture
def basic_tweet_multiple_mentions():
data = {'created_at': 'Mon Dec 27 21:21:25 +0000 2021', 'id': 1475577584947707909, 'id_str': '1475577584947707909', 'full_text': '@tamaranatalia9 @Darkstrings @Chris88171572 @manuelcortez00 Well done, thanks Tamara', 'truncated': False, 'display_text_range': [60, 84], 'entities': {'hashtags': [], 'symbols': [], 'user_mentions': [{'screen_name': 'tamaranatalia9', 'name': 'Tamara', 'id': 914114584591597568, 'id_str': '914114584591597568', 'indices': [0, 15]}, {'screen_name': 'Darkstrings', 'name': 'Luc', 'id': 1374154151115042823, 'id_str': '1374154151115042823', 'indices': [16, 28]}, {'screen_name': 'Chris88171572', 'name': 'Chris', 'id': 1323980014799495168, 'id_str': '1323980014799495168', 'indices': [29, 43]}, {'screen_name': 'manuelcortez00', 'name': 'Manuel Cortez', 'id': 258677951, 'id_str': '258677951', 'indices': [44, 59]}], 'urls': []}, 'source': '<a href="http://twitter.com/download/android" rel="nofollow">Twitter for Android</a>', 'in_reply_to_status_id': 1475550502083563526, 'in_reply_to_status_id_str': '1475550502083563526', 'in_reply_to_user_id': 914114584591597568, 'in_reply_to_user_id_str': '914114584591597568', 'in_reply_to_screen_name': 'tamaranatalia9', 'user': {'id': 784837522157436929, 'id_str': '784837522157436929', 'name': 'Paulus', 'screen_name': 'PauloPer01', 'location': '', 'description': '', 'url': None, 'entities': {'description': {'urls': []}}, 'protected': False, 'followers_count': 1082, 'friends_count': 3029, 'listed_count': 2, 'created_at': 'Sat Oct 08 19:27:01 +0000 2016', 'favourites_count': 78862, 'utc_offset': None, 'time_zone': None, 'geo_enabled': False, 'verified': False, 'statuses_count': 4976, 'lang': None, 'contributors_enabled': False, 'is_translator': False, 'is_translation_enabled': False, 'profile_background_color': 'F5F8FA', 'profile_background_image_url': None, 'profile_background_image_url_https': None, 'profile_background_tile': False, 'profile_image_url': 'http://pbs.twimg.com/profile_images/1464572633014587395/246oPPLa_normal.jpg', 'profile_image_url_https': 'https://pbs.twimg.com/profile_images/1464572633014587395/246oPPLa_normal.jpg', 'profile_image_extensions_alt_text': None, 'profile_link_color': '1DA1F2', 'profile_sidebar_border_color': 'C0DEED', 'profile_sidebar_fill_color': 'DDEEF6', 'profile_text_color': '333333', 'profile_use_background_image': True, 'has_extended_profile': True, 'default_profile': True, 'default_profile_image': False, 'following': False, 'follow_request_sent': False, 'notifications': False, 'translator_type': 'none', 'withheld_in_countries': []}, 'geo': None, 'coordinates': None, 'place': None, 'contributors': None, 'is_quote_status': False, 'retweet_count': 1, 'favorite_count': 2, 'favorited': False, 'retweeted': False, 'lang': 'en'}
yield Status().parse(api=None, json=data)

View File

@ -0,0 +1,52 @@
# -*- coding: utf-8 -*-
import pytest
import gettext
import datetime
gettext.install("test")
from unittest import mock
from sessions.twitter import templates
def test_default_values():
""" Tests wheter default values are the expected ones.
This might be useful so we will have this failing when we update anything from those values.
As TWBlue might be using those from other dialogs.
"""
assert templates.tweet_variables == ["date", "display_name", "screen_name", "source", "lang", "text", "image_descriptions"]
assert templates.dm_variables == ["date", "sender_display_name", "sender_screen_name", "recipient_display_name", "recipient_display_name", "text"]
assert templates.person_variables == ["display_name", "screen_name", "location", "description", "followers", "following", "listed", "likes", "tweets", "created_at"]
@pytest.mark.parametrize("offset, language, expected_result", [
(0, "en_US", "Wednesday, October 10, 2018 20:19:24"),
(-21600, "en_US", "Wednesday, October 10, 2018 14:19:24"),
(7200, "en_US", "Wednesday, October 10, 2018 22:19:24"),
(0, "es_ES", "miércoles, octubre 10, 2018 20:19:24"),
(-21600, "es_ES", "miércoles, octubre 10, 2018 14:19:24"),
(7200, "es_ES", "miércoles, octubre 10, 2018 22:19:24"),
(18000, "es_ES", "jueves, octubre 11, 2018 1:19:24"),
])
def test_process_date_absolute_time(offset, language, expected_result):
""" Tests date processing function for tweets, when relative_times is set to False. """
# Date representation used by twitter, converted to datetime object, as tweepy already does this.
# Original date was Wed Oct 10 20:19:24 +0000 2018
date_field = datetime.datetime(2018, 10, 10, 20, 19, 24)
with mock.patch("languageHandler.curLang", new=language):
processed_date = templates.process_date(date_field, relative_times=False, offset_seconds=offset)
assert processed_date == expected_result
def test_process_date_relative_time():
date_field = datetime.datetime(2018, 10, 10, 20, 19, 24)
with mock.patch("languageHandler.curLang", new="es_ES"):
processed_date = templates.process_date(date_field, relative_times=True, offset_seconds=7200)
# As this depends in relative times and this is subject to change, let's do some light checks here and hope the string is going to be valid.
assert isinstance(processed_date, str)
assert "hace" in processed_date and "años" in processed_date
def test_process_text_basic_tweet(basic_tweet):
expected_result = "Changes in projects for next year https://manuelcortez.net/blog/changes-in-projects-for-next-year/#.YdMQQU6t1FI.twitter"
text = templates.process_text(basic_tweet)
assert text == expected_result
def test_process_text_basic_tweet_multiple_mentions(basic_tweet_multiple_mentions):
expected_result = "@tamaranatalia9, @Darkstrings and 2 more: Well done, thanks Tamara"
text = templates.process_text(basic_tweet_multiple_mentions)
assert text == expected_result

View File

@ -1,200 +0,0 @@
# -*- coding: utf-8 -*-
""" Test case to check some of the scenarios we might face when storing tweets in cache, both loading into memory or rreading from disk. """
import unittest
import os
import paths
import sqlitedict
import shutil
# The base session module requires sound as a dependency, and this needs libVLC to be locatable.
os.environ['PYTHON_VLC_MODULE_PATH']=os.path.abspath(os.path.join(paths.app_path(), "..", "windows-dependencies", "x86"))
os.environ['PYTHON_VLC_LIB_PATH']=os.path.abspath(os.path.join(paths.app_path(), "..", "windows-dependencies", "x86", "libvlc.dll"))
from sessions import base
class cacheTestCase(unittest.TestCase):
def setUp(self):
""" Configures a fake session to check caching objects here. """
self.session = base.baseSession("testing")
if os.path.exists(os.path.join(paths.config_path(), "testing")) == False:
os.mkdir(os.path.join(paths.config_path(), "testing"))
self.session.get_configuration()
def tearDown(self):
""" Removes the previously configured session. """
session_folder = os.path.join(paths.config_path(), "testing")
if os.path.exists(session_folder):
shutil.rmtree(session_folder)
def generate_dataset(self):
""" Generates a sample dataset"""
dataset = dict(home_timeline=["message" for i in range(10000)], mentions_timeline=["mention" for i in range(20000)])
return dataset
### Testing database being read from disk.
def test_cache_in_disk_unlimited_size(self):
""" Tests cache database being read from disk, storing the whole datasets. """
dataset = self.generate_dataset()
self.session.settings["general"]["load_cache_in_memory"] = False
self.session.settings["general"]["persist_size"] = -1
self.session.load_persistent_data()
self.session.db["home_timeline"] = dataset["home_timeline"]
self.session.db["mentions_timeline"] = dataset["mentions_timeline"]
self.session.save_persistent_data()
self.assertIsInstance(self.session.db, sqlitedict.SqliteDict)
self.assertTrue(self.session.db.get("home_timeline") != None)
self.assertTrue(self.session.db.get("mentions_timeline") != None)
self.assertEquals(len(self.session.db.get("home_timeline")), 10000)
self.assertEquals(len(self.session.db.get("mentions_timeline")), 20000)
self.session.db.close()
def test_cache_in_disk_limited_dataset(self):
""" Tests wether the cache stores only the amount of items we ask it to store. """
dataset = self.generate_dataset()
self.session.settings["general"]["load_cache_in_memory"] = False
self.session.settings["general"]["persist_size"] = 100
self.session.load_persistent_data()
self.session.db["home_timeline"] = dataset["home_timeline"]
self.session.db["mentions_timeline"] = dataset["mentions_timeline"]
# We need to save and load the db again because we cannot modify buffers' size while the database is opened.
# As TWBlue reads directly from db when reading from disk, an attempt to modify buffers size while Blue is reading the db
# Might cause an out of sync error between the GUI lists and the database.
# So we perform the changes to buffer size when loading data during app startup if the DB is read from disk.
self.session.save_persistent_data()
self.session.db = dict()
self.session.load_persistent_data()
self.assertIsInstance(self.session.db, sqlitedict.SqliteDict)
self.assertTrue(self.session.db.get("home_timeline") != None)
self.assertTrue(self.session.db.get("mentions_timeline") != None)
self.assertEquals(len(self.session.db.get("home_timeline")), 100)
self.assertEquals(len(self.session.db.get("mentions_timeline")), 100)
self.session.db.close()
def test_cache_in_disk_limited_dataset_unreversed(self):
"""Test if the cache is saved properly in unreversed buffers, when newest items are at the end of the list. """
dataset = dict(home_timeline=[i for i in range(20)], mentions_timeline=[i for i in range(20)])
self.session.settings["general"]["load_cache_in_memory"] = False
self.session.settings["general"]["persist_size"] = 10
self.session.load_persistent_data()
self.session.db["home_timeline"] = dataset["home_timeline"]
self.session.db["mentions_timeline"] = dataset["mentions_timeline"]
# We need to save and load the db again because we cannot modify buffers' size while the database is opened.
# As TWBlue reads directly from db when reading from disk, an attempt to modify buffers size while Blue is reading the db
# Might cause an out of sync error between the GUI lists and the database.
# So we perform the changes to buffer size when loading data during app startup if the DB is read from disk.
self.session.save_persistent_data()
self.session.db = dict()
self.session.load_persistent_data()
self.assertIsInstance(self.session.db, sqlitedict.SqliteDict)
self.assertTrue(self.session.db.get("home_timeline") != None)
self.assertTrue(self.session.db.get("mentions_timeline") != None)
self.assertEquals(self.session.db.get("home_timeline")[0], 10)
self.assertEquals(self.session.db.get("mentions_timeline")[0], 10)
self.assertEquals(self.session.db.get("home_timeline")[-1], 19)
self.assertEquals(self.session.db.get("mentions_timeline")[-1], 19)
self.session.db.close()
def test_cache_in_disk_limited_dataset_reversed(self):
"""Test if the cache is saved properly in reversed buffers, when newest items are at the start of the list. """
dataset = dict(home_timeline=[i for i in range(19, -1, -1)], mentions_timeline=[i for i in range(19, -1, -1)])
self.session.settings["general"]["load_cache_in_memory"] = False
self.session.settings["general"]["persist_size"] = 10
self.session.settings["general"]["reverse_timelines"] = True
self.session.load_persistent_data()
self.session.db["home_timeline"] = dataset["home_timeline"]
self.session.db["mentions_timeline"] = dataset["mentions_timeline"]
# We need to save and load the db again because we cannot modify buffers' size while the database is opened.
# As TWBlue reads directly from db when reading from disk, an attempt to modify buffers size while Blue is reading the db
# Might cause an out of sync error between the GUI lists and the database.
# So we perform the changes to buffer size when loading data during app startup if the DB is read from disk.
self.session.save_persistent_data()
self.session.db = dict()
self.session.load_persistent_data()
self.assertIsInstance(self.session.db, sqlitedict.SqliteDict)
self.assertTrue(self.session.db.get("home_timeline") != None)
self.assertTrue(self.session.db.get("mentions_timeline") != None)
self.assertEquals(self.session.db.get("home_timeline")[0], 19)
self.assertEquals(self.session.db.get("mentions_timeline")[0], 19)
self.assertEquals(self.session.db.get("home_timeline")[-1], 10)
self.assertEquals(self.session.db.get("mentions_timeline")[-1], 10)
self.session.db.close()
### Testing database being loaded into memory. Those tests should give the same results than before
### but as we have different code depending whether we load db into memory or read it from disk,
### We need to test this anyways.
def test_cache_in_memory_unlimited_size(self):
""" Tests cache database being loaded in memory, storing the whole datasets. """
dataset = self.generate_dataset()
self.session.settings["general"]["load_cache_in_memory"] = True
self.session.settings["general"]["persist_size"] = -1
self.session.load_persistent_data()
self.session.db["home_timeline"] = dataset["home_timeline"]
self.session.db["mentions_timeline"] = dataset["mentions_timeline"]
self.session.save_persistent_data()
self.session.db = dict()
self.session.load_persistent_data()
self.assertIsInstance(self.session.db, dict)
self.assertTrue(self.session.db.get("home_timeline") != None)
self.assertTrue(self.session.db.get("mentions_timeline") != None)
self.assertEquals(len(self.session.db.get("home_timeline")), 10000)
self.assertEquals(len(self.session.db.get("mentions_timeline")), 20000)
def test_cache_in_memory_limited_dataset(self):
""" Tests wether the cache stores only the amount of items we ask it to store, when loaded in memory. """
dataset = self.generate_dataset()
self.session.settings["general"]["load_cache_in_memory"] = True
self.session.settings["general"]["persist_size"] = 100
self.session.load_persistent_data()
self.session.db["home_timeline"] = dataset["home_timeline"]
self.session.db["mentions_timeline"] = dataset["mentions_timeline"]
self.session.save_persistent_data()
self.session.db = dict()
self.session.load_persistent_data()
self.assertIsInstance(self.session.db, dict)
self.assertTrue(self.session.db.get("home_timeline") != None)
self.assertTrue(self.session.db.get("mentions_timeline") != None)
self.assertEquals(len(self.session.db.get("home_timeline")), 100)
self.assertEquals(len(self.session.db.get("mentions_timeline")), 100)
def test_cache_in_memory_limited_dataset_unreversed(self):
"""Test if the cache is saved properly when loaded in memory in unreversed buffers, when newest items are at the end of the list. """
dataset = dict(home_timeline=[i for i in range(20)], mentions_timeline=[i for i in range(20)])
self.session.settings["general"]["load_cache_in_memory"] = True
self.session.settings["general"]["persist_size"] = 10
self.session.load_persistent_data()
self.assertTrue(len(self.session.db)==1)
self.session.db["home_timeline"] = dataset["home_timeline"]
self.session.db["mentions_timeline"] = dataset["mentions_timeline"]
self.session.save_persistent_data()
self.session.db = dict()
self.session.load_persistent_data()
self.assertIsInstance(self.session.db, dict)
self.assertTrue(self.session.db.get("home_timeline") != None)
self.assertTrue(self.session.db.get("mentions_timeline") != None)
self.assertEquals(self.session.db.get("home_timeline")[0], 10)
self.assertEquals(self.session.db.get("mentions_timeline")[0], 10)
self.assertEquals(self.session.db.get("home_timeline")[-1], 19)
self.assertEquals(self.session.db.get("mentions_timeline")[-1], 19)
def test_cache_in_memory_limited_dataset_reversed(self):
"""Test if the cache is saved properly in reversed buffers, when newest items are at the start of the list. This test if for db read into memory. """
dataset = dict(home_timeline=[i for i in range(19, -1, -1)], mentions_timeline=[i for i in range(19, -1, -1)])
self.session.settings["general"]["load_cache_in_memory"] = True
self.session.settings["general"]["persist_size"] = 10
self.session.settings["general"]["reverse_timelines"] = True
self.session.load_persistent_data()
self.session.db["home_timeline"] = dataset["home_timeline"]
self.session.db["mentions_timeline"] = dataset["mentions_timeline"]
self.session.save_persistent_data()
self.session.db = dict()
self.session.load_persistent_data()
self.assertIsInstance(self.session.db, dict)
self.assertTrue(self.session.db.get("home_timeline") != None)
self.assertTrue(self.session.db.get("mentions_timeline") != None)
self.assertEquals(self.session.db.get("home_timeline")[0], 19)
self.assertEquals(self.session.db.get("mentions_timeline")[0], 19)
self.assertEquals(self.session.db.get("home_timeline")[-1], 10)
self.assertEquals(self.session.db.get("mentions_timeline")[-1], 10)
if __name__ == "__main__":
unittest.main()