From 02ccdfd8637606ae05118bcdfdbc154298fcd4af Mon Sep 17 00:00:00 2001 From: "Casper V. Kristensen" Date: Fri, 15 Mar 2019 01:24:26 +0100 Subject: [PATCH] Rename 'PredbRelease' to 'Pre' and label parsed releases simply as releases for the sake of clarity. --- dailyreleases/generation.py | 31 +++++------ dailyreleases/parsing.py | 69 +++++++++++------------ dailyreleases/predbs.py | 34 ++++++------ tests/test_parse_dirname.py | 104 ++++++++++++++++++++++++----------- tests/test_parse_releases.py | 58 ------------------- 5 files changed, 136 insertions(+), 160 deletions(-) delete mode 100644 tests/test_parse_releases.py diff --git a/dailyreleases/generation.py b/dailyreleases/generation.py index c474e11..e0a4dbf 100644 --- a/dailyreleases/generation.py +++ b/dailyreleases/generation.py @@ -9,7 +9,7 @@ from typing import Set from . import util, reddit, predbs, parsing from .config import CONFIG, DATA_DIR -from .parsing import ParsedReleases, Release, ReleaseType +from .parsing import Releases, Release, ReleaseType logger = logging.getLogger(__name__) @@ -46,7 +46,7 @@ def row(release: Release): return name, release.group, stores, reviews -def generate_post(releases: ParsedReleases) -> str: +def generate_post(releases: Releases) -> str: post = [] for platform, platform_releases in releases.items(): # Skip if platform doesn't contain any releases in any of the three release type categories @@ -109,18 +109,16 @@ def generate(post=False, pm_recipients=None) -> None: logger.info("-------------------------------------------------------------------------------------------------") start_time = time.time() - already_posted = load_already_posted() - - releases = predbs.get_releases() - # Remove old releases from already_posted to save space - already_posted.intersection_update(r.dirname for r in releases) - - parsed_releases = parsing.parse_releases(releases, already_posted) + processed = load_processed() + pres = predbs.get_pres() + releases = parsing.parse_pres(pre + for pre in pres + if pre.dirname not in processed) # The date of the post changes at midday instead of midnight to allow calling script after 00:00 title = f"Daily Releases ({(datetime.today() - timedelta(hours=12)).strftime('%B %-d, %Y')})" - generated_post = generate_post(parsed_releases) + generated_post = generate_post(releases) generated_post_src = textwrap.indent(generated_post, " ") if post: @@ -133,7 +131,8 @@ def generate(post=False, pm_recipients=None) -> None: reddit_src_post.mod.approve() reddit_post.mod.approve() - save_already_posted(already_posted) + # We only need to save the latest pres since older ones will never show up again + save_processed(p.dirname for p in pres) if pm_recipients is not None: msg = inspect.cleandoc( @@ -149,15 +148,15 @@ def generate(post=False, pm_recipients=None) -> None: logger.info("-------------------------------------------------------------------------------------------------") -def load_already_posted() -> Set[str]: +def load_processed() -> Set[str]: try: - with DATA_DIR.joinpath("already_posted").open() as file: + with DATA_DIR.joinpath("processed").open() as file: return {line.rstrip() for line in file} except FileNotFoundError: return set() -def save_already_posted(already_posted) -> None: +def save_processed(processed) -> None: logger.info("Saving already posted to file") - with DATA_DIR.joinpath("already_posted").open("w") as file: - file.writelines(f"{dirname}\n" for dirname in already_posted) + with DATA_DIR.joinpath("processed").open("w") as file: + file.writelines(f"{dirname}\n" for dirname in processed) diff --git a/dailyreleases/parsing.py b/dailyreleases/parsing.py index 6a4635d..b7fca13 100644 --- a/dailyreleases/parsing.py +++ b/dailyreleases/parsing.py @@ -4,10 +4,10 @@ import string from dataclasses import dataclass, field from datetime import datetime, timedelta from enum import Enum -from typing import Optional, List, Set, Dict, Iterable +from typing import List, Dict, Iterable from . import stores -from .predbs import PredbRelease +from .predbs import Pre from .stores import steam logger = logging.getLogger(__name__) @@ -34,12 +34,13 @@ class Platform(str, Enum): @dataclass class Release: dirname: str + nfo_link: str + timestamp: datetime rls_name: str # dirname without group group: str game_name: str type: ReleaseType platform: Platform - nfo_link: str store_links: Dict[str, str] = field(default_factory=dict) tags: List[str] = field(default_factory=list) highlights: List[str] = field(default_factory=list) @@ -47,6 +48,10 @@ class Release: num_reviews: int = -1 +class ParseError(Exception): + pass + + STOPWORDS = ( "update", "v[0-9]+", @@ -119,12 +124,18 @@ BLACKLISTED = ( ) -def parse_dirname(dirname: str, nfo_link: str) -> Optional[Release]: +def parse_pre(pre: Pre) -> Release: + if re.search("|".join(BLACKLISTED), pre.dirname, flags=re.IGNORECASE): + raise ParseError("Contains blacklisted word") + + if pre.timestamp < datetime.now() - timedelta(hours=48): + raise ParseError("Older than 48 hours") + logger.info("---") - logger.info("Parsing: %s", dirname) + logger.info("Parsing: %s", pre.dirname) # Extract group name - rls_name, group = dirname.rsplit("-", maxsplit=1) + rls_name, group = pre.dirname.rsplit("-", maxsplit=1) # Find game name by matching until one of the stopwords game_name, *stopwords = re.split("[._-]({})".format("|".join(STOPWORDS + TAGS + HIGHLIGHTS)), @@ -170,17 +181,17 @@ def parse_dirname(dirname: str, nfo_link: str) -> Optional[Release]: # No store link? Probably software and not a game if not store_links: - logger.info("Skipping %s: no store link (probably software)", dirname) - return None + raise ParseError("No store link: probably software") release = Release( - dirname=dirname, + dirname=pre.dirname, + nfo_link=pre.nfo_link, + timestamp=pre.timestamp, rls_name=rls_name, group=group, game_name=game_name, type=rls_type, platform=platform, - nfo_link=nfo_link, store_links=store_links, tags=tags, highlights=highlights @@ -199,33 +210,17 @@ def parse_dirname(dirname: str, nfo_link: str) -> Optional[Release]: return release -ParsedReleases = Dict[Platform, Dict[ReleaseType, List[Release]]] +Releases = Dict[Platform, Dict[ReleaseType, List[Release]]] # {Windows: {Game: [..], DLC: [..], ..}, Linux: ...} -def parse_releases(releases: Iterable[PredbRelease], already_posted: Set[str]) -> ParsedReleases: - parsed_releases = {platform: {release_type: [] for release_type in ReleaseType} - for platform in Platform} # {Windows: {Game: [..], DLC: [..], ..}, Linux: ...} +def parse_pres(pres: Iterable[Pre]) -> Releases: + releases = {platform: {release_type: [] for release_type in ReleaseType} for platform in Platform} + for pre in pres: + try: + release = parse_pre(pre) + releases[release.platform][release.type].append(release) + except ParseError as e: + logger.info("Skipping %s: %s", pre.dirname, e) - for release in releases: - if release.dirname in already_posted: - logger.info("Skipping %s: dirname in already posted", release.dirname) - continue - - if re.search("|".join(BLACKLISTED), release.dirname, flags=re.IGNORECASE): - logger.info("Skipping %s: contains blacklisted word", release.dirname) - continue - - if release.timestamp < datetime.now() - timedelta(hours=48): - logger.info("Skipping %s: older than 48 hours (but not in already_posted!?)", release.dirname) - continue - - release = parse_dirname(release.dirname, release.nfo_link) - if not release: - continue # skip if there is no data about the release (e.g. if it is deemed a non-game by parse_dirname) - - # Add release to dict of parsed releases by platform and type - parsed_releases[release.platform][release.type].append(release) - already_posted.add(release.dirname) - - logger.debug("Parsed releases: %s", parsed_releases) - return parsed_releases + logger.debug("Parsed releases: %s", releases) + return releases diff --git a/dailyreleases/predbs.py b/dailyreleases/predbs.py index d0e5b31..4dfe3b2 100644 --- a/dailyreleases/predbs.py +++ b/dailyreleases/predbs.py @@ -10,28 +10,28 @@ from . import cache logger = logging.getLogger(__name__) -class PredbRelease(NamedTuple): +class Pre(NamedTuple): dirname: str nfo_link: str timestamp: datetime -def get_releases() -> List[PredbRelease]: - logger.info("Getting releases from predbs") +def get_pres() -> List[Pre]: + logger.info("Getting pres from predbs") - releases = {} - for db_releases in (get_predbme, get_xrel): # in reverse order of preference + pres = {} + for get in (get_predbme, get_xrel): # in reverse order of preference try: - releases.update((r.dirname, r) for r in db_releases()) # override duplicate dirnames in later iterations + pres.update((p.dirname, p) for p in get()) # override duplicate dirnames in later iterations except HTTPError as e: logger.error(e) logger.warning("Connection to predb failed, skipping..") - return list(releases.values()) + return list(pres.values()) -def get_xrel(categories=("CRACKED", "UPDATE"), num_pages=2) -> List[PredbRelease]: - logger.debug("Getting releases from xrel.to") +def get_xrel(categories=("CRACKED", "UPDATE"), num_pages=2) -> List[Pre]: + logger.debug("Getting pres from xrel.to") def get_releases_in_category(category, page): r = cache.get("https://api.xrel.to/v2/release/browse_category.json", params={ @@ -42,23 +42,23 @@ def get_xrel(categories=("CRACKED", "UPDATE"), num_pages=2) -> List[PredbRelease }) return r.json["list"] - return [PredbRelease(rls["dirname"], - rls["link_href"], - datetime.fromtimestamp(rls["time"])) + return [Pre(rls["dirname"], + rls["link_href"], + datetime.fromtimestamp(rls["time"])) for category in categories for page in range(1, num_pages) for rls in get_releases_in_category(category, page)] -def get_predbme() -> List[PredbRelease]: - logger.debug("Getting releases from predb.me") +def get_predbme() -> List[Pre]: + logger.debug("Getting pres from predb.me") rss = cache.get("https://predb.me/?cats=games-pc&rss=1") soup = BeautifulSoup(rss.text, "html.parser").find_all("item") # Predb.me doesn't show timestamps in the RSS-feed, but the feed is so short it only shows ~72 hours worth of # releases anyway, so we just set timestamp to now. - return [PredbRelease(item.find("title").text, - item.find("guid").text, - datetime.utcnow()) + return [Pre(item.find("title").text, + item.find("guid").text, + datetime.utcnow()) for item in soup] diff --git a/tests/test_parse_dirname.py b/tests/test_parse_dirname.py index 3c4a801..1935b45 100644 --- a/tests/test_parse_dirname.py +++ b/tests/test_parse_dirname.py @@ -1,12 +1,15 @@ import unittest +from datetime import datetime, timedelta from dailyreleases import parsing -from dailyreleases.parsing import ReleaseType, Platform +from dailyreleases.parsing import ReleaseType, Platform, ParseError +from dailyreleases.predbs import Pre class ParseDirnameTestCase(unittest.TestCase): def test_single_word_release(self): - p = parsing.parse_dirname("Aztez-DARKSiDERS", "nfo_link") + pre = Pre("Aztez-DARKSiDERS", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual("Aztez-DARKSiDERS", p.dirname) self.assertEqual("Aztez", p.rls_name) @@ -18,111 +21,142 @@ class ParseDirnameTestCase(unittest.TestCase): self.assertEqual([], p.tags) self.assertEqual([], p.highlights) + def test_error_on_blacklisted_word(self): + pre = Pre("Anthemion.Software.DialogBlocks.v5.15.LINUX.Incl.Keygen-AMPED", "nfo_link", datetime.now()) + with self.assertRaisesRegex(ParseError, "Contains blacklisted word"): + parsing.parse_pre(pre) + + def test_error_on_old(self): + pre = Pre("Aztez-DARKSiDERS", "nfo_link", datetime.now() - timedelta(hours=50)) + with self.assertRaisesRegex(ParseError, "Older than 48 hours"): + parsing.parse_pre(pre) + + def test_error_on_software(self): + pre = Pre("Tecplot.RS.2017.R1.v1.2.85254.X64-AMPED", "nfo_link", datetime.now()) + with self.assertRaisesRegex(ParseError, "No store link: probably software"): + parsing.parse_pre(pre) + def test_nuked_release(self): # TODO: Actual nuke handling? - p = parsing.parse_dirname("Battlefield.1-CPY", "nfo_link") + pre = Pre("Battlefield.1-CPY", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual("Battlefield.1-CPY", p.dirname) def test_update(self): - p = parsing.parse_dirname("Car.Mechanic.Simulator.2018.Plymouth.Update.v1.5.1.Hotfix-PLAZA", "nfo_link") + pre = Pre("Car.Mechanic.Simulator.2018.Plymouth.Update.v1.5.1.Hotfix-PLAZA", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual(ReleaseType.UPDATE, p.type) self.assertIn("store.steampowered.com/app/754920", p.store_links["Steam"]) def test_proper_highlight(self): - p = parsing.parse_dirname("Death.Coming.PROPER-SiMPLEX", "nfo_link") + pre = Pre("Death.Coming.PROPER-SiMPLEX", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual(["PROPER"], p.highlights) self.assertIn("store.steampowered.com/app/705120", p.store_links["Steam"]) def test_macos_release(self): - p = parsing.parse_dirname("The_Fall_Part_2_Unbound_MacOS-Razor1911", "nfo_link") + pre = Pre("The_Fall_Part_2_Unbound_MacOS-Razor1911", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual(Platform.OSX, p.platform) self.assertEqual(ReleaseType.GAME, p.type) self.assertIn("store.steampowered.com/app/510490", p.store_links["Steam"]) self.assertIn("gog.com/game/the_fall_part_2_unbound", p.store_links["GOG"]) def test_macosx_update(self): - p = parsing.parse_dirname("Man_O_War_Corsair_Warhammer_Naval_Battles_v1.3.2_MacOSX-Razor1911", "nfo_link") + pre = Pre("Man_O_War_Corsair_Warhammer_Naval_Battles_v1.3.2_MacOSX-Razor1911", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual(Platform.OSX, p.platform) self.assertEqual(ReleaseType.UPDATE, p.type) self.assertIn("store.steampowered.com/app/344240", p.store_links["Steam"]) self.assertIn("gog.com/game/man_o_war_corsair", p.store_links["GOG"]) def test_linux_release(self): - p = parsing.parse_dirname("Sphinx_And_The_Cursed_Mummy_Linux-Razor1911", "nfo_link") + pre = Pre("Sphinx_And_The_Cursed_Mummy_Linux-Razor1911", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual(Platform.LINUX, p.platform) self.assertEqual(ReleaseType.GAME, p.type) self.assertIn("store.steampowered.com/app/606710", p.store_links["Steam"]) self.assertIn("gog.com/game/sphinx_and_the_cursed_mummy", p.store_links["GOG"]) def test_dlc_explicit(self): - p = parsing.parse_dirname("Fallout.4.Far.Harbor.DLC-CODEX", "nfo_link") + pre = Pre("Fallout.4.Far.Harbor.DLC-CODEX", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertIn("store.steampowered.com/app/435881", p.store_links["Steam"]) self.assertEqual(ReleaseType.DLC, p.type) def test_dlc_implicit(self): - p = parsing.parse_dirname("Euro.Truck.Simulator.2.Italia-CODEX", "nfo_link") + pre = Pre("Euro.Truck.Simulator.2.Italia-CODEX", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual(ReleaseType.DLC, p.type) self.assertIn("store.steampowered.com/app/558244", p.store_links["Steam"]) def test_incl_dlc_update(self): - p = parsing.parse_dirname("Wolfenstein.II.The.New.Colossus.Update.5.incl.DLC-CODEX", "nfo_link") + pre = Pre("Wolfenstein.II.The.New.Colossus.Update.5.incl.DLC-CODEX", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual(ReleaseType.UPDATE, p.type) self.assertIn("store.steampowered.com/app/612880", p.store_links["Steam"]) def test_incl_dlc_release(self): - p = parsing.parse_dirname("Mutiny.Incl.DLC-DARKSiDERS", "nfo_link") + pre = Pre("Mutiny.Incl.DLC-DARKSiDERS", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual(ReleaseType.GAME, p.type) def test_score_steam(self): - p1 = parsing.parse_dirname("BioShock_Infinite-FLT", "nfo_link") + pre = Pre("BioShock_Infinite-FLT", "nfo_link", datetime.now()) + p1 = parsing.parse_pre(pre) self.assertIn("store.steampowered.com/app/8870", p1.store_links["Steam"]) - p2 = parsing.parse_dirname("Duke.Nukem.Forever.Complete-PLAZA", "nfo_link") + pre = Pre("Duke.Nukem.Forever.Complete-PLAZA", "nfo_link", datetime.now()) + p2 = parsing.parse_pre(pre) self.assertIn("store.steampowered.com/app/57900", p2.store_links["Steam"]) self.assertGreater(p1.score, p2.score) def test_non_steam(self): - p = parsing.parse_dirname("Battlefield.1.REPACK-CPY", "nfo_link") + pre = Pre("Battlefield.1.REPACK-CPY", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertIn("www.origin.com/usa/en-us/store/battlefield/battlefield-1", p.store_links["Origin"]) self.assertEqual(-1, p.score) self.assertEqual(-1, p.num_reviews) def test_gog_exclusive(self): # TODO: Actually use GOG API (gog.update_info) - p = parsing.parse_dirname("Dungeons.and.Dragons.Dragonshard.v2.0.0.10.Multilingual-DELiGHT", "nfo_link") + pre = Pre("Dungeons.and.Dragons.Dragonshard.v2.0.0.10.Multilingual-DELiGHT", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertIn("gog.com/game/dungeons_dragons_dragonshard", p.store_links["GOG"]) self.assertEqual(-1, p.score) def test_gog_exclusive2(self): - p = parsing.parse_dirname("Diablo.GOG.Classic-KaliMaaShaktiDe", "nfo_link") + pre = Pre("Diablo.GOG.Classic-KaliMaaShaktiDe", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertIn("gog.com/game/diablo", p.store_links["GOG"]) def test_score_non_steam(self): - p = parsing.parse_dirname("Ode.RIP.MULTI12-SiMPLEX", "nfo_link") + pre = Pre("Ode.RIP.MULTI12-SiMPLEX", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual(-1, p.score) def test_tags(self): - p = parsing.parse_dirname("The.Curious.Expedition.v1.3.7.1.MULTI.7.RIP-Unleashed", "nfo_link") + pre = Pre("The.Curious.Expedition.v1.3.7.1.MULTI.7.RIP-Unleashed", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertIn("gog.com/game/curious_expedition_the", p.store_links["GOG"]) self.assertEqual(["MULTI.7", "RIP"], p.tags) - def test_skip_software(self): - p = parsing.parse_dirname("Tecplot.RS.2017.R1.v1.2.85254.X64-AMPED", "nfo_link") - self.assertIsNone(p) - def test_steam_package(self): - p = parsing.parse_dirname("Farming.Simulator.17.Platinum.Edition.Update.v1.5.3-BAT", "nfo_link") + pre = Pre("Farming.Simulator.17.Platinum.Edition.Update.v1.5.3-BAT", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual("Farming Simulator 17 - Platinum Edition", p.game_name) self.assertEqual(ReleaseType.UPDATE, p.type) self.assertIn("store.steampowered.com/sub/202103", p.store_links["Steam"]) def test_steam_package_with_dlc_first(self): - p = parsing.parse_dirname("The.Witcher.3.Wild.Hunt.Game.of.The.Year.Edition-RELOADED", "nfo_link") + pre = Pre("The.Witcher.3.Wild.Hunt.Game.of.The.Year.Edition-RELOADED", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual("The Witcher 3: Wild Hunt - Game of the Year Edition", p.game_name) self.assertEqual(ReleaseType.GAME, p.type) self.assertIn("store.steampowered.com/sub/124923", p.store_links["Steam"]) def test_steam_bundle(self): - p = parsing.parse_dirname("Valve.Complete.Pack-FAKE", "nfo_link") + pre = Pre("Valve.Complete.Pack-FAKE", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual("Valve.Complete.Pack-FAKE", p.dirname) self.assertEqual("Valve Complete Pack", p.game_name) self.assertEqual("Windows", p.platform) @@ -131,29 +165,35 @@ class ParseDirnameTestCase(unittest.TestCase): def test_steam_denuvo(self): # "denuvo" occurs in the Steam EULA - p = parsing.parse_dirname("Deus.Ex.Mankind.Divided-CPY", "nfo_link") + pre = Pre("Deus.Ex.Mankind.Divided-CPY", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual(["DENUVO"], p.highlights) # "denuvo" occurs in the Steam DRM notice - p = parsing.parse_dirname("Yakuza.0-FAKE", "nfo_link") + pre = Pre("Yakuza.0-FAKE", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual(["DENUVO"], p.highlights) def test_episode_release(self): - p = parsing.parse_dirname("Life.is.Strange.Before.the.Storm.Episode.3-CODEX", "nfo_link") + pre = Pre("Life.is.Strange.Before.the.Storm.Episode.3-CODEX", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual("Life is Strange: Before the Storm Episode 3", p.game_name) self.assertEqual(ReleaseType.DLC, p.type) self.assertIn("store.steampowered.com/app/704740", p.store_links["Steam"]) def test_season_and_episode_release(self): - p = parsing.parse_dirname("Minecraft.Story.Mode.Season.Two.Episode.5.MacOSX-RELOADED", "nfo_link") + pre = Pre("Minecraft.Story.Mode.Season.Two.Episode.5.MacOSX-RELOADED", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual("Minecraft Story Mode Season Two Episode 5", p.game_name) def test_build_is_update(self): - p = parsing.parse_dirname("DUSK.Episode.1.Build.2.6-SKIDROW", "nfo_link") + pre = Pre("DUSK.Episode.1.Build.2.6-SKIDROW", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertEqual(ReleaseType.UPDATE, p.type) def test_prefer_steam_to_microsoft_store(self): - p = parsing.parse_dirname("Forgiveness-PLAZA", "nfo_link") + pre = Pre("Forgiveness-PLAZA", "nfo_link", datetime.now()) + p = parsing.parse_pre(pre) self.assertIn("store.steampowered.com/app/971120", p.store_links["Steam"]) diff --git a/tests/test_parse_releases.py b/tests/test_parse_releases.py deleted file mode 100644 index 0d33212..0000000 --- a/tests/test_parse_releases.py +++ /dev/null @@ -1,58 +0,0 @@ -import unittest -from datetime import datetime, timedelta - -from dailyreleases import parsing -from dailyreleases.parsing import Platform, ReleaseType -from dailyreleases.predbs import PredbRelease - - -class ParseReleasesTestCase(unittest.TestCase): - - @classmethod - def setUpClass(cls) -> None: - cls.empty_releases = {platform: {release_type: [] for release_type in ReleaseType} - for platform in Platform} - - def test_dont_skip(self): - already_posted = set() - releases = [ - PredbRelease("Aztez-DARKSiDERS", "nfo_link", datetime.now()) - ] - parsed_releases = parsing.parse_releases(releases, already_posted) - self.assertEqual(parsed_releases[Platform.WINDOWS][ReleaseType.GAME][0].game_name, "Aztez") - - def test_skip_already_posted(self): - already_posted = {"Aztez-DARKSiDERS"} - releases = [ - PredbRelease("Aztez-DARKSiDERS", "nfo_link", datetime.now()) - ] - parsed_releases = parsing.parse_releases(releases, already_posted) - self.assertDictEqual(parsed_releases, self.empty_releases) - - def test_skip_blacklisted_word(self): - already_posted = set() - releases = [ - PredbRelease("Anthemion.Software.DialogBlocks.v5.15.LINUX.Incl.Keygen-AMPED", "nfo_link", datetime.now()) - ] - parsed_releases = parsing.parse_releases(releases, already_posted) - self.assertDictEqual(parsed_releases, self.empty_releases) - - def test_skip_older_than_48hr(self): - already_posted = set() - releases = [ - PredbRelease("Aztez-DARKSiDERS", "nfo_link", datetime.now() - timedelta(hours=50)) - ] - parsed_releases = parsing.parse_releases(releases, already_posted) - self.assertDictEqual(parsed_releases, self.empty_releases) - - def test_skip_no_data_for_software(self): - already_posted = set() - releases = [ - PredbRelease("Tecplot.RS.2017.R1.v1.2.85254.X64-AMPED", "nfo_link", datetime.now()) - ] - parsed_releases = parsing.parse_releases(releases, already_posted) - self.assertDictEqual(parsed_releases, self.empty_releases) - - -if __name__ == '__main__': - unittest.main()