Fixing pipeline run for the downloader, reworking input gestion, moving prints & logging management

This commit is contained in:
Zoe Roux
2020-11-15 02:36:37 +01:00
parent c7b19920c2
commit f4441c00b4
5 changed files with 31 additions and 19 deletions

View File

@@ -1,5 +1,6 @@
import json
import logging
import shutil
import time
from typing import Callable, Union, List
@@ -23,7 +24,7 @@ class Autopipe:
self.step = 0
while True:
self.process_coordinator()
sleep_time = self.coordinator.get_input().loop_cooldown
sleep_time = self.coordinator.input.loop_cooldown
if sleep_time <= 0 or not daemon:
logging.info("Input generator finished. Closing now.")
break
@@ -46,12 +47,17 @@ class Autopipe:
return None
def process_coordinator(self):
for data in self.coordinator.get_input():
logging.info(f"Starting input manager: {self.coordinator.input.name}")
for data in self.coordinator.input:
self.step = 0
pipe = None
while pipe is None or not isinstance(pipe, Output):
pipe = self._process_input(self.coordinator, data)
data = pipe if isinstance(pipe, APData) else pipe.pipe(data)
logging.info("Pipeline finished")
if data is not None:
logging.debug(f"Output data (discarded): {json.dumps(to_dict(data), indent=4)}")
logging.separator()
def _process_input(self, coordinator: Coordinator, data: APData) -> Union[APData, Pipe]:
logging.debug(f"Data: {json.dumps(to_dict(data), indent=4)}")
@@ -61,11 +67,12 @@ class Autopipe:
logging.info(f"Using interceptor: {interceptor.__name__}")
return interceptor(data)
if len(self.pipeline) < self.step:
if len(self.pipeline) > self.step:
pipe = self.pipeline[self.step]
self.step += 1
if isinstance(self.pipeline[self.step], Pipe):
return self.pipeline[self.step]
return self.pipeline[self.step](data)
if isinstance(pipe, Pipe):
return pipe
return pipe(data)
logging.info(f"Using default handler.")
return coordinator.default_handler(data)

View File

@@ -13,10 +13,11 @@ class DownloadExample(Coordinator):
def name(cls):
return "DownloadExample"
@property
def input(self):
return RssInput(f"http://www.obsrv.com/General/ImageFeed.aspx?{self.query}",
lambda x: FileData(x.title, x["media_content"][0]["url"], False))
@property
def pipeline(self) -> List[Union[Pipe, Callable[[APData], Union[APData, Pipe]]]]:
return [Output(DownloaderPipe())]
def get_input(self):
return RssInput(f"http://www.obsrv.com/General/ImageFeed.aspx?{self.query}",
lambda x: FileData(x.title, x["media_content"][0]["url"], True))

View File

@@ -8,7 +8,6 @@ import feedparser
class RssInput(Input):
def __init__(self, url: str, mapper: Callable[[List], APData], start_from_now: bool = True):
super().__init__()
self.url = url
self.mapper = mapper
self.last_etag = None

View File

@@ -1,5 +1,7 @@
import logging
import shutil
from enum import Enum
from logging import Logger
class LogLevel(Enum):
@@ -22,12 +24,17 @@ class LogLevel(Enum):
return value
def _log(self, msg, *args, **kwargs):
class APLogger(Logger):
def trace(self, msg, *args, **kwargs):
if self.isEnabledFor(LogLevel.TRACE.value):
self._log(LogLevel.TRACE.value, msg, args, **kwargs)
def separator(self, level: LogLevel = LogLevel.INFO):
self.log(level.value, "=" * (shutil.get_terminal_size().columns - (len(level.name) + 2)))
logging.setLoggerClass(APLogger)
logging.addLevelName(LogLevel.TRACE.value, LogLevel.TRACE.name)
setattr(logging, LogLevel.TRACE.name, LogLevel.TRACE.value)
setattr(logging.getLoggerClass(), "trace", _log)
setattr(logging, "trace", lambda msg, *args, **kwargs: logging.log(LogLevel.TRACE.value, msg, *args, **kwargs))
setattr(logging, "separator", lambda level=LogLevel.INFO: logging.getLogger(__name__).separator(level))

View File

@@ -26,9 +26,6 @@ class Pipe(ABC):
class Input(ABC):
def __init__(self):
logging.info(f"Starting input manager: {self.name}")
@property
@abstractmethod
def name(self):
@@ -86,8 +83,9 @@ class Coordinator(ABC):
def name(cls):
raise NotImplementedError
@property
@abstractmethod
def get_input(self) -> Input:
def input(self) -> Input:
raise NotImplementedError
@property