Skip to content

Commit

Permalink
Merge pull request #2266 from camp00000/misc-fixes
Browse files Browse the repository at this point in the history
QoL: Miscellanous Fixes
  • Loading branch information
dipu-bd committed Feb 12, 2024
2 parents ef08d10 + d333e20 commit 9d2b3d5
Show file tree
Hide file tree
Showing 7 changed files with 15 additions and 15 deletions.
2 changes: 1 addition & 1 deletion lncrawl/bots/telegram/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def start(self):

async def error_handler(self, update: Update, context: ContextTypes.DEFAULT_TYPE):
"""Log Errors caused by Updates."""
logger.warn("Error: %s\nCaused by: %s", context.error, update)
logger.warning("Error: %s\nCaused by: %s", context.error, update)

async def show_help(self, update: Update, context: ContextTypes.DEFAULT_TYPE):
await update.message.reply_text("Send /start to create new session.\n")
Expand Down
2 changes: 1 addition & 1 deletion lncrawl/core/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ def compress_books(self, archive_singles=False):
format="zip",
root_dir=root_dir,
)
logger.info("Compressed:", os.path.basename(archived_file))
logger.info("Compressed: %s", os.path.basename(archived_file))

if archived_file:
self.archived_outputs.append(archived_file)
10 changes: 5 additions & 5 deletions lncrawl/core/sources.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def __load_latest_index():
except Exception as e:
if "crawlers" not in __current_index:
raise LNException("Could not fetch sources index")
logger.warn("Could not download latest index. Error: %s", e)
logger.warning("Could not download latest index. Error: %s", e)
__latest_index = __current_index


Expand Down Expand Up @@ -223,7 +223,7 @@ def __download_sources():
try:
__save_source_data(sid, data)
except Exception as e:
logger.warn("Failed to save source file. Error: %s", e)
logger.warning("Failed to save source file. Error: %s", e)


# --------------------------------------------------------------------------- #
Expand All @@ -248,7 +248,7 @@ def __import_crawlers(file_path: Path) -> List[Type[Crawler]]:
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
except Exception as e:
logger.warn("Module load failed: %s | %s", file_path, e)
logger.warning("Module load failed: %s | %s", file_path, e)
return []

language_code = ""
Expand Down Expand Up @@ -296,7 +296,7 @@ def __add_crawlers_from_path(path: Path):
return

if not path.exists():
logger.warn("Path does not exists: %s", path)
logger.warning("Path does not exists: %s", path)
return

if path.is_dir():
Expand All @@ -312,7 +312,7 @@ def __add_crawlers_from_path(path: Path):
for url in getattr(crawler, "base_url"):
crawler_list[url] = crawler
except Exception as e:
logger.warn("Could not load crawlers from %s. Error: %s", path, e)
logger.warning("Could not load crawlers from %s. Error: %s", path, e)


# --------------------------------------------------------------------------- #
Expand Down
8 changes: 4 additions & 4 deletions lncrawl/templates/browser/general.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,13 @@ def read_novel_info_in_scraper(self) -> None:
try:
self.novel_cover = self.parse_cover(soup)
except Exception as e:
logger.warn("Failed to parse novel cover | %s", e)
logger.warning("Failed to parse novel cover | %s", e)

try:
authors = set(list(self.parse_authors(soup)))
self.novel_author = ", ".join(authors)
except Exception as e:
logger.warn("Failed to parse novel authors | %s", e)
logger.warning("Failed to parse novel authors | %s", e)

for item in self.parse_chapter_list(soup):
if isinstance(item, Chapter):
Expand All @@ -51,13 +51,13 @@ def read_novel_info_in_browser(self) -> None:
try:
self.novel_cover = self.parse_cover_in_browser()
except Exception as e:
logger.warn("Failed to parse novel cover | %s", e)
logger.warning("Failed to parse novel cover | %s", e)

try:
authors = set(list(self.parse_authors_in_browser()))
self.novel_author = ", ".join(authors)
except Exception as e:
logger.warn("Failed to parse novel authors | %s", e)
logger.warning("Failed to parse novel authors | %s", e)

for item in self.parse_chapter_list_in_browser():
if isinstance(item, Chapter):
Expand Down
4 changes: 2 additions & 2 deletions lncrawl/templates/soup/general.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,13 @@ def read_novel_info(self) -> None:
try:
self.novel_cover = self.parse_cover(soup)
except Exception as e:
logger.warn("Failed to parse novel cover | %s", e)
logger.warning("Failed to parse novel cover | %s", e)

try:
authors = set(list(self.parse_authors(soup)))
self.novel_author = ", ".join(authors)
except Exception as e:
logger.warn("Failed to parse novel authors | %s", e)
logger.warning("Failed to parse novel authors | %s", e)

for item in self.parse_chapter_list(soup):
if isinstance(item, Chapter):
Expand Down
2 changes: 1 addition & 1 deletion lncrawl/utils/pbincli.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ class PBinCLIException(Exception):


def PBinCLIError(message):
logger.warn("PBinCLI Error: {}".format(message))
logger.warning("PBinCLI Error: {}".format(message))


def path_leaf(path):
Expand Down
2 changes: 1 addition & 1 deletion sources/en/d/dobelyuwai.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def read_novel_info(self):
# try:
# self.novel_author = soup.select_one('div.entry-content > p:nth-child(2)').text.strip()
# except Exception as e:
# logger.warn('Failed to get novel auth. Error: %s', e)
# logger.warning('Failed to get novel auth. Error: %s', e)
# logger.info('%s', self.novel_author)

# Removes none TOC links from bottom of page.
Expand Down

0 comments on commit 9d2b3d5

Please sign in to comment.