Auto stash before merge of "bugfix/token_mismatch" and "origin/bugfix/token_mismatch"

This commit is contained in:
2021-10-24 18:28:18 +01:00
parent 5fb70300fb
commit 6b2b0a3b5e
4 changed files with 36 additions and 33 deletions

View File

@@ -3,7 +3,7 @@ from __future__ import annotations
from abc import ABCMeta, abstractmethod
import pathlib
import re
from typing import Tuple
from typing import Tuple, Literal
import jinja2
import pendulum
@@ -69,8 +69,8 @@ class ScopedPageCreator(PageCreator):
self.page_path = ""
self.filename = ""
self.extension = ""
self.path_error: MismatchedTokenError | None = None
self.template_error: MismatchedTokenError | None = None
self.path_date_tokens: Tuple[int, int] = (0, 0)
self.template_date_tokens: Tuple[int, int] = (0, 0)
def create_page(
@@ -94,7 +94,9 @@ class ScopedPageCreator(PageCreator):
)
# substitute tokens in the filepath
try:
path = pathlib.Path(self._substitute_tokens(str(path), user_input, name))
path = pathlib.Path(
self._substitute_tokens(str(path), user_input, name, "path")
)
except MismatchedTokenError as mismatched_path_error:
self.path_error = mismatched_path_error
# get the template file
@@ -103,7 +105,7 @@ class ScopedPageCreator(PageCreator):
template_contents = self._load_template(self.base_path, template_filename)
try:
template_contents = self._substitute_tokens(
template_contents, user_input, name
template_contents, user_input, name, "template"
)
except MismatchedTokenError as mismatched_template_error:
self.template_error = mismatched_template_error
@@ -117,32 +119,32 @@ class ScopedPageCreator(PageCreator):
print("template_token_count > path_token_count")
return ScopedPage(path, template_contents)
@staticmethod
def __mismatched_token_error(
path_error: MismatchedTokenError | None = None,
template_error: MismatchedTokenError | None = None,
) -> None:
if isinstance(path_error, MismatchedTokenError):
path_token_count = path_error.args[0]
# logger.critical(
# "Your config specifies %s input tokens, you gave %s " "- exiting",
# path_error.args[0],
# path_error.args[1],
# )
# raise SystemExit(1)
if isinstance(template_error, MismatchedTokenError):
template_token_count = template_error.args[0]
# logger.critical(
# "Your template specifies %s input tokens, you gave %s " "- exiting",
# template_error.args[0],
# template_error.args[1],
# )
# raise SystemExit(1)
if path_token_count > template_token_count:
print("path_token_count > template_token_count")
elif template_token_count > path_token_count:
print("template_token_count > path_token_count")
raise SystemExit(1)
# @staticmethod
# def __mismatched_token_error(
# path_error: MismatchedTokenError | None = None,
# template_error: MismatchedTokenError | None = None,
# ) -> None:
# if isinstance(path_error, MismatchedTokenError):
# path_token_count = path_error.args[0]
# # logger.critical(
# # "Your config specifies %s input tokens, you gave %s " "- exiting",
# # path_error.args[0],
# # path_error.args[1],
# # )
# # raise SystemExit(1)
# if isinstance(template_error, MismatchedTokenError):
# template_token_count = template_error.args[0]
# # logger.critical(
# # "Your template specifies %s input tokens, you gave %s " "- exiting",
# # template_error.args[0],
# # template_error.args[1],
# # )
# # raise SystemExit(1)
# if path_token_count > template_token_count:
# print("path_token_count > template_token_count")
# elif template_token_count > path_token_count:
# print("template_token_count > path_token_count")
# raise SystemExit(1)
# TODO: change the annotation to include the error
def _substitute_tokens(
@@ -150,6 +152,7 @@ class ScopedPageCreator(PageCreator):
tokenified_string: str,
user_input: Tuple[str, ...] | Tuple[()],
name: str,
token_type: Literal["path", "template"],
) -> str:
# for a tokened string, substitute input, name and date tokens
try:
@@ -170,9 +173,9 @@ class ScopedPageCreator(PageCreator):
tokenified_string = tokenified_string.replace(extracted_input, name)
return tokenified_string
@staticmethod
# @staticmethod
def __substitute_input_tokens(
tokenified_string: str, user_input: Tuple[str, ...] | Tuple[()]
self, tokenified_string: str, user_input: Tuple[str, ...] | Tuple[()]
) -> str:
# find {inputN} tokens in string
input_extraction = re.findall(r"(\{input\d*\})", tokenified_string)