2019-11-20 18:27:30 +00:00
|
|
|
#!/usr/bin/env python3
|
2019-07-24 09:49:01 -04:00
|
|
|
"""
|
|
|
|
|
Generates exercise test suites using an exercise's canonical-data.json
|
|
|
|
|
(found in problem-specifications) and $exercise/.meta/template.j2.
|
|
|
|
|
If either does not exist, generation will not be attempted.
|
|
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
|
generate_tests.py Generates tests for all exercises
|
|
|
|
|
generate_tests.py two-fer Generates tests for two-fer exercise
|
|
|
|
|
generate_tests.py t* Generates tests for all exercises matching t*
|
|
|
|
|
|
|
|
|
|
generate_tests.py --check Checks if test files are out of sync with templates
|
|
|
|
|
generate_tests.py --check two-fer Checks if two-fer test file is out of sync with template
|
|
|
|
|
"""
|
2019-11-20 18:27:30 +00:00
|
|
|
import sys
|
|
|
|
|
|
2021-02-03 10:52:31 -05:00
|
|
|
from githelp import Repo
|
|
|
|
|
|
2019-11-20 18:27:30 +00:00
|
|
|
_py = sys.version_info
|
2023-07-14 15:52:15 -07:00
|
|
|
if _py.major < 3 or (_py.major == 3 and _py.minor < 7):
|
|
|
|
|
print("Python version must be at least 3.7")
|
2019-11-20 18:27:30 +00:00
|
|
|
sys.exit(1)
|
|
|
|
|
|
2019-07-24 09:49:01 -04:00
|
|
|
import argparse
|
2020-10-15 12:43:24 -04:00
|
|
|
from datetime import datetime
|
2023-07-14 15:52:15 -07:00
|
|
|
from datetime import timezone
|
2019-11-06 13:39:53 -05:00
|
|
|
import difflib
|
2019-07-24 10:54:59 -04:00
|
|
|
import filecmp
|
2019-10-29 16:44:59 -04:00
|
|
|
import importlib.util
|
2019-07-24 09:49:01 -04:00
|
|
|
import json
|
|
|
|
|
import logging
|
2022-06-08 11:26:30 -07:00
|
|
|
from pathlib import Path, PurePath, PureWindowsPath
|
2019-07-24 09:49:01 -04:00
|
|
|
import re
|
2019-07-24 10:54:59 -04:00
|
|
|
import shutil
|
2019-07-24 09:49:01 -04:00
|
|
|
from itertools import repeat
|
|
|
|
|
from string import punctuation, whitespace
|
2019-10-29 11:23:21 -03:00
|
|
|
from subprocess import check_call
|
2019-07-24 12:35:16 -04:00
|
|
|
from tempfile import NamedTemporaryFile
|
2019-11-13 15:00:17 +00:00
|
|
|
from textwrap import wrap
|
2020-10-15 12:43:24 -04:00
|
|
|
from typing import Any, Dict, List, NoReturn, Union
|
2019-07-24 09:49:01 -04:00
|
|
|
|
2023-04-07 04:20:05 -07:00
|
|
|
# Tomli was subsumed into Python 3.11.x, but was renamed to to tomllib.
|
|
|
|
|
# This avoids ci failures for Python < 3.11.2.
|
|
|
|
|
try:
|
|
|
|
|
import tomllib
|
|
|
|
|
except ModuleNotFoundError:
|
|
|
|
|
import tomli as tomllib
|
|
|
|
|
|
2019-08-07 09:45:02 -04:00
|
|
|
from jinja2 import Environment, FileSystemLoader, TemplateNotFound, UndefinedError
|
2019-11-20 19:57:18 +00:00
|
|
|
from dateutil.parser import parse
|
2019-07-24 09:49:01 -04:00
|
|
|
|
2021-02-03 10:52:31 -05:00
|
|
|
from githelp import clone_if_missing, Repo
|
2021-03-02 12:24:57 -05:00
|
|
|
from data import TestsTOML
|
2021-02-03 10:52:31 -05:00
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
VERSION = "0.3.0"
|
2019-07-24 09:49:01 -04:00
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
TypeJSON = Dict[str, Any]
|
|
|
|
|
|
|
|
|
|
PROBLEM_SPEC_REPO = "https://github.com/exercism/problem-specifications.git"
|
|
|
|
|
DEFAULT_SPEC_LOCATION = Path(".problem-specifications")
|
2019-10-29 16:44:59 -04:00
|
|
|
RGX_WORDS = re.compile(r"[-_\s]|(?=[A-Z])")
|
2019-07-24 09:49:01 -04:00
|
|
|
|
|
|
|
|
logging.basicConfig()
|
2019-10-29 16:44:59 -04:00
|
|
|
logger = logging.getLogger("generator")
|
2019-07-24 09:49:01 -04:00
|
|
|
logger.setLevel(logging.WARN)
|
|
|
|
|
|
|
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
def replace_all(string: str, chars: Union[str, List[str]], rep: str) -> str:
|
2019-07-24 09:49:01 -04:00
|
|
|
"""
|
|
|
|
|
Replace any char in chars with rep, reduce runs and strip terminal ends.
|
|
|
|
|
"""
|
|
|
|
|
trans = str.maketrans(dict(zip(chars, repeat(rep))))
|
2019-10-29 16:44:59 -04:00
|
|
|
return re.sub("{0}+".format(re.escape(rep)), rep, string.translate(trans)).strip(
|
|
|
|
|
rep
|
|
|
|
|
)
|
2019-07-24 09:49:01 -04:00
|
|
|
|
|
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
def to_snake(string: str, wordchars_only: bool = False) -> str:
|
2019-07-24 09:49:01 -04:00
|
|
|
"""
|
|
|
|
|
Convert pretty much anything to to_snake.
|
2020-05-28 17:18:13 +02:00
|
|
|
|
|
|
|
|
By default whitespace and punctuation will be converted
|
|
|
|
|
to underscores as well, pass wordchars_only=True to preserve these as is.
|
2019-07-24 09:49:01 -04:00
|
|
|
"""
|
|
|
|
|
clean = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", string)
|
|
|
|
|
clean = re.sub("([a-z0-9])([A-Z])", r"\1_\2", clean).lower()
|
2020-05-28 17:18:13 +02:00
|
|
|
return clean if wordchars_only else replace_all(clean, whitespace + punctuation, "_")
|
2019-07-24 09:49:01 -04:00
|
|
|
|
|
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
def camel_case(string: str) -> str:
|
2019-07-24 09:49:01 -04:00
|
|
|
"""
|
|
|
|
|
Convert pretty much anything to CamelCase.
|
|
|
|
|
"""
|
2019-10-29 16:44:59 -04:00
|
|
|
return "".join(w.title() for w in to_snake(string).split("_"))
|
2019-07-24 09:49:01 -04:00
|
|
|
|
|
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
def wrap_overlong(string: str, width: int = 70) -> List[str]:
|
2019-11-13 15:00:17 +00:00
|
|
|
"""
|
|
|
|
|
Break an overly long string literal into escaped lines.
|
|
|
|
|
"""
|
|
|
|
|
return ["{0!r} \\".format(w) for w in wrap(string, width)]
|
|
|
|
|
|
|
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
def parse_datetime(string: str, strip_module: bool = False) -> datetime:
|
2019-11-20 19:57:18 +00:00
|
|
|
"""
|
|
|
|
|
Parse a (hopefully ISO 8601) datestamp to a datetime object and
|
|
|
|
|
return its repr for use in a jinja2 template.
|
|
|
|
|
|
|
|
|
|
If used the template will need to import the datetime module.
|
|
|
|
|
|
|
|
|
|
import datetime
|
|
|
|
|
|
2019-12-19 15:44:53 -03:00
|
|
|
However if strip_module is True then the template will need to
|
2019-11-20 19:57:18 +00:00
|
|
|
import the datetime _class_ instead.
|
|
|
|
|
|
|
|
|
|
from datetime import datetime
|
|
|
|
|
"""
|
|
|
|
|
result = repr(parse(string))
|
|
|
|
|
if strip_module:
|
|
|
|
|
return result.replace("datetime.", "", 1)
|
|
|
|
|
return result
|
|
|
|
|
|
2020-02-25 15:48:19 +00:00
|
|
|
INVALID_ESCAPE_RE = re.compile(
|
|
|
|
|
r"""
|
|
|
|
|
\\(?! # a backslash NOT followed by
|
|
|
|
|
newline # the literal newline
|
|
|
|
|
|[ # OR precisely one of
|
|
|
|
|
\\ # another backslash
|
|
|
|
|
' # the single quote
|
|
|
|
|
" # the double quote
|
|
|
|
|
a # the ASCII bell
|
|
|
|
|
b # the ASCII backspace
|
|
|
|
|
f # the ASCII formfeed
|
|
|
|
|
n # the ASCII linefeed
|
|
|
|
|
r # the ASCII carriage return
|
|
|
|
|
t # the ASCII horizontal tab
|
|
|
|
|
v # the ASCII vertical tab
|
|
|
|
|
]| # OR
|
|
|
|
|
o(?:[0-8]{1,3}) # an octal value
|
|
|
|
|
| # OR
|
2023-12-27 06:02:10 -08:00
|
|
|
x(?:[0-9A-Fa-f]{2}) # a hexadecimal value
|
2020-02-25 15:48:19 +00:00
|
|
|
| # OR
|
|
|
|
|
N # a unicode char name composed of
|
|
|
|
|
\{ # an opening brace
|
|
|
|
|
[A-Z][A-Z\ \-]*[A-Z] # uppercase WORD, WORDs (or WORD-WORDs)
|
|
|
|
|
\} # and a closing brace
|
|
|
|
|
| # OR
|
|
|
|
|
u(?:[0-9A-Fa-f]{4}) # a 16-bit unicode char
|
|
|
|
|
| # OR
|
|
|
|
|
U(?:[0-9A-Fa-f]{8}) # a 32-bit unicode char
|
|
|
|
|
)""", flags=re.VERBOSE)
|
|
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
def escape_invalid_escapes(string: str) -> str:
|
2020-02-25 15:48:19 +00:00
|
|
|
"""
|
|
|
|
|
Some canonical data includes invalid escape sequences, which
|
|
|
|
|
need to be properly escaped before template render.
|
|
|
|
|
"""
|
|
|
|
|
return INVALID_ESCAPE_RE.sub(r"\\\\", string)
|
|
|
|
|
|
|
|
|
|
ALL_VALID = r"\newline\\\'\"\a\b\f\n\r\t\v\o123" \
|
|
|
|
|
r"\xFF\N{GREATER-THAN SIGN}\u0394\U00000394"
|
|
|
|
|
|
|
|
|
|
assert ALL_VALID == escape_invalid_escapes(ALL_VALID)
|
2019-11-20 19:57:18 +00:00
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
def get_tested_properties(spec: TypeJSON) -> List[str]:
|
2019-07-24 16:05:08 -04:00
|
|
|
"""
|
|
|
|
|
Get set of tested properties from spec. Include nested cases.
|
|
|
|
|
"""
|
|
|
|
|
props = set()
|
|
|
|
|
for case in spec["cases"]:
|
|
|
|
|
if "property" in case:
|
|
|
|
|
props.add(case["property"])
|
|
|
|
|
if "cases" in case:
|
|
|
|
|
props.update(get_tested_properties(case))
|
|
|
|
|
return sorted(props)
|
|
|
|
|
|
|
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
def error_case(case: TypeJSON) -> bool:
|
2019-07-29 23:42:12 -05:00
|
|
|
return (
|
2019-10-29 16:44:59 -04:00
|
|
|
"expected" in case
|
|
|
|
|
and isinstance(case["expected"], dict)
|
|
|
|
|
and "error" in case["expected"]
|
2019-07-29 23:42:12 -05:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
def has_error_case(cases: List[TypeJSON]) -> bool:
|
2019-08-06 08:35:53 -04:00
|
|
|
cases = cases[:]
|
|
|
|
|
while cases:
|
|
|
|
|
case = cases.pop(0)
|
|
|
|
|
if error_case(case):
|
|
|
|
|
return True
|
|
|
|
|
cases.extend(case.get("cases", []))
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
def regex_replace(s: str, find: str, repl: str) -> str:
|
2019-10-22 10:50:01 -04:00
|
|
|
return re.sub(find, repl, s)
|
|
|
|
|
|
|
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
def regex_find(s: str, find: str) -> List[Any]:
|
2019-12-19 15:44:53 -03:00
|
|
|
return re.findall(find, s)
|
|
|
|
|
|
|
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
def regex_split(s: str, find: str) -> List[str]:
|
2019-12-19 15:44:53 -03:00
|
|
|
return re.split(find, s)
|
|
|
|
|
|
|
|
|
|
|
2021-03-02 12:24:57 -05:00
|
|
|
def filter_test_cases(cases: List[TypeJSON], opts: TestsTOML) -> List[TypeJSON]:
|
2020-10-15 12:43:24 -04:00
|
|
|
"""
|
|
|
|
|
Returns a filtered copy of `cases` where only cases whose UUID is marked True in
|
|
|
|
|
`opts` are included.
|
|
|
|
|
"""
|
|
|
|
|
filtered = []
|
|
|
|
|
for case in cases:
|
|
|
|
|
if "uuid" in case:
|
|
|
|
|
uuid = case["uuid"]
|
2021-03-02 12:24:57 -05:00
|
|
|
case_opts = opts.cases.get(uuid, None)
|
|
|
|
|
if case_opts is not None and case_opts.include:
|
2020-10-15 12:43:24 -04:00
|
|
|
filtered.append(case)
|
|
|
|
|
else:
|
2021-03-02 12:24:57 -05:00
|
|
|
logger.debug(f"uuid {uuid} either missing or not marked for include")
|
2020-10-15 12:43:24 -04:00
|
|
|
elif "cases" in case:
|
|
|
|
|
subfiltered = filter_test_cases(case["cases"], opts)
|
|
|
|
|
if subfiltered:
|
|
|
|
|
case_copy = dict(case)
|
|
|
|
|
case_copy["cases"] = subfiltered
|
|
|
|
|
filtered.append(case_copy)
|
|
|
|
|
return filtered
|
|
|
|
|
|
|
|
|
|
|
2021-03-02 12:24:57 -05:00
|
|
|
def load_canonical(exercise: str, spec_path: Path, test_opts: TestsTOML) -> TypeJSON:
|
2019-07-24 09:49:01 -04:00
|
|
|
"""
|
|
|
|
|
Loads the canonical data for an exercise as a nested dictionary
|
|
|
|
|
"""
|
2020-10-15 12:43:24 -04:00
|
|
|
full_path = spec_path / "exercises" / exercise / "canonical-data.json"
|
|
|
|
|
with full_path.open() as f:
|
2019-07-24 16:05:08 -04:00
|
|
|
spec = json.load(f)
|
2021-03-02 12:24:57 -05:00
|
|
|
spec["cases"] = filter_test_cases(spec["cases"], test_opts)
|
2021-02-03 09:12:46 -05:00
|
|
|
spec["properties"] = get_tested_properties(spec)
|
2019-07-24 16:05:08 -04:00
|
|
|
return spec
|
2019-07-24 09:49:01 -04:00
|
|
|
|
|
|
|
|
|
2021-01-31 16:41:22 -05:00
|
|
|
def load_additional_tests(exercise: Path) -> List[TypeJSON]:
|
2019-08-06 14:41:00 -04:00
|
|
|
"""
|
|
|
|
|
Loads additional tests from .meta/additional_tests.json
|
|
|
|
|
"""
|
2021-01-31 16:41:22 -05:00
|
|
|
full_path = exercise / ".meta/additional_tests.json"
|
2019-08-06 14:41:00 -04:00
|
|
|
try:
|
2020-10-15 12:43:24 -04:00
|
|
|
with full_path.open() as f:
|
2019-08-06 14:41:00 -04:00
|
|
|
data = json.load(f)
|
|
|
|
|
return data.get("cases", [])
|
|
|
|
|
except FileNotFoundError:
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
def format_file(path: Path) -> NoReturn:
|
2019-07-24 09:49:01 -04:00
|
|
|
"""
|
|
|
|
|
Runs black auto-formatter on file at path
|
|
|
|
|
"""
|
2019-10-29 16:44:59 -04:00
|
|
|
check_call(["black", "-q", path])
|
2019-07-24 09:49:01 -04:00
|
|
|
|
|
|
|
|
|
2021-02-03 10:52:31 -05:00
|
|
|
def check_template(slug: str, tests_path: Path, tmpfile: Path):
|
2023-07-21 16:54:40 -07:00
|
|
|
"""Generate a new test file and diff against existing file.
|
|
|
|
|
|
|
|
|
|
Note: The timestamp in each test file creates issues with
|
|
|
|
|
Python difflib, so it is skipped when being prepped
|
|
|
|
|
for diff.
|
|
|
|
|
|
|
|
|
|
You can see this "skipping" on lines 281 & 283.
|
|
|
|
|
However, this rather crude method creates
|
|
|
|
|
an empty "false positive" diff. This empty diff is
|
|
|
|
|
then skipped in lines 293 & 294, so that it can be
|
|
|
|
|
considered a pass..
|
|
|
|
|
"""
|
|
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
try:
|
2021-02-03 10:52:31 -05:00
|
|
|
check_ok = True
|
|
|
|
|
if not tmpfile.is_file():
|
|
|
|
|
logger.debug(f"{slug}: tmp file {tmpfile} not found")
|
|
|
|
|
check_ok = False
|
|
|
|
|
if not tests_path.is_file():
|
|
|
|
|
logger.debug(f"{slug}: tests file {tests_path} not found")
|
|
|
|
|
check_ok = False
|
|
|
|
|
if check_ok and not filecmp.cmp(tmpfile, tests_path):
|
|
|
|
|
with tests_path.open() as f:
|
2023-07-21 16:54:40 -07:00
|
|
|
current_lines = f.readlines()[3:]
|
2021-02-03 10:52:31 -05:00
|
|
|
with tmpfile.open() as f:
|
2023-07-21 16:54:40 -07:00
|
|
|
rendered_lines = f.readlines()[3:]
|
|
|
|
|
|
|
|
|
|
diff = list(difflib.unified_diff(
|
2021-02-03 10:52:31 -05:00
|
|
|
current_lines,
|
|
|
|
|
rendered_lines,
|
|
|
|
|
fromfile=f"[current] {tests_path.name}",
|
|
|
|
|
tofile=f"[generated] {tmpfile.name}",
|
2023-07-21 16:54:40 -07:00
|
|
|
lineterm="\n",
|
|
|
|
|
))
|
|
|
|
|
if not diff:
|
|
|
|
|
check_ok = True
|
|
|
|
|
else:
|
|
|
|
|
logger.debug(f"{slug}: ##### DIFF START #####")
|
|
|
|
|
for line in diff:
|
|
|
|
|
logger.debug(line.strip())
|
|
|
|
|
logger.debug(f"{slug}: ##### DIFF END #####")
|
|
|
|
|
check_ok = False
|
2021-02-03 10:52:31 -05:00
|
|
|
if not check_ok:
|
|
|
|
|
logger.error(
|
|
|
|
|
f"{slug}: check failed; tests must be regenerated with bin/generate_tests.py"
|
|
|
|
|
)
|
|
|
|
|
return False
|
|
|
|
|
logger.debug(f"{slug}: check passed")
|
2020-10-15 12:43:24 -04:00
|
|
|
finally:
|
2021-02-03 10:52:31 -05:00
|
|
|
logger.debug(f"{slug}: removing tmp file {tmpfile}")
|
|
|
|
|
tmpfile.unlink()
|
|
|
|
|
return True
|
2020-10-15 12:43:24 -04:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def generate_exercise(env: Environment, spec_path: Path, exercise: Path, check: bool = False):
|
2019-07-24 09:49:01 -04:00
|
|
|
"""
|
|
|
|
|
Renders test suite for exercise and if check is:
|
|
|
|
|
True: verifies that current tests file matches rendered
|
|
|
|
|
False: saves rendered to tests file
|
|
|
|
|
"""
|
2020-10-15 12:43:24 -04:00
|
|
|
slug = exercise.name
|
|
|
|
|
meta_dir = exercise / ".meta"
|
2019-10-29 16:44:59 -04:00
|
|
|
plugins_module = None
|
|
|
|
|
plugins_name = "plugins"
|
2020-10-15 12:43:24 -04:00
|
|
|
plugins_source = meta_dir / f"{plugins_name}.py"
|
2019-07-24 09:49:01 -04:00
|
|
|
try:
|
2020-10-15 12:43:24 -04:00
|
|
|
if plugins_source.is_file():
|
2019-10-29 16:44:59 -04:00
|
|
|
plugins_spec = importlib.util.spec_from_file_location(
|
|
|
|
|
plugins_name, plugins_source
|
|
|
|
|
)
|
|
|
|
|
plugins_module = importlib.util.module_from_spec(plugins_spec)
|
|
|
|
|
sys.modules[plugins_name] = plugins_module
|
|
|
|
|
plugins_spec.loader.exec_module(plugins_module)
|
2020-10-15 12:54:34 -04:00
|
|
|
try:
|
2021-03-02 12:24:57 -05:00
|
|
|
test_opts = TestsTOML.load(meta_dir / "tests.toml")
|
2020-10-15 12:54:34 -04:00
|
|
|
except FileNotFoundError:
|
2021-01-31 16:55:18 -05:00
|
|
|
logger.error(f"{slug}: tests.toml not found; skipping.")
|
|
|
|
|
return True
|
2022-06-08 11:26:30 -07:00
|
|
|
|
2020-10-15 12:54:34 -04:00
|
|
|
spec = load_canonical(slug, spec_path, test_opts)
|
2021-01-31 16:41:22 -05:00
|
|
|
additional_tests = load_additional_tests(exercise)
|
2019-08-06 14:45:35 -04:00
|
|
|
spec["additional_cases"] = additional_tests
|
2021-01-31 16:41:22 -05:00
|
|
|
template_path = exercise.relative_to("exercises") / ".meta/template.j2"
|
2022-06-08 11:26:30 -07:00
|
|
|
|
|
|
|
|
# See https://github.com/pallets/jinja/issues/767 for why this is needed on Windows systems.
|
|
|
|
|
if "\\" in str(template_path):
|
|
|
|
|
template_path = PureWindowsPath(template_path).as_posix()
|
|
|
|
|
|
2020-10-15 12:43:24 -04:00
|
|
|
template = env.get_template(str(template_path))
|
|
|
|
|
tests_path = exercise / f"{to_snake(slug)}_test.py"
|
2019-08-07 09:45:02 -04:00
|
|
|
spec["has_error_case"] = has_error_case(spec["cases"])
|
2022-06-08 11:26:30 -07:00
|
|
|
|
2019-10-29 16:44:59 -04:00
|
|
|
if plugins_module is not None:
|
|
|
|
|
spec[plugins_name] = plugins_module
|
2019-11-06 13:39:53 -05:00
|
|
|
logger.debug(f"{slug}: attempting render")
|
2019-08-07 09:45:02 -04:00
|
|
|
rendered = template.render(**spec)
|
2019-10-29 16:44:59 -04:00
|
|
|
with NamedTemporaryFile("w", delete=False) as tmp:
|
2019-11-06 13:39:53 -05:00
|
|
|
logger.debug(f"{slug}: writing render to tmp file {tmp.name}")
|
2020-10-15 12:43:24 -04:00
|
|
|
tmpfile = Path(tmp.name)
|
2019-08-07 09:45:02 -04:00
|
|
|
tmp.write(rendered)
|
2019-10-15 18:47:37 +01:00
|
|
|
try:
|
2020-10-15 12:43:24 -04:00
|
|
|
logger.debug(f"{slug}: formatting tmp file {tmpfile}")
|
|
|
|
|
format_file(tmpfile)
|
2019-10-15 18:47:37 +01:00
|
|
|
except FileNotFoundError as e:
|
2019-10-29 16:44:59 -04:00
|
|
|
logger.error(f"{slug}: the black utility must be installed")
|
2019-10-15 18:47:37 +01:00
|
|
|
return False
|
|
|
|
|
|
2019-08-07 09:45:02 -04:00
|
|
|
if check:
|
2021-02-03 10:52:31 -05:00
|
|
|
return check_template(slug, tests_path, tmpfile)
|
2019-08-07 09:45:02 -04:00
|
|
|
else:
|
2020-10-15 12:43:24 -04:00
|
|
|
logger.debug(f"{slug}: moving tmp file {tmpfile}->{tests_path}")
|
|
|
|
|
shutil.move(tmpfile, tests_path)
|
2019-10-29 16:44:59 -04:00
|
|
|
print(f"{slug} generated at {tests_path}")
|
|
|
|
|
except (TypeError, UndefinedError, SyntaxError) as e:
|
2019-08-07 09:45:02 -04:00
|
|
|
logger.debug(str(e))
|
2019-10-29 16:44:59 -04:00
|
|
|
logger.error(f"{slug}: generation failed")
|
2019-08-07 09:45:02 -04:00
|
|
|
return False
|
|
|
|
|
except TemplateNotFound as e:
|
|
|
|
|
logger.debug(str(e))
|
2019-10-29 16:44:59 -04:00
|
|
|
logger.info(f"{slug}: no template found; skipping")
|
2019-07-24 16:05:08 -04:00
|
|
|
except FileNotFoundError as e:
|
|
|
|
|
logger.debug(str(e))
|
2019-10-29 16:44:59 -04:00
|
|
|
logger.info(f"{slug}: no canonical data found; skipping")
|
2019-08-07 09:45:02 -04:00
|
|
|
return True
|
2019-07-24 09:49:01 -04:00
|
|
|
|
|
|
|
|
|
2019-08-07 09:45:02 -04:00
|
|
|
def generate(
|
2020-10-15 12:43:24 -04:00
|
|
|
exercise_glob: str,
|
|
|
|
|
spec_path: Path = DEFAULT_SPEC_LOCATION,
|
|
|
|
|
stop_on_failure: bool = False,
|
|
|
|
|
check: bool = False,
|
|
|
|
|
**_,
|
2019-08-07 09:45:02 -04:00
|
|
|
):
|
2019-07-24 09:49:01 -04:00
|
|
|
"""
|
|
|
|
|
Primary entry point. Generates test files for all exercises matching exercise_glob
|
|
|
|
|
"""
|
2019-10-15 18:47:37 +01:00
|
|
|
# black must be installed or all test files will error
|
|
|
|
|
if not shutil.which("black"):
|
|
|
|
|
logger.error("the black utility must be installed")
|
|
|
|
|
sys.exit(1)
|
2019-10-29 16:44:59 -04:00
|
|
|
loader = FileSystemLoader(["config", "exercises"])
|
2019-07-24 09:49:01 -04:00
|
|
|
env = Environment(loader=loader, keep_trailing_newline=True)
|
2019-10-29 16:44:59 -04:00
|
|
|
env.filters["to_snake"] = to_snake
|
|
|
|
|
env.filters["camel_case"] = camel_case
|
2019-11-13 15:00:17 +00:00
|
|
|
env.filters["wrap_overlong"] = wrap_overlong
|
2019-10-29 16:44:59 -04:00
|
|
|
env.filters["regex_replace"] = regex_replace
|
2019-12-19 15:44:53 -03:00
|
|
|
env.filters["regex_find"] = regex_find
|
|
|
|
|
env.filters["regex_split"] = regex_split
|
|
|
|
|
env.filters["zip"] = zip
|
2019-11-20 19:57:18 +00:00
|
|
|
env.filters["parse_datetime"] = parse_datetime
|
2020-02-25 15:48:19 +00:00
|
|
|
env.filters["escape_invalid_escapes"] = escape_invalid_escapes
|
2023-07-14 15:52:15 -07:00
|
|
|
env.globals["current_date"] = datetime.now(tz=timezone.utc).date()
|
2019-10-29 16:44:59 -04:00
|
|
|
env.tests["error_case"] = error_case
|
2019-08-07 09:45:02 -04:00
|
|
|
result = True
|
2021-01-31 16:41:22 -05:00
|
|
|
for exercise in sorted(Path("exercises/practice").glob(exercise_glob)):
|
2019-08-07 09:45:02 -04:00
|
|
|
if not generate_exercise(env, spec_path, exercise, check):
|
|
|
|
|
result = False
|
|
|
|
|
if stop_on_failure:
|
|
|
|
|
break
|
|
|
|
|
if not result:
|
|
|
|
|
sys.exit(1)
|
2019-07-24 09:49:01 -04:00
|
|
|
|
|
|
|
|
|
2019-10-29 16:44:59 -04:00
|
|
|
if __name__ == "__main__":
|
2019-07-24 09:49:01 -04:00
|
|
|
parser = argparse.ArgumentParser()
|
2019-10-29 16:44:59 -04:00
|
|
|
parser.add_argument("exercise_glob", nargs="?", default="*", metavar="EXERCISE")
|
2019-07-24 09:49:01 -04:00
|
|
|
parser.add_argument(
|
2019-10-29 16:44:59 -04:00
|
|
|
"--version",
|
|
|
|
|
action="version",
|
|
|
|
|
version="%(prog)s {} for Python {}".format(VERSION, sys.version.split("\n")[0]),
|
2019-07-24 09:49:01 -04:00
|
|
|
)
|
2019-10-29 16:44:59 -04:00
|
|
|
parser.add_argument("-v", "--verbose", action="store_true")
|
2019-08-08 12:49:02 -04:00
|
|
|
parser.add_argument(
|
2019-10-29 16:44:59 -04:00
|
|
|
"-p",
|
|
|
|
|
"--spec-path",
|
2019-08-08 12:49:02 -04:00
|
|
|
default=DEFAULT_SPEC_LOCATION,
|
2020-10-15 12:43:24 -04:00
|
|
|
type=Path,
|
2019-08-08 12:49:02 -04:00
|
|
|
help=(
|
2019-10-29 16:44:59 -04:00
|
|
|
"path to clone of exercism/problem-specifications " "(default: %(default)s)"
|
|
|
|
|
),
|
2019-08-08 12:49:02 -04:00
|
|
|
)
|
2019-10-29 16:44:59 -04:00
|
|
|
parser.add_argument("--stop-on-failure", action="store_true")
|
2019-08-08 12:49:02 -04:00
|
|
|
parser.add_argument(
|
2019-10-29 16:44:59 -04:00
|
|
|
"--check",
|
|
|
|
|
action="store_true",
|
|
|
|
|
help="check if tests are up-to-date, but do not modify test files",
|
2019-08-08 12:49:02 -04:00
|
|
|
)
|
2019-07-24 09:49:01 -04:00
|
|
|
opts = parser.parse_args()
|
|
|
|
|
if opts.verbose:
|
2019-07-24 16:05:08 -04:00
|
|
|
logger.setLevel(logging.DEBUG)
|
2021-02-03 10:52:31 -05:00
|
|
|
with clone_if_missing(repo=Repo.ProblemSpecifications, directory=opts.spec_path):
|
2020-10-15 12:43:24 -04:00
|
|
|
generate(**opts.__dict__)
|