1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222
|
"""Utility to generate yml files for all the parsing examples."""
import fnmatch
import multiprocessing
import os
import re
import sys
import time
from collections import defaultdict
from typing import Callable, Optional, TypeVar
import click
import yaml
from conftest import (
ParseExample,
compute_parse_tree_hash,
get_parse_fixtures,
parse_example_file,
)
from sqlfluff.core.errors import SQLParseError
S = TypeVar("S", bound="ParseExample")
def distribute_work(work_items: list[S], work_fn: Callable[[S], None]) -> None:
"""Distribute work keep track of progress."""
# Build up a dict of sets, where the key is the dialect and the set
# contains all the expected cases. As cases return we'll check them
# off.
success_map = {}
expected_cases = defaultdict(set)
for case in work_items:
expected_cases[case.dialect].add(case)
errors = []
with multiprocessing.Pool(multiprocessing.cpu_count()) as pool:
for example, result in pool.imap_unordered(work_fn, work_items):
if result is not None:
errors.append(result)
success_map[example] = False
else:
success_map[example] = True
expected_cases[example.dialect].remove(example)
# Check to see whether a dialect is complete
if not expected_cases[example.dialect]:
# It's done. Report success rate.
local_success_map = {
k: v for k, v in success_map.items() if k.dialect == example.dialect
}
if all(local_success_map.values()):
print(f"{example.dialect!r} complete.\t\tAll Success ✅")
else:
fail_files = [
k.sqlfile for k, v in local_success_map.items() if not v
]
print(
f"{example.dialect!r} complete.\t\t{len(fail_files)} fails. ⚠️"
)
for fname in fail_files:
print(f" - {fname!r}")
if errors:
print(errors)
print("FAILED TO GENERATE ALL CASES")
sys.exit(1)
def _create_file_path(example: ParseExample, ext: str = ".yml") -> str:
dialect, sqlfile = example
root, _ = os.path.splitext(sqlfile)
path = os.path.join("test", "fixtures", "dialects", dialect, root + ext)
return path
def _is_matching_new_criteria(example: ParseExample):
"""Is the Yaml doesn't exist or is older than the SQL."""
yaml_path = _create_file_path(example)
if not os.path.exists(yaml_path):
return True
sql_path = os.path.join(
"test",
"fixtures",
"dialects",
example.dialect,
example.sqlfile,
)
return os.path.getmtime(yaml_path) < os.path.getmtime(sql_path)
def generate_one_parse_fixture(
example: ParseExample,
) -> tuple[ParseExample, Optional[SQLParseError]]:
"""Parse example SQL file, write parse tree to YAML file."""
dialect, sqlfile = example
sql_path = _create_file_path(example, ".sql")
try:
tree = parse_example_file(dialect, sqlfile)
except Exception as err:
# Catch parsing errors, and wrap the file path only it.
return example, SQLParseError(f"Fatal parsing error: {sql_path}: {err}")
# Check we don't have any base types or unparsable sections
types = tree.type_set()
if "base" in types:
return example, SQLParseError(f"Unnamed base section when parsing: {sql_path}")
if "unparsable" in types:
return example, SQLParseError(f"Could not parse: {sql_path}")
_hash = compute_parse_tree_hash(tree)
# Remove the .sql file extension
path = _create_file_path(example)
with open(path, "w", newline="\n", encoding="utf8") as f:
r: Optional[dict[str, Optional[str]]] = None
if not tree:
f.write("")
return example, None
records = tree.as_record(code_only=True, show_raw=True)
assert records, "TypeGuard"
r = dict([("_hash", _hash), *list(records.items())])
print(
"# YML test files are auto-generated from SQL files and should not be "
"edited by",
'# hand. To help enforce this, the "hash" field in the file must match '
"a hash",
"# computed by SQLFluff when running the tests. Please run",
"# `python test/generate_parse_fixture_yml.py` to generate them after "
"adding or",
"# altering SQL files.",
file=f,
sep="\n",
)
yaml.dump(
data=r,
stream=f,
default_flow_style=False,
sort_keys=False,
allow_unicode=True,
)
return example, None
def gather_file_list(
dialect: Optional[str] = None,
glob_match_pattern: Optional[str] = None,
new_only: bool = False,
) -> list[ParseExample]:
"""Gather the list of files to generate fixtures for. Apply filters as required."""
parse_success_examples, _ = get_parse_fixtures()
if new_only:
parse_success_examples = [
example
for example in parse_success_examples
if _is_matching_new_criteria(example)
]
if dialect:
dialect = dialect.lower()
parse_success_examples = [
example for example in parse_success_examples if example[0] == dialect
]
if len(parse_success_examples) == 0:
raise ValueError(f'Unknown Dialect "{dialect}"')
if not glob_match_pattern:
return parse_success_examples
regex = re.compile(fnmatch.translate(glob_match_pattern))
return [
example
for example in parse_success_examples
if regex.match(example[1]) is not None
]
@click.command()
@click.option(
"--filter", "-f", default=None, help="A glob filter to apply to file names."
)
@click.option("--dialect", "-d", default=None, help="Filter to a given dialect.")
@click.option(
"--new-only",
"new_only",
is_flag=True,
default=False,
help="Only create missing fixtures.",
)
def generate_parse_fixtures(
filter: Optional[str], dialect: Optional[str], new_only: bool
):
"""Generate fixture or a subset based on dialect or filename glob match."""
filter_str = filter or "*"
dialect_str = dialect or "all"
print("Match Pattern Received:")
print(f"\tfilter={filter_str} dialect={dialect_str} new-only={new_only}")
parse_success_examples = gather_file_list(dialect, filter, new_only)
print(f"Found {len(parse_success_examples)} file(s) to generate")
t0 = time.monotonic()
try:
distribute_work(parse_success_examples, generate_one_parse_fixture)
except SQLParseError as err:
# If one fails, exit early and cleanly.
print(f"PARSING FAILED: {err}")
sys.exit(1)
dt = time.monotonic() - t0
print(f"Built {len(parse_success_examples)} fixtures in {dt:.2f}s.")
def main():
"""Find all example SQL files, parse and create YAML files."""
generate_parse_fixtures()
if __name__ == "__main__":
main()
|