mirror of
https://github.com/github/codeql.git
synced 2025-12-16 16:53:25 +01:00
Swift: implement python schema
The information that was contained in `schema.yml` is now in `swift/schema.py`, which allows a more integrated IDE experience for writing and navigating it. Another minor change is that `schema.Class` now has a `str` `group` field instead of a `pathlib.Path` `dir` one.
This commit is contained in:
3
swift/.pep8
Normal file
3
swift/.pep8
Normal file
@@ -0,0 +1,3 @@
|
||||
[pep8]
|
||||
ignore = E302
|
||||
max_line_length = 120
|
||||
@@ -3,6 +3,18 @@ load("@rules_pkg//:install.bzl", "pkg_install")
|
||||
load("//:defs.bzl", "codeql_platform")
|
||||
load("//misc/bazel:pkg_runfiles.bzl", "pkg_runfiles")
|
||||
|
||||
filegroup(
|
||||
name = "schema",
|
||||
srcs = ["schema.py"],
|
||||
visibility = ["//swift:__subpackages__"],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "schema_includes",
|
||||
srcs = glob(["*.dbscheme"]),
|
||||
visibility = ["//swift:__subpackages__"],
|
||||
)
|
||||
|
||||
pkg_files(
|
||||
name = "dbscheme_files",
|
||||
srcs = [
|
||||
|
||||
@@ -1,3 +1,2 @@
|
||||
[pep8]
|
||||
max_line_length = 120
|
||||
ignore = E302
|
||||
|
||||
@@ -1,26 +1,16 @@
|
||||
load("@swift_codegen_deps//:requirements.bzl", "requirement")
|
||||
|
||||
filegroup(
|
||||
name = "schema",
|
||||
srcs = ["schema.yml"],
|
||||
visibility = ["//swift:__subpackages__"],
|
||||
)
|
||||
|
||||
filegroup(
|
||||
name = "schema_includes",
|
||||
srcs = glob(["*.dbscheme"]),
|
||||
visibility = ["//swift:__subpackages__"],
|
||||
)
|
||||
|
||||
py_binary(
|
||||
name = "codegen",
|
||||
srcs = ["codegen.py"],
|
||||
data = [
|
||||
":schema",
|
||||
":schema_includes",
|
||||
"//swift:schema",
|
||||
"//swift:schema_includes",
|
||||
"//swift/codegen/templates:cpp",
|
||||
"//swift/codegen/templates:trap",
|
||||
],
|
||||
visibility = ["//swift:__subpackages__"],
|
||||
deps = ["//swift/codegen/generators"],
|
||||
deps = [
|
||||
"//swift/codegen/generators",
|
||||
],
|
||||
)
|
||||
|
||||
@@ -23,7 +23,7 @@ def _parse_args() -> argparse.Namespace:
|
||||
p.add_argument("--swift-dir", type=_abspath, default=paths.swift_dir,
|
||||
help="the directory that should be regarded as the root of the swift codebase. Used to compute QL "
|
||||
"imports and in some comments (default %(default)s)")
|
||||
p.add_argument("--schema", type=_abspath, default=paths.swift_dir / "codegen/schema.yml",
|
||||
p.add_argument("--schema", type=_abspath, default=paths.swift_dir / "schema.py",
|
||||
help="input schema file (default %(default)s)")
|
||||
p.add_argument("--dbscheme", type=_abspath, default=paths.swift_dir / "ql/lib/swift.dbscheme",
|
||||
help="output file for dbscheme generation, input file for trap generation (default %(default)s)")
|
||||
|
||||
@@ -70,16 +70,16 @@ class Processor:
|
||||
)
|
||||
|
||||
def get_classes(self):
|
||||
ret = {pathlib.Path(): []}
|
||||
ret = {'': []}
|
||||
for k, cls in self._classmap.items():
|
||||
ret.setdefault(cls.dir, []).append(self._get_class(cls.name))
|
||||
ret.setdefault(cls.group, []).append(self._get_class(cls.name))
|
||||
return ret
|
||||
|
||||
|
||||
def generate(opts, renderer):
|
||||
assert opts.cpp_output
|
||||
processor = Processor(schema.load(opts.schema).classes)
|
||||
processor = Processor(schema.load_file(opts.schema).classes)
|
||||
out = opts.cpp_output
|
||||
for dir, classes in processor.get_classes().items():
|
||||
include_parent = (dir != pathlib.Path())
|
||||
renderer.render(cpp.ClassList(classes, opts.schema, include_parent), out / dir / "TrapClasses")
|
||||
renderer.render(cpp.ClassList(classes, opts.schema,
|
||||
include_parent=bool(dir)), out / dir / "TrapClasses")
|
||||
|
||||
@@ -14,12 +14,11 @@ Moreover:
|
||||
The type hierarchy will be translated to corresponding `union` declarations.
|
||||
"""
|
||||
|
||||
import pathlib
|
||||
|
||||
import inflection
|
||||
|
||||
from swift.codegen.lib import schema
|
||||
from swift.codegen.lib.dbscheme import *
|
||||
from typing import Set, List
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
@@ -35,7 +34,7 @@ def cls_to_dbscheme(cls: schema.Class):
|
||||
""" Yield all dbscheme entities needed to model class `cls` """
|
||||
if cls.derived:
|
||||
yield Union(dbtype(cls.name), (dbtype(c) for c in cls.derived))
|
||||
dir = cls.dir if cls.dir != pathlib.Path() else None
|
||||
dir = pathlib.Path(cls.group) if cls.group else None
|
||||
# output a table specific to a class only if it is a leaf class or it has 1-to-1 properties
|
||||
# Leaf classes need a table to bind the `@` ids
|
||||
# 1-to-1 properties are added to a class specific table
|
||||
@@ -104,7 +103,7 @@ def generate(opts, renderer):
|
||||
input = opts.schema
|
||||
out = opts.dbscheme
|
||||
|
||||
data = schema.load(input)
|
||||
data = schema.load_file(input)
|
||||
|
||||
dbscheme = Scheme(src=input.relative_to(opts.swift_dir),
|
||||
includes=get_includes(data, include_dir=input.parent, swift_dir=opts.swift_dir),
|
||||
|
||||
@@ -51,6 +51,10 @@ class RootElementHasChildren(Error):
|
||||
pass
|
||||
|
||||
|
||||
class NoClasses(Error):
|
||||
pass
|
||||
|
||||
|
||||
def get_ql_property(cls: schema.Class, prop: schema.Property, prev_child: str = "") -> ql.Property:
|
||||
args = dict(
|
||||
type=prop.type if not prop.is_predicate else "predicate",
|
||||
@@ -103,7 +107,7 @@ def get_ql_class(cls: schema.Class, lookup: typing.Dict[str, schema.Class]):
|
||||
bases=cls.bases,
|
||||
final=not cls.derived,
|
||||
properties=properties,
|
||||
dir=cls.dir,
|
||||
dir=pathlib.Path(cls.group or ""),
|
||||
ipa=bool(cls.ipa),
|
||||
**pragmas,
|
||||
)
|
||||
@@ -127,7 +131,7 @@ def get_ql_ipa_class_db(name: str) -> ql.Synth.FinalClassDb:
|
||||
def get_ql_ipa_class(cls: schema.Class):
|
||||
if cls.derived:
|
||||
return ql.Synth.NonFinalClass(name=cls.name, derived=sorted(cls.derived),
|
||||
root=(cls.name == schema.root_class_name))
|
||||
root=not cls.bases)
|
||||
if cls.ipa and cls.ipa.from_class is not None:
|
||||
source = cls.ipa.from_class
|
||||
get_ql_ipa_class_db(source).subtract_type(cls.name)
|
||||
@@ -253,12 +257,14 @@ def generate(opts, renderer):
|
||||
existing |= {q for q in test_out.rglob("*.ql")}
|
||||
existing |= {q for q in test_out.rglob(missing_test_source_filename)}
|
||||
|
||||
data = schema.load(input)
|
||||
data = schema.load_file(input)
|
||||
|
||||
classes = {name: get_ql_class(cls, data.classes) for name, cls in data.classes.items()}
|
||||
# element root is absent in tests
|
||||
if schema.root_class_name in classes and classes[schema.root_class_name].has_children:
|
||||
raise RootElementHasChildren
|
||||
if not classes:
|
||||
raise NoClasses
|
||||
root = next(iter(classes.values()))
|
||||
if root.has_children:
|
||||
raise RootElementHasChildren(root)
|
||||
|
||||
imports = {}
|
||||
|
||||
@@ -288,10 +294,10 @@ def generate(opts, renderer):
|
||||
for c in data.classes.values():
|
||||
if _should_skip_qltest(c, data.classes):
|
||||
continue
|
||||
test_dir = test_out / c.dir / c.name
|
||||
test_dir = test_out / c.group / c.name
|
||||
test_dir.mkdir(parents=True, exist_ok=True)
|
||||
if not any(test_dir.glob("*.swift")):
|
||||
log.warning(f"no test source in {c.dir / c.name}")
|
||||
log.warning(f"no test source in {test_dir.relative_to(test_out)}")
|
||||
renderer.render(ql.MissingTestInstructions(),
|
||||
test_dir / missing_test_source_filename)
|
||||
continue
|
||||
@@ -308,12 +314,12 @@ def generate(opts, renderer):
|
||||
constructor_imports = []
|
||||
ipa_constructor_imports = []
|
||||
stubs = {}
|
||||
for cls in sorted(data.classes.values(), key=lambda cls: (cls.dir, cls.name)):
|
||||
for cls in sorted(data.classes.values(), key=lambda cls: (cls.group, cls.name)):
|
||||
ipa_type = get_ql_ipa_class(cls)
|
||||
if ipa_type.is_final:
|
||||
final_ipa_types.append(ipa_type)
|
||||
if ipa_type.has_params:
|
||||
stub_file = stub_out / cls.dir / f"{cls.name}Constructor.qll"
|
||||
stub_file = stub_out / cls.group / f"{cls.name}Constructor.qll"
|
||||
if not stub_file.is_file() or _is_generated_stub(stub_file):
|
||||
# stub rendering must be postponed as we might not have yet all subtracted ipa types in `ipa_type`
|
||||
stubs[stub_file] = ql.Synth.ConstructorStub(ipa_type)
|
||||
@@ -326,7 +332,7 @@ def generate(opts, renderer):
|
||||
|
||||
for stub_file, data in stubs.items():
|
||||
renderer.render(data, stub_file)
|
||||
renderer.render(ql.Synth.Types(schema.root_class_name, final_ipa_types, non_final_ipa_types), out / "Synth.qll")
|
||||
renderer.render(ql.Synth.Types(root.name, final_ipa_types, non_final_ipa_types), out / "Synth.qll")
|
||||
renderer.render(ql.ImportList(constructor_imports), out / "SynthConstructors.qll")
|
||||
renderer.render(ql.ImportList(ipa_constructor_imports), out / "PureSynthConstructors.qll")
|
||||
|
||||
|
||||
@@ -1 +1,121 @@
|
||||
optional = list = int = string = boolean = predicate = include = group = child = synth = qltest = cpp = object()
|
||||
from typing import Callable as _Callable, Union as _Union
|
||||
from functools import singledispatch as _singledispatch
|
||||
from swift.codegen.lib import schema as _schema
|
||||
import inspect as _inspect
|
||||
|
||||
|
||||
class _ChildModifier(_schema.PropertyModifier):
|
||||
def modify(self, prop: _schema.Property):
|
||||
if prop.type is None or prop.type[0].islower():
|
||||
raise _schema.Error("Non-class properties cannot be children")
|
||||
prop.is_child = True
|
||||
|
||||
|
||||
def include(source: str):
|
||||
# add to `includes` variable in calling context
|
||||
_inspect.currentframe().f_back.f_locals.setdefault(
|
||||
"__includes", []).append(source)
|
||||
|
||||
|
||||
class _Pragma(_schema.PropertyModifier):
|
||||
""" A class or property pragma.
|
||||
For properties, it functions similarly to a `_PropertyModifier` with `|`, adding the pragma.
|
||||
For schema classes it acts as a python decorator with `@`.
|
||||
"""
|
||||
|
||||
def __init__(self, pragma):
|
||||
self.pragma = pragma
|
||||
|
||||
def modify(self, prop: _schema.Property):
|
||||
prop.pragmas.append(self.pragma)
|
||||
|
||||
def __call__(self, cls: type) -> type:
|
||||
""" use this pragma as a decorator on classes """
|
||||
if "pragmas" in cls.__dict__: # not using hasattr as we don't want to land on inherited pragmas
|
||||
cls.pragmas.append(self.pragma)
|
||||
else:
|
||||
cls.pragmas = [self.pragma]
|
||||
return cls
|
||||
|
||||
|
||||
class _Optionalizer(_schema.PropertyModifier):
|
||||
def modify(self, prop: _schema.Property):
|
||||
K = _schema.Property.Kind
|
||||
if prop.kind != K.SINGLE:
|
||||
raise _schema.Error(
|
||||
"Optional should only be applied to simple property types")
|
||||
prop.kind = K.OPTIONAL
|
||||
|
||||
|
||||
class _Listifier(_schema.PropertyModifier):
|
||||
def modify(self, prop: _schema.Property):
|
||||
K = _schema.Property.Kind
|
||||
if prop.kind == K.SINGLE:
|
||||
prop.kind = K.REPEATED
|
||||
elif prop.kind == K.OPTIONAL:
|
||||
prop.kind = K.REPEATED_OPTIONAL
|
||||
else:
|
||||
raise _schema.Error(
|
||||
"Repeated should only be applied to simple or optional property types")
|
||||
|
||||
|
||||
class _TypeModifier:
|
||||
""" Modifies types using get item notation """
|
||||
|
||||
def __init__(self, modifier: _schema.PropertyModifier):
|
||||
self.modifier = modifier
|
||||
|
||||
def __getitem__(self, item):
|
||||
return item | self.modifier
|
||||
|
||||
|
||||
class _Namespace:
|
||||
""" simple namespacing mechanism """
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
self.__dict__.update(kwargs)
|
||||
|
||||
|
||||
_ClassDecorator = _Callable[[type], type]
|
||||
|
||||
|
||||
def _annotate(**kwargs) -> _ClassDecorator:
|
||||
def f(cls: type) -> type:
|
||||
for k, v in kwargs.items():
|
||||
setattr(cls, k, v)
|
||||
return cls
|
||||
|
||||
return f
|
||||
|
||||
|
||||
boolean = "boolean"
|
||||
int = "int"
|
||||
string = "string"
|
||||
|
||||
predicate = _schema.predicate_marker
|
||||
optional = _TypeModifier(_Optionalizer())
|
||||
list = _TypeModifier(_Listifier())
|
||||
|
||||
child = _ChildModifier()
|
||||
|
||||
qltest = _Namespace(
|
||||
skip=_Pragma("qltest_skip"),
|
||||
collapse_hierarchy=_Pragma("qltest_collapse_hierarchy"),
|
||||
uncollapse_hierarchy=_Pragma("qltest_uncollapse_hierarchy"),
|
||||
)
|
||||
|
||||
cpp = _Namespace(
|
||||
skip=_Pragma("cpp_skip"),
|
||||
)
|
||||
|
||||
|
||||
def group(name: str = "") -> _ClassDecorator:
|
||||
return _annotate(group=name)
|
||||
|
||||
|
||||
synth = _Namespace(
|
||||
from_class=lambda ref: _annotate(ipa=_schema.IpaInfo(
|
||||
from_class=_schema.get_type_name(ref))),
|
||||
on_arguments=lambda **kwargs: _annotate(
|
||||
ipa=_schema.IpaInfo(on_arguments={k: _schema.get_type_name(t) for k, t in kwargs.items()}))
|
||||
)
|
||||
|
||||
@@ -1,60 +1,63 @@
|
||||
""" schema.yml format representation """
|
||||
|
||||
import pathlib
|
||||
import re
|
||||
import types
|
||||
import typing
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Set, Union, Dict, ClassVar, Optional
|
||||
from typing import List, Set, Union, Dict, Optional
|
||||
from enum import Enum, auto
|
||||
import functools
|
||||
import importlib.util
|
||||
from toposort import toposort_flatten
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
|
||||
def __str__(self):
|
||||
return self.args[0]
|
||||
|
||||
|
||||
root_class_name = "Element"
|
||||
def _check_type(t: Optional[str], known: typing.Iterable[str]):
|
||||
if t is not None and t not in known:
|
||||
raise Error(f"Unknown type {t}")
|
||||
|
||||
|
||||
@dataclass
|
||||
class Property:
|
||||
is_single: ClassVar = False
|
||||
is_optional: ClassVar = False
|
||||
is_repeated: ClassVar = False
|
||||
is_predicate: ClassVar = False
|
||||
class Kind(Enum):
|
||||
SINGLE = auto()
|
||||
REPEATED = auto()
|
||||
OPTIONAL = auto()
|
||||
REPEATED_OPTIONAL = auto()
|
||||
PREDICATE = auto()
|
||||
|
||||
name: str
|
||||
type: str = None
|
||||
kind: Kind
|
||||
name: Optional[str] = None
|
||||
type: Optional[str] = None
|
||||
is_child: bool = False
|
||||
pragmas: List[str] = field(default_factory=list)
|
||||
|
||||
@property
|
||||
def is_single(self) -> bool:
|
||||
return self.kind == self.Kind.SINGLE
|
||||
|
||||
@dataclass
|
||||
class SingleProperty(Property):
|
||||
is_single: ClassVar = True
|
||||
@property
|
||||
def is_optional(self) -> bool:
|
||||
return self.kind in (self.Kind.OPTIONAL, self.Kind.REPEATED_OPTIONAL)
|
||||
|
||||
@property
|
||||
def is_repeated(self) -> bool:
|
||||
return self.kind in (self.Kind.REPEATED, self.Kind.REPEATED_OPTIONAL)
|
||||
|
||||
@property
|
||||
def is_predicate(self) -> bool:
|
||||
return self.kind == self.Kind.PREDICATE
|
||||
|
||||
|
||||
@dataclass
|
||||
class OptionalProperty(Property):
|
||||
is_optional: ClassVar = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class RepeatedProperty(Property):
|
||||
is_repeated: ClassVar = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class RepeatedOptionalProperty(Property):
|
||||
is_optional: ClassVar = True
|
||||
is_repeated: ClassVar = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class PredicateProperty(Property):
|
||||
is_predicate: ClassVar = True
|
||||
SingleProperty = functools.partial(Property, Property.Kind.SINGLE)
|
||||
OptionalProperty = functools.partial(Property, Property.Kind.OPTIONAL)
|
||||
RepeatedProperty = functools.partial(Property, Property.Kind.REPEATED)
|
||||
RepeatedOptionalProperty = functools.partial(
|
||||
Property, Property.Kind.REPEATED_OPTIONAL)
|
||||
PredicateProperty = functools.partial(Property, Property.Kind.PREDICATE)
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -66,119 +69,161 @@ class IpaInfo:
|
||||
@dataclass
|
||||
class Class:
|
||||
name: str
|
||||
bases: List[str] = field(default_factory=set)
|
||||
bases: List[str] = field(default_factory=list)
|
||||
derived: Set[str] = field(default_factory=set)
|
||||
properties: List[Property] = field(default_factory=list)
|
||||
dir: pathlib.Path = pathlib.Path()
|
||||
group: str = ""
|
||||
pragmas: List[str] = field(default_factory=list)
|
||||
ipa: Optional[IpaInfo] = None
|
||||
|
||||
@property
|
||||
def final(self):
|
||||
return not self.derived
|
||||
def check_types(self, known: typing.Iterable[str]):
|
||||
for b in self.bases:
|
||||
_check_type(b, known)
|
||||
for d in self.derived:
|
||||
_check_type(d, known)
|
||||
for p in self.properties:
|
||||
_check_type(p.type, known)
|
||||
if self.ipa is not None:
|
||||
_check_type(self.ipa.from_class, known)
|
||||
if self.ipa.on_arguments is not None:
|
||||
for t in self.ipa.on_arguments.values():
|
||||
_check_type(t, known)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Schema:
|
||||
classes: Dict[str, Class]
|
||||
classes: Dict[str, Class] = field(default_factory=dict)
|
||||
includes: Set[str] = field(default_factory=set)
|
||||
|
||||
|
||||
_StrOrList = Union[str, List[str]]
|
||||
predicate_marker = object()
|
||||
|
||||
TypeRef = Union[type, str]
|
||||
|
||||
|
||||
def _auto_list(data: _StrOrList) -> List[str]:
|
||||
if isinstance(data, list):
|
||||
return data
|
||||
return [data]
|
||||
@functools.singledispatch
|
||||
def get_type_name(arg: TypeRef) -> str:
|
||||
raise Error(f"Not a schema type or string ({arg})")
|
||||
|
||||
|
||||
def _parse_property(name: str, data: Union[str, Dict[str, _StrOrList]], is_child: bool = False):
|
||||
if isinstance(data, dict):
|
||||
if "type" not in data:
|
||||
raise Error(f"property {name} has no type")
|
||||
pragmas = _auto_list(data.pop("_pragma", []))
|
||||
type = data.pop("type")
|
||||
if data:
|
||||
raise Error(f"unknown metadata {', '.join(data)} in property {name}")
|
||||
else:
|
||||
pragmas = []
|
||||
type = data
|
||||
if is_child and type[0].islower():
|
||||
raise Error(f"children must have class type, got {type} for {name}")
|
||||
if type.endswith("?*"):
|
||||
return RepeatedOptionalProperty(name, type[:-2], is_child=is_child, pragmas=pragmas)
|
||||
elif type.endswith("*"):
|
||||
return RepeatedProperty(name, type[:-1], is_child=is_child, pragmas=pragmas)
|
||||
elif type.endswith("?"):
|
||||
return OptionalProperty(name, type[:-1], is_child=is_child, pragmas=pragmas)
|
||||
elif type == "predicate":
|
||||
return PredicateProperty(name, pragmas=pragmas)
|
||||
else:
|
||||
return SingleProperty(name, type, is_child=is_child, pragmas=pragmas)
|
||||
@get_type_name.register
|
||||
def _(arg: type):
|
||||
return arg.__name__
|
||||
|
||||
|
||||
def _parse_ipa(data: Dict[str, Union[str, Dict[str, str]]]):
|
||||
return IpaInfo(from_class=data.get("from"),
|
||||
on_arguments=data.get(True)) # 'on' is parsed as boolean True in yaml
|
||||
@get_type_name.register
|
||||
def _(arg: str):
|
||||
return arg
|
||||
|
||||
|
||||
class _DirSelector:
|
||||
""" Default output subdirectory selector for generated QL files, based on the `_directories` global field"""
|
||||
|
||||
def __init__(self, dir_to_patterns):
|
||||
self.selector = [(re.compile(p), pathlib.Path(d)) for d, p in dir_to_patterns]
|
||||
self.selector.append((re.compile(""), pathlib.Path()))
|
||||
|
||||
def get(self, name):
|
||||
return next(d for p, d in self.selector if p.search(name))
|
||||
@functools.singledispatch
|
||||
def _make_property(arg: object) -> Property:
|
||||
if arg is predicate_marker:
|
||||
return PredicateProperty()
|
||||
raise Error(f"Illegal property specifier {arg}")
|
||||
|
||||
|
||||
def load(path):
|
||||
""" Parse the schema from the file at `path` """
|
||||
with open(path) as input:
|
||||
data = yaml.load(input, Loader=yaml.SafeLoader)
|
||||
grouper = _DirSelector(data.get("_directories", {}).items())
|
||||
classes = {root_class_name: Class(root_class_name)}
|
||||
classes.update((cls, Class(cls, dir=grouper.get(cls))) for cls in data if not cls.startswith("_"))
|
||||
for name, info in data.items():
|
||||
if name.startswith("_"):
|
||||
continue
|
||||
if not name[0].isupper():
|
||||
raise Error(f"keys in the schema file must be capitalized class names or metadata, got {name}")
|
||||
cls = classes[name]
|
||||
for k, v in info.items():
|
||||
if not k.startswith("_"):
|
||||
cls.properties.append(_parse_property(k, v))
|
||||
elif k == "_extends":
|
||||
cls.bases = _auto_list(v)
|
||||
for base in cls.bases:
|
||||
classes[base].derived.add(name)
|
||||
elif k == "_dir":
|
||||
cls.dir = pathlib.Path(v)
|
||||
elif k == "_children":
|
||||
cls.properties.extend(_parse_property(kk, vv, is_child=True) for kk, vv in v.items())
|
||||
elif k == "_pragma":
|
||||
cls.pragmas = _auto_list(v)
|
||||
elif k == "_synth":
|
||||
cls.ipa = _parse_ipa(v)
|
||||
else:
|
||||
raise Error(f"unknown metadata {k} for class {name}")
|
||||
if not cls.bases and cls.name != root_class_name:
|
||||
cls.bases = [root_class_name]
|
||||
classes[root_class_name].derived.add(name)
|
||||
@_make_property.register(str)
|
||||
@_make_property.register(type)
|
||||
def _(arg: TypeRef):
|
||||
return SingleProperty(type=get_type_name(arg))
|
||||
|
||||
|
||||
@_make_property.register
|
||||
def _(arg: Property):
|
||||
return arg
|
||||
|
||||
|
||||
class PropertyModifier:
|
||||
""" Modifier of `Property` objects.
|
||||
Being on the right of `|` it will trigger construction of a `Property` from
|
||||
the left operand.
|
||||
"""
|
||||
|
||||
def __ror__(self, other: object) -> Property:
|
||||
ret = _make_property(other)
|
||||
self.modify(ret)
|
||||
return ret
|
||||
|
||||
def modify(self, prop: Property):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@dataclass
|
||||
class _PropertyNamer(PropertyModifier):
|
||||
name: str
|
||||
|
||||
def modify(self, prop: Property):
|
||||
prop.name = self.name.rstrip("_")
|
||||
|
||||
|
||||
def _get_class(cls: type) -> Class:
|
||||
if not isinstance(cls, type):
|
||||
raise Error(f"Only class definitions allowed in schema, found {cls}")
|
||||
if cls.__name__[0].islower():
|
||||
raise Error(f"Class name must be capitalized, found {cls.__name__}")
|
||||
if len({b.group for b in cls.__bases__ if hasattr(b, "group")}) > 1:
|
||||
raise Error(f"Bases with mixed groups for {cls.__name__}")
|
||||
return Class(name=cls.__name__,
|
||||
bases=[b.__name__ for b in cls.__bases__ if b is not object],
|
||||
derived={d.__name__ for d in cls.__subclasses__()},
|
||||
# getattr to inherit from bases
|
||||
group=getattr(cls, "group", ""),
|
||||
# not getattr not to inherit from bases
|
||||
pragmas=cls.__dict__.get("pragmas", []),
|
||||
# not getattr not to inherit from bases
|
||||
ipa=cls.__dict__.get("ipa", None),
|
||||
properties=[
|
||||
a | _PropertyNamer(n)
|
||||
for n, a in cls.__annotations__.items()
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def _toposort_classes_by_group(classes: typing.Dict[str, Class]) -> typing.Dict[str, Class]:
|
||||
groups = {}
|
||||
ret = {}
|
||||
|
||||
for name, cls in classes.items():
|
||||
groups.setdefault(cls.dir, []).append(name)
|
||||
groups.setdefault(cls.group, []).append(name)
|
||||
|
||||
sorted_classes = {}
|
||||
|
||||
for dir in sorted(groups):
|
||||
group = groups[dir]
|
||||
inheritance = {name: classes[name].bases for name in group}
|
||||
for group, grouped in sorted(groups.items()):
|
||||
inheritance = {name: classes[name].bases for name in grouped}
|
||||
for name in toposort_flatten(inheritance):
|
||||
sorted_classes[name] = classes[name]
|
||||
ret[name] = classes[name]
|
||||
|
||||
return Schema(classes=sorted_classes, includes=set(data.get("_includes", [])))
|
||||
return ret
|
||||
|
||||
|
||||
def load(m: types.ModuleType) -> Schema:
|
||||
includes = set()
|
||||
classes = {}
|
||||
known = {"int", "string", "boolean"}
|
||||
known.update(n for n in m.__dict__ if not n.startswith("__"))
|
||||
import swift.codegen.lib.schema.defs as defs
|
||||
for name, data in m.__dict__.items():
|
||||
if hasattr(defs, name):
|
||||
continue
|
||||
if name == "__includes":
|
||||
includes = set(data)
|
||||
continue
|
||||
if name.startswith("__"):
|
||||
continue
|
||||
cls = _get_class(data)
|
||||
if classes and not cls.bases:
|
||||
raise Error(
|
||||
f"Only one root class allowed, found second root {name}")
|
||||
cls.check_types(known)
|
||||
classes[name] = cls
|
||||
|
||||
return Schema(includes=includes, classes=_toposort_classes_by_group(classes))
|
||||
|
||||
|
||||
def load_file(path: pathlib.Path) -> Schema:
|
||||
spec = importlib.util.spec_from_file_location("schema", path)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
return load(module)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -136,10 +136,10 @@ def test_classes_with_dirs(generate_grouped):
|
||||
cbase = cpp.Class(name="CBase")
|
||||
assert generate_grouped([
|
||||
schema.Class(name="A"),
|
||||
schema.Class(name="B", dir=pathlib.Path("foo")),
|
||||
schema.Class(name="CBase", derived={"C"}, dir=pathlib.Path("bar")),
|
||||
schema.Class(name="C", bases=["CBase"], dir=pathlib.Path("bar")),
|
||||
schema.Class(name="D", dir=pathlib.Path("foo/bar/baz")),
|
||||
schema.Class(name="B", group="foo"),
|
||||
schema.Class(name="CBase", derived={"C"}, group="bar"),
|
||||
schema.Class(name="C", bases=["CBase"], group="bar"),
|
||||
schema.Class(name="D", group="foo/bar/baz"),
|
||||
]) == {
|
||||
".": [cpp.Class(name="A", trap_name="As", final=True)],
|
||||
"foo": [cpp.Class(name="B", trap_name="Bs", final=True)],
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import collections
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
from swift.codegen.generators import dbschemegen
|
||||
from swift.codegen.lib import dbscheme
|
||||
from swift.codegen.test.utils import *
|
||||
@@ -12,8 +9,8 @@ InputExpectedPair = collections.namedtuple("InputExpectedPair", ("input", "expec
|
||||
|
||||
|
||||
@pytest.fixture(params=[
|
||||
InputExpectedPair(pathlib.Path(), None),
|
||||
InputExpectedPair(pathlib.Path("foodir"), pathlib.Path("foodir")),
|
||||
InputExpectedPair(None, None),
|
||||
InputExpectedPair("foodir", pathlib.Path("foodir")),
|
||||
])
|
||||
def dir_param(request):
|
||||
return request.param
|
||||
@@ -58,7 +55,7 @@ def test_includes(input, opts, generate):
|
||||
|
||||
def test_empty_final_class(generate, dir_param):
|
||||
assert generate([
|
||||
schema.Class("Object", dir=dir_param.input),
|
||||
schema.Class("Object", group=dir_param.input),
|
||||
]) == dbscheme.Scheme(
|
||||
src=schema_file,
|
||||
includes=[],
|
||||
@@ -76,7 +73,7 @@ def test_empty_final_class(generate, dir_param):
|
||||
|
||||
def test_final_class_with_single_scalar_field(generate, dir_param):
|
||||
assert generate([
|
||||
schema.Class("Object", dir=dir_param.input, properties=[
|
||||
schema.Class("Object", group=dir_param.input, properties=[
|
||||
schema.SingleProperty("foo", "bar"),
|
||||
]),
|
||||
]) == dbscheme.Scheme(
|
||||
@@ -96,7 +93,7 @@ def test_final_class_with_single_scalar_field(generate, dir_param):
|
||||
|
||||
def test_final_class_with_single_class_field(generate, dir_param):
|
||||
assert generate([
|
||||
schema.Class("Object", dir=dir_param.input, properties=[
|
||||
schema.Class("Object", group=dir_param.input, properties=[
|
||||
schema.SingleProperty("foo", "Bar"),
|
||||
]),
|
||||
]) == dbscheme.Scheme(
|
||||
@@ -116,7 +113,7 @@ def test_final_class_with_single_class_field(generate, dir_param):
|
||||
|
||||
def test_final_class_with_optional_field(generate, dir_param):
|
||||
assert generate([
|
||||
schema.Class("Object", dir=dir_param.input, properties=[
|
||||
schema.Class("Object", group=dir_param.input, properties=[
|
||||
schema.OptionalProperty("foo", "bar"),
|
||||
]),
|
||||
]) == dbscheme.Scheme(
|
||||
@@ -144,7 +141,7 @@ def test_final_class_with_optional_field(generate, dir_param):
|
||||
@pytest.mark.parametrize("property_cls", [schema.RepeatedProperty, schema.RepeatedOptionalProperty])
|
||||
def test_final_class_with_repeated_field(generate, property_cls, dir_param):
|
||||
assert generate([
|
||||
schema.Class("Object", dir=dir_param.input, properties=[
|
||||
schema.Class("Object", group=dir_param.input, properties=[
|
||||
property_cls("foo", "bar"),
|
||||
]),
|
||||
]) == dbscheme.Scheme(
|
||||
@@ -172,7 +169,7 @@ def test_final_class_with_repeated_field(generate, property_cls, dir_param):
|
||||
|
||||
def test_final_class_with_predicate_field(generate, dir_param):
|
||||
assert generate([
|
||||
schema.Class("Object", dir=dir_param.input, properties=[
|
||||
schema.Class("Object", group=dir_param.input, properties=[
|
||||
schema.PredicateProperty("foo"),
|
||||
]),
|
||||
]) == dbscheme.Scheme(
|
||||
@@ -198,7 +195,7 @@ def test_final_class_with_predicate_field(generate, dir_param):
|
||||
|
||||
def test_final_class_with_more_fields(generate, dir_param):
|
||||
assert generate([
|
||||
schema.Class("Object", dir=dir_param.input, properties=[
|
||||
schema.Class("Object", group=dir_param.input, properties=[
|
||||
schema.SingleProperty("one", "x"),
|
||||
schema.SingleProperty("two", "y"),
|
||||
schema.OptionalProperty("three", "z"),
|
||||
@@ -276,7 +273,7 @@ def test_class_with_derived_and_single_property(generate, dir_param):
|
||||
schema.Class(
|
||||
name="Base",
|
||||
derived={"Left", "Right"},
|
||||
dir=dir_param.input,
|
||||
group=dir_param.input,
|
||||
properties=[
|
||||
schema.SingleProperty("single", "Prop"),
|
||||
]),
|
||||
@@ -306,7 +303,7 @@ def test_class_with_derived_and_optional_property(generate, dir_param):
|
||||
schema.Class(
|
||||
name="Base",
|
||||
derived={"Left", "Right"},
|
||||
dir=dir_param.input,
|
||||
group=dir_param.input,
|
||||
properties=[
|
||||
schema.OptionalProperty("opt", "Prop"),
|
||||
]),
|
||||
@@ -335,7 +332,7 @@ def test_class_with_derived_and_repeated_property(generate, dir_param):
|
||||
assert generate([
|
||||
schema.Class(
|
||||
name="Base",
|
||||
dir=dir_param.input,
|
||||
group=dir_param.input,
|
||||
derived={"Left", "Right"},
|
||||
properties=[
|
||||
schema.RepeatedProperty("rep", "Prop"),
|
||||
|
||||
@@ -126,18 +126,6 @@ def generate_tests(generate):
|
||||
return func
|
||||
|
||||
|
||||
def test_empty(generate):
|
||||
assert generate([]) == {
|
||||
import_file(): ql.ImportList(),
|
||||
children_file(): ql.GetParentImplementation(),
|
||||
ql_output_path() / "Synth.qll": ql.Synth.Types(schema.root_class_name),
|
||||
ql_output_path() / "SynthConstructors.qll": ql.ImportList(),
|
||||
ql_output_path() / "PureSynthConstructors.qll": ql.ImportList(),
|
||||
ql_output_path() / "Raw.qll": ql.DbClasses(),
|
||||
ql_output_path() / "Raw.qll": ql.DbClasses(),
|
||||
}
|
||||
|
||||
|
||||
def test_one_empty_class(generate_classes):
|
||||
assert generate_classes([
|
||||
schema.Class("A")
|
||||
@@ -209,6 +197,7 @@ def test_single_property(generate_classes):
|
||||
|
||||
def test_children(generate_classes):
|
||||
assert generate_classes([
|
||||
schema.Class("FakeRoot"),
|
||||
schema.Class("MyObject", properties=[
|
||||
schema.SingleProperty("a", "int"),
|
||||
schema.SingleProperty("child1", "int", is_child=True),
|
||||
@@ -220,6 +209,8 @@ def test_children(generate_classes):
|
||||
schema.RepeatedOptionalProperty("child4", "int", is_child=True),
|
||||
]),
|
||||
]) == {
|
||||
"FakeRoot.qll": (ql.Stub(name="FakeRoot", base_import=gen_import_prefix + "FakeRoot"),
|
||||
ql.Class(name="FakeRoot", final=True)),
|
||||
"MyObject.qll": (ql.Stub(name="MyObject", base_import=gen_import_prefix + "MyObject"),
|
||||
ql.Class(name="MyObject", final=True,
|
||||
properties=[
|
||||
@@ -227,18 +218,24 @@ def test_children(generate_classes):
|
||||
tableparams=["this", "result", "_"]),
|
||||
ql.Property(singular="Child1", type="int", tablename="my_objects",
|
||||
tableparams=["this", "_", "result"], prev_child=""),
|
||||
ql.Property(singular="B", plural="Bs", type="int", tablename="my_object_bs",
|
||||
ql.Property(singular="B", plural="Bs", type="int",
|
||||
tablename="my_object_bs",
|
||||
tableparams=["this", "index", "result"]),
|
||||
ql.Property(singular="Child2", plural="Child2s", type="int", tablename="my_object_child2s",
|
||||
ql.Property(singular="Child2", plural="Child2s", type="int",
|
||||
tablename="my_object_child2s",
|
||||
tableparams=["this", "index", "result"], prev_child="Child1"),
|
||||
ql.Property(singular="C", type="int", tablename="my_object_cs",
|
||||
tableparams=["this", "result"], is_optional=True),
|
||||
ql.Property(singular="Child3", type="int", tablename="my_object_child3s",
|
||||
tableparams=["this", "result"], is_optional=True, prev_child="Child2"),
|
||||
ql.Property(singular="D", plural="Ds", type="int", tablename="my_object_ds",
|
||||
tableparams=["this", "result"], is_optional=True,
|
||||
prev_child="Child2"),
|
||||
ql.Property(singular="D", plural="Ds", type="int",
|
||||
tablename="my_object_ds",
|
||||
tableparams=["this", "index", "result"], is_optional=True),
|
||||
ql.Property(singular="Child4", plural="Child4s", type="int", tablename="my_object_child4s",
|
||||
tableparams=["this", "index", "result"], is_optional=True, prev_child="Child3"),
|
||||
ql.Property(singular="Child4", plural="Child4s", type="int",
|
||||
tablename="my_object_child4s",
|
||||
tableparams=["this", "index", "result"], is_optional=True,
|
||||
prev_child="Child3"),
|
||||
])),
|
||||
}
|
||||
|
||||
@@ -267,9 +264,12 @@ def test_single_properties(generate_classes):
|
||||
@pytest.mark.parametrize("is_child,prev_child", [(False, None), (True, "")])
|
||||
def test_optional_property(generate_classes, is_child, prev_child):
|
||||
assert generate_classes([
|
||||
schema.Class("FakeRoot"),
|
||||
schema.Class("MyObject", properties=[
|
||||
schema.OptionalProperty("foo", "bar", is_child=is_child)]),
|
||||
]) == {
|
||||
"FakeRoot.qll": (ql.Stub(name="FakeRoot", base_import=gen_import_prefix + "FakeRoot"),
|
||||
ql.Class(name="FakeRoot", final=True)),
|
||||
"MyObject.qll": (ql.Stub(name="MyObject", base_import=gen_import_prefix + "MyObject"),
|
||||
ql.Class(name="MyObject", final=True, properties=[
|
||||
ql.Property(singular="Foo", type="bar", tablename="my_object_foos",
|
||||
@@ -282,9 +282,12 @@ def test_optional_property(generate_classes, is_child, prev_child):
|
||||
@pytest.mark.parametrize("is_child,prev_child", [(False, None), (True, "")])
|
||||
def test_repeated_property(generate_classes, is_child, prev_child):
|
||||
assert generate_classes([
|
||||
schema.Class("FakeRoot"),
|
||||
schema.Class("MyObject", properties=[
|
||||
schema.RepeatedProperty("foo", "bar", is_child=is_child)]),
|
||||
]) == {
|
||||
"FakeRoot.qll": (ql.Stub(name="FakeRoot", base_import=gen_import_prefix + "FakeRoot"),
|
||||
ql.Class(name="FakeRoot", final=True)),
|
||||
"MyObject.qll": (ql.Stub(name="MyObject", base_import=gen_import_prefix + "MyObject"),
|
||||
ql.Class(name="MyObject", final=True, properties=[
|
||||
ql.Property(singular="Foo", plural="Foos", type="bar", tablename="my_object_foos",
|
||||
@@ -296,9 +299,13 @@ def test_repeated_property(generate_classes, is_child, prev_child):
|
||||
@pytest.mark.parametrize("is_child,prev_child", [(False, None), (True, "")])
|
||||
def test_repeated_optional_property(generate_classes, is_child, prev_child):
|
||||
assert generate_classes([
|
||||
schema.Class("FakeRoot"),
|
||||
schema.Class("MyObject", properties=[
|
||||
schema.RepeatedOptionalProperty("foo", "bar", is_child=is_child)]),
|
||||
]) == {
|
||||
|
||||
"FakeRoot.qll": (ql.Stub(name="FakeRoot", base_import=gen_import_prefix + "FakeRoot"),
|
||||
ql.Class(name="FakeRoot", final=True)),
|
||||
"MyObject.qll": (ql.Stub(name="MyObject", base_import=gen_import_prefix + "MyObject"),
|
||||
ql.Class(name="MyObject", final=True, properties=[
|
||||
ql.Property(singular="Foo", plural="Foos", type="bar", tablename="my_object_foos",
|
||||
@@ -325,9 +332,9 @@ def test_predicate_property(generate_classes):
|
||||
@pytest.mark.parametrize("is_child,prev_child", [(False, None), (True, "")])
|
||||
def test_single_class_property(generate_classes, is_child, prev_child):
|
||||
assert generate_classes([
|
||||
schema.Class("Bar"),
|
||||
schema.Class("MyObject", properties=[
|
||||
schema.SingleProperty("foo", "Bar", is_child=is_child)]),
|
||||
schema.Class("Bar"),
|
||||
]) == {
|
||||
"MyObject.qll": (ql.Stub(name="MyObject", base_import=gen_import_prefix + "MyObject"),
|
||||
ql.Class(
|
||||
@@ -344,13 +351,13 @@ def test_single_class_property(generate_classes, is_child, prev_child):
|
||||
|
||||
|
||||
def test_class_dir(generate_classes):
|
||||
dir = pathlib.Path("another/rel/path")
|
||||
dir = "another/rel/path"
|
||||
assert generate_classes([
|
||||
schema.Class("A", derived={"B"}, dir=dir),
|
||||
schema.Class("A", derived={"B"}, group=dir),
|
||||
schema.Class("B", bases=["A"]),
|
||||
]) == {
|
||||
f"{dir}/A.qll": (ql.Stub(name="A", base_import=gen_import_prefix + "another.rel.path.A"),
|
||||
ql.Class(name="A", dir=dir)),
|
||||
ql.Class(name="A", dir=pathlib.Path(dir))),
|
||||
"B.qll": (ql.Stub(name="B", base_import=gen_import_prefix + "B"),
|
||||
ql.Class(name="B", final=True, bases=["A"],
|
||||
imports=[stub_import_prefix + "another.rel.path.A"])),
|
||||
@@ -360,14 +367,14 @@ def test_class_dir(generate_classes):
|
||||
def test_root_element_cannot_have_children(generate_classes):
|
||||
with pytest.raises(qlgen.RootElementHasChildren):
|
||||
generate_classes([
|
||||
schema.Class(schema.root_class_name, properties=[schema.SingleProperty("x", is_child=True)])
|
||||
schema.Class('A', properties=[schema.SingleProperty("x", is_child=True)])
|
||||
])
|
||||
|
||||
|
||||
def test_class_dir_imports(generate_import_list):
|
||||
dir = pathlib.Path("another/rel/path")
|
||||
dir = "another/rel/path"
|
||||
assert generate_import_list([
|
||||
schema.Class("A", derived={"B"}, dir=dir),
|
||||
schema.Class("A", derived={"B"}, group=dir),
|
||||
schema.Class("B", bases=["A"]),
|
||||
]) == ql.ImportList([
|
||||
stub_import_prefix + "B",
|
||||
@@ -383,7 +390,7 @@ def test_format(opts, generate, renderer, run_mock):
|
||||
pathlib.Path("bar.qll"),
|
||||
pathlib.Path("y", "baz.txt"),
|
||||
]
|
||||
generate([])
|
||||
generate([schema.Class('A')])
|
||||
assert run_mock.mock_calls == [
|
||||
mock.call(["my_fake_codeql", "query", "format", "--in-place", "--", "x/foo.ql", "bar.qll"],
|
||||
stderr=subprocess.PIPE, text=True),
|
||||
@@ -400,11 +407,11 @@ def test_format_error(opts, generate, renderer, run_mock):
|
||||
pathlib.Path("y", "baz.txt"),
|
||||
]
|
||||
with pytest.raises(qlgen.FormatError):
|
||||
generate([])
|
||||
generate([schema.Class('A')])
|
||||
|
||||
|
||||
def test_empty_cleanup(generate, renderer):
|
||||
generate([])
|
||||
generate([schema.Class('A')])
|
||||
assert renderer.mock_calls[-1] == mock.call.cleanup(set())
|
||||
|
||||
|
||||
@@ -423,7 +430,7 @@ def test_non_empty_cleanup(opts, generate, renderer):
|
||||
write(test_a)
|
||||
write(test_b)
|
||||
write(test_c)
|
||||
generate([])
|
||||
generate([schema.Class('A')])
|
||||
assert renderer.mock_calls[-1] == mock.call.cleanup(
|
||||
{ql_a, ql_b, stub_a, test_a, test_b})
|
||||
|
||||
@@ -463,7 +470,7 @@ def test_test_source_present(opts, generate_tests):
|
||||
def test_test_source_present_with_dir(opts, generate_tests):
|
||||
write(opts.ql_test_output / "foo" / "A" / "test.swift")
|
||||
assert generate_tests([
|
||||
schema.Class("A", dir=pathlib.Path("foo")),
|
||||
schema.Class("A", group="foo"),
|
||||
]) == {
|
||||
"foo/A/A.ql": ql.ClassTester(class_name="A"),
|
||||
}
|
||||
|
||||
@@ -3,333 +3,403 @@ import sys
|
||||
import pytest
|
||||
|
||||
from swift.codegen.test.utils import *
|
||||
|
||||
root_name = schema.root_class_name
|
||||
from swift.codegen.lib.schema import defs
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def load(tmp_path):
|
||||
file = tmp_path / "schema.yml"
|
||||
def test_empty_schema():
|
||||
@schema.load
|
||||
class data:
|
||||
pass
|
||||
|
||||
def ret(yml):
|
||||
write(file, yml)
|
||||
return schema.load(file)
|
||||
|
||||
return ret
|
||||
assert data.classes == {}
|
||||
assert data.includes == set()
|
||||
|
||||
|
||||
def test_empty_schema(load):
|
||||
ret = load("{}")
|
||||
assert ret.classes == {root_name: schema.Class(root_name)}
|
||||
assert ret.includes == set()
|
||||
def test_one_empty_class():
|
||||
@schema.load
|
||||
class data:
|
||||
class MyClass:
|
||||
pass
|
||||
|
||||
|
||||
def test_one_empty_class(load):
|
||||
ret = load("""
|
||||
MyClass: {}
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'MyClass'}),
|
||||
'MyClass': schema.Class('MyClass', bases=[root_name]),
|
||||
assert data.classes == {
|
||||
'MyClass': schema.Class('MyClass'),
|
||||
}
|
||||
|
||||
|
||||
def test_two_empty_classes(load):
|
||||
ret = load("""
|
||||
MyClass1: {}
|
||||
MyClass2: {}
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'MyClass1', 'MyClass2'}),
|
||||
'MyClass1': schema.Class('MyClass1', bases=[root_name]),
|
||||
'MyClass2': schema.Class('MyClass2', bases=[root_name]),
|
||||
}
|
||||
def test_two_empty_classes():
|
||||
@schema.load
|
||||
class data:
|
||||
class MyClass1:
|
||||
pass
|
||||
|
||||
class MyClass2(MyClass1):
|
||||
pass
|
||||
|
||||
def test_two_empty_chained_classes(load):
|
||||
ret = load("""
|
||||
MyClass1: {}
|
||||
MyClass2:
|
||||
_extends: MyClass1
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'MyClass1'}),
|
||||
'MyClass1': schema.Class('MyClass1', bases=[root_name], derived={'MyClass2'}),
|
||||
assert data.classes == {
|
||||
'MyClass1': schema.Class('MyClass1', derived={'MyClass2'}),
|
||||
'MyClass2': schema.Class('MyClass2', bases=['MyClass1']),
|
||||
}
|
||||
|
||||
|
||||
def test_empty_classes_diamond(load):
|
||||
ret = load("""
|
||||
A: {}
|
||||
B: {}
|
||||
C:
|
||||
_extends:
|
||||
- A
|
||||
- B
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'A', 'B'}),
|
||||
'A': schema.Class('A', bases=[root_name], derived={'C'}),
|
||||
'B': schema.Class('B', bases=[root_name], derived={'C'}),
|
||||
'C': schema.Class('C', bases=['A', 'B']),
|
||||
}
|
||||
def test_no_external_bases():
|
||||
class A:
|
||||
pass
|
||||
|
||||
|
||||
def test_dir(load):
|
||||
ret = load("""
|
||||
A:
|
||||
_dir: other/dir
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'A'}),
|
||||
'A': schema.Class('A', bases=[root_name], dir=pathlib.Path("other/dir")),
|
||||
}
|
||||
|
||||
|
||||
def test_directory_filter(load):
|
||||
ret = load("""
|
||||
_directories:
|
||||
first/dir: '[xy]'
|
||||
second/dir: foo$
|
||||
third/dir: bar$
|
||||
Afoo: {}
|
||||
Bbar: {}
|
||||
Abar: {}
|
||||
Bfoo: {}
|
||||
Ax: {}
|
||||
Ay: {}
|
||||
A: {}
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'Afoo', 'Bbar', 'Abar', 'Bfoo', 'Ax', 'Ay', 'A'}),
|
||||
'Afoo': schema.Class('Afoo', bases=[root_name], dir=pathlib.Path("second/dir")),
|
||||
'Bbar': schema.Class('Bbar', bases=[root_name], dir=pathlib.Path("third/dir")),
|
||||
'Abar': schema.Class('Abar', bases=[root_name], dir=pathlib.Path("third/dir")),
|
||||
'Bfoo': schema.Class('Bfoo', bases=[root_name], dir=pathlib.Path("second/dir")),
|
||||
'Ax': schema.Class('Ax', bases=[root_name], dir=pathlib.Path("first/dir")),
|
||||
'Ay': schema.Class('Ay', bases=[root_name], dir=pathlib.Path("first/dir")),
|
||||
'A': schema.Class('A', bases=[root_name], dir=pathlib.Path()),
|
||||
}
|
||||
|
||||
|
||||
def test_directory_filter_override(load):
|
||||
ret = load("""
|
||||
_directories:
|
||||
one/dir: ^A$
|
||||
A:
|
||||
_dir: other/dir
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'A'}),
|
||||
'A': schema.Class('A', bases=[root_name], dir=pathlib.Path("other/dir")),
|
||||
}
|
||||
|
||||
|
||||
def test_lowercase_rejected(load):
|
||||
with pytest.raises(schema.Error):
|
||||
load("aLowercase: {}")
|
||||
@schema.load
|
||||
class data:
|
||||
class MyClass(A):
|
||||
pass
|
||||
|
||||
|
||||
def test_digit_rejected(load):
|
||||
def test_no_multiple_roots():
|
||||
with pytest.raises(schema.Error):
|
||||
load("1digit: {}")
|
||||
@schema.load
|
||||
class data:
|
||||
class MyClass1:
|
||||
pass
|
||||
|
||||
class MyClass2:
|
||||
pass
|
||||
|
||||
|
||||
def test_properties(load):
|
||||
ret = load("""
|
||||
A:
|
||||
one: string
|
||||
two: int?
|
||||
three: bool*
|
||||
four: x?*
|
||||
five: predicate
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'A'}),
|
||||
'A': schema.Class('A', bases=[root_name], properties=[
|
||||
def test_empty_classes_diamond():
|
||||
@schema.load
|
||||
class data:
|
||||
class A:
|
||||
pass
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
class C(A):
|
||||
pass
|
||||
|
||||
class D(B, C):
|
||||
pass
|
||||
|
||||
assert data.classes == {
|
||||
'A': schema.Class('A', derived={'B', 'C'}),
|
||||
'B': schema.Class('B', bases=['A'], derived={'D'}),
|
||||
'C': schema.Class('C', bases=['A'], derived={'D'}),
|
||||
'D': schema.Class('D', bases=['B', 'C']),
|
||||
}
|
||||
|
||||
|
||||
#
|
||||
def test_group():
|
||||
@schema.load
|
||||
class data:
|
||||
@defs.group("xxx")
|
||||
class A:
|
||||
pass
|
||||
|
||||
assert data.classes == {
|
||||
'A': schema.Class('A', group="xxx"),
|
||||
}
|
||||
|
||||
|
||||
def test_group_is_inherited():
|
||||
@schema.load
|
||||
class data:
|
||||
class A:
|
||||
pass
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
@defs.group('xxx')
|
||||
class C(A):
|
||||
pass
|
||||
|
||||
class D(B, C):
|
||||
pass
|
||||
|
||||
assert data.classes == {
|
||||
'A': schema.Class('A', derived={'B', 'C'}),
|
||||
'B': schema.Class('B', bases=['A'], derived={'D'}),
|
||||
'C': schema.Class('C', bases=['A'], derived={'D'}, group='xxx'),
|
||||
'D': schema.Class('D', bases=['B', 'C'], group='xxx'),
|
||||
}
|
||||
|
||||
|
||||
def test_no_mixed_groups_in_bases():
|
||||
with pytest.raises(schema.Error):
|
||||
@schema.load
|
||||
class data:
|
||||
class A:
|
||||
pass
|
||||
|
||||
@defs.group('x')
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
@defs.group('y')
|
||||
class C(A):
|
||||
pass
|
||||
|
||||
class D(B, C):
|
||||
pass
|
||||
|
||||
#
|
||||
def test_lowercase_rejected():
|
||||
with pytest.raises(schema.Error):
|
||||
@schema.load
|
||||
class data:
|
||||
class aLowerCase:
|
||||
pass
|
||||
|
||||
|
||||
def test_properties():
|
||||
@schema.load
|
||||
class data:
|
||||
class A:
|
||||
one: defs.string
|
||||
two: defs.optional[defs.int]
|
||||
three: defs.list[defs.boolean]
|
||||
four: defs.list[defs.optional[defs.string]]
|
||||
five: defs.predicate
|
||||
|
||||
assert data.classes == {
|
||||
'A': schema.Class('A', properties=[
|
||||
schema.SingleProperty('one', 'string'),
|
||||
schema.OptionalProperty('two', 'int'),
|
||||
schema.RepeatedProperty('three', 'bool'),
|
||||
schema.RepeatedOptionalProperty('four', 'x'),
|
||||
schema.RepeatedProperty('three', 'boolean'),
|
||||
schema.RepeatedOptionalProperty('four', 'string'),
|
||||
schema.PredicateProperty('five'),
|
||||
]),
|
||||
}
|
||||
|
||||
|
||||
def test_element_properties(load):
|
||||
ret = load("""
|
||||
Element:
|
||||
x: string
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, properties=[
|
||||
schema.SingleProperty('x', 'string'),
|
||||
def test_class_properties():
|
||||
class A:
|
||||
pass
|
||||
|
||||
@schema.load
|
||||
class data:
|
||||
class A:
|
||||
pass
|
||||
|
||||
class B(A):
|
||||
one: A
|
||||
two: defs.optional[A]
|
||||
three: defs.list[A]
|
||||
four: defs.list[defs.optional[A]]
|
||||
|
||||
assert data.classes == {
|
||||
'A': schema.Class('A', derived={'B'}),
|
||||
'B': schema.Class('B', bases=['A'], properties=[
|
||||
schema.SingleProperty('one', 'A'),
|
||||
schema.OptionalProperty('two', 'A'),
|
||||
schema.RepeatedProperty('three', 'A'),
|
||||
schema.RepeatedOptionalProperty('four', 'A'),
|
||||
]),
|
||||
}
|
||||
|
||||
|
||||
def test_children(load):
|
||||
ret = load("""
|
||||
A:
|
||||
a: string
|
||||
b: B*
|
||||
_children:
|
||||
c: C
|
||||
d: D*
|
||||
e: E?
|
||||
f: F?*
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'A'}),
|
||||
'A': schema.Class('A', bases=[root_name], properties=[
|
||||
schema.SingleProperty('a', 'string'),
|
||||
schema.RepeatedProperty('b', 'B'),
|
||||
schema.SingleProperty('c', 'C', is_child=True),
|
||||
schema.RepeatedProperty('d', 'D', is_child=True),
|
||||
schema.OptionalProperty('e', 'E', is_child=True),
|
||||
schema.RepeatedOptionalProperty('f', 'F', is_child=True),
|
||||
def test_string_reference_class_properties():
|
||||
@schema.load
|
||||
class data:
|
||||
class A:
|
||||
one: "A"
|
||||
two: defs.optional["A"]
|
||||
three: defs.list["A"]
|
||||
four: defs.list[defs.optional["A"]]
|
||||
|
||||
assert data.classes == {
|
||||
'A': schema.Class('A', properties=[
|
||||
schema.SingleProperty('one', 'A'),
|
||||
schema.OptionalProperty('two', 'A'),
|
||||
schema.RepeatedProperty('three', 'A'),
|
||||
schema.RepeatedOptionalProperty('four', 'A'),
|
||||
]),
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("type", ["string", "int", "boolean", "predicate"])
|
||||
def test_builtin_and_predicate_children_not_allowed(load, type):
|
||||
@pytest.mark.parametrize("spec", [lambda t: t, lambda t: defs.optional[t], lambda t: defs.list[t],
|
||||
lambda t: defs.list[defs.optional[t]]])
|
||||
def test_string_reference_dangling(spec):
|
||||
with pytest.raises(schema.Error):
|
||||
load(f"""
|
||||
A:
|
||||
_children:
|
||||
x: {type}
|
||||
""")
|
||||
@schema.load
|
||||
class data:
|
||||
class A:
|
||||
x: spec("B")
|
||||
|
||||
|
||||
def test_property_with_explicit_type(load):
|
||||
ret = load("""
|
||||
A:
|
||||
x:
|
||||
type: string*
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'A'}),
|
||||
'A': schema.Class('A', bases=[root_name], properties=[
|
||||
schema.RepeatedProperty('x', 'string'),
|
||||
def test_children():
|
||||
@schema.load
|
||||
class data:
|
||||
class A:
|
||||
one: "A" | defs.child
|
||||
two: defs.optional["A"] | defs.child
|
||||
three: defs.list["A"] | defs.child
|
||||
four: defs.list[defs.optional["A"]] | defs.child
|
||||
|
||||
assert data.classes == {
|
||||
'A': schema.Class('A', properties=[
|
||||
schema.SingleProperty('one', 'A', is_child=True),
|
||||
schema.OptionalProperty('two', 'A', is_child=True),
|
||||
schema.RepeatedProperty('three', 'A', is_child=True),
|
||||
schema.RepeatedOptionalProperty('four', 'A', is_child=True),
|
||||
]),
|
||||
}
|
||||
|
||||
|
||||
def test_property_with_explicit_type_and_pragmas(load):
|
||||
ret = load("""
|
||||
A:
|
||||
x:
|
||||
type: string*
|
||||
_pragma: [foo, bar]
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'A'}),
|
||||
'A': schema.Class('A', bases=[root_name], properties=[
|
||||
schema.RepeatedProperty('x', 'string', pragmas=["foo", "bar"]),
|
||||
@pytest.mark.parametrize("spec", [defs.string, defs.int, defs.boolean, defs.predicate])
|
||||
def test_builtin_and_predicate_children_not_allowed(spec):
|
||||
with pytest.raises(schema.Error):
|
||||
@schema.load
|
||||
class data:
|
||||
class A:
|
||||
x: spec | defs.child
|
||||
|
||||
|
||||
_pragmas = [(defs.qltest.skip, "qltest_skip"),
|
||||
(defs.qltest.collapse_hierarchy, "qltest_collapse_hierarchy"),
|
||||
(defs.qltest.uncollapse_hierarchy, "qltest_uncollapse_hierarchy"),
|
||||
(defs.cpp.skip, "cpp_skip"),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("pragma,expected", _pragmas)
|
||||
def test_property_with_pragma(pragma, expected):
|
||||
@schema.load
|
||||
class data:
|
||||
class A:
|
||||
x: defs.string | pragma
|
||||
|
||||
assert data.classes == {
|
||||
'A': schema.Class('A', properties=[
|
||||
schema.SingleProperty('x', 'string', pragmas=[expected]),
|
||||
]),
|
||||
}
|
||||
|
||||
|
||||
def test_property_with_explicit_type_and_one_pragma(load):
|
||||
ret = load("""
|
||||
A:
|
||||
x:
|
||||
type: string*
|
||||
_pragma: foo
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'A'}),
|
||||
'A': schema.Class('A', bases=[root_name], properties=[
|
||||
schema.RepeatedProperty('x', 'string', pragmas=["foo"]),
|
||||
def test_property_with_pragmas():
|
||||
spec = defs.string
|
||||
for pragma, _ in _pragmas:
|
||||
spec |= pragma
|
||||
|
||||
@schema.load
|
||||
class data:
|
||||
class A:
|
||||
x: spec
|
||||
|
||||
assert data.classes == {
|
||||
'A': schema.Class('A', properties=[
|
||||
schema.SingleProperty('x', 'string', pragmas=[expected for _, expected in _pragmas]),
|
||||
]),
|
||||
}
|
||||
|
||||
|
||||
def test_property_with_explicit_type_and_unknown_metadata(load):
|
||||
@pytest.mark.parametrize("pragma,expected", _pragmas)
|
||||
def test_class_with_pragma(pragma, expected):
|
||||
@schema.load
|
||||
class data:
|
||||
@pragma
|
||||
class A:
|
||||
pass
|
||||
|
||||
assert data.classes == {
|
||||
'A': schema.Class('A', pragmas=[expected]),
|
||||
}
|
||||
|
||||
def test_class_with_pragmas():
|
||||
def apply_pragmas(cls):
|
||||
for p, _ in _pragmas:
|
||||
p(cls)
|
||||
|
||||
@schema.load
|
||||
class data:
|
||||
class A:
|
||||
pass
|
||||
|
||||
apply_pragmas(A)
|
||||
|
||||
assert data.classes == {
|
||||
'A': schema.Class('A', pragmas=[e for _, e in _pragmas]),
|
||||
}
|
||||
|
||||
|
||||
def test_ipa_from_class():
|
||||
@schema.load
|
||||
class data:
|
||||
class A:
|
||||
pass
|
||||
|
||||
@defs.synth.from_class(A)
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
assert data.classes == {
|
||||
'A': schema.Class('A', derived={'B'}),
|
||||
'B': schema.Class('B', bases=['A'], ipa=schema.IpaInfo(from_class="A")),
|
||||
}
|
||||
|
||||
def test_ipa_from_class_ref():
|
||||
@schema.load
|
||||
class data:
|
||||
@defs.synth.from_class("B")
|
||||
class A:
|
||||
pass
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
assert data.classes == {
|
||||
'A': schema.Class('A', derived={'B'}, ipa=schema.IpaInfo(from_class="B")),
|
||||
'B': schema.Class('B', bases=['A']),
|
||||
}
|
||||
|
||||
def test_ipa_from_class_dangling():
|
||||
with pytest.raises(schema.Error):
|
||||
load("""
|
||||
A:
|
||||
x:
|
||||
type: string*
|
||||
_what_is_this: [foo, bar]
|
||||
""")
|
||||
@schema.load
|
||||
class data:
|
||||
@defs.synth.from_class("X")
|
||||
class A:
|
||||
pass
|
||||
|
||||
|
||||
def test_property_with_dict_without_explicit_type(load):
|
||||
def test_ipa_class_on():
|
||||
@schema.load
|
||||
class data:
|
||||
class A:
|
||||
pass
|
||||
|
||||
@defs.synth.on_arguments(a=A, i=defs.int)
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
assert data.classes == {
|
||||
'A': schema.Class('A', derived={'B'}),
|
||||
'B': schema.Class('B', bases=['A'], ipa=schema.IpaInfo(on_arguments={'a': 'A', 'i': 'int'})),
|
||||
}
|
||||
|
||||
def test_ipa_class_on_ref():
|
||||
class A:
|
||||
pass
|
||||
|
||||
@schema.load
|
||||
class data:
|
||||
@defs.synth.on_arguments(b="B", i=defs.int)
|
||||
class A:
|
||||
pass
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
assert data.classes == {
|
||||
'A': schema.Class('A', derived={'B'}, ipa=schema.IpaInfo(on_arguments={'b': 'B', 'i': 'int'})),
|
||||
'B': schema.Class('B', bases=['A']),
|
||||
}
|
||||
|
||||
def test_ipa_class_on_dangling():
|
||||
with pytest.raises(schema.Error):
|
||||
load("""
|
||||
A:
|
||||
x:
|
||||
typo: string*
|
||||
""")
|
||||
|
||||
|
||||
def test_class_with_pragmas(load):
|
||||
ret = load("""
|
||||
A:
|
||||
x: string*
|
||||
_pragma: [foo, bar]
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'A'}),
|
||||
'A': schema.Class('A', bases=[root_name], properties=[
|
||||
schema.RepeatedProperty('x', 'string'),
|
||||
], pragmas=["foo", "bar"]),
|
||||
}
|
||||
|
||||
|
||||
def test_class_with_one_pragma(load):
|
||||
ret = load("""
|
||||
A:
|
||||
x: string*
|
||||
_pragma: foo
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'A'}),
|
||||
'A': schema.Class('A', bases=[root_name], properties=[
|
||||
schema.RepeatedProperty('x', 'string'),
|
||||
], pragmas=["foo"]),
|
||||
}
|
||||
|
||||
|
||||
def test_class_with_unknown_metadata(load):
|
||||
with pytest.raises(schema.Error):
|
||||
load("""
|
||||
A:
|
||||
x: string*
|
||||
_foobar: yeah
|
||||
""")
|
||||
|
||||
|
||||
def test_ipa_class_from(load):
|
||||
ret = load("""
|
||||
MyClass:
|
||||
_synth:
|
||||
from: A
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'MyClass'}),
|
||||
'MyClass': schema.Class('MyClass', bases=[root_name], ipa=schema.IpaInfo(from_class="A")),
|
||||
}
|
||||
|
||||
|
||||
def test_ipa_class_on(load):
|
||||
ret = load("""
|
||||
MyClass:
|
||||
_synth:
|
||||
on:
|
||||
x: A
|
||||
y: int
|
||||
""")
|
||||
assert ret.classes == {
|
||||
root_name: schema.Class(root_name, derived={'MyClass'}),
|
||||
'MyClass': schema.Class('MyClass', bases=[root_name], ipa=schema.IpaInfo(on_arguments={"x": "A", "y": "int"})),
|
||||
}
|
||||
|
||||
|
||||
# TODO rejection tests and implementation for malformed `_synth` clauses
|
||||
@schema.load
|
||||
class data:
|
||||
@defs.synth.on_arguments(s=defs.string, a="A", i=defs.int)
|
||||
class B:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -37,7 +37,7 @@ def override_paths(tmp_path):
|
||||
@pytest.fixture
|
||||
def input(opts, tmp_path):
|
||||
opts.schema = tmp_path / schema_file
|
||||
with mock.patch("swift.codegen.lib.schema.load") as load_mock:
|
||||
with mock.patch("swift.codegen.lib.schema.load_file") as load_mock:
|
||||
load_mock.return_value = schema.Schema([])
|
||||
yield load_mock.return_value
|
||||
assert load_mock.mock_calls == [
|
||||
|
||||
@@ -4,10 +4,6 @@ _dirs = ("", "decl/", "expr/", "pattern/", "stmt/", "type/")
|
||||
|
||||
genrule(
|
||||
name = "cppgen",
|
||||
srcs = [
|
||||
"//swift/codegen:schema",
|
||||
"//swift/codegen:schema_includes",
|
||||
],
|
||||
outs = [
|
||||
"generated/swift.dbscheme",
|
||||
"generated/TrapTags.h",
|
||||
@@ -20,7 +16,6 @@ genrule(
|
||||
cmd = " ".join([
|
||||
"$(location //swift/codegen)",
|
||||
"--generate=dbscheme,trap,cpp",
|
||||
"--schema $(location //swift/codegen:schema)",
|
||||
"--dbscheme $(RULEDIR)/generated/swift.dbscheme",
|
||||
"--cpp-output $(RULEDIR)/generated",
|
||||
]),
|
||||
|
||||
@@ -4,4 +4,3 @@
|
||||
sourceLocationPrefix(
|
||||
string prefix: string ref
|
||||
);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// generated by codegen/codegen.py
|
||||
|
||||
// from codegen/prefix.dbscheme
|
||||
// from prefix.dbscheme
|
||||
/**
|
||||
* The source location of the snapshot.
|
||||
*/
|
||||
@@ -9,8 +9,7 @@ sourceLocationPrefix(
|
||||
);
|
||||
|
||||
|
||||
|
||||
// from codegen/schema.yml
|
||||
// from schema.py
|
||||
|
||||
@element =
|
||||
@callable
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user