From 3a8cc83cbe465b798647b92357eff2d52454f3dc Mon Sep 17 00:00:00 2001 From: SoniEx2 Date: Sun, 16 Feb 2020 19:35:47 -0300 Subject: Squashed commit of the following: * Move things around a bit more * Add project structure details to HACKING.md * Create setup.py * Split off A Boneless Datastructure Language --- HACKING.md | 25 +++ abdl/__init__.py | 528 ----------------------------------------------- ganarchy/__init__.py | 15 +- ganarchy/__main__.py | 7 +- ganarchy/cli/__init__.py | 21 ++ ganarchy/cli/debug.py | 70 +++++++ ganarchy/config.py | 52 +++++ ganarchy/debug.py | 36 ---- requirements.txt | 8 +- requirements_test.txt | 18 ++ setup.py | 5 + testing/test_abdl.py | 199 ------------------ testing/test_examples.py | 26 --- testing/test_ops_abdl.py | 161 --------------- 14 files changed, 209 insertions(+), 962 deletions(-) create mode 100644 HACKING.md delete mode 100644 abdl/__init__.py create mode 100644 ganarchy/cli/__init__.py create mode 100644 ganarchy/cli/debug.py delete mode 100644 ganarchy/debug.py create mode 100644 setup.py delete mode 100644 testing/test_abdl.py delete mode 100644 testing/test_examples.py delete mode 100644 testing/test_ops_abdl.py diff --git a/HACKING.md b/HACKING.md new file mode 100644 index 0000000..ef461fa --- /dev/null +++ b/HACKING.md @@ -0,0 +1,25 @@ +Hacking GAnarchy +================ + +Project Structure +----------------- + +`requirements.txt` lists known-good, frozen dependencies. if needed or desired, install dependencies listed in setup.py directly. + +GAnarchy uses GAnarchy-based dependencies. they are identified by `gan$COMMIT` names. e.g. in requirements.txt: + +``` +-e git+https://soniex2.autistic.space/git-repos/abdl.git@1b26ad799217af7e187fdae78e862a6bf46e5591#egg=gan0f74bd87a23b515b45da7e6f5d9cc82380443dab +``` + +or in setup.py: + +``` +install_requires=[ + "gan0f74bd87a23b515b45da7e6f5d9cc82380443dab", # a boneless datastructure library +] +``` + +(the comment is just a hint for humans to read) + +note however that not all forks are compatible with the project. requirements.txt provides known-good versions. diff --git a/abdl/__init__.py b/abdl/__init__.py deleted file mode 100644 index 8dde742..0000000 --- a/abdl/__init__.py +++ /dev/null @@ -1,528 +0,0 @@ -# A Boneless Datastructure Language -# Copyright (C) 2019 Soni L. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -"""A Boneless Datastructure Language, version 2.0.1. - -ABDL expressions are regex-like constructs for matching and validating object structures. They can be used -with JSON and similar formats, and even self-referential data structures. - -Language Reference: - - ABDL expressions have the ability to iterate, index, validate and filter data structures. This is - done with the use of the syntax elements listed below. - - Syntax Elements: - - An ABDL expression is a sequence of zero or more sequences starting with arrows followed by zero or - more subvalues. - - An arrow is ``->`` and indicates indexing/iteration (Mappings, Sequences, Sets). It must be followed - by a variable, literal, parameter, regex or key match. - - A variable is a string of alphanumeric characters, not starting with a digit. It may be followed by a - literal, parameter, regex, key match, or one or more type tests. A ``(key, value)`` tuple containing - the corresponding matched element will be identified by this name in the results dict. - - A literal is a string delimited by single quotes (use ``%'`` to escape ``'`` and ``%%`` to escape ``%``). - A literal can be made "non-validating" by appending an ``?`` after it. It may be followed by one or more - type tests. It is exactly equivalent to indexing an object with a string key. - - A parameter is the symbol ``$`` followed by a string of alphanumeric characters, not starting with - a digit. A parameter can be made "non-validating" by appending an ``?`` after it. It may be followed by - one or more type tests. It is exactly equivalent to indexing an object with an arbitrary object key. - - A regex is an RE, as defined by the ``re`` module, delimited by forward slashes (use ``%/`` to escape - ``/`` and ``%%`` to escape ``%``). A regex can be made "non-validating" by appending an ``?`` after it. - It may be followed by one or more type tests. It attempts to match each key in the object. - - A type test is ``:`` followed by a parameter. A type test can be made "non-validating" by appending - an ``?`` after the ``:``. It attempts to match the type of each matched value in the object. - - A key match is an ABDL expression enclosed in ``[`` and ``]``, optionally prefixed with one or more type - tests. This matches keys (including the type tests). - - A subvalue is an ABDL expression enclosed in ``(`` and ``)``. This allows matching multiple values on - the same object. - - Some syntax elements can be validating or non-validating. Validating syntax elements will raise a - :py:exc:`abdl.ValidationError` whenever a non-matching element is encountered, whereas non-validating - ones will skip them. Note that it is possible for a validating syntax element to still yield results - before raising a :py:exc:`abdl.ValidationError`, so one needs to be careful when writing code where such - behaviour could result in a security vulnerability. - - Examples: - - >>> import abdl - >>> for m in abdl.match("->X:?$dict->Y", {"foo": 1, "bar": {"baz": 2}}, {'dict': dict}): - ... print(m['X'][0], m['Y'][0], m['Y'][1]) - bar baz 2 - - >>> pat = abdl.compile('''-> 'projects'? - ... -> commit /[0-9a-fA-F]{40}|[0-9a-fA-F]{64}/? :?$dict - ... -> url :?$dict - ... -> branch :?$dict''', {'dict': dict}) - >>> data = {"projects": { - ... "385e734a52e13949a7a5c71827f6de920dbfea43": { - ... "https://soniex2.autistic.space/git-repos/ganarchy.git": {"HEAD": {"active": True}} - ... } - ... }} - >>> for m in pat.match(data): - ... print(m['commit'][0], m['url'][0], m['branch'][0], m['branch'][1]) - 385e734a52e13949a7a5c71827f6de920dbfea43 https://soniex2.autistic.space/git-repos/ganarchy.git HEAD {'active': True} - - (If ``:?$dict`` wasn't present, a TypeError would be raised when trying to iterate the ``1`` from ``"foo": 1``.) -""" - -import re - -from collections.abc import Mapping, Sequence, Iterator, Set - -class DeprecationError(Exception): - """Raised for deprecated features, if they are disabled. - - This class controls warning/error behaviour of deprecated features.""" - #enable_key_match_compat = False - #warn_key_match_compat = False - - @classmethod - def warn_all(cls): - """Enables all deprecation warnings.""" - pass - -class PatternError(Exception): - """Raised for invalid input or output expressions.""" - # TODO implement formatting - - def __init__(self, msg, pattern, defs, pos, toks): - self.msg = msg - self.pattern = pattern - self.defs = defs - self.pos = pos - self._toks = toks # internal use - - def _normalize(self, pattern, defs): - if pattern is not None: - if self.pattern is not None: - raise ValueError("Attempt to normalize normalized pattern") - else: - self.pattern = pattern - if defs is not None: - if self.defs is not None: - raise ValueError("Attempt to normalize normalized defs") - else: - self.defs = defs - - @classmethod - def _str_escape(cls, s, pos, toks): - raise cls("Error in string escape", None, None, pos, toks) - - @classmethod - def _str_end(cls, s, pos, toks): - raise cls("Unfinished string", None, None, pos, toks) - - @classmethod - def _re_escape(cls, s, pos, toks): - raise cls("Error in regex escape", None, None, pos, toks) - - @classmethod - def _re_end(cls, s, pos, toks): - raise cls("Unfinished regex", None, None, pos, toks) - - @classmethod - def _unexpected_tok(cls, s, pos, toks): - raise cls("Unexpected token", None, None, pos, toks) - -class ValidationError(Exception): - """Raised when the object tree doesn't validate against the given pattern.""" - # FIXME TODO? - -class _PatternElement: - def on_not_in_key(self, frame, path, defs): - raise NotImplementedError - - def on_in_key(self, frame, path, defs): - raise NotImplementedError - - def collect_params(self, res: list): - pass - -class _Arrow(_PatternElement): - def on_not_in_key(self, frame, path, defs): - assert not path[-1].empty - path.append(_Holder(key=None, value=None, name=None, parent=path[-1].value, empty=True)) - return False - -class _StringKey(_PatternElement): - def __init__(self, toks): - self.key = toks[0] - self.skippable = toks[1] == '?' - - def on_in_key(self, frame, path, defs): - return self.on_not_in_key(frame, path, defs) - - def on_not_in_key(self, frame, path, defs): - path[-1].iterator = self.extract(path[-1].parent) - path[-1].empty = False - return True - - def extract(self, obj): - try: - yield (self.key, obj[self.key]) - except (TypeError, IndexError, KeyError): - if not self.skippable: - raise ValidationError - -class _RegexKey(_PatternElement): - def __init__(self, toks): - self.key = toks[0] - self.compiled = re.compile(self.key) - self.skippable = toks[1] == '?' - - def on_in_key(self, frame, path, defs): - return self.on_not_in_key(frame, path, defs) - - def on_not_in_key(self, frame, path, defs): - filtered_iterator = self.filter(path[-1].iterator) - del path[-1].iterator - path[-1].iterator = filtered_iterator - del filtered_iterator - path[-1].empty = False - return True - - def filter(self, it): - for el in it: - try: - if self.compiled.search(el[0]): - yield el - elif not self.skippable: - raise ValidationError - except TypeError: - if not self.skippable: - raise ValidationError - -class _KeySubtree(_PatternElement): - def __init__(self, toks): - self.key = toks[0] - self.skippable = toks[1] == '?' - - def on_not_in_key(self, frame, path, defs): - path[-1].subtree = True - filtered_iterator = self.filter(path[-1].iterator, defs) - del path[-1].iterator - path[-1].iterator = filtered_iterator - del filtered_iterator - path[-1].empty = False - return True - - def filter(self, it, defs): - for x in it: - for y in _match_helper(self.key, defs, x[0]): - yield (y, x[1]) - - def collect_params(self, res: list): - for sub in self.key: - sub.collect_params(res) - -class _ValueSubtree(_PatternElement): - def __init__(self, toks): - self.key = toks[0] - self.skippable = toks[1] == '?' - - def on_not_in_key(self, frame, path, defs): - assert not path[-1].empty - path.append(_Holder(key=None, value=None, name=None, parent=path[-1].value, empty=False, subtree=True)) - path[-1].iterator = self.filter(path[-1].parent, defs) - return True - - def filter(self, parent, defs): - for x in _match_helper(self.key, defs, parent): - yield (x, parent) - - def collect_params(self, res: list): - for sub in self.key: - sub.collect_params(res) - -class _Ident(_PatternElement): - def __init__(self, toks): - self.key = toks[0] - - def on_not_in_key(self, frame, path, defs): - path[-1].name = self.key - path[-1].empty = False - return True - -class _Param(_PatternElement): - def __init__(self, toks): - assert isinstance(toks[1], _Ident) - self.skippable = toks[0] == '?' - self.key = toks[1].key - - def on_in_key(self, frame, path, defs): - return self.on_not_in_key(frame, path, defs) - - def on_not_in_key(self, frame, path, defs): - path[-1].iterator = self.extract(path[-1].parent, defs[self.key]) - path[-1].empty = False - return True - - def extract(self, obj, key): - try: - yield (key, obj[key]) - except (TypeError, IndexError, KeyError): - if not self.skippable: - raise ValidationError - - def collect_params(self, res: list): - res.append(self.key) - - def get_value(self, defs): - return defs[self.key] - -class _Ty(_PatternElement): - def __init__(self, toks): - assert isinstance(toks[1], _Ident) - self.skippable = toks[0] == '?' - self.key = toks[1].key - - def on_in_key(self, frame, path, defs): - filtered_iterator = self.filter(path[-1].iterator, defs[self.key]) - del path[-1].iterator - path[-1].iterator = filtered_iterator - del filtered_iterator - path[-1].empty = False - return True - - def on_not_in_key(self, frame, path, defs): - assert len(path) == 1 - if isinstance(path[-1].value, defs[self.key]): - return False - elif not self.skippable: - raise ValidationError - path.clear() - return False - - def filter(self, it, ty): - for el in it: - # this may TypeError if ty is not a type nor a tuple of types - # but that's actually the programmer's error - if isinstance(el[1], ty): - yield el - elif not self.skippable: - # and this one is for actual validation - raise ValidationError - - def collect_params(self, res: list): - res.append(self.key) - -class _End(_PatternElement): - def on_in_key(self, frame, path, defs): - try: - path[-1].next() - return False - except StopIteration: - path.pop() - while frame.prev() and not isinstance(frame.current_op, _End): - pass - if not frame.prev(): - # FIXME? - path.clear() - return True # FIXME? - -def _build_syntax(): - from pyparsing import Suppress, Literal, Forward, CharsNotIn, StringEnd, Combine, Optional, Group, Word, srange, Empty - # original regex order: arrow, type/parameter/identifier, string, regex, failure - # better syntax: "arrow" changes from "value" to "key" and thus you need at least one key match before an arrow - subtree = Forward() - # where relevant, enforces match behaviour (skippable object tree branch vs required object tree branch) - skippable = Optional("?", default="") - # r"|'(?:%'|%%|%(?P.|$)|[^%'])*?(?:'|(?P$))\??" # string literals - str_literal = (Combine(Suppress("'") - + (Suppress("%") + "'" | Suppress("%") + "%" | Literal("%") + (CharsNotIn("") | StringEnd()).setParseAction(PatternError._str_escape) | CharsNotIn("%'"))[...] - + (Suppress("'") | StringEnd().setParseAction(PatternError._str_end))) + skippable).setParseAction(lambda toks: [_StringKey(toks)]) - # r"|/(?:%/|%%|%(?P.|$)|[^%/])*?(?:/|(?P$))\??" # regex - re_literal = (Combine(Suppress("/") - + (Suppress("%") + "/" | Suppress("%") + "%" | Literal("%") + (CharsNotIn("") | StringEnd()).setParseAction(PatternError._re_escape) | CharsNotIn("%/"))[...] - + (Suppress("/") | StringEnd().setParseAction(PatternError._re_end))) + skippable).setParseAction(lambda toks: [_RegexKey(toks)]) - arrow = Literal("->").setParseAction(lambda: [_Arrow()]) - # r"|(?::\??)?\$?[A-Za-z][A-Za-z0-9]*" # identifiers, parameters and type matches - identifier = Word(srange("[A-Za-z_]"), srange("[A-Za-z0-9_]")).setParseAction(lambda toks: [_Ident(toks)]) - parameter = (Suppress("$") + skippable + identifier).setParseAction(lambda toks: [_Param(toks)]) - ty = (Suppress(":") + skippable + Suppress("$") + identifier).setParseAction(lambda toks: [_Ty(toks)]) - # support for objects-as-keys - keysubtree = (Suppress("[") + Group(ty[...] + subtree) + (Suppress("]") | CharsNotIn("").setParseAction(PatternError._unexpected_tok) | StringEnd().setParseAction(PatternError._unexpected_tok)) + Optional("?", default="")).setParseAction(lambda toks: [_KeySubtree(toks)]) - # represents key matching - switches from "key" to "value" - tag = (identifier + Optional(parameter | str_literal | re_literal | keysubtree) | parameter | str_literal | re_literal | keysubtree) + ty[...] + Empty().setParseAction(lambda: [_End()]) - # multiple value matching - valuesubtree = (Suppress("(") + Group(subtree) + (Suppress(")") | CharsNotIn("").setParseAction(PatternError._unexpected_tok) | StringEnd().setParseAction(PatternError._unexpected_tok)) + Optional("?", default="")).setParseAction(lambda toks: [_ValueSubtree(toks)]) - # arrow and tag, value subtree - subtree <<= (arrow + tag)[...] + (valuesubtree + Empty().setParseAction(lambda: [_End()]))[...] - return ((subtree | CharsNotIn("").setParseAction(PatternError._unexpected_tok)) + StringEnd()).parseWithTabs() - -_built_syntax = _build_syntax() - -def _pairs(o): - if isinstance(o, Mapping): - return iter(o.items()) - elif isinstance(o, Sequence): - return iter(enumerate(o, 0)) - elif isinstance(o, Set): - return iter(((e, e) for e in o)) - else: - # maybe there's more stuff I can implement later - raise TypeError - -class _Holder: - def __init__(self, key, value, name, parent=None, it=None, empty=False, subtree=False): - self.name = name - self.key = key - self.value = value - self.empty = empty - self._it = it - self.parent = parent - self.subtree = subtree - - @property - def iterator(self): - if self._it is None: - self._it = _pairs(self.parent) - return self._it - - @iterator.setter - def iterator(self, value): - assert self._it is None - self._it = value - - @iterator.deleter - def iterator(self): - self._it = None - - def next(self): - self.key, self.value = next(self.iterator) - -class _Frame: - def __init__(self, ops): - self.ops = ops - self.pc = -1 - - def next(self): - pc = self.pc + 1 - if pc >= len(self.ops): - return False - self.pc = pc - return True - - @property - def current_op(self): - return self.ops[self.pc] - - def prev(self): - pc = self.pc - 1 - if pc < 0: - return False - self.pc = pc - return True - -def _match_helper(ops, defs, tree): - frame = _Frame(ops) - - path = [_Holder(key=None, value=tree, parent=None, it=iter(()), name=None)] - in_key = False - while path: - if not frame.next(): - assert not path[-1].empty - res = {} - for h in path: - if h.subtree: - for name, kv in h.key.items(): - res[name] = kv - elif h.name is not None: - res[h.name] = (h.key, h.value) - yield res - assert len(path) == 1 or isinstance(frame.current_op, _End) - frame.prev() - in_key = True - else: - op = frame.current_op - if in_key: - in_key = op.on_in_key(frame, path, defs) - else: - in_key = op.on_not_in_key(frame, path, defs) - -class Pattern: - """A compiled pattern object. - - Warning: - Do not create instances of this class manually. Use :py:func:`abdl.compile`. - - """ - - def __init__(self, pattern, defs): - try: - self._ops = _built_syntax.parseString(pattern) - except PatternError as e: - e._normalize(pattern, defs) - raise - else: - self._params = [] - for op in self._ops: - op.collect_params(self._params) - self._defs = {param: defs[param] for param in self._params} - - def match(self, obj): - """Matches this compiled pattern against the given object. - - Args: - obj: The object to match against. - - Returns: - An iterator. This iterator yields ``(key, value)`` pairs - wrapped in a dict for each variable in the pattern. - - """ - return _match_helper(self._ops, self._defs, obj) - -def compile(pattern, defs={}): - """Compiles the pattern and returns a compiled :py:class:`abdl.Pattern` object. - - Args: - pattern (str): The pattern. Refer to module-level documentation for - pattern syntax. - defs (dict): The parameter list. Used by parameters in the pattern. - - Returns: - Pattern: A compiled pattern object. - - """ - # TODO caching - return Pattern(pattern, defs) - -def match(pattern, obj, defs={}): - """Matches the pattern against the given obj. - - This method is equivalent to ``abdl.compile(pattern, defs).match(obj)``. - - Args: - pattern (str): The pattern. Refer to module-level documentation for - pattern syntax. - obj: The object to match against. - defs (dict): The parameter list. Used by parameters in the pattern. - - Returns: - An iterator. This iterator yields ``(key, value)`` pairs - wrapped in a dict for each variable in the pattern. - - """ - return compile(pattern, defs).match(obj) diff --git a/ganarchy/__init__.py b/ganarchy/__init__.py index 0f13b44..6d24994 100644 --- a/ganarchy/__init__.py +++ b/ganarchy/__init__.py @@ -32,7 +32,7 @@ import requests from collections import defaultdict from urllib.parse import urlparse -import ganarchy.config +import ganarchy.config as m_ganarchy_config MIGRATIONS = { "toml-config": ( @@ -210,10 +210,8 @@ def get_env(): env.filters['tomle'] = env.filters['tomlescape'] return env - -@click.group() -def ganarchy(): - pass +# FIXME +from ganarchy.cli import main as ganarchy @ganarchy.command() def initdb(): @@ -457,12 +455,11 @@ class Config: # TODO re.compile("(^" + "|^".join(map(re.escape, domains)) + "|" + "|".join(map(re.escape, suffixes) + ")$") if base: # FIXME is remove=remove the right thing to do? - self._update_projects(base.projects, remove=remove, sanitize=False) # already sanitized - projects = config_data.get('projects', {}) - self._update_projects(projects, remove=remove) + self._update_projects({'projects': base.projects}, remove=remove, sanitize=False) # already sanitized + self._update_projects(config_data, remove=remove) def _update_projects(self, projects, remove, sanitize=True): - m = (ganarchy.config.CONFIG_PATTERN_SANITIZE if sanitize else ganarchy.config.CONFIG_PATTERN).match(projects) + m = (m_ganarchy_config.CONFIG_REPOS_SANITIZE if sanitize else m_ganarchy_config.CONFIG_REPOS).match(projects) for v in m: commit, repo_url, branchname, options = v['commit'][0], v['url'][0], v['branch'][0], v['branch'][1] try: diff --git a/ganarchy/__main__.py b/ganarchy/__main__.py index b270251..ca17cb7 100644 --- a/ganarchy/__main__.py +++ b/ganarchy/__main__.py @@ -15,9 +15,12 @@ # along with this program. If not, see . # The base CLI +import ganarchy.cli + +# FIXME this shouldn't be here import ganarchy # Additional CLI commands -import ganarchy.debug +import ganarchy.cli.debug -ganarchy.ganarchy(prog_name='ganarchy') +ganarchy.cli.main(prog_name='ganarchy') diff --git a/ganarchy/cli/__init__.py b/ganarchy/cli/__init__.py new file mode 100644 index 0000000..9effabb --- /dev/null +++ b/ganarchy/cli/__init__.py @@ -0,0 +1,21 @@ +# This file is part of GAnarchy - decentralized project hub +# Copyright (C) 2019 Soni L. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import click + +@click.group() +def main(): + pass diff --git a/ganarchy/cli/debug.py b/ganarchy/cli/debug.py new file mode 100644 index 0000000..10e91e5 --- /dev/null +++ b/ganarchy/cli/debug.py @@ -0,0 +1,70 @@ +# This file is part of GAnarchy - decentralized project hub +# Copyright (C) 2019 Soni L. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . + +import click +import qtoml + +import ganarchy +import ganarchy.cli +import ganarchy.config + +@ganarchy.cli.main.group() +def debug(): + pass + +@debug.command() +def paths(): + click.echo('Config home: {}'.format(ganarchy.config_home)) + click.echo('Additional config search path: {}'.format(ganarchy.config_dirs)) + click.echo('Cache home: {}'.format(ganarchy.cache_home)) + click.echo('Data home: {}'.format(ganarchy.data_home)) + +@debug.command() +def configs(): + def print_conf(conf): + click.echo("\tRepos:") + for i, repo in enumerate(ganarchy.config.CONFIG_REPOS.match({'projects': conf.projects})): + click.echo("\t\t{}.".format(i)) + click.echo("\t\t\tProject: {}".format(repo['commit'][0])) + click.echo("\t\t\tURI: {}".format(repo['url'][0])) + click.echo("\t\t\tBranch: {}".format(repo['branch'][0])) + click.echo("\t\t\tActive: {}".format(repo['branch'][1] == {'active': True})) + + click.echo("Breaking down the configs.") + conf = None + # reverse order is intentional + for d in reversed(ganarchy.config_dirs): + click.echo("Config: {}/config.toml".format(d)) + try: + f = open(d + "/config.toml", 'r', encoding='utf-8', newline='') + conf = ganarchy.Config(f, conf) + click.echo("Updated entries:") + print_conf(conf) + f.close() + except (OSError, UnicodeDecodeError, qtoml.decoder.TOMLDecodeError) as e: + click.echo("\tError: {}".format(e)) + try: + click.echo("Config: {}/config.toml".format(ganarchy.config_home)) + f = open(ganarchy.config_home + "/config.toml", 'r', encoding='utf-8', newline='') + conf = ganarchy.Config(f, conf) + click.echo("Updated entries:") + print_conf(conf) + click.echo("-----") + click.echo("\tTitle: {}".format(conf.base_url)) + click.echo("\tBase URI: {}".format(conf.base_url)) + f.close() + except (OSError, UnicodeDecodeError, qtoml.decoder.TOMLDecodeError) as e: + click.echo("\tError: {}".format(e)) diff --git a/ganarchy/config.py b/ganarchy/config.py index 154447b..ae1615d 100644 --- a/ganarchy/config.py +++ b/ganarchy/config.py @@ -155,3 +155,55 @@ class RemoteConfigSource(ConfigSource): for r in CONFIG_PATTERN_SANITIZE.match(self.tomlobj): yield (r['commit'][0], r['url'][0], r['branch'][0], r['branch'][1]) +class ConfigManager: + def __init__(self): + # FIXME ??? + self.sources = [] + +# class Config: +# def __init__(self, toml_file, base=None, remove=True): +# self.projects = defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(dict)))) +# config_data = qtoml.load(toml_file) +# self.remote_configs = config_data.get('config_srcs', []) +# self.title = config_data.get('title', '') +# self.base_url = config_data.get('base_url', '') +# # TODO blocked domains (but only read them from config_data if remove is True) +# self.blocked_domains = [] +# self.blocked_domain_suffixes = [] +# self.blocked_domains.sort() +# self.blocked_domain_suffixes.sort(key=lambda x: x[::-1]) +# # FIXME remove duplicates and process invalid entries +# self.blocked_domains = tuple(self.blocked_domains) +# self.blocked_domain_suffixes = tuple(self.blocked_domain_suffixes) # MUST be tuple +# # TODO re.compile("(^" + "|^".join(map(re.escape, domains)) + "|" + "|".join(map(re.escape, suffixes) + ")$") +# if base: +# # FIXME is remove=remove the right thing to do? +# self._update_projects(base.projects, remove=remove, sanitize=False) # already sanitized +# projects = config_data.get('projects', {}) +# self._update_projects(projects, remove=remove) +# +# def _update_projects(self, projects, remove, sanitize=True): +# m = (m_ganarchy_config.CONFIG_PATTERN_SANITIZE if sanitize else m_ganarchy_config.CONFIG_PATTERN).match(projects) +# for v in m: +# commit, repo_url, branchname, options = v['commit'][0], v['url'][0], v['branch'][0], v['branch'][1] +# try: +# u = urlparse(repo_url) +# if not u: +# raise ValueError +# # also raises for invalid ports, see https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlparse +# # "Reading the port attribute will raise a ValueError if an invalid port is specified in the URL. [...]" +# if u.port == 0: +# raise ValueError +# if u.scheme not in ('http', 'https'): +# raise ValueError +# if (u.hostname in self.blocked_domains) or (u.hostname.endswith(self.blocked_domain_suffixes)): +# raise ValueError +# except ValueError: +# continue +# if branchname == "HEAD": +# branchname = None +# active = options.get('active', None) +# if active not in (True, False): +# continue +# branch = self.projects[commit][repo_url][branchname] +# branch['active'] = active or (branch.get('active', False) and not remove) diff --git a/ganarchy/debug.py b/ganarchy/debug.py deleted file mode 100644 index 1310549..0000000 --- a/ganarchy/debug.py +++ /dev/null @@ -1,36 +0,0 @@ -# This file is part of GAnarchy - decentralized project hub -# Copyright (C) 2019 Soni L. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . - -import click - -import ganarchy -import ganarchy.config - -@ganarchy.ganarchy.group() -def debug(): - pass - -@debug.command() -def paths(): - click.echo('Config home: {}'.format(ganarchy.config_home)) - click.echo('Additional config search path: {}'.format(ganarchy.config_dirs)) - click.echo('Cache home: {}'.format(ganarchy.cache_home)) - click.echo('Data home: {}'.format(ganarchy.data_home)) - -@debug.command() -def configs(): - pass - diff --git a/requirements.txt b/requirements.txt index 2f964c0..0fd42f6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,11 @@ +-e git+https://soniex2.autistic.space/git-repos/abdl.git@1b26ad799217af7e187fdae78e862a6bf46e5591#egg=gan0f74bd87a23b515b45da7e6f5d9cc82380443dab +certifi==2019.11.28 +chardet==3.0.4 Click==7.0 +idna==2.8 Jinja2==2.10.1 -qtoml==0.2.4 +MarkupSafe==1.1.1 pyparsing==2.4.2 +qtoml==0.2.4 requests==2.22.0 +urllib3==1.25.7 diff --git a/requirements_test.txt b/requirements_test.txt index 737d0df..ebe51db 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -1,3 +1,21 @@ +astroid==2.3.3 +atomicwrites==1.3.0 +attrs==19.3.0 +decompyle3==3.3.2 hypothesis==4.42.7 +isort==4.3.21 +lazy-object-proxy==1.4.3 +mccabe==0.6.1 +more-itertools==7.2.0 +packaging==19.2 +pluggy==0.13.1 +py==1.8.0 +pylint==2.4.4 pytest==5.2.2 pytest-sphinx==0.2.2 +six==1.13.0 +spark-parser==1.8.9 +uncompyle6==3.6.2 +wcwidth==0.1.7 +wrapt==1.11.2 +xdis==4.2.2 diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..a14854e --- /dev/null +++ b/setup.py @@ -0,0 +1,5 @@ +import setuptools + +setuptools.setup(packages=["gan385e734a52e13949a7a5c71827f6de920dbfea43"], install_requires=[ + "gan0f74bd87a23b515b45da7e6f5d9cc82380443dab", # a boneless datastructure library + "Click", "Jinja2", "qtoml", "requests"]) diff --git a/testing/test_abdl.py b/testing/test_abdl.py deleted file mode 100644 index 4f5241f..0000000 --- a/testing/test_abdl.py +++ /dev/null @@ -1,199 +0,0 @@ -# Tests abdl.py - -import abdl - -import hypothesis -import hypothesis.strategies as st - -import collections.abc - -import re - -import traceback - - -abdl.DeprecationError.enable_key_match_compat = False - -# use abdl's _pairs for consistency. -pairs = abdl._pairs - -# do not put integers, floats, etc here -# do not put bytes, they iterate as integers -hashables = st.deferred(lambda: st.text() | st.frozensets(hashables) | st.lists(hashables).map(tuple)) -values = st.deferred(lambda: hashables | objtree) -objtree = st.deferred(lambda: st.text() | st.dictionaries(hashables, values) | st.lists(values) | st.sets(hashables) | st.lists(hashables).map(tuple)) - -# note: use all() so as to not eat all the RAM :p - -class LogAndCompare: - def __init__(self, left, right): - self._itl = left - self._itr = right - self.left = [] - self.right = [] - def __iter__(self): - return self - def __next__(self): - try: - left = next(self._itl) - except abdl.ValidationError as e: - e.tb = traceback.format_exc() - left = e - try: - right = next(self._itr) - except abdl.ValidationError as e: - e.tb = traceback.format_exc() - right = e - self.left.append(left) - self.right.append(right) - return left == right or (type(left), type(right)) == (abdl.ValidationError,)*2 - def __repr__(self): - return "LogAndCompare(left=" + repr(self.left) + ", right=" + repr(self.right) + ")" - - -@hypothesis.given(objtree, st.just(abdl.compile("->X"))) -def test_basic_iterator(foo, pat): - assert all(LogAndCompare(pat.match(foo), map(lambda x: {"X": x}, pairs(foo)))) - -@hypothesis.given(objtree, st.just(abdl.compile("->X->Y"))) -def test_two_depths(foo, pat): - def deep(foo): - for x in pairs(foo): - for y in pairs(x[1]): - yield {"X": x, "Y": y} - assert all(LogAndCompare(pat.match(foo), deep(foo))) - -@hypothesis.given(objtree, st.just(abdl.compile("->X->Y->Z->W"))) -def test_four_depths(foo, pat): - def deep(foo): - for x in pairs(foo): - for y in pairs(x[1]): - for z in pairs(y[1]): - for w in pairs(z[1]): - yield {"X": x, "Y": y, "Z": z, "W": w} - assert all(LogAndCompare(pat.match(foo), deep(foo))) - -@hypothesis.given(st.dictionaries(st.text(), st.text()) | st.sets(st.text()), st.just(abdl.compile("->/.../"))) -def test_regex(foo, pat): - # no bindings on this one :< - def deep(foo): - for x in pairs(foo): - if re.search("...", x[0]): - yield {} - else: - raise abdl.ValidationError - assert all(LogAndCompare(pat.match(foo), deep(foo))) - -@hypothesis.given(objtree, st.just(abdl.compile("->/.../?"))) -def test_regex_skippable_vs_objtree(foo, pat): - assert all(LogAndCompare(pat.match(foo), ({} for x in pairs(foo) if isinstance(x[0], str) and re.search("...", x[0])))) - -@hypothesis.given(st.dictionaries(st.text(), st.text()) | st.sets(st.text()), st.just(abdl.compile("->/.../->Y"))) -def test_regex_and_bind(foo, pat): - def deep(foo): - for x in pairs(foo): - if re.search("...", x[0]): - for y in pairs(x[1]): - yield {"Y": y} - else: - raise abdl.ValidationError - assert all(LogAndCompare(pat.match(foo), deep(foo))) - -@hypothesis.given(objtree, st.just(abdl.compile("->/.../?->Y"))) -def test_regex_skippable_and_bind_vs_objtree(foo, pat): - def deep(foo): - for x in pairs(foo): - if isinstance(x[0], str) and re.search("...", x[0]): - for y in pairs(x[1]): - yield {"Y": y} - assert all(LogAndCompare(pat.match(foo), deep(foo))) - -@hypothesis.given(objtree, st.just(abdl.compile("->/^...$/?->Y"))) -def test_regex_anchored_skippable_and_bind_vs_objtree(foo, pat): - def deep(foo): - for x in pairs(foo): - if isinstance(x[0], str) and re.search("^...$", x[0]): - for y in pairs(x[1]): - yield {"Y": y} - assert all(LogAndCompare(pat.match(foo), deep(foo))) - -@hypothesis.given(objtree, st.just(abdl.compile("->''?->Y"))) -def test_empty_literal_vs_objtree(foo, pat): - def deep(foo): - for x in pairs(foo): - if x[0] == '': - for y in pairs(x[1]): - yield {"Y": y} - assert all(LogAndCompare(pat.match(foo), deep(foo))) - -defs = {'a': (dict, list, set)} -@hypothesis.given(objtree, st.just(abdl.compile("->X:?$a->Y", defs=defs))) -def test_type(foo, pat): - def deep(foo): - for x in pairs(foo): - if isinstance(x[1], (dict, list, set)): - for y in pairs(x[1]): - yield {"X": x, "Y": y} - assert all(LogAndCompare(pat.match(foo), deep(foo))) - -defs = {'a': (dict, list, set), 'b': (dict, set), 'c': dict} -@hypothesis.given(objtree, st.just(abdl.compile("->X:?$a:?$b:?$c->Y", defs=defs))) -def test_multi_type(foo, pat): - def deep(foo): - for x in pairs(foo): - if isinstance(x[1], dict): - for y in pairs(x[1]): - yield {"X": x, "Y": y} - assert all(LogAndCompare(pat.match(foo), deep(foo))) - -defs = {'a': (dict, list, set), 'b': (dict, set), 'c': dict} -@hypothesis.given(objtree, st.just(abdl.compile("->X:$a:$b:$c->Y", defs=defs))) -@hypothesis.settings(suppress_health_check=[hypothesis.HealthCheck.too_slow]) -def test_multi_type_with_validation_errors(foo, pat): - def deep(foo): - for x in pairs(foo): - if isinstance(x[1], dict): - for y in pairs(x[1]): - yield {"X": x, "Y": y} - else: - raise abdl.ValidationError - assert all(LogAndCompare(pat.match(foo), deep(foo))) - -@hypothesis.given(st.dictionaries(st.frozensets(st.text()), st.text()), st.just(abdl.compile("->[:?$sets->A]->D", {'sets': collections.abc.Set}))) -def test_subtree_partial(foo, pat): - def deep(foo): - for x in pairs(foo): - if isinstance(x[0], collections.abc.Set): - for a in pairs(x[0]): - for d in pairs(x[1]): - yield {"A": a, "D": d} - assert all(LogAndCompare(pat.match(foo), deep(foo))) - -@hypothesis.given(objtree, st.just(abdl.compile("->X->$a->Z", {'a': '0'}))) -def test_param(foo, pat): - def deep(foo): - for x in pairs(foo): - try: - y = x['0'] - except (TypeError, IndexError, KeyError): - raise abdl.ValidationError - else: - for z in pairs(y): - yield {"X": x, "Z": z} - assert all(LogAndCompare(pat.match(foo), deep(foo))) - -def test_basic_value_subtree(): - matcher = abdl.match("(->foo'foo')(->bar'bar')", {'foo': 1, 'bar': 2}) - assert list(matcher) == [{'foo': ('foo', 1), 'bar': ('bar', 2)}] - -# FIXME -#@hypothesis.given(objtree, st.text()) -#def test_exhaustive(foo, pat): -# hypothesis.assume(not re.match("^%s+$", pat)) -# hypothesis.assume(pat) -# try: -# compiled = abdl.compile(pat) -# print(pat) -# except abdl.PatternError: -# hypothesis.assume(False) -# compiled.match(foo) diff --git a/testing/test_examples.py b/testing/test_examples.py deleted file mode 100644 index 02532cc..0000000 --- a/testing/test_examples.py +++ /dev/null @@ -1,26 +0,0 @@ -import abdl - -def test_basic_example(): - m = next(abdl.match("->X:?$dict->Y", {"foo": 1, "bar": {"baz": 2}}, {'dict': dict})) - assert m['X'][0] == 'bar' - assert m['Y'][0] == 'baz' - assert m['Y'][1] == 2 - -def test_basic_2(): - m = next(abdl.match("->'projects':?$d->P/[0-9a-fA-F]{40}|[0-9a-fA-F]{64}/?:?$d->U:?$d->B", {"projects": {"385e734a52e13949a7a5c71827f6de920dbfea43": {"https://soniex2.autistic.space/git-repos/ganarchy.git": {"HEAD": {"active": True}}}}}, {'d': dict})) - assert m['P'][0] == "385e734a52e13949a7a5c71827f6de920dbfea43" - assert m['U'][0] == "https://soniex2.autistic.space/git-repos/ganarchy.git" - assert m['B'][0] == "HEAD" - assert m['B'][1] == {"active": True} - -def test_spaces(): - pat = abdl.compile("""-> 'projects'? - -> commit /[0-9a-fA-F]{40}|[0-9a-fA-F]{64}/? :?$dict - -> url :?$dict - -> branch :?$dict""", {'dict': dict}) - data = {"projects": {"385e734a52e13949a7a5c71827f6de920dbfea43": {"https://soniex2.autistic.space/git-repos/ganarchy.git": {"HEAD": {"active": True}}}}} - m = next(pat.match(data)) - assert m['commit'][0] == "385e734a52e13949a7a5c71827f6de920dbfea43" - assert m['url'][0] == "https://soniex2.autistic.space/git-repos/ganarchy.git" - assert m['branch'][0] == "HEAD" - assert m['branch'][1] == {"active": True} diff --git a/testing/test_ops_abdl.py b/testing/test_ops_abdl.py deleted file mode 100644 index fe50369..0000000 --- a/testing/test_ops_abdl.py +++ /dev/null @@ -1,161 +0,0 @@ -# Tests abdl.py internals - -import abdl - -import re - -class OpHelper: - def __init__(self, pat, ops=None): - self.pat = pat - if not ops: - self.ops = pat._ops - else: - self.ops = ops - self.pos = -1 - - def done(self): - assert self.pos + 1 == len(self.ops) - - def __enter__(self): - self.pos += 1 - first = self.pos - assert not isinstance(self.ops[first], abdl._End) - while not isinstance(self.ops[self.pos], abdl._End): - self.pos += 1 - assert isinstance(self.ops[self.pos], abdl._End) - return self.ops[first:self.pos] - - def __exit__(self, exc_type, exc_value, traceback): - pass - -def expect_types(seq, *tys): - assert len(seq) == len(tys) - assert(all(map(lambda x: isinstance(*x), zip(seq, tys)))) - -def expect_idents(oph, *idents): - for ident in idents: - with oph as ops: - expect_types(ops, abdl._Arrow, abdl._Ident) - assert ops[1].key == ident - -def test_empty_iterator_pattern(): - oph = OpHelper(abdl.compile("")) - oph.done() - -def test_four_depths_pattern(): - oph = OpHelper(abdl.compile("->X->Y->Z->W")) - expect_idents(oph, "X", "Y", "Z", "W") - oph.done() - -def test_regex_pattern(): - oph = OpHelper(abdl.compile("->/.../")) - with oph as ops: - expect_types(ops, abdl._Arrow, abdl._RegexKey) - assert ops[1].key == '...' - assert ops[1].compiled == re.compile('...') - assert ops[1].skippable == False - oph.done() - -def test_regex_skippable_pattern(): - oph = OpHelper(abdl.compile("->/.../?")) - with oph as ops: - expect_types(ops, abdl._Arrow, abdl._RegexKey) - assert ops[1].key == '...' - assert ops[1].compiled == re.compile('...') - assert ops[1].skippable == True - oph.done() - -def test_regex_and_bind_pattern(): - oph = OpHelper(abdl.compile("->/.../->Y")) - with oph as ops: - expect_types(ops, abdl._Arrow, abdl._RegexKey) - assert ops[1].key == '...' - assert ops[1].compiled == re.compile('...') - assert ops[1].skippable == False - expect_idents(oph, "Y") - oph.done() - -def test_empty_literal_skippable_and_bind_pattern(): - oph = OpHelper(abdl.compile("->''?->Y")) - with oph as ops: - expect_types(ops, abdl._Arrow, abdl._StringKey) - assert ops[1].key == '' - assert ops[1].skippable == True - expect_idents(oph, "Y") - oph.done() - -def test_type_pattern(): - oph = OpHelper(abdl.compile("->X:?$a->Y", defs={'a': (dict, list, set)})) - assert oph.pat._defs['a'] == (dict, list, set) - with oph as ops: - expect_types(ops, abdl._Arrow, abdl._Ident, abdl._Ty) - assert ops[1].key == 'X' - assert ops[2].key == 'a' - assert ops[2].skippable == True - expect_idents(oph, "Y") - oph.done() - -def test_multi_type_pattern(): - oph = OpHelper(abdl.compile("->X:$a:?$b:?$c->Y", defs={'a': (dict, list, set), 'b': (dict, set), 'c': dict})) - assert oph.pat._defs['a'] == (dict, list, set) - assert oph.pat._defs['b'] == (dict, set) - assert oph.pat._defs['c'] == dict - with oph as ops: - expect_types(ops, abdl._Arrow, abdl._Ident, abdl._Ty, abdl._Ty, abdl._Ty) - assert ops[1].key == 'X' - assert ops[2].key == 'a' - assert ops[2].skippable == False - assert ops[3].key == 'b' - assert ops[3].skippable == True - assert ops[4].key == 'c' - assert ops[4].skippable == True - expect_idents(oph, "Y") - oph.done() - -def test_key_subtree_pattern(): - oph = OpHelper(abdl.compile("->[:?$set->A]->D", defs={'set': set})) - assert oph.pat._defs['set'] == set - with oph as ops: - expect_types(ops, abdl._Arrow, abdl._KeySubtree) - oph2 = OpHelper(None, ops=ops[1].key) - with oph2 as ops2: - expect_types(ops2, abdl._Ty, abdl._Arrow, abdl._Ident) - assert ops2[0].key == 'set' - assert ops2[0].skippable == True - assert ops2[2].key == 'A' - oph2.done() - expect_idents(oph, "D") - oph.done() - -def test_param_pattern(): - oph = OpHelper(abdl.compile("->X->$a->Z", defs={'a': '0'})) - assert oph.pat._defs['a'] == '0' - expect_idents(oph, "X") - with oph as ops: - expect_types(ops, abdl._Arrow, abdl._Param) - assert ops[1].key == 'a' - assert ops[1].skippable == False - expect_idents(oph, "Z") - oph.done() - -def test_value_subtree_pattern(): - oph = OpHelper(abdl.compile("(->foo'foo')(->bar'bar')")) - with oph as ops: - expect_types(ops, abdl._ValueSubtree) - oph2 = OpHelper(None, ops=ops[0].key) - with oph2 as ops2: - expect_types(ops2, abdl._Arrow, abdl._Ident, abdl._StringKey) - assert ops2[1].key == 'foo' - assert ops2[2].key == 'foo' - assert ops2[2].skippable == False - oph2.done() - with oph as ops: - expect_types(ops, abdl._ValueSubtree) - oph2 = OpHelper(None, ops=ops[0].key) - with oph2 as ops2: - expect_types(ops2, abdl._Arrow, abdl._Ident, abdl._StringKey) - assert ops2[1].key == 'bar' - assert ops2[2].key == 'bar' - assert ops2[2].skippable == False - oph2.done() - oph.done() -- cgit 1.4.1