first commit
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,274 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Translate an options data structure into command line args
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
import sys
|
||||
import inspect
|
||||
import argparse
|
||||
import functools
|
||||
import collections
|
||||
import pop.hub
|
||||
|
||||
__virtualname__ = "args"
|
||||
__contracts__ = [__virtualname__]
|
||||
|
||||
|
||||
class ActionWrapper:
|
||||
"""
|
||||
This class wraps argparse.Action instances in order to mark arguments passed
|
||||
on CLI as explicitly passed
|
||||
"""
|
||||
|
||||
def __init__(self, action):
|
||||
self._action = action
|
||||
functools.update_wrapper(self, action)
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string):
|
||||
# Let's store the call to this option as an explicit CLI call for later
|
||||
# use when overwriting any configuration settings on file with those
|
||||
# from CLI
|
||||
if getattr(parser, "_explicit_cli_args_", None) is None:
|
||||
setattr(parser, "_explicit_cli_args_", set())
|
||||
parser._explicit_cli_args_.add(
|
||||
self._action.dest
|
||||
) # pylint: disable=protected-access
|
||||
# Carry on regular operation
|
||||
return self._action(parser, namespace, values, option_string)
|
||||
|
||||
def __getattribute__(self, name):
|
||||
if name == "_action":
|
||||
return object.__getattribute__(self, name)
|
||||
# Proxy any attribute's search to the _action instance
|
||||
return getattr(self._action, name)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._action)
|
||||
|
||||
|
||||
class ActionClassWrapper:
|
||||
"""
|
||||
This class wraps argparse.Action classes in order to mark arguments passed
|
||||
on CLI as explicitly passed
|
||||
"""
|
||||
|
||||
def __init__(self, klass):
|
||||
self._klass = klass
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return ActionWrapper(self._klass(*args, **kwargs))
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._klass)
|
||||
|
||||
def __getattribute__(self, name):
|
||||
if name == "_klass":
|
||||
return object.__getattribute__(self, name)
|
||||
# Proxy any attributes search to the _klass instance
|
||||
return getattr(self._klass, name)
|
||||
|
||||
|
||||
class ArgumentParser(argparse.ArgumentParser):
|
||||
def register(self, name, value, obj): # pylint: disable=arguments-differ
|
||||
if name == "action":
|
||||
# Let's wrap it on our action class wrapper so we can latter store
|
||||
# which options were explicitly passed from CLI
|
||||
return super(ArgumentParser, self).register(
|
||||
name, value, ActionClassWrapper(obj)
|
||||
)
|
||||
return super(ArgumentParser, self).register(name, value, obj)
|
||||
|
||||
def parse_known_args(self, args=None, namespace=None):
|
||||
namespace, arg_strings = super().parse_known_args(args, namespace)
|
||||
explicit_cli_args = getattr(self, "_explicit_cli_args_", set())
|
||||
if "_explicit_cli_args_" not in namespace:
|
||||
setattr(namespace, "_explicit_cli_args_", set())
|
||||
namespace._explicit_cli_args_.update(explicit_cli_args)
|
||||
return namespace, arg_strings
|
||||
|
||||
|
||||
def __init__(hub: "pop.hub.Hub"):
|
||||
"""
|
||||
Set up the local memory copy of the parser
|
||||
"""
|
||||
hub.conf._mem["args"] = {}
|
||||
|
||||
|
||||
def _init_parser(hub: "pop.hub.Hub", opts):
|
||||
if "parser" not in hub.conf._mem["args"]:
|
||||
# Instantiate the parser
|
||||
hub.conf._mem["args"]["parser"] = ArgumentParser(**opts.get("_argparser_", {}))
|
||||
|
||||
|
||||
def _keys(opts):
|
||||
"""
|
||||
Return the keys in the right order
|
||||
"""
|
||||
if isinstance(opts, collections.OrderedDict):
|
||||
return sorted(
|
||||
list(opts), key=lambda k: opts[k].get("display_priority", sys.maxsize)
|
||||
)
|
||||
return sorted(opts, key=lambda k: (opts[k].get("display_priority", sys.maxsize), k))
|
||||
|
||||
|
||||
def subs(hub: "pop.hub.Hub", opts):
|
||||
"""
|
||||
Set up sub parsers, if using sub parsers this needs to be called
|
||||
before calling setup.
|
||||
|
||||
opts dict:
|
||||
<sub_title>:
|
||||
[desc]: 'Some subparser'
|
||||
help: 'subparser!'
|
||||
"""
|
||||
_init_parser(hub, opts)
|
||||
hub.conf._mem["args"]["sub"] = hub.conf._mem["args"]["parser"].add_subparsers(
|
||||
dest="_subparser_"
|
||||
)
|
||||
hub.conf._mem["args"]["subs"] = {}
|
||||
for arg in _keys(opts):
|
||||
if arg in ("_argparser_",):
|
||||
continue
|
||||
comps = opts[arg]
|
||||
kwargs = {}
|
||||
if "help" in comps:
|
||||
kwargs["help"] = comps["help"]
|
||||
if "desc" in comps:
|
||||
kwargs["description"] = comps["desc"]
|
||||
hub.conf._mem["args"]["subs"][arg] = hub.conf._mem["args"]["sub"].add_parser(
|
||||
arg, **kwargs
|
||||
)
|
||||
return {"result": True, "return": True}
|
||||
|
||||
|
||||
def setup(hub: "pop.hub.Hub", opts):
|
||||
"""
|
||||
Take in a pre-defined opts dict and translate it to args
|
||||
|
||||
opts dict:
|
||||
<arg>:
|
||||
[group]: foo
|
||||
[default]: bar
|
||||
[action]: store_true
|
||||
[options]: # arg will be turned into --arg
|
||||
- '-A'
|
||||
- '--cheese'
|
||||
[choices]:
|
||||
- foo
|
||||
- bar
|
||||
- baz
|
||||
[nargs]: +
|
||||
[type]: int
|
||||
[dest]: cheese
|
||||
help: Some great help message
|
||||
"""
|
||||
_init_parser(hub, opts)
|
||||
defaults = {}
|
||||
groups = {}
|
||||
ex_groups = {}
|
||||
for arg in _keys(opts):
|
||||
if arg in ("_argparser_",):
|
||||
continue
|
||||
comps = opts[arg]
|
||||
positional = comps.pop("positional", False)
|
||||
if positional:
|
||||
args = [arg]
|
||||
else:
|
||||
long_opts = ["--{}".format(arg.replace("_", "-"))]
|
||||
short_opts = []
|
||||
for o_str in comps.get("options", []):
|
||||
if not o_str.startswith("--") and o_str.startswith("-"):
|
||||
short_opts.append(o_str)
|
||||
continue
|
||||
long_opts.append(o_str)
|
||||
args = short_opts + long_opts
|
||||
kwargs = {}
|
||||
kwargs["action"] = action = comps.get("action", None)
|
||||
|
||||
if action is None:
|
||||
# Non existing option defaults to a StoreAction in argparse
|
||||
action = hub.conf._mem["args"]["parser"]._registry_get(
|
||||
"action", action
|
||||
) # pylint: disable=protected-access
|
||||
|
||||
if isinstance(action, str):
|
||||
signature = inspect.signature(
|
||||
hub.conf._mem["args"]["parser"]._registry_get("action", action).__init__
|
||||
) # pylint: disable=protected-access
|
||||
else:
|
||||
signature = inspect.signature(action.__init__)
|
||||
|
||||
for param in signature.parameters:
|
||||
if param == "self" or param not in comps:
|
||||
continue
|
||||
if param == "dest":
|
||||
kwargs["dest"] = comps.get("dest", arg)
|
||||
continue
|
||||
if param == "help":
|
||||
kwargs["help"] = comps.get("help", "THIS NEEDS SOME DOCUMENTATION!!")
|
||||
continue
|
||||
if param == "default":
|
||||
defaults[comps.get("dest", arg)] = comps[param]
|
||||
kwargs[param] = comps[param]
|
||||
|
||||
if "group" in comps:
|
||||
group = comps["group"]
|
||||
if group not in groups:
|
||||
groups[group] = hub.conf._mem["args"]["parser"].add_argument_group(
|
||||
group
|
||||
)
|
||||
groups[group].add_argument(*args, **kwargs)
|
||||
continue
|
||||
if "ex_group" in comps:
|
||||
group = comps["ex_group"]
|
||||
if group not in ex_groups:
|
||||
ex_groups[group] = hub.conf._mem["args"][
|
||||
"parser"
|
||||
].add_mutually_exclusive_group()
|
||||
ex_groups[group].add_argument(*args, **kwargs)
|
||||
continue
|
||||
if "sub" in comps:
|
||||
subs = comps["sub"]
|
||||
if not isinstance(subs, list):
|
||||
subs = [subs]
|
||||
for sub in subs:
|
||||
sparse = hub.conf._mem["args"]["subs"].get(sub)
|
||||
if not sparse:
|
||||
# Maybe raise exception here? Malformed config?
|
||||
continue
|
||||
sparse.add_argument(*args, **kwargs)
|
||||
continue
|
||||
hub.conf._mem["args"]["parser"].add_argument(*args, **kwargs)
|
||||
return {"result": True, "return": defaults}
|
||||
|
||||
|
||||
def parse(
|
||||
hub: "pop.hub.Hub", args=None, namespace=None, only_parse_known_arguments=False
|
||||
):
|
||||
"""
|
||||
Parse the command line options
|
||||
"""
|
||||
if only_parse_known_arguments:
|
||||
opts, unknown_args = hub.conf._mem["args"]["parser"].parse_known_args(
|
||||
args, namespace
|
||||
)
|
||||
opts_dict = opts.__dict__
|
||||
opts_dict["_unknown_args_"] = unknown_args
|
||||
else:
|
||||
opts = hub.conf._mem["args"]["parser"].parse_args(args, namespace)
|
||||
opts_dict = opts.__dict__
|
||||
return {"result": True, "return": opts_dict}
|
||||
|
||||
|
||||
def render(hub: "pop.hub.Hub", defaults, cli_opts, explicit_cli_args):
|
||||
"""
|
||||
For options specified as such, take the string passed into the cli and
|
||||
render it using the specified render flag
|
||||
"""
|
||||
for key in explicit_cli_args:
|
||||
rend = defaults.get(key, {}).get("render")
|
||||
if rend:
|
||||
ref = f"conf.{rend}.render"
|
||||
cli_opts[key] = hub.pop.ref.last(ref)(cli_opts[key])
|
||||
return cli_opts
|
||||
@@ -0,0 +1,56 @@
|
||||
"""
|
||||
Used to take care of the options that end in `_dir`. The assumption is that
|
||||
`_dir` options need to be treated differently. They need to verified to exist
|
||||
and they need to be rooted based on the user, root option etc.
|
||||
"""
|
||||
# Import python libs
|
||||
import os
|
||||
import pop.hub
|
||||
|
||||
|
||||
def roots(hub: "pop.hub.Hub", default_root, f_opts, root_dir):
|
||||
"""
|
||||
Detect the root dir data and apply it
|
||||
"""
|
||||
os_root = os.path.abspath(os.sep)
|
||||
root = os_root
|
||||
change = False
|
||||
non_priv = False
|
||||
if hasattr(os, "geteuid"):
|
||||
if not os.geteuid() == 0:
|
||||
change = True
|
||||
non_priv = True
|
||||
if root_dir and root_dir != default_root:
|
||||
root = root_dir
|
||||
change = True
|
||||
if not root.endswith(os.sep):
|
||||
root = f"{root}{os.sep}"
|
||||
if change:
|
||||
for imp in f_opts:
|
||||
for key in f_opts[imp]:
|
||||
if key == "root_dir":
|
||||
continue
|
||||
if key.endswith("_dir"):
|
||||
if non_priv:
|
||||
root = os.path.join(os.environ["HOME"], f".{imp}{os.sep}")
|
||||
if imp in f_opts[imp][key]:
|
||||
a_len = len(imp) + 1
|
||||
f_opts[imp][
|
||||
key
|
||||
] = f"{os_root}{f_opts[imp][key][f_opts[imp][key].index(imp)+a_len:]}"
|
||||
f_opts[imp][key] = f_opts[imp][key].replace(os_root, root, 1)
|
||||
|
||||
|
||||
def verify(hub: "pop.hub.Hub", opts):
|
||||
"""
|
||||
Verify that the environment and all named directories in the
|
||||
configuration exist
|
||||
"""
|
||||
for key in opts:
|
||||
if key == "root_dir":
|
||||
continue
|
||||
if key == "config_dir":
|
||||
continue
|
||||
if key.endswith("_dir"):
|
||||
if not os.path.isdir(opts[key]):
|
||||
os.makedirs(opts[key])
|
||||
@@ -0,0 +1,120 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Configuration file core loading functions
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
import glob
|
||||
import fnmatch
|
||||
import pop.hub
|
||||
|
||||
__virtualname__ = "file"
|
||||
__contracts__ = [__virtualname__]
|
||||
|
||||
|
||||
def load_file(hub: "pop.hub.Hub", paths, defaults=None, overrides=None, includes=True):
|
||||
"""
|
||||
Load a single configuration file
|
||||
"""
|
||||
opts = {}
|
||||
if isinstance(defaults, dict):
|
||||
opts.update(defaults)
|
||||
if not isinstance(paths, list):
|
||||
paths = paths.split(",")
|
||||
add = []
|
||||
for fn_ in paths:
|
||||
add.extend(glob.glob(fn_))
|
||||
paths.extend(add)
|
||||
for fn_ in paths:
|
||||
if hub.conf._loader == "yaml":
|
||||
opts.update(hub.conf.yaml.load(fn_))
|
||||
elif hub.conf._loader == "json":
|
||||
opts.update(hub.conf.json.load(fn_))
|
||||
elif hub.conf._loader == "toml":
|
||||
opts.update(hub.conf.toml.load(fn_))
|
||||
if includes:
|
||||
hub.conf.file.proc_include(opts)
|
||||
if isinstance(overrides, dict):
|
||||
opts.update(overrides)
|
||||
return opts
|
||||
|
||||
|
||||
def load_dir(
|
||||
hub,
|
||||
confdir,
|
||||
defaults=None,
|
||||
overrides=None,
|
||||
includes=True,
|
||||
recurse=False,
|
||||
pattern=None,
|
||||
):
|
||||
"""
|
||||
Load takes a directory location to scan for configuration files. These
|
||||
files will be read in. The defaults dict defines what
|
||||
configuration options should exist if not found in the confdir. Overrides
|
||||
are configuration options which should be included regardless of whether
|
||||
those options existed before. If includes is set to True, then the
|
||||
statements 'include' and 'include_dir' found in either the defaults or
|
||||
in configuration files.
|
||||
"""
|
||||
opts = {}
|
||||
if not isinstance(confdir, list):
|
||||
confdir = confdir.split(",")
|
||||
confdirs = []
|
||||
for dirs in confdir:
|
||||
if not isinstance(dirs, (list, tuple)):
|
||||
dirs = [dirs]
|
||||
for dir_ in dirs:
|
||||
confdirs.extend(glob.glob(dir_))
|
||||
if isinstance(defaults, dict):
|
||||
opts.update(defaults)
|
||||
paths = []
|
||||
for dir_ in confdirs:
|
||||
dirpaths = []
|
||||
if os.path.isdir(dir_):
|
||||
if not recurse:
|
||||
for fn_ in os.listdir(dir_):
|
||||
path = os.path.join(dir_, fn_)
|
||||
if os.path.isdir(path):
|
||||
# Don't process directories
|
||||
continue
|
||||
if pattern and not fnmatch.fnmatch(fn_, pattern):
|
||||
continue
|
||||
dirpaths.append(path)
|
||||
else:
|
||||
for root, dirs, files in os.walk(dir_):
|
||||
for fn_ in files:
|
||||
path = os.path.join(root, fn_)
|
||||
if pattern and not fnmatch.fnmatch(fn_, pattern):
|
||||
continue
|
||||
dirpaths.append(path)
|
||||
|
||||
# Sort confdir directory paths like:
|
||||
# /b.txt
|
||||
# /c.txt
|
||||
# /a/x.txt
|
||||
# /b/x.txt
|
||||
paths.extend(sorted(dirpaths, key=lambda p: (p.count(os.path.sep), p)))
|
||||
opts.update(hub.conf.file.load_file(paths, includes))
|
||||
if isinstance(overrides, dict):
|
||||
opts.update(overrides)
|
||||
return opts
|
||||
|
||||
|
||||
def proc_include(hub: "pop.hub.Hub", opts):
|
||||
"""
|
||||
process include and include_dir
|
||||
"""
|
||||
rec = False
|
||||
if opts.get("include_dir"):
|
||||
idir = opts.pop("include_dir")
|
||||
opts.update(hub.conf.file.load_dir(idir))
|
||||
rec = True
|
||||
if opts.get("include"):
|
||||
ifn = opts.pop("include")
|
||||
opts.update(hub.conf.file.load_file(ifn))
|
||||
rec = True
|
||||
if rec:
|
||||
hub.conf.file.proc_include(opts)
|
||||
return opts
|
||||
@@ -0,0 +1,6 @@
|
||||
def __init__(hub):
|
||||
"""
|
||||
Load the subdirs for conf
|
||||
"""
|
||||
hub.__._mem = {}
|
||||
hub.pop.sub.load_subdirs(hub.conf)
|
||||
@@ -0,0 +1,141 @@
|
||||
"""
|
||||
Integrate is used to pull config data from multiple sources and merge it into
|
||||
the hub. Once it is merged then when a sub is loaded the respective config data
|
||||
is loaded into the sub as `OPTS`
|
||||
"""
|
||||
# Take an *args list of modules to import and look for conf.py
|
||||
# Import conf.py if present
|
||||
# After gathering all dicts, modify them to merge CLI options
|
||||
#
|
||||
# Import python libs
|
||||
import importlib
|
||||
import copy
|
||||
import os
|
||||
|
||||
|
||||
def _ex_final(confs, final, override, key_to_ref, ops_to_ref):
|
||||
"""
|
||||
Scan the configuration datasets, create the final config
|
||||
value, and detect collisions
|
||||
"""
|
||||
for arg in confs:
|
||||
for key in confs[arg]:
|
||||
ref = f"{arg}.{key}"
|
||||
if ref in override:
|
||||
s_key = override[ref]["key"]
|
||||
s_opts = override[ref]["options"]
|
||||
else:
|
||||
s_key = key
|
||||
s_opts = confs[arg][key].get("options", [])
|
||||
s_opts.append(f"--{s_key}")
|
||||
final[s_key] = confs[arg][key]
|
||||
if s_opts:
|
||||
final[s_key]["options"] = s_opts
|
||||
if s_key in key_to_ref:
|
||||
key_to_ref[s_key].append(ref)
|
||||
else:
|
||||
key_to_ref[s_key] = [ref]
|
||||
for opt in s_opts:
|
||||
if opt in ops_to_ref:
|
||||
ops_to_ref[opt].append(ref)
|
||||
else:
|
||||
ops_to_ref = [ref]
|
||||
|
||||
|
||||
def load(
|
||||
hub,
|
||||
imports,
|
||||
override=None,
|
||||
cli=None,
|
||||
roots=False,
|
||||
loader="json",
|
||||
logs=True,
|
||||
version=True,
|
||||
):
|
||||
"""
|
||||
This function takes a list of python packages to load and look for
|
||||
respective configs. The configs are then loaded in a non-collision
|
||||
way modifying the cli options dynamically.
|
||||
The args look for the named <package>.conf python module and then
|
||||
looks for dictionaries named after the following convention:
|
||||
|
||||
override = {'<package>.key': 'key': 'new_key', 'options': ['--option1', '--option2']}
|
||||
|
||||
CONFIG: The main configuration for this package - loads to hub.OPT['<import>']
|
||||
CLI_CONFIG: Loaded only if this is the only import or if specified in the cli option
|
||||
SUBS: Used to define the subcommands, only loaded if this is the cli config
|
||||
"""
|
||||
if override is None:
|
||||
override = {}
|
||||
if isinstance(imports, str):
|
||||
if cli is None:
|
||||
cli = imports
|
||||
imports = [imports]
|
||||
primary = imports[0] if cli is None else cli
|
||||
confs = {}
|
||||
final = {}
|
||||
collides = []
|
||||
key_to_ref = {}
|
||||
ops_to_ref = {}
|
||||
subs = {}
|
||||
for imp in imports:
|
||||
try:
|
||||
cmod = importlib.import_module(f"{imp}.conf")
|
||||
except ImportError:
|
||||
continue
|
||||
if hasattr(cmod, "CONFIG"):
|
||||
confs[imp] = copy.deepcopy(cmod.CONFIG)
|
||||
if cli == imp:
|
||||
if hasattr(cmod, "CLI_CONFIG"):
|
||||
confs[imp].update(copy.deepcopy(cmod.CLI_CONFIG))
|
||||
if hasattr(cmod, "SUBS"):
|
||||
subs = copy.deepcopy(cmod.SUBS)
|
||||
if logs:
|
||||
lconf = hub.conf.log.init.conf(primary)
|
||||
lconf.update(confs[primary])
|
||||
confs[primary] = lconf
|
||||
if version:
|
||||
vconf = hub.conf.version.CONFIG
|
||||
vconf.update(confs[primary])
|
||||
confs[primary] = vconf
|
||||
_ex_final(confs, final, override, key_to_ref, ops_to_ref)
|
||||
for opt in ops_to_ref:
|
||||
g_count = 0
|
||||
if len(ops_to_ref[opt]) > 1:
|
||||
collides.append({opt: ops_to_ref[opt]})
|
||||
for key in key_to_ref:
|
||||
col = []
|
||||
for ref in key_to_ref[key]:
|
||||
col.append(ref)
|
||||
if len(col) > 1:
|
||||
collides.append({key: key_to_ref[key]})
|
||||
if collides:
|
||||
raise KeyError(collides)
|
||||
opts = hub.conf.reader.read(final, subs, loader=loader)
|
||||
# This will be put into an immutable data type before it is passed on
|
||||
f_opts = {}
|
||||
for key in opts:
|
||||
if key == "_subparser_":
|
||||
f_opts["_subparser_"] = opts["_subparser_"]
|
||||
continue
|
||||
for ref in key_to_ref[key]:
|
||||
imp = ref[: ref.rindex(".")]
|
||||
local_key = ref[ref.rindex(".") + 1 :]
|
||||
if imp not in f_opts:
|
||||
f_opts[imp] = {}
|
||||
f_opts[imp][local_key] = opts[key]
|
||||
if roots:
|
||||
root_dir = f_opts.get(cli, {}).get("root_dir")
|
||||
hub.conf.dirs.roots(
|
||||
final.get("root_dir", {}).get("default", os.path.abspath(os.sep)),
|
||||
f_opts,
|
||||
root_dir,
|
||||
)
|
||||
for imp in f_opts:
|
||||
hub.conf.dirs.verify(f_opts[imp])
|
||||
hub.OPT = hub.pop.data.imap(f_opts)
|
||||
if logs:
|
||||
log_plugin = hub.OPT[primary].get("log_plugin")
|
||||
getattr(hub, f"conf.log.{log_plugin}.setup")(hub.OPT[primary])
|
||||
if hub.OPT[primary].get("version"):
|
||||
hub.conf.version.run(primary)
|
||||
@@ -0,0 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Define the JSON loader interface
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
import json
|
||||
import pop.hub
|
||||
|
||||
__virtualname__ = "json"
|
||||
__contracts__ = [__virtualname__]
|
||||
|
||||
|
||||
def __virtual__(hub):
|
||||
return True
|
||||
|
||||
|
||||
def load(hub: "pop.hub.Hub", path):
|
||||
"""
|
||||
Use json to read in a file
|
||||
"""
|
||||
try:
|
||||
with open(path, "r") as fp_:
|
||||
ret = json.loads(fp_.read())
|
||||
return ret
|
||||
except FileNotFoundError:
|
||||
return {}
|
||||
|
||||
|
||||
def render(hub: "pop.hub.Hub", val):
|
||||
"""
|
||||
Take the string and render it in json
|
||||
"""
|
||||
return json.loads(val)
|
||||
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,23 @@
|
||||
# Import python libs
|
||||
import logging
|
||||
import pop.hub
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
def setup(hub: "pop.hub.Hub", conf: Dict[str, Any]):
|
||||
"""
|
||||
Given the configuration data set up the logger
|
||||
"""
|
||||
level = hub.conf.log.LEVELS.get(conf["log_level"].lower(), logging.INFO)
|
||||
root = logging.getLogger("")
|
||||
root.setLevel(level)
|
||||
cf = logging.Formatter(fmt=conf["log_fmt_console"], datefmt=conf["log_datefmt"])
|
||||
ch = logging.StreamHandler()
|
||||
ch.setLevel(level)
|
||||
ch.setFormatter(cf)
|
||||
root.addHandler(ch)
|
||||
ff = logging.Formatter(fmt=conf["log_fmt_console"], datefmt=conf["log_datefmt"])
|
||||
fh = logging.FileHandler(conf["log_file"])
|
||||
fh.setLevel(level)
|
||||
fh.setFormatter(ff)
|
||||
root.addHandler(fh)
|
||||
@@ -0,0 +1,62 @@
|
||||
"""
|
||||
This sub is used to set up logging for pop projects and injects logging
|
||||
options into conf making it easy to add robust logging
|
||||
"""
|
||||
# Import python libs
|
||||
import logging
|
||||
import pop.hub
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
def __init__(hub: "pop.hub.Hub"):
|
||||
"""
|
||||
Set up variables used by the log subsystem
|
||||
"""
|
||||
hub.conf.log.LEVELS = {
|
||||
"debug": logging.DEBUG,
|
||||
"info": logging.INFO,
|
||||
"warning": logging.WARNING,
|
||||
"error": logging.ERROR,
|
||||
"critical": logging.CRITICAL,
|
||||
}
|
||||
|
||||
|
||||
def conf(hub: "pop.hub.Hub", name: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Return the conf dict for logging, this should be merged OVER by the loaded
|
||||
config dict(s)
|
||||
"""
|
||||
# TODO: Make this more robust to handle more logging interfaces
|
||||
ldict = {
|
||||
"log_file": {
|
||||
"default": f"{name}.log",
|
||||
"help": "The location of the log file",
|
||||
"group": "Logging Options",
|
||||
},
|
||||
"log_level": {
|
||||
"default": "warning",
|
||||
"help": "Set the log level, either quiet, info, warning, or error",
|
||||
"group": "Logging Options",
|
||||
},
|
||||
"log_fmt_logfile": {
|
||||
"default": "%(asctime)s,%(msecs)03d [%(name)-17s][%(levelname)-8s] %(message)s",
|
||||
"help": "The format to be given to log file messages",
|
||||
"group": "Logging Options",
|
||||
},
|
||||
"log_fmt_console": {
|
||||
"default": "[%(levelname)-8s] %(message)s",
|
||||
"help": "The log formatting used in the console",
|
||||
"group": "Logging Options",
|
||||
},
|
||||
"log_datefmt": {
|
||||
"default": "%H:%M:%S",
|
||||
"help": "The date format to display in the logs",
|
||||
"group": "Logging Options",
|
||||
},
|
||||
"log_plugin": {
|
||||
"default": "basic",
|
||||
"help": "The logging plugin to use",
|
||||
"group": "Logging Options",
|
||||
},
|
||||
}
|
||||
return ldict
|
||||
@@ -0,0 +1,37 @@
|
||||
"""
|
||||
The os module is used to gather configuration options from the OS facility
|
||||
to send configuration options into applications. In the case of Unix like
|
||||
systems this translates to the environment variables. On Windows systems
|
||||
this translates to the registry.
|
||||
"""
|
||||
# Import python libs
|
||||
import os
|
||||
import pop.hub
|
||||
|
||||
__virtualname__ = "os"
|
||||
|
||||
|
||||
def __virtual__(hub):
|
||||
"""
|
||||
Don't load on Windows, this is for *nix style platforms
|
||||
"""
|
||||
# TODO: detect if windows
|
||||
return True
|
||||
|
||||
|
||||
def gather(hub: "pop.hub.Hub", defaults):
|
||||
"""
|
||||
Iterate over the default config data and look for os: True/str options. When set
|
||||
gather the option from environment variables is present
|
||||
"""
|
||||
ret = {}
|
||||
for key in defaults:
|
||||
if not "os" in defaults[key]:
|
||||
continue
|
||||
os_var = defaults[key]["os"]
|
||||
if os_var is True:
|
||||
os_var = key
|
||||
os_var = os_var.upper()
|
||||
if os_var in os.environ:
|
||||
ret[key] = os.environ[os_var]
|
||||
return ret
|
||||
@@ -0,0 +1,114 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
The reader module is used to read the config data. This will read in cli
|
||||
arguments and merge them with config fie arguments.
|
||||
"""
|
||||
# Import python libs
|
||||
import warnings
|
||||
|
||||
# Priority order: cli, config, cli_defaults
|
||||
|
||||
__virtualname__ = "reader"
|
||||
__contracts__ = [__virtualname__]
|
||||
|
||||
|
||||
def _merge_dicts(opts, updates, os_opts, explicit_cli_args):
|
||||
"""
|
||||
recursively merge updates into opts
|
||||
"""
|
||||
for key, val in os_opts.items():
|
||||
if not val:
|
||||
# Don't use empty os vals
|
||||
continue
|
||||
if key in opts:
|
||||
opts[key] = val
|
||||
for key, val in updates.items():
|
||||
if isinstance(val, dict) and isinstance(opts.get(key), dict):
|
||||
_merge_dicts(opts.get(key, {}), val, os_opts, explicit_cli_args)
|
||||
elif val is not None:
|
||||
if key not in opts:
|
||||
# The key is not in opts(from config file), let's add it
|
||||
opts[key] = val
|
||||
continue
|
||||
|
||||
# We already have a value for the key in opts
|
||||
if opts[key] == val:
|
||||
# The value is the same, carry on
|
||||
continue
|
||||
|
||||
if key in explicit_cli_args:
|
||||
# We have a value for the key in opts(from config file) but
|
||||
# this option was explicitly passed on the CLI, ie, it's not
|
||||
# a default value.
|
||||
# Overwrite what's in opts
|
||||
opts[key] = val
|
||||
continue
|
||||
return opts
|
||||
|
||||
|
||||
def read(
|
||||
hub,
|
||||
defaults,
|
||||
subs=None,
|
||||
loader="json",
|
||||
process_cli=True,
|
||||
process_cli_known_args_only=False,
|
||||
args=None,
|
||||
namespace=None,
|
||||
):
|
||||
"""
|
||||
Pass in the default options dict to use
|
||||
:param opts:
|
||||
:param process_cli: Process the passed args or sys.argv
|
||||
:param process_cli_known_args_only: Tells the ArgumentParser to only process known arguments
|
||||
:param args: Arguments to pass to ArgumentParser
|
||||
:param namespace: argparse.Namespace to pass to ArgumentParser
|
||||
:return: options
|
||||
"""
|
||||
msg = "Pop-config is the new means to load configs in pop, reader.read will be removed in pop 13"
|
||||
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
||||
hub.conf._loader = loader
|
||||
if subs:
|
||||
hub.conf.args.subs(subs)
|
||||
opts = hub.conf.args.setup(defaults)["return"]
|
||||
os_opts = hub.conf.os.gather(defaults)
|
||||
if process_cli is True:
|
||||
cli_opts = hub.conf.args.parse(args, namespace, process_cli_known_args_only)[
|
||||
"return"
|
||||
]
|
||||
else:
|
||||
cli_opts = {}
|
||||
explicit_cli_args = cli_opts.pop("_explicit_cli_args_", set())
|
||||
cli_opts = hub.conf.args.render(defaults, cli_opts, explicit_cli_args)
|
||||
kwargs = {}
|
||||
# Due to the order of priorities and the representation of defaults in the
|
||||
# Argparser we need to manually check if the config option values are from
|
||||
# the cli or from defaults
|
||||
f_func = False
|
||||
if "config_dir" in cli_opts:
|
||||
if cli_opts["config_dir"]:
|
||||
kwargs["confdir"] = cli_opts["config_dir"]
|
||||
else:
|
||||
kwargs["confdir"] = opts["config_dir"]
|
||||
if "config_recurse" in cli_opts:
|
||||
if cli_opts["config_recurse"]:
|
||||
kwargs["recurse"] = cli_opts["config_recurse"]
|
||||
else:
|
||||
kwargs["recurse"] = opts["config_recurse"]
|
||||
# If the config_dir configuration dictionary provides a configuration
|
||||
# file pattern to read, pass it along
|
||||
kwargs["pattern"] = defaults["config_dir"].get("pattern")
|
||||
f_func = hub.conf.file.load_dir
|
||||
elif "config" in cli_opts:
|
||||
if cli_opts["config"]:
|
||||
kwargs["paths"] = cli_opts["config"]
|
||||
else:
|
||||
kwargs["paths"] = opts["config"]
|
||||
f_func = hub.conf.file.load_file
|
||||
# Render args before config parsing
|
||||
if f_func:
|
||||
f_opts = f_func(**kwargs)
|
||||
opts.update(f_opts)
|
||||
return _merge_dicts(opts, cli_opts, os_opts, explicit_cli_args)
|
||||
else:
|
||||
return _merge_dicts(opts, cli_opts, os_opts, explicit_cli_args)
|
||||
@@ -0,0 +1,41 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Define the yaml loader interface
|
||||
"""
|
||||
import pop.hub
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
import toml
|
||||
|
||||
HAS_TOML = True
|
||||
except ImportError:
|
||||
HAS_TOML = False
|
||||
|
||||
__virtualname__ = "toml"
|
||||
# __contracts__ = [__virtualname__]
|
||||
|
||||
|
||||
def __virtual__(hub: "pop.hub.Hub"):
|
||||
if HAS_TOML:
|
||||
return True
|
||||
return (False, "TOML could not be loaded")
|
||||
|
||||
|
||||
def load(hub: "pop.hub.Hub", path):
|
||||
"""
|
||||
use toml to read in a file
|
||||
"""
|
||||
try:
|
||||
with open(path, "rb") as fp_:
|
||||
return toml.load(fp_.read())
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
return {}
|
||||
|
||||
|
||||
def render(hub: "pop.hub.Hub", val):
|
||||
"""
|
||||
Take the string and render it in json
|
||||
"""
|
||||
return toml.loads(val)
|
||||
@@ -0,0 +1,25 @@
|
||||
"""
|
||||
Support embedding version number lookup into cli
|
||||
"""
|
||||
# IMport python libs
|
||||
import importlib
|
||||
import pop.hub
|
||||
import sys
|
||||
|
||||
|
||||
CONFIG = {
|
||||
"version": {
|
||||
"default": False,
|
||||
"action": "store_true",
|
||||
"help": "Display version information",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def run(hub: "pop.hub.Hub", primary):
|
||||
"""
|
||||
Check the version number and then exit
|
||||
"""
|
||||
mod = importlib.import_module(f"{primary}.version")
|
||||
print(f"{primary} {mod.version}")
|
||||
sys.exit(0)
|
||||
@@ -0,0 +1,42 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Define the yaml loader interface
|
||||
"""
|
||||
|
||||
# Import third party libs
|
||||
import pop.hub
|
||||
|
||||
try:
|
||||
import yaml
|
||||
|
||||
HAS_YAML = True
|
||||
except ImportError:
|
||||
HAS_YAML = False
|
||||
|
||||
__virtualname__ = "yaml"
|
||||
__contracts__ = [__virtualname__]
|
||||
|
||||
|
||||
def __virtual__(hub):
|
||||
if HAS_YAML:
|
||||
return True
|
||||
return (False, "PyYaml could not be loaded")
|
||||
|
||||
|
||||
def load(hub: "pop.hub.Hub", path):
|
||||
"""
|
||||
use yaml to read in a file
|
||||
"""
|
||||
try:
|
||||
with open(path, "rb") as fp_:
|
||||
return yaml.safe_load(fp_.read())
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
return {}
|
||||
|
||||
|
||||
def render(hub: "pop.hub.Hub", val):
|
||||
"""
|
||||
Take the string and render it in json
|
||||
"""
|
||||
return yaml.safe_load(val)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,23 @@
|
||||
"""
|
||||
Convenience wrappers to make using the conf system as easy and seamless as possible
|
||||
"""
|
||||
import pop.hub
|
||||
from typing import Any, Dict, List
|
||||
|
||||
|
||||
def integrate(
|
||||
hub: "pop.hub.Hub",
|
||||
imports: List[str] or str,
|
||||
override: Dict[str, Any] = None,
|
||||
cli: str = None,
|
||||
roots: bool = None,
|
||||
loader: str = "json",
|
||||
logs: bool = True,
|
||||
):
|
||||
"""
|
||||
Load the conf sub and run the integrate sequence.
|
||||
"""
|
||||
hub.pop.sub.add("pop.mods.conf")
|
||||
hub.conf.integrate.load(
|
||||
imports, override, cli=cli, roots=roots, loader=loader, logs=logs
|
||||
)
|
||||
@@ -0,0 +1,18 @@
|
||||
import pop.hub
|
||||
from typing import List
|
||||
|
||||
|
||||
def load(
|
||||
hub: "pop.hub.Hub",
|
||||
sources: List[str],
|
||||
cli: str = None,
|
||||
dyne_name: str = None,
|
||||
loader: str = "yaml",
|
||||
parse_cli: bool = True,
|
||||
):
|
||||
"""
|
||||
Use the pop-config system to load up a fresh configuration for this project
|
||||
from the included conf.py file.
|
||||
"""
|
||||
hub.pop.sub.add(dyne_name="config")
|
||||
hub.config.integrate.load(sources, cli, dyne_name, loader, parse_cli)
|
||||
@@ -0,0 +1,376 @@
|
||||
import collections
|
||||
import collections.abc as abc
|
||||
import copy
|
||||
import inspect
|
||||
import logging
|
||||
import pop.contract as contract
|
||||
import pop.hub
|
||||
import sys
|
||||
from typing import Any, Dict, Iterable, Iterator, List
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
__func_alias__ = {
|
||||
"immutable_namespaced_map": "imap",
|
||||
"mutable_namespaced_map": "map",
|
||||
"owner_writeable_namespaced_map": "omap",
|
||||
}
|
||||
|
||||
|
||||
def immutable_namespaced_map(
|
||||
hub: "pop.hub.Hub", init: Dict[str, Any], **kwargs
|
||||
) -> abc.MutableMapping:
|
||||
return IMAP(init_=init, **kwargs)
|
||||
|
||||
|
||||
class IMAP(abc.Mapping):
|
||||
"""
|
||||
An abstract base class that implements the interface of a `dict` but is immutable.
|
||||
Items can be retrieved via namespacing.
|
||||
No values can be changed after initialization
|
||||
"""
|
||||
|
||||
def __init__(self, init_: Dict[str, Any], **c_kwargs):
|
||||
"""
|
||||
:param init_: A dictionary from which to inherit data
|
||||
"""
|
||||
init_.update(**c_kwargs)
|
||||
values = {}
|
||||
for k, v in init_.items():
|
||||
if isinstance(v, Dict):
|
||||
values[k] = IMAP(init_=v)
|
||||
elif isinstance(v, (tuple, int, str, bytes)):
|
||||
values[k] = v
|
||||
elif isinstance(v, Iterable):
|
||||
values[k] = tuple(v)
|
||||
else:
|
||||
values[k] = v
|
||||
# __setattr__ is borked (on purpose) so we have to call it from super() right here
|
||||
super().__setattr__("_IMAP__store", values)
|
||||
log.debug("Initialized immutable namespaced map")
|
||||
|
||||
def __setattr__(self, k: str, v: Any):
|
||||
raise TypeError(
|
||||
f"{self.__class__.__name__} does not support attribute assignment"
|
||||
)
|
||||
|
||||
def __getattr__(self, k: str):
|
||||
if k.startswith("_"):
|
||||
return super().__getattribute__(k)
|
||||
else:
|
||||
return self.__store[k]
|
||||
|
||||
def __getitem__(self, k: str) -> Any:
|
||||
return self.__store[k]
|
||||
|
||||
def __contains__(self, k: str) -> bool:
|
||||
return k in self.__store
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__store)
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self.__store.keys())
|
||||
|
||||
def __copy__(self) -> Dict[str, Any]:
|
||||
ret = {}
|
||||
# Unpack IMAP items so that it's turtles all the way down
|
||||
for k, v in self.__store.items():
|
||||
if isinstance(v, IMAP):
|
||||
ret[k] = v.__copy__()
|
||||
else:
|
||||
ret[k] = v
|
||||
return ret
|
||||
|
||||
def __repr__(self):
|
||||
return repr(copy.copy(self))
|
||||
|
||||
|
||||
def mutable_namespaced_map(hub: "pop.hub.Hub", dict_: Dict[str, Any] = None) -> "MAP":
|
||||
return MAP(dict_=dict_)
|
||||
|
||||
|
||||
class WriteLockError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class MAP(abc.MutableMapping):
|
||||
"""
|
||||
MAP is a key-value store that allows for setting/getting
|
||||
by either dot or dictionary lookup notation ('.' or '[k]')
|
||||
|
||||
Sub-keys will be created on assignment:
|
||||
|
||||
`map.foo.bar.baz = True` will auto-create foo and bar as MAPs
|
||||
|
||||
while doing
|
||||
|
||||
`map.foo.bar.baz` before assignment will not create foo, bar or baz.
|
||||
|
||||
:param dict_: similar to dict(dict_), initialize using dict_
|
||||
"""
|
||||
|
||||
def __init__(self, dict_: Dict[str, Any] = None, parent: "MAP" = None):
|
||||
self.__dict__["_store"] = {}
|
||||
self.__dict__["_parent"] = parent
|
||||
if dict_:
|
||||
# Existing dictionaries might have properties that need wrapped as well
|
||||
self.update(dict_)
|
||||
|
||||
def _set(self, k: str, v: Any):
|
||||
if k.startswith("_"):
|
||||
raise AttributeError("Cannot store values beginning with '_'")
|
||||
|
||||
if isinstance(v, dict):
|
||||
# Cast all nested dict values as MAP so they get it's benefits as well
|
||||
v = self.__class__(dict_=v, parent=self)
|
||||
self._store[k] = v
|
||||
else:
|
||||
self._store[k] = v
|
||||
|
||||
def _get(self, k: str, create: bool = False):
|
||||
if k.startswith("_"):
|
||||
return super().__getattribute__(k)
|
||||
try:
|
||||
if k not in self._store:
|
||||
if not create:
|
||||
return UninitializedValue([k], self)
|
||||
self._set(k, self.__class__())
|
||||
return self._store[k]
|
||||
except Exception as e:
|
||||
raise AttributeError(*e.args)
|
||||
|
||||
def get(self, k: str, default: Any = None) -> Any:
|
||||
if k in self._store:
|
||||
return getattr(self, k)
|
||||
else:
|
||||
return default
|
||||
|
||||
def __setitem__(self, k: str, v: Any):
|
||||
self._set(k, v)
|
||||
|
||||
def __delitem__(self, k: str):
|
||||
"""
|
||||
Cleanup method required by abc.ABC
|
||||
"""
|
||||
if k in self._store:
|
||||
del self._store[k]
|
||||
|
||||
def __delattr__(self, k: str):
|
||||
self.__delitem__(k)
|
||||
|
||||
def __getitem__(self, k: str) -> Any:
|
||||
return self._get(k)
|
||||
|
||||
def __getattr__(self, k: str) -> Any:
|
||||
return self._get(k)
|
||||
|
||||
def __setattr__(self, k: str, v: Any):
|
||||
self._set(k, v)
|
||||
|
||||
def __contains__(self, k: str) -> Any:
|
||||
return k in self._store
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._store)
|
||||
|
||||
def __iter__(self) -> Iterator[Any]:
|
||||
return iter(self._store)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"{self.__class__.__name__}({str(self)})"
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self._store)
|
||||
|
||||
def __copy__(self) -> Dict[str, Any]:
|
||||
# The copy will be a dictionary, mangle it all you want
|
||||
ret = {}
|
||||
for k, v in self._store.items():
|
||||
if isinstance(v, MAP):
|
||||
ret[k] = v.__copy__()
|
||||
else:
|
||||
ret[k] = v
|
||||
return ret
|
||||
|
||||
|
||||
class UninitializedValueError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class UninitializedValue:
|
||||
"""
|
||||
We want you to be able to create data on the MAP by doing something like:
|
||||
|
||||
MAP.foo.bar = True
|
||||
|
||||
However, we want uses of un-initialized values to blow up, not auto-create.
|
||||
|
||||
We build up a potentially pending write using a special object that can only be written.
|
||||
Any other use should blow up.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, path: List[str], map_: MAP):
|
||||
# The path I'm looking up on the map
|
||||
self.__dict__["_path"] = path
|
||||
self.__dict__["_map"] = map_
|
||||
|
||||
def _blowup(self):
|
||||
raise UninitializedValueError(
|
||||
f"Access of uninitialized value '{'.'.join(self.__dict__['_path'])}'"
|
||||
)
|
||||
|
||||
def __getattribute__(self, item: str):
|
||||
if item == "get":
|
||||
pass
|
||||
elif not item.startswith("_"): # return a new PendingWrite for regular lookups
|
||||
new_path = self._path[:]
|
||||
new_path.append(item)
|
||||
return UninitializedValue(new_path, self._map)
|
||||
elif item not in (
|
||||
"__class__",
|
||||
"__dict__",
|
||||
"__setattr__",
|
||||
"_blowup",
|
||||
"_map",
|
||||
"_path",
|
||||
"get",
|
||||
):
|
||||
self._blowup()
|
||||
return super().__getattribute__(item)
|
||||
|
||||
def __setattr__(self, k: str, v: Any):
|
||||
node = self._map
|
||||
for part in self._path:
|
||||
node = node._get(part, create=True)
|
||||
|
||||
setattr(node, k, v)
|
||||
|
||||
def __delattr__(self, k: str):
|
||||
pass
|
||||
|
||||
def __getitem__(self, item: str):
|
||||
return getattr(self, item)
|
||||
|
||||
def __setitem__(self, k: str, v: Any):
|
||||
setattr(self, k, v)
|
||||
|
||||
def __delitem__(self, k: str):
|
||||
pass
|
||||
|
||||
def get(self, k: str, default: Any = None) -> Any:
|
||||
return default
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
# Uninitialized values always evaluate to false
|
||||
return False
|
||||
|
||||
def __contains__(self, item: str) -> bool:
|
||||
# If it contained anything it would be initialized
|
||||
return False
|
||||
|
||||
def __hash__(self):
|
||||
self._blowup()
|
||||
|
||||
def __dir__(self):
|
||||
self._blowup()
|
||||
|
||||
def __str__(self):
|
||||
self._blowup()
|
||||
|
||||
|
||||
def owner_writeable_namespaced_map(
|
||||
hub, dict_: Dict[str, Any] = None
|
||||
) -> "OwnerWriteableMapping":
|
||||
return OwnerWriteableMapping(dict_=dict_)
|
||||
|
||||
|
||||
def _stack_frames(relative_start=2):
|
||||
"""
|
||||
Efficiently access stack frames.
|
||||
:param relative_start: Starting stack depth; The default, 2 is the parent of the
|
||||
caller of stack_frames - the first function that may be unknown.
|
||||
:return: a stack frame
|
||||
"""
|
||||
if hasattr(sys, "_getframe"):
|
||||
# implementation detail of CPython, speeds things up by 100x.
|
||||
frame = sys._getframe(relative_start)
|
||||
while frame:
|
||||
yield frame
|
||||
frame = frame.f_back
|
||||
else:
|
||||
for frame_info in inspect.stack(context=0)[relative_start:]:
|
||||
yield frame_info.frame
|
||||
|
||||
|
||||
WriteLockInfo = collections.namedtuple("WriteLockInfo", ["val", "owner", "lineno"])
|
||||
|
||||
|
||||
class OwnerWriteableMapping(MAP):
|
||||
"""
|
||||
A MAP variant that is write-locked to the first Contracted function
|
||||
that writes to a given key (becoming the owner). Attempts to write
|
||||
to that key from other functions will receive a WriteLockError showing
|
||||
the owning Contracted function.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, dict_: Dict[str, Any] = None, parent: "OwnerWriteableMapping" = None
|
||||
):
|
||||
super().__init__(dict_, parent)
|
||||
|
||||
def _find_owner(self) -> (contract.Contracted, int):
|
||||
"""
|
||||
Return the contracted responsible for assigning to this variable.
|
||||
Returns None if no such function exists.
|
||||
"""
|
||||
for frame in _stack_frames(3):
|
||||
if isinstance(frame.f_locals.get("self"), contract.Contracted):
|
||||
contracted = frame.f_locals["self"]
|
||||
log.debug(f"Found contract '{contracted.__name__}'")
|
||||
break
|
||||
else:
|
||||
# find the lineno in the frame *before* our Contracted (the function called)
|
||||
lineno = frame.f_lineno
|
||||
else:
|
||||
# not found
|
||||
contracted = None
|
||||
lineno = -1
|
||||
|
||||
return contracted, lineno
|
||||
|
||||
def _set(self, k: str, v: Any):
|
||||
owner, lineno = self._find_owner()
|
||||
cur = self._store.get(k)
|
||||
if cur is None or cur.owner is owner:
|
||||
if isinstance(v, abc.Mapping):
|
||||
v = self.__class__(dict_=v, parent=self)
|
||||
elif isinstance(v, Iterable) and not isinstance(
|
||||
v, (tuple, str, bytes, UninitializedValue)
|
||||
):
|
||||
v = tuple(v) # Lists, sets, and other iterables become immutable
|
||||
super()._set(k, WriteLockInfo(v, owner, lineno))
|
||||
else:
|
||||
file = inspect.getsourcefile(cur.owner.func)
|
||||
raise WriteLockError(
|
||||
f"'{k}' was previously assigned by '{cur.owner.__name__}' ({file}:{cur.lineno})"
|
||||
)
|
||||
|
||||
def _get(self, k: str, create: bool = False) -> Any:
|
||||
v = super()._get(k, create)
|
||||
if not isinstance(v, UninitializedValue):
|
||||
v = v.val
|
||||
return v
|
||||
|
||||
def __str__(self) -> str:
|
||||
return str(self._dict())
|
||||
|
||||
def _dict(self):
|
||||
vals = {}
|
||||
for k, v in self._store.items():
|
||||
if isinstance(v.val, self.__class__):
|
||||
vals[k] = v.val._dict()
|
||||
else:
|
||||
vals[k] = v.val
|
||||
return vals
|
||||
@@ -0,0 +1,38 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Tools to work with dicts
|
||||
"""
|
||||
# Import local libs
|
||||
import pop.dicts
|
||||
import pop.hub
|
||||
|
||||
|
||||
def traverse(hub: "pop.hub.Hub", data, key, default=None, delimiter=":"):
|
||||
"""
|
||||
Traverse a dict or list using a colon-delimited (or otherwise delimited,
|
||||
using the 'delimiter' param) target string. the target 'foo:bar:0' will
|
||||
return data['foo']['bar'][0] if this value exists, and will otherwise
|
||||
return the dict in the default argument.
|
||||
function will automatically determine the target type.
|
||||
the target 'foo:bar:0' will return data['foo']['bar'][0] if data like
|
||||
{'foo':{'bar':['baz']}} , if data like {'foo':{'bar':{'0':'baz'}}}
|
||||
then return data['foo']['bar']['0']
|
||||
"""
|
||||
return pop.dicts.traverse(data, key, default.delimiter)
|
||||
|
||||
|
||||
def update(hub: "pop.hub.Hub", dest, upd, recursive_update=True, merge_lists=True):
|
||||
"""
|
||||
Recursive version of the default dict.update
|
||||
|
||||
Merges upd recursively into dest
|
||||
|
||||
If recursive_update=False, will use the classic dict.update, or fall back
|
||||
on a manual merge (helpful for non-dict types like FunctionWrapper)
|
||||
|
||||
If merge_lists=True, will aggregate list object types instead of replace.
|
||||
The list in ``upd`` is added to the list in ``dest``, so the resulting list
|
||||
is ``dest[key] + upd[key]``. This behavior is only activated when
|
||||
recursive_update=True. By default merge_lists=False.
|
||||
"""
|
||||
return pop.dicts.update(dest, upd, recursive_update, merge_lists)
|
||||
@@ -0,0 +1,19 @@
|
||||
import pop.hub
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
def get(hub: "pop.hub.Hub") -> Dict[str, Any]:
|
||||
"""
|
||||
Retrive the dynamic dirs data for this hub, if dynamic dirs have not been
|
||||
gathered yet then gather it.
|
||||
"""
|
||||
if not hub._dscan:
|
||||
hub._scan_dynamic()
|
||||
return hub._dynamic
|
||||
|
||||
|
||||
def refresh(hub: "pop.hub.Hub"):
|
||||
"""
|
||||
Refresh the dynamic dirs
|
||||
"""
|
||||
hub._scan_dynamic()
|
||||
@@ -0,0 +1,156 @@
|
||||
"""
|
||||
The input module is used to translate typical input strings into the
|
||||
ref/args/kwargs used by pop when forwarding data into functions.
|
||||
"""
|
||||
# Import python libs
|
||||
import re
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
# Import third party libs
|
||||
import yaml
|
||||
import pop.hub
|
||||
|
||||
KWARG_REGEX = re.compile(r"^([^\d\W][\w.-]*)=(?!=)(.*)$", re.UNICODE)
|
||||
|
||||
|
||||
def parse(
|
||||
hub: "pop.hub.Hub",
|
||||
args: List[Any],
|
||||
condition: bool = True,
|
||||
no_parse: Tuple[str] = None,
|
||||
) -> Tuple[List[Any], Dict[str, Any]]:
|
||||
"""
|
||||
Parse out the args and kwargs from a list of input values. Optionally,
|
||||
return the args and kwargs without passing them to condition_input().
|
||||
Don't pull args with key=val apart if it has a newline in it.
|
||||
"""
|
||||
if no_parse is None:
|
||||
no_parse = ()
|
||||
_args = []
|
||||
_kwargs = {}
|
||||
for arg in args:
|
||||
if isinstance(arg, str):
|
||||
if "=" in arg:
|
||||
arg_name, arg_value = _parse_kwarg(arg)
|
||||
if arg_name:
|
||||
_kwargs[arg_name] = (
|
||||
_yamlify_arg(arg_value)
|
||||
if arg_name not in no_parse
|
||||
else arg_value
|
||||
)
|
||||
else:
|
||||
_args.append(_yamlify_arg(arg))
|
||||
elif isinstance(arg, dict):
|
||||
# Yes, we're popping this key off and adding it back if
|
||||
# condition_input is called below, but this is the only way to
|
||||
# gracefully handle both CLI and API input.
|
||||
if arg.pop("__kwarg__", False) is True:
|
||||
_kwargs.update(arg)
|
||||
else:
|
||||
_args.append(arg)
|
||||
else:
|
||||
_args.append(arg)
|
||||
if condition:
|
||||
return _condition_input(_args, _kwargs)
|
||||
return _args, _kwargs
|
||||
|
||||
|
||||
def _yamlify_arg(arg: Any) -> Any:
|
||||
"""
|
||||
yaml.safe_load the arg
|
||||
"""
|
||||
if not isinstance(arg, str):
|
||||
return arg
|
||||
|
||||
if arg.strip() == "":
|
||||
# Because YAML loads empty (or all whitespace) strings as None, we
|
||||
# return the original string
|
||||
# >>> import yaml
|
||||
# >>> yaml.load('') is None
|
||||
# True
|
||||
# >>> yaml.load(' ') is None
|
||||
# True
|
||||
return arg
|
||||
|
||||
elif "_" in arg and all([x in "0123456789_" for x in arg.strip()]):
|
||||
# When the stripped string includes just digits and underscores, the
|
||||
# underscores are ignored and the digits are combined together and
|
||||
# loaded as an int. We don't want that, so return the original value.
|
||||
return arg
|
||||
|
||||
try:
|
||||
original_arg = arg
|
||||
if "#" in arg:
|
||||
# Only yamlify if it parses into a non-string type, to prevent
|
||||
# loss of content due to # as comment character
|
||||
parsed_arg = yaml.safe_load(arg)
|
||||
if isinstance(parsed_arg, str) or parsed_arg is None:
|
||||
return arg
|
||||
return parsed_arg
|
||||
if arg == "None":
|
||||
arg = None
|
||||
else:
|
||||
arg = yaml.safe_load(arg)
|
||||
|
||||
if isinstance(arg, dict):
|
||||
# dicts must be wrapped in curly braces
|
||||
if isinstance(original_arg, str) and not original_arg.startswith("{"):
|
||||
return original_arg
|
||||
else:
|
||||
return arg
|
||||
|
||||
elif isinstance(arg, list):
|
||||
# lists must be wrapped in brackets
|
||||
if isinstance(original_arg, str) and not original_arg.startswith("["):
|
||||
return original_arg
|
||||
else:
|
||||
return arg
|
||||
|
||||
elif arg is None or isinstance(arg, (list, float, int, str)):
|
||||
# yaml.safe_load will load '|' as '', don't let it do that.
|
||||
if arg == "" and original_arg in ("|",):
|
||||
return original_arg
|
||||
# yaml.safe_load will treat '#' as a comment, so a value of '#'
|
||||
# will become None. Keep this value from being stomped as well.
|
||||
elif arg is None and original_arg.strip().startswith("#"):
|
||||
return original_arg
|
||||
else:
|
||||
return arg
|
||||
else:
|
||||
# we don't support this type
|
||||
return original_arg
|
||||
except Exception:
|
||||
# In case anything goes wrong...
|
||||
return original_arg
|
||||
|
||||
|
||||
def _parse_kwarg(string_: str) -> Tuple:
|
||||
"""
|
||||
Parses the string and looks for the following kwarg format:
|
||||
"{argument name}={argument value}"
|
||||
For example: "my_message=Hello world"
|
||||
Returns the kwarg name and value, or (None, None) if the regex was not
|
||||
matched.
|
||||
"""
|
||||
try:
|
||||
return KWARG_REGEX.match(string_).groups()
|
||||
except AttributeError:
|
||||
return None, None
|
||||
|
||||
|
||||
def _condition_input(args: List[Any], kwargs: Dict[str, Any]) -> List[str]:
|
||||
"""
|
||||
Return a single arg structure for the publisher to safely use
|
||||
"""
|
||||
ret = []
|
||||
for arg in args:
|
||||
if isinstance(arg, int):
|
||||
ret.append(str(arg))
|
||||
else:
|
||||
ret.append(arg)
|
||||
if isinstance(kwargs, dict) and kwargs:
|
||||
kw_ = {"__kwarg__": True}
|
||||
for key, val in kwargs.items():
|
||||
kw_[key] = val
|
||||
return ret + [kw_]
|
||||
return ret
|
||||
@@ -0,0 +1,173 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
The main interface for management of the aio loop
|
||||
"""
|
||||
# Import python libs
|
||||
import asyncio
|
||||
import os
|
||||
import pop.hub
|
||||
import signal
|
||||
import functools
|
||||
from typing import Callable, Iterable, Generator
|
||||
|
||||
__virtualname__ = "loop"
|
||||
|
||||
|
||||
def __virtual__(hub: "pop.hub.Hub"):
|
||||
return True
|
||||
|
||||
|
||||
def create(hub: "pop.hub.Hub"):
|
||||
"""
|
||||
Create the loop at hub.pop.Loop
|
||||
"""
|
||||
if not hasattr(hub.pop, "Loop"):
|
||||
hub.pop.loop.FUT_QUE = asyncio.Queue()
|
||||
if os.name == "nt":
|
||||
hub.pop.Loop = asyncio._get_running_loop()
|
||||
if hub.pop.Loop is not None:
|
||||
return
|
||||
# The default event loop on Windows, "SelectorEventLoop" has certain limitations
|
||||
# ProactorEventLoop makes use of Window's I/O Completion Ports:
|
||||
# https://docs.microsoft.com/en-ca/windows/win32/fileio/i-o-completion-ports
|
||||
hub.pop.Loop = asyncio.ProactorEventLoop()
|
||||
asyncio.set_event_loop(hub.pop.Loop)
|
||||
else:
|
||||
hub.pop.Loop = asyncio.get_event_loop()
|
||||
|
||||
|
||||
def call_soon(hub: "pop.hub.Hub", ref: str, *args, **kwargs):
|
||||
"""
|
||||
Schedule a coroutine to be called when the loop has time. This needs
|
||||
to be called after the creation fo the loop
|
||||
"""
|
||||
fun = hub.pop.ref.get_func(ref)
|
||||
hub.pop.Loop.call_soon(functools.partial(fun, *args, **kwargs))
|
||||
|
||||
|
||||
def ensure_future(hub: "pop.hub.Hub", ref: str, *args, **kwargs):
|
||||
"""
|
||||
Schedule a coroutine to be called when the loop has time. This needs
|
||||
to be called after the creation fo the loop. This function also uses
|
||||
the hold system to await the future when it is done making it easy
|
||||
to create a future that will be cleanly awaited in the background.
|
||||
"""
|
||||
fun = getattr(hub, ref)
|
||||
future = asyncio.ensure_future(fun(*args, **kwargs))
|
||||
|
||||
def callback(fut):
|
||||
hub.pop.loop.FUT_QUE.put_nowait(fut)
|
||||
|
||||
future.add_done_callback(callback)
|
||||
return future
|
||||
|
||||
|
||||
def start(
|
||||
hub: "pop.hub.Hub",
|
||||
*coros,
|
||||
hold: bool = False,
|
||||
sigint: Callable = None,
|
||||
sigterm: Callable = None,
|
||||
):
|
||||
"""
|
||||
Start a loop that will run until complete
|
||||
"""
|
||||
hub.pop.loop.create()
|
||||
if sigint:
|
||||
s = signal.SIGINT
|
||||
hub.pop.Loop.add_signal_handler(s, lambda s=s: asyncio.create_task(sigint(s)))
|
||||
if sigterm:
|
||||
s = signal.SIGTERM
|
||||
hub.pop.Loop.add_signal_handler(s, lambda s=s: asyncio.create_task(sigterm(s)))
|
||||
if hold:
|
||||
coros = list(coros)
|
||||
coros.append(_holder(hub))
|
||||
try:
|
||||
# DO NOT CHANGE THIS CALL TO run_forever! If we do that then the tracebacks
|
||||
# do not get resolved.
|
||||
return hub.pop.Loop.run_until_complete(asyncio.gather(*coros))
|
||||
except KeyboardInterrupt as e:
|
||||
print("Caught keyboard interrupt. Canceling...")
|
||||
hub.pop.Loop.close()
|
||||
|
||||
|
||||
async def _holder(hub: "pop.hub.Hub"):
|
||||
"""
|
||||
Just a sleeping while loop to hold the loop open while it runs until
|
||||
complete
|
||||
"""
|
||||
while True:
|
||||
future = await hub.pop.loop.FUT_QUE.get()
|
||||
await future
|
||||
|
||||
|
||||
async def await_futures(hub: "pop.hub.Hub"):
|
||||
"""
|
||||
Scan over the futures that have completed and manually await them.
|
||||
This function is used to clean up futures when the loop is not opened
|
||||
up with hold=True so that ensured futures can be cleaned up on demand
|
||||
"""
|
||||
while not hub.pop.loop.FUT_QUE.empty():
|
||||
future = await hub.pop.loop.FUT_QUE.get()
|
||||
await future
|
||||
|
||||
|
||||
async def kill(hub: "pop.hub.Hub", wait: int or float = 0):
|
||||
"""
|
||||
Close out the loop
|
||||
"""
|
||||
await asyncio.sleep(wait)
|
||||
hub.pop.Loop.stop()
|
||||
while True:
|
||||
if not hub.pop.Loop.is_running():
|
||||
hub.pop.Loop.close()
|
||||
await asyncio.sleep(1)
|
||||
|
||||
|
||||
async def as_yielded(hub: "pop.hub.Hub", gens: Iterable[Generator]):
|
||||
"""
|
||||
Concurrently run multiple async generators and yield the next yielded
|
||||
value from the soonest yielded generator.
|
||||
|
||||
async def many():
|
||||
for n in range(10):
|
||||
yield os.urandom(6).hex()
|
||||
|
||||
async def run():
|
||||
gens = []
|
||||
for n in range(10):
|
||||
gens.append(many())
|
||||
async for y in as_yielded(gens):
|
||||
print(y)
|
||||
"""
|
||||
fin = os.urandom(32)
|
||||
que = asyncio.Queue()
|
||||
fs = []
|
||||
to_clean = []
|
||||
|
||||
async def _yield(gen):
|
||||
async for comp in gen:
|
||||
await que.put(comp)
|
||||
|
||||
async def _ensure(coros):
|
||||
for f in asyncio.as_completed(coros):
|
||||
await f
|
||||
|
||||
async def _set_done():
|
||||
await que.put(fin)
|
||||
|
||||
def _done(future):
|
||||
to_clean.append(asyncio.ensure_future(_set_done()))
|
||||
|
||||
coros = []
|
||||
for gen in gens:
|
||||
coros.append(_yield(gen))
|
||||
f = asyncio.ensure_future(_ensure(coros))
|
||||
f.add_done_callback(_done)
|
||||
while True:
|
||||
ret = await que.get()
|
||||
if ret == fin:
|
||||
break
|
||||
yield ret
|
||||
for c in to_clean:
|
||||
await c
|
||||
@@ -0,0 +1,46 @@
|
||||
"""
|
||||
Used to resolve resolutions to paths on the hub
|
||||
"""
|
||||
import pop.hub
|
||||
from typing import List
|
||||
|
||||
|
||||
def last(hub: "pop.hub.Hub", ref: str) -> "pop.hub.Sub":
|
||||
"""
|
||||
Takes a string that references the desired ref and returns the last object
|
||||
called out in that ref
|
||||
"""
|
||||
return hub.pop.ref.path(ref)[-1]
|
||||
|
||||
|
||||
def path(hub: "pop.hub.Hub", ref: str) -> List["pop.hub.Sub"]:
|
||||
"""
|
||||
Retuns a list of references up to the named ref
|
||||
"""
|
||||
ret = [hub]
|
||||
if isinstance(ref, str):
|
||||
ref = ref.split(".")
|
||||
for chunk in ref:
|
||||
ret.append(getattr(ret[-1], chunk))
|
||||
return ret
|
||||
|
||||
|
||||
def create(hub: "pop.hub.Hub", ref: str, obj: object):
|
||||
"""
|
||||
Create an attribute at a given target using just a ref string and the
|
||||
object to be saved at said location. The desired location must already
|
||||
exist!
|
||||
|
||||
:param hub: The redistributed pop central hub
|
||||
:param ref: The dot delimited string referencing the target location to
|
||||
create the given object on the hub
|
||||
:param obj: The object to store at the given reference point
|
||||
"""
|
||||
if "." not in ref:
|
||||
setattr(hub, ref, obj)
|
||||
return
|
||||
comps = ref.split(".")
|
||||
sub_ref = ref[: ref.rindex(".")]
|
||||
var = comps[-1]
|
||||
top = hub.pop.ref.last(sub_ref)
|
||||
setattr(top, var, obj)
|
||||
@@ -0,0 +1,360 @@
|
||||
"""
|
||||
Seed a new project with a directory tree and first files
|
||||
"""
|
||||
# Import python libs
|
||||
import os
|
||||
import pop.hub
|
||||
|
||||
SETUP = """#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
import shutil
|
||||
from setuptools import setup, Command
|
||||
|
||||
NAME = "%%NAME%%"
|
||||
DESC = ""
|
||||
|
||||
# Version info -- read without importing
|
||||
_locals = {}
|
||||
with open("{}/version.py".format(NAME)) as fp:
|
||||
exec(fp.read(), None, _locals)
|
||||
VERSION = _locals["version"]
|
||||
SETUP_DIRNAME = os.path.dirname(__file__)
|
||||
if not SETUP_DIRNAME:
|
||||
SETUP_DIRNAME = os.getcwd()
|
||||
|
||||
with open("README.rst", encoding="utf-8") as f:
|
||||
LONG_DESC = f.read()
|
||||
|
||||
with open("requirements.txt") as f:
|
||||
REQUIREMENTS = f.read().splitlines()
|
||||
|
||||
|
||||
class Clean(Command):
|
||||
user_options = []
|
||||
|
||||
def initialize_options(self):
|
||||
pass
|
||||
|
||||
def finalize_options(self):
|
||||
pass
|
||||
|
||||
def run(self):
|
||||
for subdir in (NAME, "tests"):
|
||||
for root, dirs, files in os.walk(
|
||||
os.path.join(os.path.dirname(__file__), subdir)
|
||||
):
|
||||
for dir_ in dirs:
|
||||
if dir_ == "__pycache__":
|
||||
shutil.rmtree(os.path.join(root, dir_))
|
||||
|
||||
|
||||
def discover_packages():
|
||||
modules = []
|
||||
for package in (NAME,):
|
||||
for root, _, files in os.walk(os.path.join(SETUP_DIRNAME, package)):
|
||||
pdir = os.path.relpath(root, SETUP_DIRNAME)
|
||||
modname = pdir.replace(os.sep, ".")
|
||||
modules.append(modname)
|
||||
return modules
|
||||
|
||||
|
||||
setup(
|
||||
name=NAME,
|
||||
author="",
|
||||
author_email="",
|
||||
url="",
|
||||
version=VERSION,
|
||||
install_requires=REQUIREMENTS,
|
||||
description=DESC,
|
||||
long_description=LONG_DESC,
|
||||
long_description_content_type="text/x-rst",
|
||||
python_requires=">=3.6",
|
||||
classifiers=[
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
],
|
||||
packages=discover_packages(),
|
||||
entry_points={"console_scripts": ["%%NAME%% = %%NAME%%.scripts:start",],},
|
||||
cmdclass={"clean": Clean},
|
||||
)
|
||||
"""
|
||||
|
||||
PYPROJ = r"""[tool.black]
|
||||
line-length = 88
|
||||
target-version = ['py36', 'py37', 'py38']
|
||||
include = '\.pyi?$'
|
||||
exclude = '''
|
||||
(
|
||||
/(
|
||||
\.eggs
|
||||
| \.git
|
||||
| \.hg
|
||||
| \.mypy_cache
|
||||
| \.tox
|
||||
| \.venv
|
||||
| _build
|
||||
| buck-out
|
||||
| build
|
||||
| dist
|
||||
)/
|
||||
)
|
||||
'''
|
||||
"""
|
||||
|
||||
PRECOM = r"""---
|
||||
minimum_pre_commit_version: 1.15.2
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v2.5.0
|
||||
hooks:
|
||||
- id: check-merge-conflict # Check for files that contain merge conflict strings.
|
||||
language_version: python3
|
||||
- id: trailing-whitespace # Trims trailing whitespace.
|
||||
args: [--markdown-linebreak-ext=md]
|
||||
language_version: python3
|
||||
- id: mixed-line-ending # Replaces or checks mixed line ending.
|
||||
args: [--fix=lf]
|
||||
language_version: python3
|
||||
- id: end-of-file-fixer # Makes sure files end in a newline and only a newline.
|
||||
exclude: tests/fake_.*\.key
|
||||
language_version: python3
|
||||
- id: check-ast # Simply check whether files parse as valid python.
|
||||
language_version: python3
|
||||
- id: check-yaml
|
||||
- id: check-json
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 19.10b0
|
||||
hooks:
|
||||
- id: black
|
||||
language_version: python3
|
||||
"""
|
||||
|
||||
ENTRY = """entry_points={
|
||||
'console_scripts': [
|
||||
'%%NAME%% = %%NAME%%.scripts:start',
|
||||
],
|
||||
},"""
|
||||
|
||||
SCRIPT = """#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import pop.hub
|
||||
|
||||
|
||||
def start():
|
||||
hub = pop.hub.Hub()
|
||||
hub.pop.sub.add(dyne_name="%%NAME%%")
|
||||
hub.%%NAME%%.init.cli()
|
||||
"""
|
||||
|
||||
INIT = """def __init__(hub):
|
||||
# Remember not to start your app in the __init__ function
|
||||
# This function should just be used to set up the plugin subsystem
|
||||
# Add another function to call from your run.py to start the app
|
||||
pass
|
||||
|
||||
|
||||
def cli(hub):
|
||||
hub.pop.config.load(["%%NAME%%"], cli="%%NAME%%")
|
||||
print("%%NAME%% works!")
|
||||
"""
|
||||
|
||||
REQ = "pop\n"
|
||||
|
||||
CONF = """CLI_CONFIG = {}
|
||||
CONFIG = {}
|
||||
SUBCOMMANDS = {}
|
||||
DYNE = {
|
||||
"%%NAME%%": ["%%NAME%%"],
|
||||
%%DYNE%%}
|
||||
"""
|
||||
|
||||
VER = """# -*- coding: utf-8 -*-
|
||||
version = "1"\n
|
||||
"""
|
||||
|
||||
|
||||
def new(hub: "pop.hub.Hub"):
|
||||
"""
|
||||
Given the option in hub.opts "seed_name" create a directory tree for a
|
||||
new pop project
|
||||
"""
|
||||
hub.PATH = os.getcwd()
|
||||
name = hub.opts["seed_name"]
|
||||
for dyne in hub.opts["dyne"]:
|
||||
hub.pop.seed.mkdir(name, dyne)
|
||||
hub.pop.seed.mkdir(name, dyne, "contracts")
|
||||
if hub.opts["type"] == "v":
|
||||
hub.pop.seed.mkdir(name)
|
||||
hub.pop.seed.mksetup(name, entry=False)
|
||||
hub.pop.seed.mkversion(name)
|
||||
hub.pop.seed.mkconf(name)
|
||||
hub.pop.seed.mkreq(name)
|
||||
hub.pop.seed.mkreadme(name)
|
||||
else:
|
||||
hub.pop.seed.mkdir(name, name)
|
||||
hub.pop.seed.mkdir(name, name, "contracts")
|
||||
hub.pop.seed.mksetup(name)
|
||||
hub.pop.seed.mkscript(name)
|
||||
hub.pop.seed.mkversion(name)
|
||||
hub.pop.seed.mkconf(name)
|
||||
hub.pop.seed.mkreq(name)
|
||||
hub.pop.seed.mkrun(name)
|
||||
hub.pop.seed.mkinit(name)
|
||||
hub.pop.seed.mkreadme(name)
|
||||
hub.pop.seed.mkproj()
|
||||
hub.pop.seed.mkprecom()
|
||||
hub.pop.seed.print_post(name)
|
||||
|
||||
|
||||
def mkdir(hub: "pop.hub.Hub", *args):
|
||||
"""
|
||||
Create the named dir
|
||||
"""
|
||||
path = hub.PATH
|
||||
for dir_ in args:
|
||||
path = os.path.join(path, dir_)
|
||||
if not os.path.isdir(path):
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except Exception:
|
||||
print("Failed to make {}".format(path))
|
||||
continue
|
||||
if dir_ == "scripts" and len(args) == 1:
|
||||
continue
|
||||
|
||||
|
||||
def mkreq(hub: "pop.hub.Hub", name: str):
|
||||
path = os.path.join(hub.PATH, "requirements.txt")
|
||||
with open(path, "w+") as fp:
|
||||
fp.write(REQ)
|
||||
|
||||
|
||||
def mksetup(hub: "pop.hub.Hub", name: str, entry: bool = True):
|
||||
"""
|
||||
Create and write out a setup.py file
|
||||
"""
|
||||
path = os.path.join(hub.PATH, "setup.py")
|
||||
setup_str = SETUP.replace("%%NAME%%", name)
|
||||
if entry:
|
||||
setup_str = setup_str.replace(
|
||||
"%%ENTRY%%",
|
||||
ENTRY.replace(
|
||||
"%%NAME%%.scripts:start", f"{name.replace('-', '_')}.scripts:start"
|
||||
),
|
||||
)
|
||||
setup_str = setup_str.replace("%%ENTRY%%", ENTRY.replace("%%NAME%%", name))
|
||||
else:
|
||||
setup_str = setup_str.replace("%%ENTRY%%", "")
|
||||
with open(path, "w+") as fp:
|
||||
fp.write(setup_str)
|
||||
|
||||
|
||||
def mkscript(hub: "pop.hub.Hub", name: str):
|
||||
"""
|
||||
Create and write out a setup.py file
|
||||
"""
|
||||
path = os.path.join(hub.PATH, name, "scripts.py")
|
||||
script_str = SCRIPT.replace("%%NAME%%", name)
|
||||
with open(path, "w+") as fp:
|
||||
fp.write(script_str)
|
||||
|
||||
|
||||
def mkrun(hub: "pop.hub.Hub", name: str):
|
||||
"""
|
||||
Create the convenience run.py script allowing the project to
|
||||
be executed from the local directory
|
||||
"""
|
||||
path = os.path.join(hub.PATH, "run.py")
|
||||
run_str = SCRIPT.replace("%%NAME%%", name)
|
||||
run_str += "\n\nstart()\n"
|
||||
with open(path, "w+") as fp:
|
||||
fp.write(run_str)
|
||||
|
||||
|
||||
def mkinit(hub: "pop.hub.Hub", name: str):
|
||||
"""
|
||||
Create the intial init.py
|
||||
"""
|
||||
path = os.path.join(hub.PATH, name, name, "init.py")
|
||||
init_str = INIT.replace("%%NAME%%", name)
|
||||
with open(path, "w+") as fp:
|
||||
fp.write(init_str)
|
||||
|
||||
|
||||
def mkversion(hub: "pop.hub.Hub", name: str):
|
||||
"""
|
||||
Create the version.py file
|
||||
"""
|
||||
path = os.path.join(hub.PATH, name, "version.py")
|
||||
with open(path, "w+") as fp:
|
||||
fp.write(VER)
|
||||
|
||||
|
||||
def mkconf(hub: "pop.hub.Hub", name: str):
|
||||
"""
|
||||
Create the version.py file
|
||||
"""
|
||||
path = os.path.join(hub.PATH, name, "conf.py")
|
||||
dyne_str = ""
|
||||
for dyne in hub.opts["dyne"]:
|
||||
dyne_str += f' "{dyne}": ["{dyne}"],\n'
|
||||
conf_str = CONF.replace("%%NAME%%", name)
|
||||
conf_str = conf_str.replace("%%DYNE%%", dyne_str)
|
||||
with open(path, "w+") as fp:
|
||||
fp.write(conf_str)
|
||||
|
||||
|
||||
def mkreadme(hub: "pop.hub.Hub", name: str):
|
||||
"""
|
||||
Create and write out a setup.py file
|
||||
"""
|
||||
path = os.path.join(hub.PATH, "README.rst")
|
||||
eqchars = "=" * len(name)
|
||||
readme_str = f"{eqchars}\n{name.upper()}\n{eqchars}\n"
|
||||
with open(path, "w+") as fp:
|
||||
fp.write(readme_str)
|
||||
|
||||
|
||||
def mkproj(hub: "pop.hub.Hub"):
|
||||
"""
|
||||
Create the pyproject.toml file
|
||||
"""
|
||||
path = os.path.join(hub.PATH, "pyproject.toml")
|
||||
with open(path, "a+") as fp:
|
||||
fp.write(PYPROJ)
|
||||
|
||||
|
||||
def mkprecom(hub: "pop.hub.Hub"):
|
||||
"""
|
||||
Create the precommit file
|
||||
"""
|
||||
path = os.path.join(hub.PATH, ".pre-commit-config.yaml")
|
||||
with open(path, "w+") as fp:
|
||||
fp.write(PRECOM)
|
||||
|
||||
|
||||
def print_post(hub: "pop.hub.Hub", name: str):
|
||||
"""
|
||||
Print a message after the run to document how to enable
|
||||
things like pre-commit
|
||||
"""
|
||||
print(f"Congratulations! You now have a project set up called {name}!")
|
||||
print("This project can be executed by calling the run.py script:")
|
||||
print(" python3 run.py")
|
||||
print(
|
||||
"This project has been set up with pre-commit hooks for code checks and black."
|
||||
)
|
||||
print('First set up your source control environment with "git init" or "hg init".')
|
||||
print("Then enable these checks in your git checkout:")
|
||||
print(" pip install pre-commit")
|
||||
print(" pre-commit install")
|
||||
print("To run pre-commit manually, execute:")
|
||||
print(" pre-commit run --all-files")
|
||||
@@ -0,0 +1,253 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Control and add subsystems to the running daemon hub
|
||||
"""
|
||||
# Import python libs
|
||||
import os
|
||||
from typing import Generator, List, Tuple
|
||||
|
||||
# Import pop libs
|
||||
import pop.hub
|
||||
|
||||
|
||||
def add(
|
||||
hub: pop.hub.Hub,
|
||||
pypath: List[str] or str = None,
|
||||
subname: str = None,
|
||||
sub: pop.hub.Sub = None,
|
||||
static: List[str] or str = None,
|
||||
contracts_pypath: List[str] or str = None,
|
||||
contracts_static: List[str] or str = None,
|
||||
default_contracts: List[str] or str = None,
|
||||
virtual: bool = True,
|
||||
dyne_name: str = None,
|
||||
omit_start: Tuple[str] = ("_",),
|
||||
omit_end: Tuple[str] = (),
|
||||
omit_func: bool = False,
|
||||
omit_class: bool = True,
|
||||
omit_vars: bool = False,
|
||||
mod_basename: str = "pop.sub",
|
||||
stop_on_failures: bool = False,
|
||||
load_all: bool = True,
|
||||
recursive_contracts_static: List[str] or str = None,
|
||||
default_recursive_contracts: List[str]
|
||||
or str = None, # TODO: Not str, pretty sure -W. Werner, 2020-10-20
|
||||
):
|
||||
"""
|
||||
Add a new subsystem to the hub
|
||||
:param hub: The redistributed pop central hub
|
||||
:param subname: The name that the sub is going to take on the hub
|
||||
if nothing else is passed, it is used as the pypath (TODO make it the dyne_name not the pypath)
|
||||
:param sub: The sub to use as the root to add to
|
||||
:param pypath: One or many python paths which will be imported
|
||||
:param static: Directories that can be explicitly passed
|
||||
:param contracts_pypath: Load additional contract paths
|
||||
:param contracts_static: Load additional contract paths from a specific directory
|
||||
:param default_contracts: Specifies that a specific contract plugin will be applied as a default to all plugins
|
||||
:param virtual: Toggle whether or not to process __virtual__ functions
|
||||
:param dyne_name: The dynamic name to use to look up paths to find plugins -- linked to conf.py
|
||||
:param omit_start: Allows you to pass in a tuple of characters that would omit the loading of any object
|
||||
I.E. Any function starting with an underscore will not be loaded onto a plugin
|
||||
(You should probably never change this)
|
||||
:param omit_end:Allows you to pass in a tuple of characters that would omit the loading of an object
|
||||
(You should probably never change this)
|
||||
:param omit_func: bool: Don't load any functions
|
||||
:param omit_class: bool: Don't load any classes
|
||||
:param omit_vars: bool: Don't load any vars
|
||||
:param mod_basename: str: Manipulate the location in sys.modules that the plugin will be loaded to.
|
||||
Allow plugins to be loaded into a separate namespace.
|
||||
:param stop_on_failures: If any module fails to load for any reason, stacktrace and do not continue loading this sub
|
||||
:param load_all: Load all the plugins on the sub
|
||||
"""
|
||||
if pypath:
|
||||
pypath = pop.hub.ex_path(pypath)
|
||||
subname = subname if subname else pypath[0].split(".")[-1]
|
||||
elif static:
|
||||
subname = subname if subname else os.path.basename(static)
|
||||
if dyne_name:
|
||||
subname = subname if subname else dyne_name
|
||||
root = sub or hub
|
||||
root._subs[subname] = pop.hub.Sub(
|
||||
hub,
|
||||
subname,
|
||||
root,
|
||||
pypath,
|
||||
static,
|
||||
contracts_pypath,
|
||||
contracts_static,
|
||||
default_contracts,
|
||||
virtual,
|
||||
dyne_name,
|
||||
omit_start,
|
||||
omit_end,
|
||||
omit_func,
|
||||
omit_class,
|
||||
omit_vars,
|
||||
mod_basename,
|
||||
stop_on_failures,
|
||||
sub_virtual=getattr(root, "_subvirt", True),
|
||||
recursive_contracts_static=recursive_contracts_static,
|
||||
default_recursive_contracts=default_recursive_contracts,
|
||||
)
|
||||
# init the sub (init.py:__init__) after it can be referenced on the hub!
|
||||
root._subs[subname]._sub_init()
|
||||
root._iter_subs = sorted(root._subs.keys())
|
||||
if load_all:
|
||||
root._subs[subname]._load_all()
|
||||
for alias in root._subs[subname]._alias:
|
||||
root._sub_alias[alias] = subname
|
||||
|
||||
|
||||
def remove(hub: pop.hub.Hub, subname: str):
|
||||
"""
|
||||
Remove a pop from the hub, run the shutdown if needed
|
||||
:param hub: The redistributed pop central hub
|
||||
:param subname: The name that the sub is going to take on the hub
|
||||
if nothing else is passed, it is used as the pypath (TODO make it the dyne_name not the pypath)
|
||||
"""
|
||||
if hasattr(hub, subname):
|
||||
sub = getattr(hub, subname)
|
||||
if hasattr(sub, "init"):
|
||||
mod = getattr(sub, "init")
|
||||
if hasattr(mod, "shutdown"):
|
||||
mod.shutdown()
|
||||
hub._remove_subsystem(subname)
|
||||
|
||||
|
||||
def load_all(hub: pop.hub.Hub, subname: str) -> bool:
|
||||
"""
|
||||
Load all modules under a given pop
|
||||
:param hub: The redistributed pop central hub
|
||||
:param subname: The name that the sub is going to take on the hub
|
||||
if nothing else is passed, it is used as the pypath (TODO make it the dyne_name not the pypath)
|
||||
"""
|
||||
if hasattr(hub, subname):
|
||||
sub = getattr(hub, subname)
|
||||
sub._load_all()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def get_dirs(hub: pop.hub.Hub, sub: pop.hub.Sub) -> List[str]:
|
||||
"""
|
||||
Return a list of directories that contain the modules for this subname
|
||||
:param hub: The redistributed pop central hub
|
||||
:param sub: The pop object that contains the loaded module data
|
||||
"""
|
||||
return sub._dirs
|
||||
|
||||
|
||||
def iter_subs(
|
||||
hub: pop.hub.Hub, sub: pop.hub.Sub, recurse: bool = False
|
||||
) -> Generator[pop.hub.Sub, None, None]:
|
||||
"""
|
||||
Return an iterator that will traverse just the subs. This is useful for
|
||||
nested subs
|
||||
:param hub: The redistributed pop central hub
|
||||
:param recurse: Recursively iterate over nested subs
|
||||
"""
|
||||
for name in sorted(sub._subs):
|
||||
ret = sub._subs[name]
|
||||
if ret._sub_virtual:
|
||||
yield ret
|
||||
if recurse:
|
||||
if hasattr(ret, "_subs"):
|
||||
for nest in hub.pop.sub.iter_subs(ret, recurse):
|
||||
yield nest
|
||||
|
||||
|
||||
def load_subdirs(hub: pop.hub.Hub, sub: pop.hub.Sub, recurse: bool = False):
|
||||
"""
|
||||
Given a sub, load all subdirectories found under the sub into a lower namespace
|
||||
:param hub: The redistributed pop central hub
|
||||
:param sub: The pop object that contains the loaded module data
|
||||
:param recurse: Recursively iterate over nested subs
|
||||
"""
|
||||
if not sub._sub_virtual:
|
||||
return
|
||||
dirs = hub.pop.sub.get_dirs(sub)
|
||||
roots = {}
|
||||
for dir_ in dirs:
|
||||
for fn in os.listdir(dir_):
|
||||
if fn.startswith("_"):
|
||||
continue
|
||||
if fn == "contracts":
|
||||
continue
|
||||
full = os.path.join(dir_, fn)
|
||||
if not os.path.isdir(full):
|
||||
continue
|
||||
if fn not in roots:
|
||||
roots[fn] = [full]
|
||||
else:
|
||||
roots[fn].append(full)
|
||||
for name, sub_dirs in roots.items():
|
||||
# Load er up!
|
||||
hub.pop.sub.add(
|
||||
subname=name,
|
||||
sub=sub,
|
||||
static=sub_dirs,
|
||||
virtual=sub._virtual,
|
||||
omit_start=sub._omit_start,
|
||||
omit_end=sub._omit_end,
|
||||
omit_func=sub._omit_func,
|
||||
omit_class=sub._omit_class,
|
||||
omit_vars=sub._omit_vars,
|
||||
mod_basename=sub._mod_basename,
|
||||
stop_on_failures=sub._stop_on_failures,
|
||||
)
|
||||
if recurse:
|
||||
if isinstance(getattr(sub, name), pop.hub.Sub):
|
||||
hub.pop.sub.load_subdirs(getattr(sub, name), recurse)
|
||||
|
||||
|
||||
def reload(hub: pop.hub.Hub, subname: str):
|
||||
"""
|
||||
Instruct the hub to reload the modules for the given sub. This does not call
|
||||
the init.new function or remove sub level variables. But it does re-read the
|
||||
directory list and re-initialize the loader causing all modules to be re-evaluated
|
||||
when started.
|
||||
:param hub: The redistributed pop central hub
|
||||
:param subname: The name that the sub is going to take on the hub
|
||||
if nothing else is passed, it is used as the pypath (TODO make it the dyne_name not the pypath)
|
||||
"""
|
||||
if hasattr(hub, subname):
|
||||
sub = getattr(hub, subname)
|
||||
sub._prepare()
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def extend(
|
||||
hub: pop.hub.Hub,
|
||||
subname: str,
|
||||
pypath: List[str] or str = None,
|
||||
static: List[str] or str = None,
|
||||
contracts_pypath: List[str] or str = None,
|
||||
contracts_static: List[str] or str = None,
|
||||
) -> bool:
|
||||
"""
|
||||
Extend the directory lookup for a given sub. Any of the directory lookup
|
||||
arguments can be passed.
|
||||
:param hub: The redistributed pop central hub
|
||||
:param subname: The name that the sub is going to take on the hub
|
||||
if nothing else is passed, it is used as the pypath (TODO make it the dyne_name not the pypath)
|
||||
:param pypath: One or many python paths which will be imported
|
||||
:param static: Directories that can be explicitly passed
|
||||
:param contracts_pypath: Load additional contract paths
|
||||
:param contracts_static: Load additional contract paths from a specific directory
|
||||
"""
|
||||
if not hasattr(hub, subname):
|
||||
return False
|
||||
sub = getattr(hub, subname)
|
||||
if pypath:
|
||||
sub._pypath.extend(pop.hub.ex_path(pypath))
|
||||
if static:
|
||||
sub._static.extend(pop.hub.ex_path(static))
|
||||
if contracts_pypath:
|
||||
sub._contracts_pypath.extend(pop.hub.ex_path(contracts_pypath))
|
||||
if contracts_static:
|
||||
sub._contracts_static.extend(pop.hub.ex_path(contracts_static))
|
||||
sub._prepare()
|
||||
return True
|
||||
@@ -0,0 +1,373 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Provides tools to help unit test projects using pop.
|
||||
For now, provides mock Hub instances.
|
||||
"""
|
||||
# Import python libs
|
||||
import inspect
|
||||
import copy
|
||||
from asyncio import iscoroutinefunction
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
HAS_TEST = False
|
||||
from asynctest.mock import create_autospec, Mock
|
||||
|
||||
HAS_TEST = True
|
||||
except (ImportError, ModuleNotFoundError):
|
||||
try:
|
||||
from mock import create_autospec as mock_create_autospec, Mock
|
||||
|
||||
HAS_TEST = True
|
||||
|
||||
def create_autospec(spec, *args, **kwargs):
|
||||
if iscoroutinefunction(spec):
|
||||
raise Exception(
|
||||
"MockHub requires asynctest in order to mock async functions"
|
||||
)
|
||||
return mock_create_autospec(spec, *args, **kwargs)
|
||||
|
||||
except (ImportError, ModuleNotFoundError):
|
||||
...
|
||||
|
||||
# Import pop libs
|
||||
from pop.contract import Contracted
|
||||
from pop.loader import LoadedMod
|
||||
from pop.hub import Hub, Sub
|
||||
from typing import Any, Callable, Tuple
|
||||
|
||||
|
||||
def __virtual__(hub: Hub) -> Tuple[bool, str]:
|
||||
return HAS_TEST, "Async pop testing libs are not available"
|
||||
|
||||
|
||||
class _LookUpTable:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self._lut = {}
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def __contains__(self, key: str):
|
||||
return id(key) in self._lut
|
||||
|
||||
def __setitem__(self, key: str, value: Any):
|
||||
self._lut[id(key)] = value
|
||||
|
||||
def __getitem__(self, key: str):
|
||||
return self._lut[id(key)]
|
||||
|
||||
def __delitem__(self, key: str):
|
||||
del self._lut[id(key)]
|
||||
|
||||
def __len__(self):
|
||||
return len(self._lut)
|
||||
|
||||
|
||||
class _LazyPop:
|
||||
__lazy_classes = [Hub, Sub, LoadedMod]
|
||||
_hub_id = object() # just a unique object for our lut storage
|
||||
_lazy_hub_id = object()
|
||||
|
||||
class __Lazy:
|
||||
pass
|
||||
|
||||
def __init__(self, obj, lut=None):
|
||||
if isinstance(obj, Hub):
|
||||
lut = _LookUpTable()
|
||||
lut[self._hub_id] = obj
|
||||
lut[self._lazy_hub_id] = self
|
||||
lut[obj] = self
|
||||
elif isinstance(obj, Sub):
|
||||
obj._load_all()
|
||||
|
||||
self.__lut = lut
|
||||
self.__obj = obj
|
||||
for attr_name in self.__attr_names():
|
||||
setattr(self, attr_name, _LazyPop.__Lazy)
|
||||
|
||||
def _hub(self):
|
||||
return self.__lut[self._hub_id]
|
||||
|
||||
def _lazy_hub(self):
|
||||
return self.__lut[self._lazy_hub_id]
|
||||
|
||||
def __attr_names(self):
|
||||
# TODO: '_' - is this actually right? what should I really expose?
|
||||
attrs = [attr for attr in self.__obj.__dict__ if not attr.startswith("_")]
|
||||
|
||||
if isinstance(self.__obj, Hub):
|
||||
attrs += list(self.__obj._subs)
|
||||
elif isinstance(self.__obj, Sub):
|
||||
attrs += list(self.__obj._loaded)
|
||||
attrs += list(self.__obj._subs)
|
||||
elif isinstance(self.__obj, LoadedMod):
|
||||
attrs += list(self.__obj._attrs)
|
||||
else:
|
||||
raise Exception(
|
||||
"Standard objects should not be lazy: {}".format(str(self.__obj))
|
||||
)
|
||||
|
||||
return attrs
|
||||
|
||||
def _find_subs(self):
|
||||
i = 0
|
||||
subs = [(s._subname, s) for s in self._hub()]
|
||||
while i < len(subs):
|
||||
for child in subs[i][1]._subs:
|
||||
subs.append((".".join([subs[i][0], child]), getattr(subs[i][1], child)))
|
||||
i += 1
|
||||
return subs
|
||||
|
||||
def _find_module_from_file(self, file):
|
||||
for path, sub in self._find_subs():
|
||||
try:
|
||||
mod = sub._vmap[file]
|
||||
return ".".join([path, mod]), getattr(sub, mod)
|
||||
except (AttributeError, KeyError):
|
||||
pass
|
||||
else:
|
||||
raise Exception("Module not loaded on hub.")
|
||||
|
||||
def _reset(self):
|
||||
# A potential issue - we don't do reference counting, so it's *possible*
|
||||
# that an object we're clearing out has been accessed via two places on
|
||||
# the test hub:
|
||||
#
|
||||
# mock_hub.OBJ.return_value = True
|
||||
# mock_hub.pop.OBJ
|
||||
#
|
||||
# If VAL is the same object on the real hub, and is accessed from *both*
|
||||
# places on a test hub, mock_hub.pop._reset() will break the coupling -
|
||||
# mock_hub.VAL and mock_hub.pop.VAL will then refer to different objects.
|
||||
# Fixing this requires storing back-references in the LUT. (TODO)
|
||||
# Problems arising this should be rare and also fairly obvious.
|
||||
|
||||
# first, reset items with entries in the LUT
|
||||
items = list(self.__dict__.items())
|
||||
for k, v in items:
|
||||
orig = getattr(self.__obj, k, None)
|
||||
if orig in self.__lut and orig is not self.__obj:
|
||||
if isinstance(v, _LazyPop):
|
||||
v._reset()
|
||||
del self.__lut[orig]
|
||||
setattr(self, k, _LazyPop.__Lazy)
|
||||
|
||||
# now remove any assignments that *wouldn't* be in the lut
|
||||
# (attrs that weren't assigned on the backing object)
|
||||
fresh_obj = self.__class__(self.__obj, lut={})
|
||||
fresh_obj_keys = list(self.__dict__.keys())
|
||||
for k in fresh_obj_keys:
|
||||
if k not in fresh_obj.__dict__:
|
||||
del self.__dict__[k]
|
||||
|
||||
def __setattr__(self, key, value):
|
||||
if isinstance(value, _LazyPop) and value.__class__ is not self.__class__:
|
||||
if value.__obj in self.__lut:
|
||||
# we've previously touched this item, we need to clear it out.
|
||||
self.__lut[value.__obj]._reset()
|
||||
# we are constructing a hybrid _LazyPop - copy the type of the value but nothing else
|
||||
value = value.__class__(value.__obj, self.__lut)
|
||||
self.__lut[value.__obj] = value
|
||||
if isinstance(value, Contracted) and value.hub is not self._lazy_hub():
|
||||
# We need to update the Contracted to use our parent hub
|
||||
value = copy.copy(value)
|
||||
value.hub = self._lazy_hub()
|
||||
# we don't update the lut because we don't know what the *original is *
|
||||
if value.__class__ in self.__lazy_classes and "__obj" not in key:
|
||||
raise TypeError(
|
||||
"Mixing of real and test hubs is not supported. "
|
||||
"Contracteds (hub.sub.mod.func) are supported."
|
||||
)
|
||||
|
||||
super().__setattr__(key, value)
|
||||
|
||||
def __getattribute__(self, item):
|
||||
if item and not item.strip("_"): # only contains underscores, resolve 'this'
|
||||
stack = inspect.stack(0)
|
||||
file = stack[1].filename
|
||||
path, mod = self._find_module_from_file(file)
|
||||
|
||||
# go up N steps
|
||||
parts = path.split(".")
|
||||
resolved_path = parts[0 : len(parts) - len(item) + 1]
|
||||
if resolved_path:
|
||||
orig = getattr(self._hub(), ".".join(resolved_path))
|
||||
else:
|
||||
orig = self._hub()
|
||||
|
||||
# find/create attr, return
|
||||
attr = self._orig_to_attr(orig)
|
||||
self.__lut[orig] = attr
|
||||
return attr
|
||||
|
||||
if "." in item:
|
||||
result = self
|
||||
for part in item.split(".").copy():
|
||||
result = getattr(result, part)
|
||||
return result
|
||||
|
||||
attr = super().__getattribute__(item)
|
||||
|
||||
if attr is _LazyPop.__Lazy:
|
||||
orig = getattr(self.__obj, item)
|
||||
attr = self._orig_to_attr(orig)
|
||||
|
||||
self.__lut[orig] = attr
|
||||
super().__setattr__(item, attr) # bypass our custom setattr
|
||||
|
||||
return attr
|
||||
|
||||
def _orig_to_attr(self, orig):
|
||||
if orig in self.__lut:
|
||||
attr = self.__lut[orig]
|
||||
elif [True for cls in self.__lazy_classes if isinstance(orig, cls)]:
|
||||
attr = self.__class__(orig, self.__lut)
|
||||
elif isinstance(orig, Contracted):
|
||||
attr = self._mock_function(orig)
|
||||
else:
|
||||
attr = self._mock_attr(orig)
|
||||
return attr
|
||||
|
||||
def _mock_attr(self, a):
|
||||
return create_autospec(a, spec_set=True)
|
||||
|
||||
def _mock_function(self, f):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def strip_hub(f: Callable) -> Hub:
|
||||
"""
|
||||
returns a no-op function with the same function signature... minus the first parameter (hub).
|
||||
"""
|
||||
if inspect.iscoroutinefunction(f):
|
||||
newf = "async "
|
||||
else:
|
||||
newf = ""
|
||||
newf += "def {}(".format(f.__name__)
|
||||
params = inspect.signature(f).parameters
|
||||
new_params = []
|
||||
for param in params:
|
||||
if params[param].kind is inspect.Parameter.VAR_POSITIONAL:
|
||||
new_params.append("*{}".format(param))
|
||||
elif params[param].kind is inspect.Parameter.VAR_KEYWORD:
|
||||
new_params.append("**{}".format(param))
|
||||
else:
|
||||
new_params.append(param)
|
||||
if params[param].default is not inspect.Parameter.empty:
|
||||
new_params[-1] += '="has default"'
|
||||
newf += ", ".join(new_params[1:]) # skip hub
|
||||
newf += "): pass"
|
||||
|
||||
scope = {}
|
||||
exec(newf, scope)
|
||||
|
||||
return scope[f.__name__]
|
||||
|
||||
|
||||
def mock_hub(hub: Hub) -> "MockHub":
|
||||
return MockHub(hub)
|
||||
|
||||
|
||||
class MockHub(_LazyPop):
|
||||
"""
|
||||
Provides mocks mirroring a real hub::
|
||||
|
||||
hub.sub.mod.fn() # mock
|
||||
hub.sub.mod.attr # mock
|
||||
"""
|
||||
|
||||
def _mock_function(self, f: Contracted) -> Callable:
|
||||
afunc = create_autospec(strip_hub(f.func), spec_set=True)
|
||||
afunc.__signature__ = f.signature
|
||||
return afunc
|
||||
|
||||
|
||||
def fn_hub(hub: Hub) -> "NoContractHub":
|
||||
return NoContractHub(hub)
|
||||
|
||||
|
||||
class NoContractHub(_LazyPop):
|
||||
"""
|
||||
Provides access to real functions, bypassing contracts and mocking attributes::
|
||||
|
||||
hub.sub.mod.fn() # executes real function, no contracts
|
||||
hub.sub.mod.attr # mock
|
||||
"""
|
||||
|
||||
def _mock_function(self, f: Contracted) -> Contracted:
|
||||
return Contracted(
|
||||
hub=self._lazy_hub(),
|
||||
contracts=None,
|
||||
func=f.func,
|
||||
ref=f.ref,
|
||||
name=f.__name__,
|
||||
)
|
||||
|
||||
|
||||
def mock_contracted(contract_hub, c: Contracted) -> Contracted:
|
||||
mock_func = create_autospec(c.func, spec_set=True)
|
||||
mock_func.__signature__ = c.signature # required for python 3.6
|
||||
mock_func.__module__ = c.func.__module__
|
||||
mock_func.__dict__.update(copy.deepcopy(c.func.__dict__))
|
||||
return Contracted(contract_hub, c.contracts, mock_func, c.ref, c.__name__)
|
||||
|
||||
|
||||
class ContractHub(_LazyPop):
|
||||
"""
|
||||
Runs a call through the contract system, but the function is a mock. Mostly useful for integration tests:
|
||||
|
||||
hub.sub.mod.fn() # executes mock function, real contracts
|
||||
hub.sub.mod.attr # mock
|
||||
|
||||
You can verify what parameters are passed to a function after going through loaded contracts::
|
||||
|
||||
contract_hub.sub.mod.fn('foo')
|
||||
assert contract_hub.sub.mod.fn.called_with('bar')
|
||||
|
||||
--------------------------------
|
||||
|
||||
You can view or modify the contracts that will be executed on one function for a test - but first:
|
||||
MODIFYING CONTRACTS THIS WAY IS NOT SAFE ON REAL HUBS AND OTHER TESTING HUB VARIANTS!
|
||||
|
||||
I have previously thought of modifying contracts with mocks, only to realize what I really want is to
|
||||
unit test a specific contract. Think twice before using this functionality.
|
||||
|
||||
--------------------------------
|
||||
|
||||
The contract modules are visible via hub.sub.mod.fn.contracts, and the contract functions that will
|
||||
be called, wrapping fn are visible via hub.sub.mod.fn.contract_functions. It is safe to modify the
|
||||
contracts list or contract_functions dict only on a ContractHub.
|
||||
|
||||
Examine that the first contract function to be called is 'foo.pre_fn', then bypass it::
|
||||
|
||||
assert contract_hub.sub.mod.fn.contract_functions['pre'][0].__module__ is 'foo'
|
||||
assert contract_hub.sub.mod.fn.contract_functions['pre'][0].__name__ is 'pre_fn'
|
||||
hub.sub.mod.fn.contract_functions['pre'][0] = create_autospec(hub.sub.mod.fn.contract_functions['pre'][0])
|
||||
|
||||
Assert that one contract will be called before another::
|
||||
|
||||
assert contract_hub.sub.mod.fn.contracts.index(contract1) < contract_hub.sub.mod.fn.contracts.index(contract2)
|
||||
"""
|
||||
|
||||
def _mock_function(self, f: Contracted) -> Contracted:
|
||||
return mock_contracted(self._lazy_hub(), f)
|
||||
|
||||
|
||||
def mock_attr_hub(hub: Hub) -> "MockAttrHub":
|
||||
return MockAttrHub(hub)
|
||||
|
||||
|
||||
class MockAttrHub(_LazyPop):
|
||||
"""
|
||||
Provides an almost-normal hub.
|
||||
|
||||
Contracts and functions are executed, but all attrs are mocked.
|
||||
"""
|
||||
|
||||
def _mock_function(self, f) -> Contracted:
|
||||
return Contracted(
|
||||
hub=self._lazy_hub(),
|
||||
contracts=f.contracts,
|
||||
func=f.func,
|
||||
ref=f.ref,
|
||||
name=f.__name__,
|
||||
)
|
||||
@@ -0,0 +1,18 @@
|
||||
"""
|
||||
Routines to verify the working environment etc.
|
||||
"""
|
||||
# Import python libs
|
||||
import os
|
||||
import pop.hub
|
||||
|
||||
|
||||
def env(hub: "pop.hub.Hub"):
|
||||
"""
|
||||
Verify that the directories specified in the system exist
|
||||
"""
|
||||
for key in hub.opts:
|
||||
if key.endswith("_dir"):
|
||||
try:
|
||||
os.makedirs(hub.opts[key])
|
||||
except OSError:
|
||||
pass
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,141 @@
|
||||
"""
|
||||
The Proc sub is used to spin up worker processes that run hub referenced
|
||||
coroutines.
|
||||
"""
|
||||
# Import python libs
|
||||
import os
|
||||
import sys
|
||||
import atexit
|
||||
import itertools
|
||||
import asyncio
|
||||
import subprocess
|
||||
|
||||
# Import third party libs
|
||||
import msgpack
|
||||
import pop.hub
|
||||
|
||||
|
||||
def __init__(hub: "pop.hub.Hub"):
|
||||
"""
|
||||
Create constants used by the client and server side of procs
|
||||
"""
|
||||
hub.proc.DELIM = b"d\xff\xcfCO)\xfe="
|
||||
hub.proc.D_FLAG = b"D"
|
||||
hub.proc.I_FLAG = b"I"
|
||||
hub.proc.Workers = {}
|
||||
hub.proc.WorkersIter = {}
|
||||
hub.proc.WorkersTrack = {}
|
||||
|
||||
|
||||
def _get_cmd(hub: "pop.hub.Hub", ind, ref, ret_ref, sock_dir):
|
||||
"""
|
||||
Return the shell command to execute that will start up the worker
|
||||
"""
|
||||
code = "import sys; "
|
||||
code += "import pop.hub; "
|
||||
code += "hub = pop.hub.Hub(); "
|
||||
code += 'hub.pop.sub.add("pop.mods.proc"); '
|
||||
code += f'hub.proc.worker.start("{sock_dir}", "{ind}", "{ref}", "{ret_ref}")'
|
||||
cmd = f"{sys.executable} -c '{code}'"
|
||||
return cmd
|
||||
|
||||
|
||||
def mk_proc(hub: "pop.hub.Hub", ind, workers, ret_ref, sock_dir):
|
||||
"""
|
||||
Create the process and add it to the passed in workers dict at the
|
||||
specified index
|
||||
"""
|
||||
ref = os.urandom(3).hex() + ".sock"
|
||||
workers[ind] = {"ref": ref}
|
||||
workers[ind]["path"] = os.path.join(sock_dir, ref)
|
||||
cmd = _get_cmd(hub, ind, ref, ret_ref, sock_dir)
|
||||
workers[ind]["proc"] = subprocess.Popen(cmd, shell=True)
|
||||
workers[ind]["pid"] = workers[ind]["proc"].pid
|
||||
|
||||
|
||||
async def pool(
|
||||
hub: "pop.hub.Hub", num, name: str = "Workers", callback=None, sock_dir=None
|
||||
):
|
||||
"""
|
||||
Create a new local pool of process based workers
|
||||
|
||||
:param num: The number of processes to add to this pool
|
||||
:param ref: The location on the hub to create the Workers dict used to
|
||||
store the worker pool, defaults to `hub.pop.proc.Workers`
|
||||
:param callback: The pop ref to call when the process communicates
|
||||
back
|
||||
"""
|
||||
ret_ref = os.urandom(3).hex() + ".sock"
|
||||
ret_sock_path = os.path.join(sock_dir, ret_ref)
|
||||
if not hasattr(hub.proc, "Tracker"):
|
||||
hub.proc.init.mk_tracker()
|
||||
workers = {}
|
||||
if callback:
|
||||
await asyncio.start_unix_server(
|
||||
hub.proc.init.ret_work(callback), path=ret_sock_path
|
||||
)
|
||||
for ind in range(num):
|
||||
hub.proc.init.mk_proc(ind, workers, ret_ref, sock_dir)
|
||||
w_iter = itertools.cycle(workers)
|
||||
hub.proc.Workers[name] = workers
|
||||
hub.proc.WorkersIter[name] = w_iter
|
||||
hub.proc.WorkersTrack[name] = {"subs": [], "ret_ref": ret_ref, "sock_dir": sock_dir}
|
||||
up = set()
|
||||
while True:
|
||||
for ind in workers:
|
||||
if os.path.exists(workers[ind]["path"]):
|
||||
up.add(ind)
|
||||
if len(up) == num:
|
||||
break
|
||||
await asyncio.sleep(0.01)
|
||||
# TODO: This seems to be spawning extra procs, this should be fixed
|
||||
# asyncio.ensure_future(hub.proc.init.maintain(name))
|
||||
|
||||
|
||||
async def maintain(hub: "pop.hub.Hub", name):
|
||||
"""
|
||||
Keep an eye on these processes
|
||||
"""
|
||||
workers = hub.proc.Workers[name]
|
||||
while True:
|
||||
for ind, data in workers.items():
|
||||
if not data["proc"].poll():
|
||||
hub.proc.init.mk_proc(ind, workers)
|
||||
await asyncio.sleep(2)
|
||||
|
||||
|
||||
def mk_tracker(hub: "pop.hub.Hub"):
|
||||
"""
|
||||
Create the process tracker, this simply makes a data structure to hold
|
||||
process references and sets them to be terminated when the system is
|
||||
shutdown.
|
||||
"""
|
||||
hub.proc.Tracker = True
|
||||
atexit.register(hub.proc.init.clean)
|
||||
|
||||
|
||||
def clean(hub: "pop.hub.Hub"):
|
||||
"""
|
||||
Clean up the processes registered in the tracker
|
||||
"""
|
||||
for name, workers in hub.proc.Workers.items():
|
||||
for ind in workers:
|
||||
workers[ind]["proc"].terminate()
|
||||
|
||||
|
||||
def ret_work(hub: "pop.hub.Hub", callback):
|
||||
async def work(reader, writer):
|
||||
"""
|
||||
Process the incoming work
|
||||
"""
|
||||
inbound = await reader.readuntil(hub.proc.DELIM)
|
||||
inbound = inbound[: -len(hub.proc.DELIM)]
|
||||
payload = msgpack.loads(inbound, raw=False)
|
||||
ret = await callback(payload)
|
||||
ret = msgpack.dumps(ret, use_bin_type=True)
|
||||
ret += hub.proc.DELIM
|
||||
writer.write(ret)
|
||||
await writer.drain()
|
||||
writer.close()
|
||||
|
||||
return work
|
||||
@@ -0,0 +1,180 @@
|
||||
"""
|
||||
Execute functions or load subs on the workers in the named worker pool
|
||||
"""
|
||||
# import python libs
|
||||
import asyncio
|
||||
import os
|
||||
|
||||
# Import third party libs
|
||||
import msgpack
|
||||
import pop.hub
|
||||
|
||||
|
||||
async def add_sub(hub: "pop.hub.Hub", worker_name, *args, **kwargs):
|
||||
"""
|
||||
Tell all of the worker in the named pool to load the given sub,
|
||||
|
||||
This function takes all of the same arguments as hub.pop.sub.add
|
||||
"""
|
||||
ret = {}
|
||||
workers = hub.proc.Workers[worker_name]
|
||||
for ind in workers:
|
||||
payload = {"fun": "sub", "args": args, "kwargs": kwargs}
|
||||
# TODO: Make these futures to the run at the same time
|
||||
async for chunk in hub.proc.run.send(workers[ind], payload):
|
||||
ret[ind] = chunk
|
||||
hub.proc.WorkersTrack[worker_name]["subs"].append({"args": args, "kwargs": kwargs})
|
||||
return ret
|
||||
|
||||
|
||||
async def add_proc(hub: "pop.hub.Hub", worker_name):
|
||||
"""
|
||||
Add a single process to the worker pool, also make sure that
|
||||
"""
|
||||
# grab and extrapolate the data we need
|
||||
ret_ref = hub.proc.WorkersTrack[worker_name]["ret_ref"]
|
||||
sock_dir = hub.proc.WorkersTrack[worker_name]["sock_dir"]
|
||||
workers = hub.proc.Workers[worker_name]
|
||||
ind = len(workers) + 1
|
||||
for s_ind in range(len(workers) + 1):
|
||||
if s_ind not in workers:
|
||||
ind = s_ind
|
||||
hub.proc.init.mk_proc(ind, workers, ret_ref, sock_dir)
|
||||
# Make sure the process is up with a live socket
|
||||
while True:
|
||||
if os.path.exists(workers[ind]["path"]):
|
||||
break
|
||||
await asyncio.sleep(0.01)
|
||||
# Add all of the subs that have been added to processes in this pool
|
||||
for sub in hub.proc.WorkersTrack[worker_name]["subs"]:
|
||||
payload = {"fun": "sub", "args": sub["args"], "kwargs": sub["kwargs"]}
|
||||
async for chunk in hub.proc.run.send(workers[ind], payload):
|
||||
pass
|
||||
return ind
|
||||
|
||||
|
||||
async def pub(hub: "pop.hub.Hub", worker_name, func_ref, *args, **kwargs):
|
||||
"""
|
||||
Execute the given function reference on ALL the workers in the given
|
||||
worker pool and return the return data from each.
|
||||
|
||||
Pass in the arguments for the function, keep in mind that the sub needs
|
||||
to be loaded into the workers for a function to be available via
|
||||
hub.proc.run.add_sub
|
||||
"""
|
||||
workers = hub.proc.Workers[worker_name]
|
||||
ret = {}
|
||||
for ind in workers:
|
||||
payload = {"fun": "run", "ref": func_ref, "args": args, "kwargs": kwargs}
|
||||
# TODO: Make these futures to the run at the same time
|
||||
async for chunk in hub.proc.run.send(workers[ind], payload):
|
||||
ret[ind] = chunk
|
||||
return ret
|
||||
|
||||
|
||||
async def set_attr(hub: "pop.hub.Hub", worker_name, ref, value):
|
||||
"""
|
||||
Set the given attribute to the given location on the hub of all
|
||||
worker procs
|
||||
"""
|
||||
workers = hub.proc.Workers[worker_name]
|
||||
ret = {}
|
||||
for ind in workers:
|
||||
payload = {"fun": "setattr", "ref": ref, "value": value}
|
||||
# TODO: Make these futures to the run at the same time
|
||||
async for chunk in hub.proc.run.send(workers[ind], payload):
|
||||
ret[ind] = chunk
|
||||
return ret
|
||||
|
||||
|
||||
async def ind_func(hub: "pop.hub.Hub", worker_name, _ind, func_ref, *args, **kwargs):
|
||||
"""
|
||||
Execute the function on the indexed process within the named worker pool
|
||||
"""
|
||||
workers = hub.proc.Workers[worker_name]
|
||||
worker = workers[_ind]
|
||||
payload = {"fun": "run", "ref": func_ref, "args": args, "kwargs": kwargs}
|
||||
async for ret in hub.proc.run.send(worker, payload):
|
||||
return ret
|
||||
|
||||
|
||||
async def func(hub: "pop.hub.Hub", worker_name, func_ref, *args, **kwargs):
|
||||
"""
|
||||
Execute the given function reference on one worker in the given worker
|
||||
pool and return the return data.
|
||||
|
||||
Pass in the arguments for the function, keep in mind that the sub needs
|
||||
to be loaded into the workers for a function to be available via
|
||||
hub.proc.run.add_sub
|
||||
"""
|
||||
ind, coro = await hub.proc.run.track_func(worker_name, func_ref, *args, **kwargs)
|
||||
return await coro
|
||||
|
||||
|
||||
async def track_func(hub: "pop.hub.Hub", worker_name, func_ref, *args, **kwargs):
|
||||
"""
|
||||
Run a function and return the index of the worker that the function was
|
||||
executed on and a coroutine to track
|
||||
"""
|
||||
w_iter = hub.proc.WorkersIter[worker_name]
|
||||
ind = next(w_iter)
|
||||
coro = hub.proc.run.ind_func(worker_name, ind, func_ref, *args, **kwargs)
|
||||
return ind, coro
|
||||
|
||||
|
||||
async def gen(hub: "pop.hub.Hub", worker_name, func_ref, *args, **kwargs):
|
||||
"""
|
||||
Execute a generator function reference within one worker within the given
|
||||
worker pool.
|
||||
|
||||
Like `func` the sub needs to be made available to all workers first
|
||||
"""
|
||||
ind, coro = await hub.proc.run.track_gen(worker_name, func_ref, *args, **kwargs)
|
||||
async for chunk in coro:
|
||||
yield chunk
|
||||
|
||||
|
||||
async def track_gen(hub: "pop.hub.Hub", worker_name, func_ref, *args, **kwargs):
|
||||
"""
|
||||
Return an iterable coroutine and the index executed on
|
||||
"""
|
||||
w_iter = hub.proc.WorkersIter[worker_name]
|
||||
ind = next(w_iter)
|
||||
coro = hub.proc.run.ind_gen(worker_name, ind, func_ref, *args, **kwargs)
|
||||
return ind, coro
|
||||
|
||||
|
||||
async def ind_gen(hub: "pop.hub.Hub", worker_name, _ind, func_ref, *args, **kwargs):
|
||||
"""
|
||||
run the given iterator on the defined index
|
||||
"""
|
||||
workers = hub.proc.Workers[worker_name]
|
||||
worker = workers[_ind]
|
||||
payload = {"fun": "gen", "ref": func_ref, "args": args, "kwargs": kwargs}
|
||||
async for chunk in hub.proc.run.send(worker, payload):
|
||||
yield chunk
|
||||
|
||||
|
||||
async def send(hub: "pop.hub.Hub", worker, payload):
|
||||
"""
|
||||
Send the given payload to the given worker, yield iterations based on the
|
||||
returns from the remote.
|
||||
"""
|
||||
mp = msgpack.dumps(payload, use_bin_type=True)
|
||||
mp += hub.proc.DELIM
|
||||
reader, writer = await asyncio.open_unix_connection(path=worker["path"])
|
||||
writer.write(mp)
|
||||
await writer.drain()
|
||||
final_ret = True
|
||||
while True:
|
||||
ret = await reader.readuntil(hub.proc.DELIM)
|
||||
p_ret = ret[: -len(hub.proc.DELIM)]
|
||||
i_flag = p_ret[-1:]
|
||||
ret = msgpack.loads(p_ret[:-1], raw=False)
|
||||
if i_flag == hub.proc.D_FLAG:
|
||||
# break for the end of the sequence
|
||||
break
|
||||
yield ret
|
||||
final_ret = False
|
||||
if final_ret:
|
||||
yield ret
|
||||
@@ -0,0 +1,168 @@
|
||||
"""
|
||||
This module is used to manage the process started up by the pool. Work in this
|
||||
module is used to manage the worker process itself and not other routines on
|
||||
the hub this process was derived from
|
||||
|
||||
This is an exec, not a fork! This is a fresh memory space!
|
||||
"""
|
||||
# Import python libs
|
||||
import os
|
||||
import types
|
||||
import asyncio
|
||||
import pop.hub
|
||||
|
||||
# Import third party libs
|
||||
import msgpack
|
||||
|
||||
# TODO: The workers should detect if their controlling process dies and terminate by themselves
|
||||
# The controlling process will kill them when it exists, but if it exists hard then the workers
|
||||
# Should be able to also clean themselves up
|
||||
|
||||
|
||||
def start(hub: "pop.hub.Hub", sock_dir, ind, ref, ret_ref):
|
||||
"""
|
||||
This function is called by the startup script to create a worker process
|
||||
|
||||
:NOTE: This is a new process started from the shell, it does not have any
|
||||
of the process namespace from the creating process.
|
||||
This is an EXEC, NOT a FORK!
|
||||
"""
|
||||
hub.proc.SOCK_DIR = sock_dir
|
||||
hub.proc.REF = ref
|
||||
hub.proc.SOCK_PATH = os.path.join(sock_dir, ref)
|
||||
hub.proc.RET_REF = ret_ref
|
||||
hub.proc.RET_SOCK_PATH = os.path.join(sock_dir, ret_ref)
|
||||
hub.proc.IND = ind
|
||||
hub.pop.loop.start(hub.proc.worker.hold(), hub.proc.worker.server())
|
||||
|
||||
|
||||
async def hold(hub: "pop.hub.Hub"):
|
||||
"""
|
||||
This function just holds the loop open by sleeping in a while loop
|
||||
"""
|
||||
while True:
|
||||
await asyncio.sleep(60)
|
||||
|
||||
|
||||
async def server(hub: "pop.hub.Hub"):
|
||||
"""
|
||||
Start the unix socket server to receive commands
|
||||
"""
|
||||
await asyncio.start_unix_server(hub.proc.worker.work, path=hub.proc.SOCK_PATH)
|
||||
|
||||
|
||||
async def work(hub: "pop.hub.Hub", reader, writer):
|
||||
"""
|
||||
Process the incoming work
|
||||
"""
|
||||
inbound = await reader.readuntil(hub.proc.DELIM)
|
||||
inbound = inbound[: -len(hub.proc.DELIM)]
|
||||
if msgpack.version < (1, 0, 0):
|
||||
payload = msgpack.loads(inbound, encoding="utf-8")
|
||||
else:
|
||||
payload = msgpack.loads(inbound)
|
||||
ret = b""
|
||||
if "fun" not in payload:
|
||||
ret = {"err": "Invalid format"}
|
||||
elif payload["fun"] == "sub":
|
||||
# Time to add a sub to the hub!
|
||||
try:
|
||||
hub.proc.worker.add_sub(payload)
|
||||
ret = {"status": True}
|
||||
except Exception as exc:
|
||||
ret = {"status": False, "exc": str(exc)}
|
||||
elif payload["fun"] == "run":
|
||||
# Time to do some work!
|
||||
try:
|
||||
ret = await hub.proc.worker.run(payload)
|
||||
except Exception as exc:
|
||||
ret = {"status": False, "exc": str(exc)}
|
||||
elif payload["fun"] == "gen":
|
||||
ret = await hub.proc.worker.gen(payload, reader, writer)
|
||||
elif payload["fun"] == "setattr":
|
||||
ret = await hub.proc.worker.set_attr(payload)
|
||||
ret = msgpack.dumps(ret, use_bin_type=True)
|
||||
ret += hub.proc.D_FLAG
|
||||
ret += hub.proc.DELIM
|
||||
writer.write(ret)
|
||||
await writer.drain()
|
||||
writer.close()
|
||||
|
||||
|
||||
def add_sub(hub: "pop.hub.Hub", payload):
|
||||
"""
|
||||
Add a new sub onto the hub for this worker
|
||||
"""
|
||||
hub.pop.sub.add(*payload["args"], **payload["kwargs"])
|
||||
|
||||
|
||||
async def gen(hub: "pop.hub.Hub", payload, reader, writer):
|
||||
"""
|
||||
Run a generator and yield back the returns. Supports a generator and an
|
||||
async generator
|
||||
"""
|
||||
ref = payload.get("ref")
|
||||
args = payload.get("args", [])
|
||||
kwargs = payload.get("kwargs", {})
|
||||
ret = hub.pop.ref.last(ref)(*args, **kwargs)
|
||||
if isinstance(ret, types.AsyncGeneratorType):
|
||||
async for chunk in ret:
|
||||
rchunk = msgpack.dumps(chunk, use_bin_type=True)
|
||||
rchunk += hub.proc.I_FLAG
|
||||
rchunk += hub.proc.DELIM
|
||||
writer.write(rchunk)
|
||||
await writer.drain()
|
||||
elif isinstance(ret, types.GeneratorType):
|
||||
for chunk in ret:
|
||||
rchunk = msgpack.dumps(chunk, use_bin_type=True)
|
||||
rchunk += hub.proc.I_FLAG
|
||||
rchunk += hub.proc.DELIM
|
||||
writer.write(rchunk)
|
||||
await writer.drain()
|
||||
elif asyncio.iscoroutine(ret):
|
||||
return await ret
|
||||
else:
|
||||
return ret
|
||||
return ""
|
||||
|
||||
|
||||
async def run(hub: "pop.hub.Hub", payload):
|
||||
"""
|
||||
Execute the given payload
|
||||
"""
|
||||
ref = payload.get("ref")
|
||||
args = payload.get("args", [])
|
||||
kwargs = payload.get("kwargs", {})
|
||||
ret = hub.pop.ref.last(ref)(*args, **kwargs)
|
||||
if asyncio.iscoroutine(ret):
|
||||
return await ret
|
||||
return ret
|
||||
|
||||
|
||||
async def set_attr(hub: "pop.hub.Hub", payload):
|
||||
"""
|
||||
Set the named attribute to the hub
|
||||
"""
|
||||
ref = payload.get("ref")
|
||||
value = payload.get("value")
|
||||
hub.pop.ref.create(ref, value)
|
||||
|
||||
|
||||
async def ret(hub: "pop.hub.Hub", payload):
|
||||
"""
|
||||
Send a return payload to the spawning process. This return will be tagged
|
||||
with the index of the process that returned it
|
||||
"""
|
||||
payload = {"ind": hub.proc.IND, "payload": payload}
|
||||
mp = msgpack.dumps(payload, use_bin_type=True)
|
||||
mp += hub.proc.DELIM
|
||||
reader, writer = await asyncio.open_unix_connection(path=hub.proc.RET_SOCK_PATH)
|
||||
writer.write(mp)
|
||||
await writer.drain()
|
||||
ret = await reader.readuntil(hub.proc.DELIM)
|
||||
ret = ret[: -len(hub.proc.DELIM)]
|
||||
writer.close()
|
||||
if msgpack.version < (1, 0, 0):
|
||||
return msgpack.loads(ret, encoding="utf-8")
|
||||
else:
|
||||
return msgpack.loads(ret)
|
||||
Reference in New Issue
Block a user