first commit

This commit is contained in:
2020-11-03 18:30:14 -08:00
commit 31d8522470
1881 changed files with 345408 additions and 0 deletions

View File

@@ -0,0 +1 @@
# -*- coding: utf-8 -*-

View File

@@ -0,0 +1,298 @@
# -*- coding: utf-8 -*-
"""
Contracts to enforce loader objects
"""
# Import python libs
import asyncio
import functools
import inspect
import os
from collections import namedtuple
from types import ModuleType
from typing import Dict, Iterable, List
# Import pop libs
import pop.exc
import pop.hub
import pop.verify
class ContractedContext(
namedtuple(
"ContractedContext",
("func", "args", "kwargs", "signature", "ret", "cache", "ref"),
)
):
"""
Contracted function calling context
"""
def __new__(
cls,
func: functools.partial,
args: Iterable,
kwargs: Dict,
signature,
ref,
ret=None,
cache=None,
): # pylint: disable=too-many-arguments
if cache is None:
cache = {}
return super(ContractedContext, cls).__new__(
cls, func, list(args), kwargs, signature, ret, cache, ref
)
def get_argument(self, name):
"""
Return the value corresponding to a function argument after binding the contract context
argument and keyword arguments to the function signature.
"""
return self.get_arguments()[name]
def get_arguments(self):
"""
Return a dictionary of all arguments that will be passed to the function and their
values, including default arguments.
"""
if "__bound_signature__" not in self.cache:
try:
self.cache["__bound_signature__"] = self.signature.bind(
*self.args, **self.kwargs
)
except TypeError as e:
for frame in inspect.trace(0):
if frame.function == "bind" and frame.filename.endswith(
os.sep + "inspect.py"
):
raise pop.exc.BindError(e)
raise
# Apply any default values from the signature
self.cache["__bound_signature__"].apply_defaults()
return self.cache["__bound_signature__"].arguments
def load_contract(
contracts: List["Contracted"],
default_contracts: List[str],
mod: ModuleType,
name: str,
) -> List:
"""
return a Contract object loaded up
Dynamically create the correct Contracted type
:param contracts: Contracts functions to add to the sub
:param default_contracts: The contracts that have been marked as defaults
:param mod: A loader module
:param name: The name of the module to get from the loader
"""
raws = []
if not contracts:
return raws
loaded_contracts = []
if hasattr(contracts, name):
loaded_contracts.append(name)
raws.append(getattr(contracts, name))
if hasattr(contracts, "init"):
loaded_contracts.append("init")
raws.append(getattr(contracts, "init"))
if default_contracts:
for contract in default_contracts:
if contract in loaded_contracts:
continue
loaded_contracts.append(contract)
raws.append(getattr(contracts, contract))
if hasattr(mod, "__contracts__"):
cnames = getattr(mod, "__contracts__")
if not isinstance(cnames, (list, tuple)):
cnames = cnames.split(",")
for cname in cnames:
if cname in contracts:
if cname in loaded_contracts:
continue
loaded_contracts.append(cname)
raws.append(getattr(contracts, cname))
return raws
class Wrapper:
def __init__(self, func: functools.partial, ref: str, name: str):
"""
:param func: The contracted function to call
:param ref: The reference to the function on the hub
:param name: An alias for the function
"""
self.__dict__.update(
getattr(func, "__dict__", {})
) # do this first so we later overwrite any conflicts
self.func = func
self.ref = ref
self.__name__ = name
self.signature = inspect.signature(self.func)
self._sig_errors = []
self.__wrapped__ = func
def __call__(self, *args, **kwargs):
self.func(*args, **kwargs)
def __repr__(self):
return "<{} func={}.{}>".format(
self.__class__.__name__, self.func.__module__, self.__name__
)
class Contracted(Wrapper):
"""
This class wraps functions that have a contract associated with them
and executes the contract routines
"""
def __init__(
self,
hub: "pop.hub.Hub",
contracts: List[Wrapper],
func: functools.partial,
ref: str,
name: str,
):
super().__init__(func, ref, name)
self.hub = hub
self.contracts = contracts or []
self._load_contracts()
def _get_contracts_by_type(self, contract_type: str = "pre") -> List[Wrapper]:
"""
:param contract_type: One of "call", "pre", "post", or "sig"
"""
matches = []
fn_contract_name = "{}_{}".format(contract_type, self.__name__)
for contract in self.contracts:
if hasattr(contract, fn_contract_name):
matches.append(getattr(contract, fn_contract_name))
if hasattr(contract, contract_type):
matches.append(getattr(contract, contract_type))
return matches
def _load_contracts(self):
# TODO:
# if Contracted - only allow regular pre/post
# if ContractedAsync - allow coroutines and functions
# if ContractedAsyncGen - allow coroutines and functions
self.contract_functions = {
"pre": self._get_contracts_by_type("pre"),
"call": self._get_contracts_by_type("call")[:1],
"post": self._get_contracts_by_type("post"),
}
# TODO: write test for stack-like behavior (reverse "pre")
self._has_contracts = (
sum([len(l) for l in self.contract_functions.values()]) > 0
)
def __call__(self, *args, **kwargs):
args = (self.hub,) + args
if not self._has_contracts:
return self.func(*args, **kwargs)
contract_context = ContractedContext(
self.func, args, kwargs, self.signature, self.ref
)
for fn in self.contract_functions["pre"]:
fn(contract_context)
if self.contract_functions["call"]:
ret = self.contract_functions["call"][0](contract_context)
else:
ret = self.func(*contract_context.args, **contract_context.kwargs)
for fn in self.contract_functions["post"]:
post_ret = fn(contract_context._replace(ret=ret))
if post_ret is not None:
ret = post_ret
return ret
class ContractedAsyncGen(Contracted):
async def __call__(self, *args, **kwargs):
args = (self.hub,) + args
if not self._has_contracts:
async for chunk in self.func(*args, **kwargs):
yield chunk
return
contract_context = ContractedContext(
self.func, args, kwargs, self.signature, self.ref
)
for fn in self.contract_functions["pre"]:
pre_ret = fn(contract_context)
if asyncio.iscoroutine(pre_ret):
await pre_ret
chunk = None
if self.contract_functions["call"]:
async for chunk in self.contract_functions["call"][0](contract_context):
yield chunk
else:
async for chunk in self.func(
*contract_context.args, **contract_context.kwargs
):
yield chunk
ret = chunk
for fn in self.contract_functions["post"]:
if isinstance(fn, ContractedAsync):
post_ret = await fn(contract_context._replace(ret=ret))
else:
post_ret = fn(contract_context._replace(ret=ret))
if post_ret is not None:
ret = post_ret
class ContractedAsync(Contracted):
async def __call__(self, *args, **kwargs):
args = (self.hub,) + args
if not self._has_contracts:
return await self.func(*args, **kwargs)
contract_context = ContractedContext(
self.func, args, kwargs, self.signature, self.ref
)
for fn in self.contract_functions["pre"]:
pre_ret = fn(contract_context)
if asyncio.iscoroutine(pre_ret):
await pre_ret
if self.contract_functions["call"]:
ret = await self.contract_functions["call"][0](contract_context)
else:
ret = await self.func(*contract_context.args, **contract_context.kwargs)
for fn in self.contract_functions["post"]:
if isinstance(fn, ContractedAsync):
post_ret = await fn(contract_context._replace(ret=ret))
else:
post_ret = fn(contract_context._replace(ret=ret))
if post_ret is not None:
ret = post_ret
return ret
def create_contracted(
hub: "pop.hub.Hub",
contracts: List[Wrapper],
func: functools.partial,
ref: str,
name: str,
) -> Contracted:
"""
Dynamically create the correct Contracted type
:param hub: The redistributed pop central hub
:param contracts: Contracts functions to add to the sub
:param func: The contracted function to call
:param ref: The reference to the function on the hub
:param name: The name of the module to get from the loader
"""
if asyncio.iscoroutinefunction(func):
return ContractedAsync(hub, contracts, func, ref, name)
elif inspect.isasyncgenfunction(func):
return ContractedAsyncGen(hub, contracts, func, ref, name)
else:
return Contracted(hub, contracts, func, ref, name)

View File

@@ -0,0 +1,21 @@
# -*- coding: utf-8 -*-
"""
Tools to work with dicts
"""
import dict_tools.update
import dict_tools.data
import warnings
__virtualname__ = "dicts"
def update(*args, **kwargs):
warnings.warn("Use `dict_tools.update.update()` instead", DeprecationWarning)
return dict_tools.update.update(*args, **kwargs)
def traverse(*args, **kwargs):
warnings.warn(
"Use `dict_tools.data.traverse_dict_and_list()` instead", DeprecationWarning
)
return dict_tools.data.traverse_dict_and_list(*args, **kwargs)

View File

@@ -0,0 +1,113 @@
# -*- coding: utf-8 -*-
"""
Find directories
"""
# Import pop libs
import pop.dicts
# Import python libs
import os
import sys
import importlib
from typing import Any, Dict, Iterable, List
def dir_list(
subname: str, p_name: str, pypath: List[str] = None, static: List[str] = None
) -> List[str]:
"""
Return the directories to look for modules in, pypath specifies files
relative to an installed python package, static is for static dirs
:param subname: ignored
:param p_name: ignored
:param pypath: One or many python paths which will be imported
:param static: Directories that can be explicitly passed
"""
ret = []
for path in pypath:
mod = importlib.import_module(path)
for m_path in mod.__path__:
# If we are inside of an executable the path will be different
ret.append(m_path)
ret.extend(static)
return ret
def inline_dirs(dirs: Iterable[str], subdir: str) -> List[str]:
"""
Look for the named subdir in the list of dirs
:param dirs: The names of configured dynamic dirs
:param subdir: The name of the subdir to check for in the list of dynamic dirs
:return An extended list of dirs that includes the found subdirs
"""
ret = []
for dir_ in dirs:
check = os.path.join(dir_, subdir)
if os.path.isdir(check):
ret.append(check)
return ret
def dynamic_dirs() -> Dict[str, Any]:
"""
Iterate over the available python package imports and look for configured
dynamic dirs
"""
dirs = []
ret = {}
for dir_ in sys.path:
if not os.path.isdir(dir_):
continue
for sub in os.listdir(dir_):
full = os.path.join(dir_, sub)
if full.endswith(".egg-link"):
with open(full) as rfh:
dirs.append(rfh.read().strip())
if os.path.isdir(full):
dirs.append(full)
for dir_ in dirs:
conf = os.path.join(dir_, "conf.py")
context = {}
if not os.path.isfile(conf):
continue
try:
with open(conf) as f:
code = f.read()
if "DYNE" in code:
exec(code, context)
else:
continue
except Exception:
continue
if "DYNE" in context:
if not isinstance(context["DYNE"], dict):
continue
for name, paths in context["DYNE"].items():
if not isinstance(paths, list):
continue
if name not in ret:
ret[name] = {
"paths": [],
"CONFIG": {},
"CLI_CONFIG": {},
"SUBCOMMANDS": {},
}
if "CONFIG" in context:
pop.dicts.update(ret[name]["CONFIG"], context["CONFIG"])
if "CLI_CONFIG" in context:
pop.dicts.update(ret[name]["CLI_CONFIG"], context["CLI_CONFIG"])
if "SUBCOMMANDS" in context:
pop.dicts.update(ret[name]["SUBCOMMANDS"], context["SUBCOMMANDS"])
for path in paths:
ref = os.path.join(dir_, path.replace(".", os.sep))
if dir_.endswith(name):
ret[name]["paths"].insert(0, ref)
else:
ret[name]["paths"].append(ref)
for name in ret:
if not ret[name]:
continue
first = ret[name]["paths"].pop(0)
ret[name]["paths"] = sorted(ret[name]["paths"])
ret[name]["paths"].insert(0, first)
return ret

View File

@@ -0,0 +1,62 @@
# -*- coding: utf-8 -*-
"""
Pop related exceptions
"""
class PopBaseException(Exception):
"""
Base exception where all of Pop's exceptions derive
"""
class PopError(PopBaseException):
"""
General purpose pop exception to signal an error
"""
class PopLoadError(PopBaseException):
"""
Exception raised when a pop module fails to load
"""
class PopLookupError(PopBaseException):
"""
Exception raised when a pop module lookup fails
"""
class ContractModuleException(PopBaseException):
"""
Exception raised when a function specified in a contract as required
to exist is not found in the loaded module
"""
class ContractFuncException(PopBaseException):
"""
Exception raised when a function specified in a contract as required
to exist is found on the module but it's not function
"""
class ContractSigException(PopBaseException):
"""
Exception raised when a function signature is not compatible with the
coresponding function signature found in the contract.
"""
class ProcessNotStarted(PopBaseException):
"""
Exception raised when failing to start a process on the process manager
"""
class BindError(PopBaseException):
"""
Exception raised when arguments for a function in a ContractedContext cannot be bound
Indicates invalid function arguments.
"""

View File

@@ -0,0 +1,528 @@
# -*- coding: utf-8 -*-
# Import python libs
import os
import importlib.machinery
import inspect
import logging
import secrets
import sys
# Import pop libs
import pop.dirs
import pop.scanner
import pop.loader
import pop.exc
import pop.contract
import pop.verify
from typing import Any, Dict, List, Tuple, Iterator
from types import ModuleType
EXT_SUFFIXES = tuple(importlib.machinery.EXTENSION_SUFFIXES)
log = logging.getLogger(__name__)
def ex_path(path: str) -> List[str]:
"""
Take a path that is sent to the Sub and expand it if it is a string or not
"""
if path is None:
return []
elif isinstance(path, str):
return path.split(",")
elif isinstance(path, list):
return path
return []
class Hub:
"""
The redistributed pop central hub. All components of the system are
rooted to the Hub.
"""
def __init__(self):
self._subs = {}
self._sub_alias = {}
self._dynamic = {}
self._dscan = False
# Add the pop sub to the hub, this should always use pypath and
# Should never be made dynamic. This is a core system sub and should
# NOT be app-merged
self._subs["pop"] = Sub(self, "pop", pypath="pop.mods.pop")
self._iter_subs = sorted(self._subs.keys())
self._iter_ind = 0
# Set up the conf OPT structure so it is always available
self.OPT = {}
def __getstate__(self) -> Dict:
return dict(_subs=self._subs)
def __setstate__(self, state: Dict):
self.__dict__.update(state)
def __iter__(self) -> Iterator["Sub"]:
def iter(subs: Dict[str, Sub]):
for sub in sorted(subs.keys()):
yield subs[sub]
return iter(self._subs)
def _resolve_this(self, levels: int) -> "Hub":
"""
This function allows for hub to pop introspective calls.
This should only ever be called from within a hub module, otherwise
it should stack trace, or return heaven knows what...
:param levels: The number of frames to search for a hub reference
"""
if hasattr(
sys, "_getframe"
): # implementation detail of CPython, speeds up things by 100x.
desired_frame = sys._getframe(3)
contracted = desired_frame.f_locals["self"]
else:
call_frame = inspect.stack(0)[3]
contracted = call_frame[0].f_locals["self"]
ref = contracted.ref.split(".")
# (0=module, 1=module's parent etc.)
level_offset = levels - 1
traversed = self
for i in range(len(ref) - level_offset):
traversed = getattr(traversed, ref[i])
return traversed
def _remove_subsystem(self, subname: str) -> bool:
"""
Remove the named subsystem
:param subname: The name of a subsystem to remove
:return True if the subsystem was successfully removed, else False
"""
if subname in self._subs:
# Remove the subsystem
self._subs.pop(subname)
# reset the iterator
self._iter_subs = sorted(self._subs.keys())
self._iter_ind = 0
return True
return False
def _scan_dynamic(self):
"""
Refresh the dynamic roots data used for loading app merge module roots
"""
self._dynamic = pop.dirs.dynamic_dirs()
self._dscan = True
def __getattr__(self, item: str):
if item.startswith("_"):
if item == item[0] * len(item):
return self._resolve_this(len(item))
else:
return self.__getattribute__(item)
if "." in item:
return self.pop.ref.last(item)
if item in self._subs:
return self._subs[item]
elif item in self._sub_alias:
resolved = self._sub_alias[item]
if resolved in self._subs:
return self._subs[resolved]
return self.__getattribute__(item)
class Sub:
"""
The pop object that contains the loaded module data
"""
def __init__(
self,
hub: Hub,
subname: str,
root: Hub or Sub = None,
pypath: List[str] or str = None,
static: List[str] or str = None,
contracts_pypath: List[str] or str = None,
contracts_static: List[str] or str = None,
default_contracts: List[str] or str = None,
virtual: bool = True,
dyne_name: str = None,
omit_start: Tuple[str] = ("_",),
omit_end: Tuple[str] = (),
omit_func: bool = False,
omit_class: bool = False,
omit_vars: bool = False,
mod_basename: str = "",
stop_on_failures: bool = False,
init: bool = True,
is_contract: bool = False,
sub_virtual: bool = True,
recursive_contracts_static=None,
default_recursive_contracts=None,
):
"""
:param hub: The redistributed pop central hub
:param subname: The name that the sub is going to take on the hub
if nothing else is passed, it is used as the pypath (TODO make it the dyne_name not the pypath)
:param pypath: One or many python paths which will be imported
:param static: Directories that can be explicitly passed
:param contracts_pypath: Load additional contract paths
:param contracts_static: Load additional contract paths from a specific directory
:param default_contracts: Specifies that a specific contract plugin will be applied as a default to all plugins
:param virtual: Toggle whether or not to process __virtual__ functions
:param dyne_name: The dynamic name to use to look up paths to find plugins -- linked to conf.py
:param omit_start: Allows you to pass in a tuple of characters that would omit the loading of any object
I.E. Any function starting with an underscore will not be loaded onto a plugin
(You should probably never change this)
:param omit_end:Allows you to pass in a tuple of characters that would omit the loading of an object
(You should probably never change this)
:param omit_func: bool: Don't load any functions
:param omit_class: bool: Don't load any classes
:param omit_vars: bool: Don't load any vars
:param mod_basename: str: Manipulate the location in sys.modules that the plugin will be loaded to.
Allow plugins to be loaded into a separate namespace.
:param stop_on_failures: If any module fails to load for any reason, stacktrace and do not continue loading this sub
:param init: bool: determine whether or not we process __init__ functions
:param is_contract: Specify whether or not this sub is a contract
:param sub_virtual: bool: Recursively ignore this sub and it's subs
"""
self._iter_ind = 0
self._hub = hub
self._root = root or hub
self._subs = {}
self._alias = []
self._sub_alias = {}
self._subname = subname
self._pypath = ex_path(pypath)
self._static = ex_path(static)
self._contracts_pypath = ex_path(contracts_pypath)
self._contracts_static = ex_path(contracts_static)
self._recursive_contracts_static = ex_path(recursive_contracts_static)
if isinstance(default_contracts, str):
default_contracts = [default_contracts]
if isinstance(default_recursive_contracts, str):
default_recursive_contracts = [default_recursive_contracts]
self._default_recursive_contracts = default_recursive_contracts or []
self._default_contracts = default_contracts or ()
self._dyne_name = dyne_name
self._virtual = virtual
self._omit_start = omit_start
self._sub_virtual = sub_virtual
self._omit_end = omit_end
self._omit_func = omit_func
self._omit_class = omit_class
self._omit_vars = omit_vars
self._mod_basename = mod_basename
self._stop_on_failures = stop_on_failures
self._is_contract = is_contract
self._process_init = init
self._prepare()
def _prepare(self):
self._dirs = pop.dirs.dir_list(
self._subname, "mods", self._pypath, self._static,
)
if self._dyne_name:
self._load_dyne()
self._contract_dirs = pop.dirs.dir_list(
self._subname, "contracts", self._contracts_pypath, self._contracts_static,
)
self._contract_dirs.extend(pop.dirs.inline_dirs(self._dirs, "contracts"))
self._recursive_contract_dirs = pop.dirs.dir_list(
self._subname, "recursive_contracts", [], self._recursive_contracts_static,
)
self._recursive_contract_dirs.extend(
pop.dirs.inline_dirs(self._dirs, "recursive_contracts")
)
if self._contract_dirs:
self._contracts = Sub(
self._hub,
f"{self._subname}.contracts",
static=self._contract_dirs,
is_contract=True,
)
else:
self._contracts = None
if self._recursive_contract_dirs:
self._recursive_contracts = Sub(
self._hub,
f"{self._subname}.recursive_contracts",
static=self._recursive_contract_dirs,
is_contract=True,
)
else:
self._recursive_contracts = getattr(
self._root, "_recursive_contracts", None
)
self._name_root = self._load_name_root()
self._scan = pop.scanner.scan(self._dirs)
self._loaded = {}
self._vmap = {}
self._load_errors = {}
self._loaded_all = False
def _load_dyne(self):
"""
Load up the dynamic dirs for this sub
"""
if not self._hub._dscan:
self._hub._scan_dynamic()
for path in self._hub._dynamic.get(self._dyne_name, {}).get("paths", []):
self._dirs.append(path)
def _load_name_root(self):
"""
Generate the root of the name to be used to apply to the loaded modules
"""
if self._pypath:
return self._pypath[0]
elif self._dirs:
return secrets.token_hex()
def __getstate__(self):
return dict(
_hub=self._hub,
_subname=self._subname,
_pypath=self._pypath,
_static=self._static,
_contracts_pypath=self._contracts_pypath,
_contracts_static=self._contracts_static,
_default_contracts=self._default_contracts,
_virtual=self._virtual,
_omit_start=self._omit_start,
_omit_end=self._omit_end,
_omit_func=self._omit_func,
_omit_class=self._omit_class,
_omit_vars=self._omit_vars,
_mod_basename=self._mod_basename,
_stop_on_failures=self._stop_on_failures,
)
def __setstate__(self, state: Dict):
self.__dict__.update(state)
self._prepare()
def __getattr__(self, item: str):
"""
If the item should be loaded, load it, else serve it
"""
if item.startswith("_"):
return self.__getattribute__(item)
if "." in item:
return self._hub.pop.ref.last(f"{self._subname}.{item}")
if item in self._loaded:
ret = self._loaded[item]
# If this previously errored on load, try it again,
# it might be ready to load now
if isinstance(ret, pop.loader.LoadError):
ret = self._find_mod(item)
if isinstance(ret, pop.loader.LoadError):
# If this is still a LoadError, process it
self._process_load_error(ret)
return ret
elif item in self._subs:
return self._subs[item]
elif item in self._sub_alias:
resolved = self._sub_alias[item]
if resolved in self._subs:
return self._subs[resolved]
mod = self._find_mod(item)
if mod is None:
raise AttributeError(f"'{self._subname}' has no attribute '{item}'")
return mod
def __contains__(self, item: str):
try:
return hasattr(self, item)
except pop.exc.PopLookupError:
return False
def __iter__(self) -> Iterator["Sub"]:
self._load_all()
def iter(loaded):
for l in sorted(loaded.keys()):
yield loaded[l]
return iter(self._loaded)
def __next__(self) -> "Sub":
self._load_all()
if self._iter_ind == len(self._iter_keys):
self._iter_ind = 0
raise StopIteration
self._iter_ind += 1
return self._loaded[self._iter_keys[self._iter_ind - 1]]
def _sub_init(self):
"""
Run load init.py for the sub, running '__init__' function if present
"""
self._find_mod("init", match_only=True)
def _process_load_error(
self, mod: ModuleType, skip_full_stop: bool = False
) -> bool:
if not isinstance(mod, pop.loader.LoadError):
# This is not a LoadError, return now!
return False
if mod.edict["verror"]:
error = "{0[msg]}: {0[verror]}".format(mod())
if skip_full_stop is False and self._stop_on_failures is True:
raise pop.exc.PopError(error)
log.info(error)
return False
error = "{0[msg]}: {0[exception]!r}".format(mod())
if mod.traceback:
error += "\n" + mod.traceback
if skip_full_stop is False and self._stop_on_failures is True:
raise pop.exc.PopError(error)
if mod.traceback:
log.warning(error)
else:
log.info(error)
return True
def _find_mod(self, item: str, match_only: bool = False) -> Dict:
"""
Find the module named item
:param item: The module to search for (then load) from any scanned interface
:param match_only: return the loaded module
:return a loaded mod_dict
"""
for iface in self._scan:
for bname in self._scan[iface]:
if os.path.basename(bname) == item:
self._load_item(iface, bname)
if item in self._loaded:
return self._loaded[item]
if not match_only:
for iface in self._scan:
for bname in self._scan[iface]:
if self._scan[iface][bname].get("loaded"):
continue
self._load_item(iface, bname)
if item in self._loaded:
return self._loaded[item]
# Let's see if the module being lookup is in the load errors dictionary
if item in self._load_errors:
# Return the LoadError
return self._load_errors[item]
def _load_item(self, iface: str, bname: str):
"""
Load the named basename
:param iface: A scanned directory type
:param bname: The base name of the python path of a module
"""
if iface not in self._scan:
raise pop.exc.PopLoadError(
"Bad call to load item, no iface {}".format(iface)
)
if bname not in self._scan[iface]:
raise pop.exc.PopLoadError(
"Bad call to load item, no bname {} in iface {}".format(bname, iface)
)
# The mname is the name to give the module in python's sys.modules
# This name must be unique for every loaded module, so we use the full
# module path sans the file extention
mname = self._scan[iface][bname]["path"].replace(os.sep, ".")
mname = mname[mname.index(".") + 1 : mname.rindex(".")].strip(".")
mod = pop.loader.load_mod(mname, iface, self._scan[iface][bname]["path"],)
if self._process_load_error(mod):
self._load_errors[os.path.basename(bname)] = mod
return
self._prep_mod(mod, iface, bname)
def _process_vret(self, vret: Dict[str, Any]) -> bool:
"""
:param vret: The return from a __virtual__ or __sub_virtual__ function
:return: True if there was an error, else false
"""
if "error" in vret:
# Virtual Errors should not full stop pop
self._process_load_error(vret["error"], skip_full_stop=True)
# Store the LoadError under the __virtualname__ if defined
self._load_errors[vret["vname"]] = vret["error"]
return True
else:
return False
def _prep_mod(self, mod: ModuleType, iface: str, bname: str):
"""
Prepare the module!
:param mod: A python module containing data
:param iface: A scanned directory type
:param bname: The base name of the python path of a module
"""
if not self._sub_virtual:
return
else:
vret = pop.loader.load_sub_virtual(self._hub, self._virtual, mod, bname)
if self._process_vret(vret):
self._sub_virtual = False
return
vret = pop.loader.load_virtual(self._hub, self._virtual, mod, bname)
if self._process_vret(vret):
return
contracts = pop.contract.load_contract(
self._contracts, self._default_contracts, mod, vret["name"]
)
recursive_contracts = set(
pop.contract.load_contract(
self._recursive_contracts,
self._default_recursive_contracts,
mod,
vret["name"],
)
)
if getattr(self._root, "_recursive_contracts", None):
recursive_contracts.update(
pop.contract.load_contract(
self._root._recursive_contracts,
self._root._default_recursive_contracts,
mod,
vret["name"],
)
)
recursive_contracts = list(recursive_contracts)
name = vret["name"]
if name.endswith(EXT_SUFFIXES):
for ext in EXT_SUFFIXES:
if name.endswith(ext):
name = name.split(ext)[0]
break
mod_dict = pop.loader.prep_loaded_mod(
self, mod, name, contracts, recursive_contracts
)
if name != "init":
pop.verify.contract(self._hub, contracts + recursive_contracts, mod_dict)
self._loaded[name] = mod_dict
self._vmap[mod.__file__] = name
# Let's mark the module as loaded
self._scan[iface][bname]["loaded"] = True
if self._process_init:
# Now that the module has been added to the sub, call mod_init
pop.loader.mod_init(self, mod, name)
def _load_all(self):
"""
Load all modules found during the scan.
.. attention:: This completely disables the lazy loader behavior of pop
"""
if self._loaded_all is True:
return
for iface in self._scan:
for bname in self._scan[iface]:
if self._scan[iface][bname].get("loaded"):
continue
self._load_item(iface, bname)
self._loaded_all = True

View File

@@ -0,0 +1,401 @@
# -*- coding: utf-8 -*-
"""
Load the files detected from the scanner
"""
# Import Python libs
import asyncio
import os
import sys
import inspect
import importlib
import importlib.util
import importlib.machinery
import traceback as stdlib_traceback
import types
from typing import Any, Dict, List, Tuple
# Import pop libs
import pop.exc
import pop.contract
class LoadError(Exception):
"""
Errors from the loader are contained herein
"""
__slots__ = ("edict", "traceback")
def __init__(self, msg, exception=None, traceback=None, verror=None):
self.edict = {
"msg": msg,
"exception": exception,
"verror": verror,
}
self.traceback = traceback
def __call__(self):
"""
Return the error cases
"""
return self.edict
def __getattr__(self, attr):
"""
Return a lambda that returns the edict
"""
return self.__calling_load_error__
def __calling_load_error__(
self, *args, **kwargs
): # pylint: disable=unused-argument
if self.edict["verror"]:
error = "{0[msg]}: {0[verror]}".format(self())
raise pop.exc.PopError(error)
error = "{0[msg]}: {0[exception]!r}".format(self())
if self.traceback:
error += "\n" + self.traceback
raise pop.exc.PopError(error)
def __repr__(self):
return "<{} edict={!r}>".format(self.__class__.__name__, self.edict)
def load_mod(modname: str, form: str, path: str) -> "LoadedMod":
"""
Load a single module
:param form: The name of the loader module
:param modname: The name of the module to get from the loader
:param path: The package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
this = sys.modules[__name__]
return getattr(this, form)(modname, path)
def _generate_module(name: str) -> types.ModuleType:
"""
Generate a module at runtime and insert it in sys.modules
:param name: The name of the module to create
"""
if name in sys.modules:
return sys.modules[name]
code = "'''POP sub auto generated parent module for {0}'''".format(
name.split(".")[-1]
)
# Create a new module that is not entered into sys.modules
module = types.ModuleType(name)
exec(code, module.__dict__) # pylint: disable=exec-used
sys.modules[name] = module
return module
def _populate_sys_modules(mod: "LoadedMod"):
"""
This is a hack to populate sys.modules with the modules that pop loads
while making sure that parent modules have the attribute for the child
modules.
"""
mod_parts = mod.split(".")
imp_mod = mod_parts.pop(0)
gen_mod = _generate_module(imp_mod)
while True:
if not mod_parts:
break
part = mod_parts.pop(0)
imp_mod += "." + part
gen_child_mod = _generate_module(imp_mod)
setattr(gen_mod, part, gen_child_mod)
gen_mod = gen_child_mod
def ext(modname: str, path: str) -> "LoadedMod" or LoadError:
"""
Attempt to load the named python modules
:param modname: The name of the module to get from the loader
:param path: The package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
modname = ".".join(modname.split(".")[:-1])
if modname in sys.modules:
return sys.modules[modname]
_populate_sys_modules(modname)
try:
efl = importlib.machinery.ExtensionFileLoader(modname, path)
mod = efl.create_module(importlib.util.find_spec(modname))
return efl.exec_module(mod)
except Exception as exc: # pylint: disable=broad-except
return LoadError(
"Failed to load python module {} at path {}".format(modname, path),
exception=exc,
traceback=stdlib_traceback.format_exc(),
)
def python(modname: str, path: str) -> "LoadedMod" or LoadError:
"""
Attempt to load the named python modules
:param modname: The name of the module to get from the loader
:param path: The package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
if modname in sys.modules:
return sys.modules[modname]
try:
sfl = importlib.machinery.SourceFileLoader(modname, path)
return sfl.load_module()
except Exception as exc: # pylint: disable=broad-except
return LoadError(
"Failed to load python module {} at path {}".format(modname, path),
exception=exc,
traceback=stdlib_traceback.format_exc(),
)
def _base_name(bname: str, mod: "LoadedMod") -> Tuple[str, str]:
"""
Find the basename and alias of a loader module
:param bname: The base name of the mod's path
:param mod: A loader module or a LoadError if the module didn't load
"""
base_name = os.path.basename(bname)
if "." in base_name:
base_name = base_name.split(".")[0]
return base_name, getattr(mod, "__virtualname__", base_name)
def _load_virtual(
hub: "pop.hub.Hub",
virtual: bool,
mod: "LoadedMod" or LoadError,
bname: str,
vtype: str,
) -> Dict[str, Any]:
"""
Run the virtual function to name the module and check for all loader
errors
:param hub: The redistributed pop central hub
:param virtual: Toggle whether or not to process __virtual__ functions
:param mod: A loader module or a LoadError if the module didn't load
:param bname: The base name of the mod's path
:param vtype: The name of the virtual function to call on the module I.E. __virtual__ or __sub_virtual__
"""
base_name, name = _base_name(bname, mod)
if isinstance(mod, LoadError):
# The mod is a LoadError instance.
# Return the load error with name as the base_name because another
# module is still allowed to load under the same __virtualname__
# but also return the vname information
return {"name": base_name, "vname": name, "error": mod}
if not virtual:
# __virtual__ is not to be processed. Return now!
return {"name": base_name}
if not hasattr(mod, vtype):
# No __virtual__ processing is required.
# Return the mod's name as the defined __virtualname__ if defined,
# else, the base_name
return {"name": name}
try:
vret = getattr(mod, vtype)(hub)
# If the __virtual__ function was asynchronous then run it in an asyncio loop
if asyncio.iscoroutine(vret):
vret = asyncio.get_running_loop().run_until_complete(vret)
except Exception as exc: # pylint: disable=broad-except
err = LoadError(
"Virtual threw exception in mod {}".format(bname),
exception=exc,
traceback=stdlib_traceback.format_exc(),
)
# Return the load error with name as the base_name because another
# module is still allowed to load under the same __virtualname__
# but also return the vname information
return {"name": base_name, "vname": name, "error": err}
verror = vret
if isinstance(vret, tuple):
if len(vret) > 1:
verror = vret[1]
vret = vret[0]
if vret is True:
# No problems occurred, module is allowed to load
# Return the mod's name as the defined __virtualname__ if defined,
# else, the base_name
return {"name": name}
if vret is False:
# __virtual__ explicitly disabled the loading of this module
err = LoadError("Module {} returned virtual FALSE".format(bname), verror=verror)
# Return the load error with name as the base_name because another
# module is still allowed to load under the same __virtualname__
# but also return the vname information
return {"name": base_name, "vname": name, "error": err}
# Anything else besides True/False should be considered a LoadError
err = LoadError("Module {} returned virtual error".format(bname), verror=verror)
# Return the load error with name as the base_name because another
# module is still allowed to load under the same __virtualname__
# but also return the vname information
return {"name": base_name, "vname": name, "error": err}
def load_virtual(
hub: "pop.hub.Hub", virtual: bool, mod: "LoadedMod" or LoadError, bname: str
) -> Dict[str, Any]:
"""
Run the __virtual__ function to name the module and check for all loader errors
:param hub: The redistributed pop central hub
:param virtual: Toggle whether or not to process __virtual__ functions
:param mod: A loader module or a LoadError if the module didn't load
:param bname: The base name of the mod's path
"""
return _load_virtual(hub, virtual, mod, bname, "__virtual__")
def load_sub_virtual(
hub: "pop.hub.Hub", virtual: bool, mod: "LoadedMod" or LoadError, bname: str
) -> Dict[str, Any]:
"""
Run the __sub_virtual__ function to name the module and check for all loader errors
:param hub: The redistributed pop central hub
:param virtual: Toggle whether or not to process __virtual__ functions
:param mod: A loader module or a LoadError if the module didn't load
:param bname: The base name of the mod's path
"""
_, name = _base_name(bname, mod)
if name != "init":
return {"name": name}
return _load_virtual(hub, virtual, mod, bname, "__sub_virtual__")
def mod_init(sub: "pop.hub.Sub", mod: "LoadedMod", mod_name: str):
"""
Process module's __init__ function if defined
:param sub: The pop object that contains the loaded module data
:param mod: A loader modul
:param mod_name: The name of the module to get from the loader
"""
if "__init__" in dir(mod):
init = pop.contract.Contracted(
sub._hub,
contracts=[],
func=mod.__init__,
ref=f"{sub._subname}.{mod_name}",
name="__init__",
)
ret = init()
# If the __init__ function was asynchronous then run it in an asyncio loop
if asyncio.iscoroutine(ret):
asyncio.get_running_loop().run_until_complete(ret)
def sub_alias(this_sub: "pop.hub.Sub", mod: "LoadedMod", mod_name: str):
"""
Check the sub alias settings and apply the alias names locally so they can be gathered into the higher level object on the hub
:param this_sub: The pop object that contains the loaded module data
:param mod: A loader module
:param mod_name: The name of the module to get from the loader
"""
if mod_name == "init":
alias = getattr(mod, "__sub_alias__", [])
if alias:
this_sub._alias = alias
def prep_loaded_mod(
this_sub: "pop.hub.Sub",
mod: "LoadedMod",
mod_name: str,
contracts: List[pop.contract.Wrapper],
recursive_contracts: List[pop.contract.Wrapper],
) -> "LoadedMod":
"""
Read the attributes of a python module and create a LoadedMod, which resolves
aliases and omits objects that should not be exposed.
:param this_sub: The pop object that contains the loaded module data
:param mod: A loader module
:param mod_name: The name of the module to get from the loader
:param contracts: Contracts functions to add to the sub
"""
lmod = this_sub._loaded.get(mod_name, LoadedMod(mod_name))
ref = f"{this_sub._subname}.{mod_name}" # getattr(hub, ref) should resolve to this module
sub_alias(this_sub, mod, mod_name)
for attr in getattr(mod, "__load__", dir(mod)):
name = getattr(mod, "__func_alias__", {}).get(attr, attr)
func = getattr(mod, attr)
if not this_sub._omit_vars:
if (
not inspect.isfunction(func)
and not inspect.isclass(func)
and type(func).__name__ != "cython_function_or_method"
):
lmod._vars[name] = func
lmod._attrs[name] = func
continue
if attr.startswith(this_sub._omit_start):
continue
if attr.endswith(this_sub._omit_end):
continue
if (
inspect.isfunction(func)
or inspect.isbuiltin(func)
or type(func).__name__ == "cython_function_or_method"
):
obj = pop.contract.create_contracted(
this_sub._hub, contracts + recursive_contracts, func, ref, name
)
if not this_sub._omit_func:
if this_sub._pypath and not func.__module__.startswith(mod.__name__):
# We're only interested in functions defined in this module, not
# imported functions
continue
lmod._funcs[name] = obj
lmod._attrs[name] = obj
else:
klass = func
if not this_sub._omit_class and inspect.isclass(klass):
# We're only interested in classes defined in this module, not
# imported classes
if not klass.__module__.startswith(mod.__name__):
continue
lmod._classes[name] = klass
lmod._attrs[name] = klass
return lmod
class LoadedMod(types.ModuleType):
"""
The LoadedMod class allows for the module loaded onto the sub to return
custom sequencing, for instance it can be iterated over to return all
functions
"""
def __init__(self, name: str):
super().__init__(name)
self._vars = {}
self._funcs = {}
self._classes = {}
self._attrs = {}
def __getattr__(self, item: str):
if item in self._attrs:
return self._attrs[item]
raise AttributeError(item)
def __iter__(self):
keys = sorted(self._funcs)
ret = []
for key in keys:
ret.append(self._funcs[key])
return iter(ret)
def __dir__(self):
# TODO: This should return finite set attrs as well as dunder attrs
ret = list(self._attrs.keys())
ret.extend(["__name__", "_vars", "_funcs", "_classes", "_attrs"])
return ret

View File

@@ -0,0 +1,274 @@
# -*- coding: utf-8 -*-
"""
Translate an options data structure into command line args
"""
# Import python libs
import sys
import inspect
import argparse
import functools
import collections
import pop.hub
__virtualname__ = "args"
__contracts__ = [__virtualname__]
class ActionWrapper:
"""
This class wraps argparse.Action instances in order to mark arguments passed
on CLI as explicitly passed
"""
def __init__(self, action):
self._action = action
functools.update_wrapper(self, action)
def __call__(self, parser, namespace, values, option_string):
# Let's store the call to this option as an explicit CLI call for later
# use when overwriting any configuration settings on file with those
# from CLI
if getattr(parser, "_explicit_cli_args_", None) is None:
setattr(parser, "_explicit_cli_args_", set())
parser._explicit_cli_args_.add(
self._action.dest
) # pylint: disable=protected-access
# Carry on regular operation
return self._action(parser, namespace, values, option_string)
def __getattribute__(self, name):
if name == "_action":
return object.__getattribute__(self, name)
# Proxy any attribute's search to the _action instance
return getattr(self._action, name)
def __repr__(self):
return repr(self._action)
class ActionClassWrapper:
"""
This class wraps argparse.Action classes in order to mark arguments passed
on CLI as explicitly passed
"""
def __init__(self, klass):
self._klass = klass
def __call__(self, *args, **kwargs):
return ActionWrapper(self._klass(*args, **kwargs))
def __repr__(self):
return repr(self._klass)
def __getattribute__(self, name):
if name == "_klass":
return object.__getattribute__(self, name)
# Proxy any attributes search to the _klass instance
return getattr(self._klass, name)
class ArgumentParser(argparse.ArgumentParser):
def register(self, name, value, obj): # pylint: disable=arguments-differ
if name == "action":
# Let's wrap it on our action class wrapper so we can latter store
# which options were explicitly passed from CLI
return super(ArgumentParser, self).register(
name, value, ActionClassWrapper(obj)
)
return super(ArgumentParser, self).register(name, value, obj)
def parse_known_args(self, args=None, namespace=None):
namespace, arg_strings = super().parse_known_args(args, namespace)
explicit_cli_args = getattr(self, "_explicit_cli_args_", set())
if "_explicit_cli_args_" not in namespace:
setattr(namespace, "_explicit_cli_args_", set())
namespace._explicit_cli_args_.update(explicit_cli_args)
return namespace, arg_strings
def __init__(hub: "pop.hub.Hub"):
"""
Set up the local memory copy of the parser
"""
hub.conf._mem["args"] = {}
def _init_parser(hub: "pop.hub.Hub", opts):
if "parser" not in hub.conf._mem["args"]:
# Instantiate the parser
hub.conf._mem["args"]["parser"] = ArgumentParser(**opts.get("_argparser_", {}))
def _keys(opts):
"""
Return the keys in the right order
"""
if isinstance(opts, collections.OrderedDict):
return sorted(
list(opts), key=lambda k: opts[k].get("display_priority", sys.maxsize)
)
return sorted(opts, key=lambda k: (opts[k].get("display_priority", sys.maxsize), k))
def subs(hub: "pop.hub.Hub", opts):
"""
Set up sub parsers, if using sub parsers this needs to be called
before calling setup.
opts dict:
<sub_title>:
[desc]: 'Some subparser'
help: 'subparser!'
"""
_init_parser(hub, opts)
hub.conf._mem["args"]["sub"] = hub.conf._mem["args"]["parser"].add_subparsers(
dest="_subparser_"
)
hub.conf._mem["args"]["subs"] = {}
for arg in _keys(opts):
if arg in ("_argparser_",):
continue
comps = opts[arg]
kwargs = {}
if "help" in comps:
kwargs["help"] = comps["help"]
if "desc" in comps:
kwargs["description"] = comps["desc"]
hub.conf._mem["args"]["subs"][arg] = hub.conf._mem["args"]["sub"].add_parser(
arg, **kwargs
)
return {"result": True, "return": True}
def setup(hub: "pop.hub.Hub", opts):
"""
Take in a pre-defined opts dict and translate it to args
opts dict:
<arg>:
[group]: foo
[default]: bar
[action]: store_true
[options]: # arg will be turned into --arg
- '-A'
- '--cheese'
[choices]:
- foo
- bar
- baz
[nargs]: +
[type]: int
[dest]: cheese
help: Some great help message
"""
_init_parser(hub, opts)
defaults = {}
groups = {}
ex_groups = {}
for arg in _keys(opts):
if arg in ("_argparser_",):
continue
comps = opts[arg]
positional = comps.pop("positional", False)
if positional:
args = [arg]
else:
long_opts = ["--{}".format(arg.replace("_", "-"))]
short_opts = []
for o_str in comps.get("options", []):
if not o_str.startswith("--") and o_str.startswith("-"):
short_opts.append(o_str)
continue
long_opts.append(o_str)
args = short_opts + long_opts
kwargs = {}
kwargs["action"] = action = comps.get("action", None)
if action is None:
# Non existing option defaults to a StoreAction in argparse
action = hub.conf._mem["args"]["parser"]._registry_get(
"action", action
) # pylint: disable=protected-access
if isinstance(action, str):
signature = inspect.signature(
hub.conf._mem["args"]["parser"]._registry_get("action", action).__init__
) # pylint: disable=protected-access
else:
signature = inspect.signature(action.__init__)
for param in signature.parameters:
if param == "self" or param not in comps:
continue
if param == "dest":
kwargs["dest"] = comps.get("dest", arg)
continue
if param == "help":
kwargs["help"] = comps.get("help", "THIS NEEDS SOME DOCUMENTATION!!")
continue
if param == "default":
defaults[comps.get("dest", arg)] = comps[param]
kwargs[param] = comps[param]
if "group" in comps:
group = comps["group"]
if group not in groups:
groups[group] = hub.conf._mem["args"]["parser"].add_argument_group(
group
)
groups[group].add_argument(*args, **kwargs)
continue
if "ex_group" in comps:
group = comps["ex_group"]
if group not in ex_groups:
ex_groups[group] = hub.conf._mem["args"][
"parser"
].add_mutually_exclusive_group()
ex_groups[group].add_argument(*args, **kwargs)
continue
if "sub" in comps:
subs = comps["sub"]
if not isinstance(subs, list):
subs = [subs]
for sub in subs:
sparse = hub.conf._mem["args"]["subs"].get(sub)
if not sparse:
# Maybe raise exception here? Malformed config?
continue
sparse.add_argument(*args, **kwargs)
continue
hub.conf._mem["args"]["parser"].add_argument(*args, **kwargs)
return {"result": True, "return": defaults}
def parse(
hub: "pop.hub.Hub", args=None, namespace=None, only_parse_known_arguments=False
):
"""
Parse the command line options
"""
if only_parse_known_arguments:
opts, unknown_args = hub.conf._mem["args"]["parser"].parse_known_args(
args, namespace
)
opts_dict = opts.__dict__
opts_dict["_unknown_args_"] = unknown_args
else:
opts = hub.conf._mem["args"]["parser"].parse_args(args, namespace)
opts_dict = opts.__dict__
return {"result": True, "return": opts_dict}
def render(hub: "pop.hub.Hub", defaults, cli_opts, explicit_cli_args):
"""
For options specified as such, take the string passed into the cli and
render it using the specified render flag
"""
for key in explicit_cli_args:
rend = defaults.get(key, {}).get("render")
if rend:
ref = f"conf.{rend}.render"
cli_opts[key] = hub.pop.ref.last(ref)(cli_opts[key])
return cli_opts

View File

@@ -0,0 +1,56 @@
"""
Used to take care of the options that end in `_dir`. The assumption is that
`_dir` options need to be treated differently. They need to verified to exist
and they need to be rooted based on the user, root option etc.
"""
# Import python libs
import os
import pop.hub
def roots(hub: "pop.hub.Hub", default_root, f_opts, root_dir):
"""
Detect the root dir data and apply it
"""
os_root = os.path.abspath(os.sep)
root = os_root
change = False
non_priv = False
if hasattr(os, "geteuid"):
if not os.geteuid() == 0:
change = True
non_priv = True
if root_dir and root_dir != default_root:
root = root_dir
change = True
if not root.endswith(os.sep):
root = f"{root}{os.sep}"
if change:
for imp in f_opts:
for key in f_opts[imp]:
if key == "root_dir":
continue
if key.endswith("_dir"):
if non_priv:
root = os.path.join(os.environ["HOME"], f".{imp}{os.sep}")
if imp in f_opts[imp][key]:
a_len = len(imp) + 1
f_opts[imp][
key
] = f"{os_root}{f_opts[imp][key][f_opts[imp][key].index(imp)+a_len:]}"
f_opts[imp][key] = f_opts[imp][key].replace(os_root, root, 1)
def verify(hub: "pop.hub.Hub", opts):
"""
Verify that the environment and all named directories in the
configuration exist
"""
for key in opts:
if key == "root_dir":
continue
if key == "config_dir":
continue
if key.endswith("_dir"):
if not os.path.isdir(opts[key]):
os.makedirs(opts[key])

View File

@@ -0,0 +1,120 @@
# -*- coding: utf-8 -*-
"""
Configuration file core loading functions
"""
# Import python libs
import os
import glob
import fnmatch
import pop.hub
__virtualname__ = "file"
__contracts__ = [__virtualname__]
def load_file(hub: "pop.hub.Hub", paths, defaults=None, overrides=None, includes=True):
"""
Load a single configuration file
"""
opts = {}
if isinstance(defaults, dict):
opts.update(defaults)
if not isinstance(paths, list):
paths = paths.split(",")
add = []
for fn_ in paths:
add.extend(glob.glob(fn_))
paths.extend(add)
for fn_ in paths:
if hub.conf._loader == "yaml":
opts.update(hub.conf.yaml.load(fn_))
elif hub.conf._loader == "json":
opts.update(hub.conf.json.load(fn_))
elif hub.conf._loader == "toml":
opts.update(hub.conf.toml.load(fn_))
if includes:
hub.conf.file.proc_include(opts)
if isinstance(overrides, dict):
opts.update(overrides)
return opts
def load_dir(
hub,
confdir,
defaults=None,
overrides=None,
includes=True,
recurse=False,
pattern=None,
):
"""
Load takes a directory location to scan for configuration files. These
files will be read in. The defaults dict defines what
configuration options should exist if not found in the confdir. Overrides
are configuration options which should be included regardless of whether
those options existed before. If includes is set to True, then the
statements 'include' and 'include_dir' found in either the defaults or
in configuration files.
"""
opts = {}
if not isinstance(confdir, list):
confdir = confdir.split(",")
confdirs = []
for dirs in confdir:
if not isinstance(dirs, (list, tuple)):
dirs = [dirs]
for dir_ in dirs:
confdirs.extend(glob.glob(dir_))
if isinstance(defaults, dict):
opts.update(defaults)
paths = []
for dir_ in confdirs:
dirpaths = []
if os.path.isdir(dir_):
if not recurse:
for fn_ in os.listdir(dir_):
path = os.path.join(dir_, fn_)
if os.path.isdir(path):
# Don't process directories
continue
if pattern and not fnmatch.fnmatch(fn_, pattern):
continue
dirpaths.append(path)
else:
for root, dirs, files in os.walk(dir_):
for fn_ in files:
path = os.path.join(root, fn_)
if pattern and not fnmatch.fnmatch(fn_, pattern):
continue
dirpaths.append(path)
# Sort confdir directory paths like:
# /b.txt
# /c.txt
# /a/x.txt
# /b/x.txt
paths.extend(sorted(dirpaths, key=lambda p: (p.count(os.path.sep), p)))
opts.update(hub.conf.file.load_file(paths, includes))
if isinstance(overrides, dict):
opts.update(overrides)
return opts
def proc_include(hub: "pop.hub.Hub", opts):
"""
process include and include_dir
"""
rec = False
if opts.get("include_dir"):
idir = opts.pop("include_dir")
opts.update(hub.conf.file.load_dir(idir))
rec = True
if opts.get("include"):
ifn = opts.pop("include")
opts.update(hub.conf.file.load_file(ifn))
rec = True
if rec:
hub.conf.file.proc_include(opts)
return opts

View File

@@ -0,0 +1,6 @@
def __init__(hub):
"""
Load the subdirs for conf
"""
hub.__._mem = {}
hub.pop.sub.load_subdirs(hub.conf)

View File

@@ -0,0 +1,141 @@
"""
Integrate is used to pull config data from multiple sources and merge it into
the hub. Once it is merged then when a sub is loaded the respective config data
is loaded into the sub as `OPTS`
"""
# Take an *args list of modules to import and look for conf.py
# Import conf.py if present
# After gathering all dicts, modify them to merge CLI options
#
# Import python libs
import importlib
import copy
import os
def _ex_final(confs, final, override, key_to_ref, ops_to_ref):
"""
Scan the configuration datasets, create the final config
value, and detect collisions
"""
for arg in confs:
for key in confs[arg]:
ref = f"{arg}.{key}"
if ref in override:
s_key = override[ref]["key"]
s_opts = override[ref]["options"]
else:
s_key = key
s_opts = confs[arg][key].get("options", [])
s_opts.append(f"--{s_key}")
final[s_key] = confs[arg][key]
if s_opts:
final[s_key]["options"] = s_opts
if s_key in key_to_ref:
key_to_ref[s_key].append(ref)
else:
key_to_ref[s_key] = [ref]
for opt in s_opts:
if opt in ops_to_ref:
ops_to_ref[opt].append(ref)
else:
ops_to_ref = [ref]
def load(
hub,
imports,
override=None,
cli=None,
roots=False,
loader="json",
logs=True,
version=True,
):
"""
This function takes a list of python packages to load and look for
respective configs. The configs are then loaded in a non-collision
way modifying the cli options dynamically.
The args look for the named <package>.conf python module and then
looks for dictionaries named after the following convention:
override = {'<package>.key': 'key': 'new_key', 'options': ['--option1', '--option2']}
CONFIG: The main configuration for this package - loads to hub.OPT['<import>']
CLI_CONFIG: Loaded only if this is the only import or if specified in the cli option
SUBS: Used to define the subcommands, only loaded if this is the cli config
"""
if override is None:
override = {}
if isinstance(imports, str):
if cli is None:
cli = imports
imports = [imports]
primary = imports[0] if cli is None else cli
confs = {}
final = {}
collides = []
key_to_ref = {}
ops_to_ref = {}
subs = {}
for imp in imports:
try:
cmod = importlib.import_module(f"{imp}.conf")
except ImportError:
continue
if hasattr(cmod, "CONFIG"):
confs[imp] = copy.deepcopy(cmod.CONFIG)
if cli == imp:
if hasattr(cmod, "CLI_CONFIG"):
confs[imp].update(copy.deepcopy(cmod.CLI_CONFIG))
if hasattr(cmod, "SUBS"):
subs = copy.deepcopy(cmod.SUBS)
if logs:
lconf = hub.conf.log.init.conf(primary)
lconf.update(confs[primary])
confs[primary] = lconf
if version:
vconf = hub.conf.version.CONFIG
vconf.update(confs[primary])
confs[primary] = vconf
_ex_final(confs, final, override, key_to_ref, ops_to_ref)
for opt in ops_to_ref:
g_count = 0
if len(ops_to_ref[opt]) > 1:
collides.append({opt: ops_to_ref[opt]})
for key in key_to_ref:
col = []
for ref in key_to_ref[key]:
col.append(ref)
if len(col) > 1:
collides.append({key: key_to_ref[key]})
if collides:
raise KeyError(collides)
opts = hub.conf.reader.read(final, subs, loader=loader)
# This will be put into an immutable data type before it is passed on
f_opts = {}
for key in opts:
if key == "_subparser_":
f_opts["_subparser_"] = opts["_subparser_"]
continue
for ref in key_to_ref[key]:
imp = ref[: ref.rindex(".")]
local_key = ref[ref.rindex(".") + 1 :]
if imp not in f_opts:
f_opts[imp] = {}
f_opts[imp][local_key] = opts[key]
if roots:
root_dir = f_opts.get(cli, {}).get("root_dir")
hub.conf.dirs.roots(
final.get("root_dir", {}).get("default", os.path.abspath(os.sep)),
f_opts,
root_dir,
)
for imp in f_opts:
hub.conf.dirs.verify(f_opts[imp])
hub.OPT = hub.pop.data.imap(f_opts)
if logs:
log_plugin = hub.OPT[primary].get("log_plugin")
getattr(hub, f"conf.log.{log_plugin}.setup")(hub.OPT[primary])
if hub.OPT[primary].get("version"):
hub.conf.version.run(primary)

View File

@@ -0,0 +1,34 @@
# -*- coding: utf-8 -*-
"""
Define the JSON loader interface
"""
# Import python libs
import json
import pop.hub
__virtualname__ = "json"
__contracts__ = [__virtualname__]
def __virtual__(hub):
return True
def load(hub: "pop.hub.Hub", path):
"""
Use json to read in a file
"""
try:
with open(path, "r") as fp_:
ret = json.loads(fp_.read())
return ret
except FileNotFoundError:
return {}
def render(hub: "pop.hub.Hub", val):
"""
Take the string and render it in json
"""
return json.loads(val)

View File

@@ -0,0 +1,23 @@
# Import python libs
import logging
import pop.hub
from typing import Any, Dict
def setup(hub: "pop.hub.Hub", conf: Dict[str, Any]):
"""
Given the configuration data set up the logger
"""
level = hub.conf.log.LEVELS.get(conf["log_level"].lower(), logging.INFO)
root = logging.getLogger("")
root.setLevel(level)
cf = logging.Formatter(fmt=conf["log_fmt_console"], datefmt=conf["log_datefmt"])
ch = logging.StreamHandler()
ch.setLevel(level)
ch.setFormatter(cf)
root.addHandler(ch)
ff = logging.Formatter(fmt=conf["log_fmt_console"], datefmt=conf["log_datefmt"])
fh = logging.FileHandler(conf["log_file"])
fh.setLevel(level)
fh.setFormatter(ff)
root.addHandler(fh)

View File

@@ -0,0 +1,62 @@
"""
This sub is used to set up logging for pop projects and injects logging
options into conf making it easy to add robust logging
"""
# Import python libs
import logging
import pop.hub
from typing import Any, Dict
def __init__(hub: "pop.hub.Hub"):
"""
Set up variables used by the log subsystem
"""
hub.conf.log.LEVELS = {
"debug": logging.DEBUG,
"info": logging.INFO,
"warning": logging.WARNING,
"error": logging.ERROR,
"critical": logging.CRITICAL,
}
def conf(hub: "pop.hub.Hub", name: str) -> Dict[str, Any]:
"""
Return the conf dict for logging, this should be merged OVER by the loaded
config dict(s)
"""
# TODO: Make this more robust to handle more logging interfaces
ldict = {
"log_file": {
"default": f"{name}.log",
"help": "The location of the log file",
"group": "Logging Options",
},
"log_level": {
"default": "warning",
"help": "Set the log level, either quiet, info, warning, or error",
"group": "Logging Options",
},
"log_fmt_logfile": {
"default": "%(asctime)s,%(msecs)03d [%(name)-17s][%(levelname)-8s] %(message)s",
"help": "The format to be given to log file messages",
"group": "Logging Options",
},
"log_fmt_console": {
"default": "[%(levelname)-8s] %(message)s",
"help": "The log formatting used in the console",
"group": "Logging Options",
},
"log_datefmt": {
"default": "%H:%M:%S",
"help": "The date format to display in the logs",
"group": "Logging Options",
},
"log_plugin": {
"default": "basic",
"help": "The logging plugin to use",
"group": "Logging Options",
},
}
return ldict

View File

@@ -0,0 +1,37 @@
"""
The os module is used to gather configuration options from the OS facility
to send configuration options into applications. In the case of Unix like
systems this translates to the environment variables. On Windows systems
this translates to the registry.
"""
# Import python libs
import os
import pop.hub
__virtualname__ = "os"
def __virtual__(hub):
"""
Don't load on Windows, this is for *nix style platforms
"""
# TODO: detect if windows
return True
def gather(hub: "pop.hub.Hub", defaults):
"""
Iterate over the default config data and look for os: True/str options. When set
gather the option from environment variables is present
"""
ret = {}
for key in defaults:
if not "os" in defaults[key]:
continue
os_var = defaults[key]["os"]
if os_var is True:
os_var = key
os_var = os_var.upper()
if os_var in os.environ:
ret[key] = os.environ[os_var]
return ret

View File

@@ -0,0 +1,114 @@
# -*- coding: utf-8 -*-
"""
The reader module is used to read the config data. This will read in cli
arguments and merge them with config fie arguments.
"""
# Import python libs
import warnings
# Priority order: cli, config, cli_defaults
__virtualname__ = "reader"
__contracts__ = [__virtualname__]
def _merge_dicts(opts, updates, os_opts, explicit_cli_args):
"""
recursively merge updates into opts
"""
for key, val in os_opts.items():
if not val:
# Don't use empty os vals
continue
if key in opts:
opts[key] = val
for key, val in updates.items():
if isinstance(val, dict) and isinstance(opts.get(key), dict):
_merge_dicts(opts.get(key, {}), val, os_opts, explicit_cli_args)
elif val is not None:
if key not in opts:
# The key is not in opts(from config file), let's add it
opts[key] = val
continue
# We already have a value for the key in opts
if opts[key] == val:
# The value is the same, carry on
continue
if key in explicit_cli_args:
# We have a value for the key in opts(from config file) but
# this option was explicitly passed on the CLI, ie, it's not
# a default value.
# Overwrite what's in opts
opts[key] = val
continue
return opts
def read(
hub,
defaults,
subs=None,
loader="json",
process_cli=True,
process_cli_known_args_only=False,
args=None,
namespace=None,
):
"""
Pass in the default options dict to use
:param opts:
:param process_cli: Process the passed args or sys.argv
:param process_cli_known_args_only: Tells the ArgumentParser to only process known arguments
:param args: Arguments to pass to ArgumentParser
:param namespace: argparse.Namespace to pass to ArgumentParser
:return: options
"""
msg = "Pop-config is the new means to load configs in pop, reader.read will be removed in pop 13"
warnings.warn(msg, DeprecationWarning, stacklevel=2)
hub.conf._loader = loader
if subs:
hub.conf.args.subs(subs)
opts = hub.conf.args.setup(defaults)["return"]
os_opts = hub.conf.os.gather(defaults)
if process_cli is True:
cli_opts = hub.conf.args.parse(args, namespace, process_cli_known_args_only)[
"return"
]
else:
cli_opts = {}
explicit_cli_args = cli_opts.pop("_explicit_cli_args_", set())
cli_opts = hub.conf.args.render(defaults, cli_opts, explicit_cli_args)
kwargs = {}
# Due to the order of priorities and the representation of defaults in the
# Argparser we need to manually check if the config option values are from
# the cli or from defaults
f_func = False
if "config_dir" in cli_opts:
if cli_opts["config_dir"]:
kwargs["confdir"] = cli_opts["config_dir"]
else:
kwargs["confdir"] = opts["config_dir"]
if "config_recurse" in cli_opts:
if cli_opts["config_recurse"]:
kwargs["recurse"] = cli_opts["config_recurse"]
else:
kwargs["recurse"] = opts["config_recurse"]
# If the config_dir configuration dictionary provides a configuration
# file pattern to read, pass it along
kwargs["pattern"] = defaults["config_dir"].get("pattern")
f_func = hub.conf.file.load_dir
elif "config" in cli_opts:
if cli_opts["config"]:
kwargs["paths"] = cli_opts["config"]
else:
kwargs["paths"] = opts["config"]
f_func = hub.conf.file.load_file
# Render args before config parsing
if f_func:
f_opts = f_func(**kwargs)
opts.update(f_opts)
return _merge_dicts(opts, cli_opts, os_opts, explicit_cli_args)
else:
return _merge_dicts(opts, cli_opts, os_opts, explicit_cli_args)

View File

@@ -0,0 +1,41 @@
# -*- coding: utf-8 -*-
"""
Define the yaml loader interface
"""
import pop.hub
# Import third party libs
try:
import toml
HAS_TOML = True
except ImportError:
HAS_TOML = False
__virtualname__ = "toml"
# __contracts__ = [__virtualname__]
def __virtual__(hub: "pop.hub.Hub"):
if HAS_TOML:
return True
return (False, "TOML could not be loaded")
def load(hub: "pop.hub.Hub", path):
"""
use toml to read in a file
"""
try:
with open(path, "rb") as fp_:
return toml.load(fp_.read())
except FileNotFoundError:
pass
return {}
def render(hub: "pop.hub.Hub", val):
"""
Take the string and render it in json
"""
return toml.loads(val)

View File

@@ -0,0 +1,25 @@
"""
Support embedding version number lookup into cli
"""
# IMport python libs
import importlib
import pop.hub
import sys
CONFIG = {
"version": {
"default": False,
"action": "store_true",
"help": "Display version information",
}
}
def run(hub: "pop.hub.Hub", primary):
"""
Check the version number and then exit
"""
mod = importlib.import_module(f"{primary}.version")
print(f"{primary} {mod.version}")
sys.exit(0)

View File

@@ -0,0 +1,42 @@
# -*- coding: utf-8 -*-
"""
Define the yaml loader interface
"""
# Import third party libs
import pop.hub
try:
import yaml
HAS_YAML = True
except ImportError:
HAS_YAML = False
__virtualname__ = "yaml"
__contracts__ = [__virtualname__]
def __virtual__(hub):
if HAS_YAML:
return True
return (False, "PyYaml could not be loaded")
def load(hub: "pop.hub.Hub", path):
"""
use yaml to read in a file
"""
try:
with open(path, "rb") as fp_:
return yaml.safe_load(fp_.read())
except FileNotFoundError:
pass
return {}
def render(hub: "pop.hub.Hub", val):
"""
Take the string and render it in json
"""
return yaml.safe_load(val)

View File

@@ -0,0 +1,23 @@
"""
Convenience wrappers to make using the conf system as easy and seamless as possible
"""
import pop.hub
from typing import Any, Dict, List
def integrate(
hub: "pop.hub.Hub",
imports: List[str] or str,
override: Dict[str, Any] = None,
cli: str = None,
roots: bool = None,
loader: str = "json",
logs: bool = True,
):
"""
Load the conf sub and run the integrate sequence.
"""
hub.pop.sub.add("pop.mods.conf")
hub.conf.integrate.load(
imports, override, cli=cli, roots=roots, loader=loader, logs=logs
)

View File

@@ -0,0 +1,18 @@
import pop.hub
from typing import List
def load(
hub: "pop.hub.Hub",
sources: List[str],
cli: str = None,
dyne_name: str = None,
loader: str = "yaml",
parse_cli: bool = True,
):
"""
Use the pop-config system to load up a fresh configuration for this project
from the included conf.py file.
"""
hub.pop.sub.add(dyne_name="config")
hub.config.integrate.load(sources, cli, dyne_name, loader, parse_cli)

View File

@@ -0,0 +1,376 @@
import collections
import collections.abc as abc
import copy
import inspect
import logging
import pop.contract as contract
import pop.hub
import sys
from typing import Any, Dict, Iterable, Iterator, List
log = logging.getLogger(__name__)
__func_alias__ = {
"immutable_namespaced_map": "imap",
"mutable_namespaced_map": "map",
"owner_writeable_namespaced_map": "omap",
}
def immutable_namespaced_map(
hub: "pop.hub.Hub", init: Dict[str, Any], **kwargs
) -> abc.MutableMapping:
return IMAP(init_=init, **kwargs)
class IMAP(abc.Mapping):
"""
An abstract base class that implements the interface of a `dict` but is immutable.
Items can be retrieved via namespacing.
No values can be changed after initialization
"""
def __init__(self, init_: Dict[str, Any], **c_kwargs):
"""
:param init_: A dictionary from which to inherit data
"""
init_.update(**c_kwargs)
values = {}
for k, v in init_.items():
if isinstance(v, Dict):
values[k] = IMAP(init_=v)
elif isinstance(v, (tuple, int, str, bytes)):
values[k] = v
elif isinstance(v, Iterable):
values[k] = tuple(v)
else:
values[k] = v
# __setattr__ is borked (on purpose) so we have to call it from super() right here
super().__setattr__("_IMAP__store", values)
log.debug("Initialized immutable namespaced map")
def __setattr__(self, k: str, v: Any):
raise TypeError(
f"{self.__class__.__name__} does not support attribute assignment"
)
def __getattr__(self, k: str):
if k.startswith("_"):
return super().__getattribute__(k)
else:
return self.__store[k]
def __getitem__(self, k: str) -> Any:
return self.__store[k]
def __contains__(self, k: str) -> bool:
return k in self.__store
def __iter__(self):
return iter(self.__store)
def __len__(self) -> int:
return len(self.__store.keys())
def __copy__(self) -> Dict[str, Any]:
ret = {}
# Unpack IMAP items so that it's turtles all the way down
for k, v in self.__store.items():
if isinstance(v, IMAP):
ret[k] = v.__copy__()
else:
ret[k] = v
return ret
def __repr__(self):
return repr(copy.copy(self))
def mutable_namespaced_map(hub: "pop.hub.Hub", dict_: Dict[str, Any] = None) -> "MAP":
return MAP(dict_=dict_)
class WriteLockError(Exception):
pass
class MAP(abc.MutableMapping):
"""
MAP is a key-value store that allows for setting/getting
by either dot or dictionary lookup notation ('.' or '[k]')
Sub-keys will be created on assignment:
`map.foo.bar.baz = True` will auto-create foo and bar as MAPs
while doing
`map.foo.bar.baz` before assignment will not create foo, bar or baz.
:param dict_: similar to dict(dict_), initialize using dict_
"""
def __init__(self, dict_: Dict[str, Any] = None, parent: "MAP" = None):
self.__dict__["_store"] = {}
self.__dict__["_parent"] = parent
if dict_:
# Existing dictionaries might have properties that need wrapped as well
self.update(dict_)
def _set(self, k: str, v: Any):
if k.startswith("_"):
raise AttributeError("Cannot store values beginning with '_'")
if isinstance(v, dict):
# Cast all nested dict values as MAP so they get it's benefits as well
v = self.__class__(dict_=v, parent=self)
self._store[k] = v
else:
self._store[k] = v
def _get(self, k: str, create: bool = False):
if k.startswith("_"):
return super().__getattribute__(k)
try:
if k not in self._store:
if not create:
return UninitializedValue([k], self)
self._set(k, self.__class__())
return self._store[k]
except Exception as e:
raise AttributeError(*e.args)
def get(self, k: str, default: Any = None) -> Any:
if k in self._store:
return getattr(self, k)
else:
return default
def __setitem__(self, k: str, v: Any):
self._set(k, v)
def __delitem__(self, k: str):
"""
Cleanup method required by abc.ABC
"""
if k in self._store:
del self._store[k]
def __delattr__(self, k: str):
self.__delitem__(k)
def __getitem__(self, k: str) -> Any:
return self._get(k)
def __getattr__(self, k: str) -> Any:
return self._get(k)
def __setattr__(self, k: str, v: Any):
self._set(k, v)
def __contains__(self, k: str) -> Any:
return k in self._store
def __len__(self) -> int:
return len(self._store)
def __iter__(self) -> Iterator[Any]:
return iter(self._store)
def __repr__(self) -> str:
return f"{self.__class__.__name__}({str(self)})"
def __str__(self) -> str:
return str(self._store)
def __copy__(self) -> Dict[str, Any]:
# The copy will be a dictionary, mangle it all you want
ret = {}
for k, v in self._store.items():
if isinstance(v, MAP):
ret[k] = v.__copy__()
else:
ret[k] = v
return ret
class UninitializedValueError(Exception):
pass
class UninitializedValue:
"""
We want you to be able to create data on the MAP by doing something like:
MAP.foo.bar = True
However, we want uses of un-initialized values to blow up, not auto-create.
We build up a potentially pending write using a special object that can only be written.
Any other use should blow up.
"""
def __init__(self, path: List[str], map_: MAP):
# The path I'm looking up on the map
self.__dict__["_path"] = path
self.__dict__["_map"] = map_
def _blowup(self):
raise UninitializedValueError(
f"Access of uninitialized value '{'.'.join(self.__dict__['_path'])}'"
)
def __getattribute__(self, item: str):
if item == "get":
pass
elif not item.startswith("_"): # return a new PendingWrite for regular lookups
new_path = self._path[:]
new_path.append(item)
return UninitializedValue(new_path, self._map)
elif item not in (
"__class__",
"__dict__",
"__setattr__",
"_blowup",
"_map",
"_path",
"get",
):
self._blowup()
return super().__getattribute__(item)
def __setattr__(self, k: str, v: Any):
node = self._map
for part in self._path:
node = node._get(part, create=True)
setattr(node, k, v)
def __delattr__(self, k: str):
pass
def __getitem__(self, item: str):
return getattr(self, item)
def __setitem__(self, k: str, v: Any):
setattr(self, k, v)
def __delitem__(self, k: str):
pass
def get(self, k: str, default: Any = None) -> Any:
return default
def __bool__(self) -> bool:
# Uninitialized values always evaluate to false
return False
def __contains__(self, item: str) -> bool:
# If it contained anything it would be initialized
return False
def __hash__(self):
self._blowup()
def __dir__(self):
self._blowup()
def __str__(self):
self._blowup()
def owner_writeable_namespaced_map(
hub, dict_: Dict[str, Any] = None
) -> "OwnerWriteableMapping":
return OwnerWriteableMapping(dict_=dict_)
def _stack_frames(relative_start=2):
"""
Efficiently access stack frames.
:param relative_start: Starting stack depth; The default, 2 is the parent of the
caller of stack_frames - the first function that may be unknown.
:return: a stack frame
"""
if hasattr(sys, "_getframe"):
# implementation detail of CPython, speeds things up by 100x.
frame = sys._getframe(relative_start)
while frame:
yield frame
frame = frame.f_back
else:
for frame_info in inspect.stack(context=0)[relative_start:]:
yield frame_info.frame
WriteLockInfo = collections.namedtuple("WriteLockInfo", ["val", "owner", "lineno"])
class OwnerWriteableMapping(MAP):
"""
A MAP variant that is write-locked to the first Contracted function
that writes to a given key (becoming the owner). Attempts to write
to that key from other functions will receive a WriteLockError showing
the owning Contracted function.
"""
def __init__(
self, dict_: Dict[str, Any] = None, parent: "OwnerWriteableMapping" = None
):
super().__init__(dict_, parent)
def _find_owner(self) -> (contract.Contracted, int):
"""
Return the contracted responsible for assigning to this variable.
Returns None if no such function exists.
"""
for frame in _stack_frames(3):
if isinstance(frame.f_locals.get("self"), contract.Contracted):
contracted = frame.f_locals["self"]
log.debug(f"Found contract '{contracted.__name__}'")
break
else:
# find the lineno in the frame *before* our Contracted (the function called)
lineno = frame.f_lineno
else:
# not found
contracted = None
lineno = -1
return contracted, lineno
def _set(self, k: str, v: Any):
owner, lineno = self._find_owner()
cur = self._store.get(k)
if cur is None or cur.owner is owner:
if isinstance(v, abc.Mapping):
v = self.__class__(dict_=v, parent=self)
elif isinstance(v, Iterable) and not isinstance(
v, (tuple, str, bytes, UninitializedValue)
):
v = tuple(v) # Lists, sets, and other iterables become immutable
super()._set(k, WriteLockInfo(v, owner, lineno))
else:
file = inspect.getsourcefile(cur.owner.func)
raise WriteLockError(
f"'{k}' was previously assigned by '{cur.owner.__name__}' ({file}:{cur.lineno})"
)
def _get(self, k: str, create: bool = False) -> Any:
v = super()._get(k, create)
if not isinstance(v, UninitializedValue):
v = v.val
return v
def __str__(self) -> str:
return str(self._dict())
def _dict(self):
vals = {}
for k, v in self._store.items():
if isinstance(v.val, self.__class__):
vals[k] = v.val._dict()
else:
vals[k] = v.val
return vals

View File

@@ -0,0 +1,38 @@
# -*- coding: utf-8 -*-
"""
Tools to work with dicts
"""
# Import local libs
import pop.dicts
import pop.hub
def traverse(hub: "pop.hub.Hub", data, key, default=None, delimiter=":"):
"""
Traverse a dict or list using a colon-delimited (or otherwise delimited,
using the 'delimiter' param) target string. the target 'foo:bar:0' will
return data['foo']['bar'][0] if this value exists, and will otherwise
return the dict in the default argument.
function will automatically determine the target type.
the target 'foo:bar:0' will return data['foo']['bar'][0] if data like
{'foo':{'bar':['baz']}} , if data like {'foo':{'bar':{'0':'baz'}}}
then return data['foo']['bar']['0']
"""
return pop.dicts.traverse(data, key, default.delimiter)
def update(hub: "pop.hub.Hub", dest, upd, recursive_update=True, merge_lists=True):
"""
Recursive version of the default dict.update
Merges upd recursively into dest
If recursive_update=False, will use the classic dict.update, or fall back
on a manual merge (helpful for non-dict types like FunctionWrapper)
If merge_lists=True, will aggregate list object types instead of replace.
The list in ``upd`` is added to the list in ``dest``, so the resulting list
is ``dest[key] + upd[key]``. This behavior is only activated when
recursive_update=True. By default merge_lists=False.
"""
return pop.dicts.update(dest, upd, recursive_update, merge_lists)

View File

@@ -0,0 +1,19 @@
import pop.hub
from typing import Any, Dict
def get(hub: "pop.hub.Hub") -> Dict[str, Any]:
"""
Retrive the dynamic dirs data for this hub, if dynamic dirs have not been
gathered yet then gather it.
"""
if not hub._dscan:
hub._scan_dynamic()
return hub._dynamic
def refresh(hub: "pop.hub.Hub"):
"""
Refresh the dynamic dirs
"""
hub._scan_dynamic()

View File

@@ -0,0 +1,156 @@
"""
The input module is used to translate typical input strings into the
ref/args/kwargs used by pop when forwarding data into functions.
"""
# Import python libs
import re
from typing import Any, Dict, List, Tuple
# Import third party libs
import yaml
import pop.hub
KWARG_REGEX = re.compile(r"^([^\d\W][\w.-]*)=(?!=)(.*)$", re.UNICODE)
def parse(
hub: "pop.hub.Hub",
args: List[Any],
condition: bool = True,
no_parse: Tuple[str] = None,
) -> Tuple[List[Any], Dict[str, Any]]:
"""
Parse out the args and kwargs from a list of input values. Optionally,
return the args and kwargs without passing them to condition_input().
Don't pull args with key=val apart if it has a newline in it.
"""
if no_parse is None:
no_parse = ()
_args = []
_kwargs = {}
for arg in args:
if isinstance(arg, str):
if "=" in arg:
arg_name, arg_value = _parse_kwarg(arg)
if arg_name:
_kwargs[arg_name] = (
_yamlify_arg(arg_value)
if arg_name not in no_parse
else arg_value
)
else:
_args.append(_yamlify_arg(arg))
elif isinstance(arg, dict):
# Yes, we're popping this key off and adding it back if
# condition_input is called below, but this is the only way to
# gracefully handle both CLI and API input.
if arg.pop("__kwarg__", False) is True:
_kwargs.update(arg)
else:
_args.append(arg)
else:
_args.append(arg)
if condition:
return _condition_input(_args, _kwargs)
return _args, _kwargs
def _yamlify_arg(arg: Any) -> Any:
"""
yaml.safe_load the arg
"""
if not isinstance(arg, str):
return arg
if arg.strip() == "":
# Because YAML loads empty (or all whitespace) strings as None, we
# return the original string
# >>> import yaml
# >>> yaml.load('') is None
# True
# >>> yaml.load(' ') is None
# True
return arg
elif "_" in arg and all([x in "0123456789_" for x in arg.strip()]):
# When the stripped string includes just digits and underscores, the
# underscores are ignored and the digits are combined together and
# loaded as an int. We don't want that, so return the original value.
return arg
try:
original_arg = arg
if "#" in arg:
# Only yamlify if it parses into a non-string type, to prevent
# loss of content due to # as comment character
parsed_arg = yaml.safe_load(arg)
if isinstance(parsed_arg, str) or parsed_arg is None:
return arg
return parsed_arg
if arg == "None":
arg = None
else:
arg = yaml.safe_load(arg)
if isinstance(arg, dict):
# dicts must be wrapped in curly braces
if isinstance(original_arg, str) and not original_arg.startswith("{"):
return original_arg
else:
return arg
elif isinstance(arg, list):
# lists must be wrapped in brackets
if isinstance(original_arg, str) and not original_arg.startswith("["):
return original_arg
else:
return arg
elif arg is None or isinstance(arg, (list, float, int, str)):
# yaml.safe_load will load '|' as '', don't let it do that.
if arg == "" and original_arg in ("|",):
return original_arg
# yaml.safe_load will treat '#' as a comment, so a value of '#'
# will become None. Keep this value from being stomped as well.
elif arg is None and original_arg.strip().startswith("#"):
return original_arg
else:
return arg
else:
# we don't support this type
return original_arg
except Exception:
# In case anything goes wrong...
return original_arg
def _parse_kwarg(string_: str) -> Tuple:
"""
Parses the string and looks for the following kwarg format:
"{argument name}={argument value}"
For example: "my_message=Hello world"
Returns the kwarg name and value, or (None, None) if the regex was not
matched.
"""
try:
return KWARG_REGEX.match(string_).groups()
except AttributeError:
return None, None
def _condition_input(args: List[Any], kwargs: Dict[str, Any]) -> List[str]:
"""
Return a single arg structure for the publisher to safely use
"""
ret = []
for arg in args:
if isinstance(arg, int):
ret.append(str(arg))
else:
ret.append(arg)
if isinstance(kwargs, dict) and kwargs:
kw_ = {"__kwarg__": True}
for key, val in kwargs.items():
kw_[key] = val
return ret + [kw_]
return ret

View File

@@ -0,0 +1,173 @@
# -*- coding: utf-8 -*-
"""
The main interface for management of the aio loop
"""
# Import python libs
import asyncio
import os
import pop.hub
import signal
import functools
from typing import Callable, Iterable, Generator
__virtualname__ = "loop"
def __virtual__(hub: "pop.hub.Hub"):
return True
def create(hub: "pop.hub.Hub"):
"""
Create the loop at hub.pop.Loop
"""
if not hasattr(hub.pop, "Loop"):
hub.pop.loop.FUT_QUE = asyncio.Queue()
if os.name == "nt":
hub.pop.Loop = asyncio._get_running_loop()
if hub.pop.Loop is not None:
return
# The default event loop on Windows, "SelectorEventLoop" has certain limitations
# ProactorEventLoop makes use of Window's I/O Completion Ports:
# https://docs.microsoft.com/en-ca/windows/win32/fileio/i-o-completion-ports
hub.pop.Loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(hub.pop.Loop)
else:
hub.pop.Loop = asyncio.get_event_loop()
def call_soon(hub: "pop.hub.Hub", ref: str, *args, **kwargs):
"""
Schedule a coroutine to be called when the loop has time. This needs
to be called after the creation fo the loop
"""
fun = hub.pop.ref.get_func(ref)
hub.pop.Loop.call_soon(functools.partial(fun, *args, **kwargs))
def ensure_future(hub: "pop.hub.Hub", ref: str, *args, **kwargs):
"""
Schedule a coroutine to be called when the loop has time. This needs
to be called after the creation fo the loop. This function also uses
the hold system to await the future when it is done making it easy
to create a future that will be cleanly awaited in the background.
"""
fun = getattr(hub, ref)
future = asyncio.ensure_future(fun(*args, **kwargs))
def callback(fut):
hub.pop.loop.FUT_QUE.put_nowait(fut)
future.add_done_callback(callback)
return future
def start(
hub: "pop.hub.Hub",
*coros,
hold: bool = False,
sigint: Callable = None,
sigterm: Callable = None,
):
"""
Start a loop that will run until complete
"""
hub.pop.loop.create()
if sigint:
s = signal.SIGINT
hub.pop.Loop.add_signal_handler(s, lambda s=s: asyncio.create_task(sigint(s)))
if sigterm:
s = signal.SIGTERM
hub.pop.Loop.add_signal_handler(s, lambda s=s: asyncio.create_task(sigterm(s)))
if hold:
coros = list(coros)
coros.append(_holder(hub))
try:
# DO NOT CHANGE THIS CALL TO run_forever! If we do that then the tracebacks
# do not get resolved.
return hub.pop.Loop.run_until_complete(asyncio.gather(*coros))
except KeyboardInterrupt as e:
print("Caught keyboard interrupt. Canceling...")
hub.pop.Loop.close()
async def _holder(hub: "pop.hub.Hub"):
"""
Just a sleeping while loop to hold the loop open while it runs until
complete
"""
while True:
future = await hub.pop.loop.FUT_QUE.get()
await future
async def await_futures(hub: "pop.hub.Hub"):
"""
Scan over the futures that have completed and manually await them.
This function is used to clean up futures when the loop is not opened
up with hold=True so that ensured futures can be cleaned up on demand
"""
while not hub.pop.loop.FUT_QUE.empty():
future = await hub.pop.loop.FUT_QUE.get()
await future
async def kill(hub: "pop.hub.Hub", wait: int or float = 0):
"""
Close out the loop
"""
await asyncio.sleep(wait)
hub.pop.Loop.stop()
while True:
if not hub.pop.Loop.is_running():
hub.pop.Loop.close()
await asyncio.sleep(1)
async def as_yielded(hub: "pop.hub.Hub", gens: Iterable[Generator]):
"""
Concurrently run multiple async generators and yield the next yielded
value from the soonest yielded generator.
async def many():
for n in range(10):
yield os.urandom(6).hex()
async def run():
gens = []
for n in range(10):
gens.append(many())
async for y in as_yielded(gens):
print(y)
"""
fin = os.urandom(32)
que = asyncio.Queue()
fs = []
to_clean = []
async def _yield(gen):
async for comp in gen:
await que.put(comp)
async def _ensure(coros):
for f in asyncio.as_completed(coros):
await f
async def _set_done():
await que.put(fin)
def _done(future):
to_clean.append(asyncio.ensure_future(_set_done()))
coros = []
for gen in gens:
coros.append(_yield(gen))
f = asyncio.ensure_future(_ensure(coros))
f.add_done_callback(_done)
while True:
ret = await que.get()
if ret == fin:
break
yield ret
for c in to_clean:
await c

View File

@@ -0,0 +1,46 @@
"""
Used to resolve resolutions to paths on the hub
"""
import pop.hub
from typing import List
def last(hub: "pop.hub.Hub", ref: str) -> "pop.hub.Sub":
"""
Takes a string that references the desired ref and returns the last object
called out in that ref
"""
return hub.pop.ref.path(ref)[-1]
def path(hub: "pop.hub.Hub", ref: str) -> List["pop.hub.Sub"]:
"""
Retuns a list of references up to the named ref
"""
ret = [hub]
if isinstance(ref, str):
ref = ref.split(".")
for chunk in ref:
ret.append(getattr(ret[-1], chunk))
return ret
def create(hub: "pop.hub.Hub", ref: str, obj: object):
"""
Create an attribute at a given target using just a ref string and the
object to be saved at said location. The desired location must already
exist!
:param hub: The redistributed pop central hub
:param ref: The dot delimited string referencing the target location to
create the given object on the hub
:param obj: The object to store at the given reference point
"""
if "." not in ref:
setattr(hub, ref, obj)
return
comps = ref.split(".")
sub_ref = ref[: ref.rindex(".")]
var = comps[-1]
top = hub.pop.ref.last(sub_ref)
setattr(top, var, obj)

View File

@@ -0,0 +1,360 @@
"""
Seed a new project with a directory tree and first files
"""
# Import python libs
import os
import pop.hub
SETUP = """#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Import python libs
import os
import shutil
from setuptools import setup, Command
NAME = "%%NAME%%"
DESC = ""
# Version info -- read without importing
_locals = {}
with open("{}/version.py".format(NAME)) as fp:
exec(fp.read(), None, _locals)
VERSION = _locals["version"]
SETUP_DIRNAME = os.path.dirname(__file__)
if not SETUP_DIRNAME:
SETUP_DIRNAME = os.getcwd()
with open("README.rst", encoding="utf-8") as f:
LONG_DESC = f.read()
with open("requirements.txt") as f:
REQUIREMENTS = f.read().splitlines()
class Clean(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
for subdir in (NAME, "tests"):
for root, dirs, files in os.walk(
os.path.join(os.path.dirname(__file__), subdir)
):
for dir_ in dirs:
if dir_ == "__pycache__":
shutil.rmtree(os.path.join(root, dir_))
def discover_packages():
modules = []
for package in (NAME,):
for root, _, files in os.walk(os.path.join(SETUP_DIRNAME, package)):
pdir = os.path.relpath(root, SETUP_DIRNAME)
modname = pdir.replace(os.sep, ".")
modules.append(modname)
return modules
setup(
name=NAME,
author="",
author_email="",
url="",
version=VERSION,
install_requires=REQUIREMENTS,
description=DESC,
long_description=LONG_DESC,
long_description_content_type="text/x-rst",
python_requires=">=3.6",
classifiers=[
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Development Status :: 5 - Production/Stable",
],
packages=discover_packages(),
entry_points={"console_scripts": ["%%NAME%% = %%NAME%%.scripts:start",],},
cmdclass={"clean": Clean},
)
"""
PYPROJ = r"""[tool.black]
line-length = 88
target-version = ['py36', 'py37', 'py38']
include = '\.pyi?$'
exclude = '''
(
/(
\.eggs
| \.git
| \.hg
| \.mypy_cache
| \.tox
| \.venv
| _build
| buck-out
| build
| dist
)/
)
'''
"""
PRECOM = r"""---
minimum_pre_commit_version: 1.15.2
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.5.0
hooks:
- id: check-merge-conflict # Check for files that contain merge conflict strings.
language_version: python3
- id: trailing-whitespace # Trims trailing whitespace.
args: [--markdown-linebreak-ext=md]
language_version: python3
- id: mixed-line-ending # Replaces or checks mixed line ending.
args: [--fix=lf]
language_version: python3
- id: end-of-file-fixer # Makes sure files end in a newline and only a newline.
exclude: tests/fake_.*\.key
language_version: python3
- id: check-ast # Simply check whether files parse as valid python.
language_version: python3
- id: check-yaml
- id: check-json
- repo: https://github.com/psf/black
rev: 19.10b0
hooks:
- id: black
language_version: python3
"""
ENTRY = """entry_points={
'console_scripts': [
'%%NAME%% = %%NAME%%.scripts:start',
],
},"""
SCRIPT = """#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pop.hub
def start():
hub = pop.hub.Hub()
hub.pop.sub.add(dyne_name="%%NAME%%")
hub.%%NAME%%.init.cli()
"""
INIT = """def __init__(hub):
# Remember not to start your app in the __init__ function
# This function should just be used to set up the plugin subsystem
# Add another function to call from your run.py to start the app
pass
def cli(hub):
hub.pop.config.load(["%%NAME%%"], cli="%%NAME%%")
print("%%NAME%% works!")
"""
REQ = "pop\n"
CONF = """CLI_CONFIG = {}
CONFIG = {}
SUBCOMMANDS = {}
DYNE = {
"%%NAME%%": ["%%NAME%%"],
%%DYNE%%}
"""
VER = """# -*- coding: utf-8 -*-
version = "1"\n
"""
def new(hub: "pop.hub.Hub"):
"""
Given the option in hub.opts "seed_name" create a directory tree for a
new pop project
"""
hub.PATH = os.getcwd()
name = hub.opts["seed_name"]
for dyne in hub.opts["dyne"]:
hub.pop.seed.mkdir(name, dyne)
hub.pop.seed.mkdir(name, dyne, "contracts")
if hub.opts["type"] == "v":
hub.pop.seed.mkdir(name)
hub.pop.seed.mksetup(name, entry=False)
hub.pop.seed.mkversion(name)
hub.pop.seed.mkconf(name)
hub.pop.seed.mkreq(name)
hub.pop.seed.mkreadme(name)
else:
hub.pop.seed.mkdir(name, name)
hub.pop.seed.mkdir(name, name, "contracts")
hub.pop.seed.mksetup(name)
hub.pop.seed.mkscript(name)
hub.pop.seed.mkversion(name)
hub.pop.seed.mkconf(name)
hub.pop.seed.mkreq(name)
hub.pop.seed.mkrun(name)
hub.pop.seed.mkinit(name)
hub.pop.seed.mkreadme(name)
hub.pop.seed.mkproj()
hub.pop.seed.mkprecom()
hub.pop.seed.print_post(name)
def mkdir(hub: "pop.hub.Hub", *args):
"""
Create the named dir
"""
path = hub.PATH
for dir_ in args:
path = os.path.join(path, dir_)
if not os.path.isdir(path):
try:
os.makedirs(path)
except Exception:
print("Failed to make {}".format(path))
continue
if dir_ == "scripts" and len(args) == 1:
continue
def mkreq(hub: "pop.hub.Hub", name: str):
path = os.path.join(hub.PATH, "requirements.txt")
with open(path, "w+") as fp:
fp.write(REQ)
def mksetup(hub: "pop.hub.Hub", name: str, entry: bool = True):
"""
Create and write out a setup.py file
"""
path = os.path.join(hub.PATH, "setup.py")
setup_str = SETUP.replace("%%NAME%%", name)
if entry:
setup_str = setup_str.replace(
"%%ENTRY%%",
ENTRY.replace(
"%%NAME%%.scripts:start", f"{name.replace('-', '_')}.scripts:start"
),
)
setup_str = setup_str.replace("%%ENTRY%%", ENTRY.replace("%%NAME%%", name))
else:
setup_str = setup_str.replace("%%ENTRY%%", "")
with open(path, "w+") as fp:
fp.write(setup_str)
def mkscript(hub: "pop.hub.Hub", name: str):
"""
Create and write out a setup.py file
"""
path = os.path.join(hub.PATH, name, "scripts.py")
script_str = SCRIPT.replace("%%NAME%%", name)
with open(path, "w+") as fp:
fp.write(script_str)
def mkrun(hub: "pop.hub.Hub", name: str):
"""
Create the convenience run.py script allowing the project to
be executed from the local directory
"""
path = os.path.join(hub.PATH, "run.py")
run_str = SCRIPT.replace("%%NAME%%", name)
run_str += "\n\nstart()\n"
with open(path, "w+") as fp:
fp.write(run_str)
def mkinit(hub: "pop.hub.Hub", name: str):
"""
Create the intial init.py
"""
path = os.path.join(hub.PATH, name, name, "init.py")
init_str = INIT.replace("%%NAME%%", name)
with open(path, "w+") as fp:
fp.write(init_str)
def mkversion(hub: "pop.hub.Hub", name: str):
"""
Create the version.py file
"""
path = os.path.join(hub.PATH, name, "version.py")
with open(path, "w+") as fp:
fp.write(VER)
def mkconf(hub: "pop.hub.Hub", name: str):
"""
Create the version.py file
"""
path = os.path.join(hub.PATH, name, "conf.py")
dyne_str = ""
for dyne in hub.opts["dyne"]:
dyne_str += f' "{dyne}": ["{dyne}"],\n'
conf_str = CONF.replace("%%NAME%%", name)
conf_str = conf_str.replace("%%DYNE%%", dyne_str)
with open(path, "w+") as fp:
fp.write(conf_str)
def mkreadme(hub: "pop.hub.Hub", name: str):
"""
Create and write out a setup.py file
"""
path = os.path.join(hub.PATH, "README.rst")
eqchars = "=" * len(name)
readme_str = f"{eqchars}\n{name.upper()}\n{eqchars}\n"
with open(path, "w+") as fp:
fp.write(readme_str)
def mkproj(hub: "pop.hub.Hub"):
"""
Create the pyproject.toml file
"""
path = os.path.join(hub.PATH, "pyproject.toml")
with open(path, "a+") as fp:
fp.write(PYPROJ)
def mkprecom(hub: "pop.hub.Hub"):
"""
Create the precommit file
"""
path = os.path.join(hub.PATH, ".pre-commit-config.yaml")
with open(path, "w+") as fp:
fp.write(PRECOM)
def print_post(hub: "pop.hub.Hub", name: str):
"""
Print a message after the run to document how to enable
things like pre-commit
"""
print(f"Congratulations! You now have a project set up called {name}!")
print("This project can be executed by calling the run.py script:")
print(" python3 run.py")
print(
"This project has been set up with pre-commit hooks for code checks and black."
)
print('First set up your source control environment with "git init" or "hg init".')
print("Then enable these checks in your git checkout:")
print(" pip install pre-commit")
print(" pre-commit install")
print("To run pre-commit manually, execute:")
print(" pre-commit run --all-files")

View File

@@ -0,0 +1,253 @@
# -*- coding: utf-8 -*-
"""
Control and add subsystems to the running daemon hub
"""
# Import python libs
import os
from typing import Generator, List, Tuple
# Import pop libs
import pop.hub
def add(
hub: pop.hub.Hub,
pypath: List[str] or str = None,
subname: str = None,
sub: pop.hub.Sub = None,
static: List[str] or str = None,
contracts_pypath: List[str] or str = None,
contracts_static: List[str] or str = None,
default_contracts: List[str] or str = None,
virtual: bool = True,
dyne_name: str = None,
omit_start: Tuple[str] = ("_",),
omit_end: Tuple[str] = (),
omit_func: bool = False,
omit_class: bool = True,
omit_vars: bool = False,
mod_basename: str = "pop.sub",
stop_on_failures: bool = False,
load_all: bool = True,
recursive_contracts_static: List[str] or str = None,
default_recursive_contracts: List[str]
or str = None, # TODO: Not str, pretty sure -W. Werner, 2020-10-20
):
"""
Add a new subsystem to the hub
:param hub: The redistributed pop central hub
:param subname: The name that the sub is going to take on the hub
if nothing else is passed, it is used as the pypath (TODO make it the dyne_name not the pypath)
:param sub: The sub to use as the root to add to
:param pypath: One or many python paths which will be imported
:param static: Directories that can be explicitly passed
:param contracts_pypath: Load additional contract paths
:param contracts_static: Load additional contract paths from a specific directory
:param default_contracts: Specifies that a specific contract plugin will be applied as a default to all plugins
:param virtual: Toggle whether or not to process __virtual__ functions
:param dyne_name: The dynamic name to use to look up paths to find plugins -- linked to conf.py
:param omit_start: Allows you to pass in a tuple of characters that would omit the loading of any object
I.E. Any function starting with an underscore will not be loaded onto a plugin
(You should probably never change this)
:param omit_end:Allows you to pass in a tuple of characters that would omit the loading of an object
(You should probably never change this)
:param omit_func: bool: Don't load any functions
:param omit_class: bool: Don't load any classes
:param omit_vars: bool: Don't load any vars
:param mod_basename: str: Manipulate the location in sys.modules that the plugin will be loaded to.
Allow plugins to be loaded into a separate namespace.
:param stop_on_failures: If any module fails to load for any reason, stacktrace and do not continue loading this sub
:param load_all: Load all the plugins on the sub
"""
if pypath:
pypath = pop.hub.ex_path(pypath)
subname = subname if subname else pypath[0].split(".")[-1]
elif static:
subname = subname if subname else os.path.basename(static)
if dyne_name:
subname = subname if subname else dyne_name
root = sub or hub
root._subs[subname] = pop.hub.Sub(
hub,
subname,
root,
pypath,
static,
contracts_pypath,
contracts_static,
default_contracts,
virtual,
dyne_name,
omit_start,
omit_end,
omit_func,
omit_class,
omit_vars,
mod_basename,
stop_on_failures,
sub_virtual=getattr(root, "_subvirt", True),
recursive_contracts_static=recursive_contracts_static,
default_recursive_contracts=default_recursive_contracts,
)
# init the sub (init.py:__init__) after it can be referenced on the hub!
root._subs[subname]._sub_init()
root._iter_subs = sorted(root._subs.keys())
if load_all:
root._subs[subname]._load_all()
for alias in root._subs[subname]._alias:
root._sub_alias[alias] = subname
def remove(hub: pop.hub.Hub, subname: str):
"""
Remove a pop from the hub, run the shutdown if needed
:param hub: The redistributed pop central hub
:param subname: The name that the sub is going to take on the hub
if nothing else is passed, it is used as the pypath (TODO make it the dyne_name not the pypath)
"""
if hasattr(hub, subname):
sub = getattr(hub, subname)
if hasattr(sub, "init"):
mod = getattr(sub, "init")
if hasattr(mod, "shutdown"):
mod.shutdown()
hub._remove_subsystem(subname)
def load_all(hub: pop.hub.Hub, subname: str) -> bool:
"""
Load all modules under a given pop
:param hub: The redistributed pop central hub
:param subname: The name that the sub is going to take on the hub
if nothing else is passed, it is used as the pypath (TODO make it the dyne_name not the pypath)
"""
if hasattr(hub, subname):
sub = getattr(hub, subname)
sub._load_all()
return True
else:
return False
def get_dirs(hub: pop.hub.Hub, sub: pop.hub.Sub) -> List[str]:
"""
Return a list of directories that contain the modules for this subname
:param hub: The redistributed pop central hub
:param sub: The pop object that contains the loaded module data
"""
return sub._dirs
def iter_subs(
hub: pop.hub.Hub, sub: pop.hub.Sub, recurse: bool = False
) -> Generator[pop.hub.Sub, None, None]:
"""
Return an iterator that will traverse just the subs. This is useful for
nested subs
:param hub: The redistributed pop central hub
:param recurse: Recursively iterate over nested subs
"""
for name in sorted(sub._subs):
ret = sub._subs[name]
if ret._sub_virtual:
yield ret
if recurse:
if hasattr(ret, "_subs"):
for nest in hub.pop.sub.iter_subs(ret, recurse):
yield nest
def load_subdirs(hub: pop.hub.Hub, sub: pop.hub.Sub, recurse: bool = False):
"""
Given a sub, load all subdirectories found under the sub into a lower namespace
:param hub: The redistributed pop central hub
:param sub: The pop object that contains the loaded module data
:param recurse: Recursively iterate over nested subs
"""
if not sub._sub_virtual:
return
dirs = hub.pop.sub.get_dirs(sub)
roots = {}
for dir_ in dirs:
for fn in os.listdir(dir_):
if fn.startswith("_"):
continue
if fn == "contracts":
continue
full = os.path.join(dir_, fn)
if not os.path.isdir(full):
continue
if fn not in roots:
roots[fn] = [full]
else:
roots[fn].append(full)
for name, sub_dirs in roots.items():
# Load er up!
hub.pop.sub.add(
subname=name,
sub=sub,
static=sub_dirs,
virtual=sub._virtual,
omit_start=sub._omit_start,
omit_end=sub._omit_end,
omit_func=sub._omit_func,
omit_class=sub._omit_class,
omit_vars=sub._omit_vars,
mod_basename=sub._mod_basename,
stop_on_failures=sub._stop_on_failures,
)
if recurse:
if isinstance(getattr(sub, name), pop.hub.Sub):
hub.pop.sub.load_subdirs(getattr(sub, name), recurse)
def reload(hub: pop.hub.Hub, subname: str):
"""
Instruct the hub to reload the modules for the given sub. This does not call
the init.new function or remove sub level variables. But it does re-read the
directory list and re-initialize the loader causing all modules to be re-evaluated
when started.
:param hub: The redistributed pop central hub
:param subname: The name that the sub is going to take on the hub
if nothing else is passed, it is used as the pypath (TODO make it the dyne_name not the pypath)
"""
if hasattr(hub, subname):
sub = getattr(hub, subname)
sub._prepare()
return True
else:
return False
def extend(
hub: pop.hub.Hub,
subname: str,
pypath: List[str] or str = None,
static: List[str] or str = None,
contracts_pypath: List[str] or str = None,
contracts_static: List[str] or str = None,
) -> bool:
"""
Extend the directory lookup for a given sub. Any of the directory lookup
arguments can be passed.
:param hub: The redistributed pop central hub
:param subname: The name that the sub is going to take on the hub
if nothing else is passed, it is used as the pypath (TODO make it the dyne_name not the pypath)
:param pypath: One or many python paths which will be imported
:param static: Directories that can be explicitly passed
:param contracts_pypath: Load additional contract paths
:param contracts_static: Load additional contract paths from a specific directory
"""
if not hasattr(hub, subname):
return False
sub = getattr(hub, subname)
if pypath:
sub._pypath.extend(pop.hub.ex_path(pypath))
if static:
sub._static.extend(pop.hub.ex_path(static))
if contracts_pypath:
sub._contracts_pypath.extend(pop.hub.ex_path(contracts_pypath))
if contracts_static:
sub._contracts_static.extend(pop.hub.ex_path(contracts_static))
sub._prepare()
return True

View File

@@ -0,0 +1,373 @@
# -*- coding: utf-8 -*-
"""
Provides tools to help unit test projects using pop.
For now, provides mock Hub instances.
"""
# Import python libs
import inspect
import copy
from asyncio import iscoroutinefunction
# Import third party libs
try:
HAS_TEST = False
from asynctest.mock import create_autospec, Mock
HAS_TEST = True
except (ImportError, ModuleNotFoundError):
try:
from mock import create_autospec as mock_create_autospec, Mock
HAS_TEST = True
def create_autospec(spec, *args, **kwargs):
if iscoroutinefunction(spec):
raise Exception(
"MockHub requires asynctest in order to mock async functions"
)
return mock_create_autospec(spec, *args, **kwargs)
except (ImportError, ModuleNotFoundError):
...
# Import pop libs
from pop.contract import Contracted
from pop.loader import LoadedMod
from pop.hub import Hub, Sub
from typing import Any, Callable, Tuple
def __virtual__(hub: Hub) -> Tuple[bool, str]:
return HAS_TEST, "Async pop testing libs are not available"
class _LookUpTable:
def __init__(self, *args, **kwargs):
self._lut = {}
super().__init__(*args, **kwargs)
def __contains__(self, key: str):
return id(key) in self._lut
def __setitem__(self, key: str, value: Any):
self._lut[id(key)] = value
def __getitem__(self, key: str):
return self._lut[id(key)]
def __delitem__(self, key: str):
del self._lut[id(key)]
def __len__(self):
return len(self._lut)
class _LazyPop:
__lazy_classes = [Hub, Sub, LoadedMod]
_hub_id = object() # just a unique object for our lut storage
_lazy_hub_id = object()
class __Lazy:
pass
def __init__(self, obj, lut=None):
if isinstance(obj, Hub):
lut = _LookUpTable()
lut[self._hub_id] = obj
lut[self._lazy_hub_id] = self
lut[obj] = self
elif isinstance(obj, Sub):
obj._load_all()
self.__lut = lut
self.__obj = obj
for attr_name in self.__attr_names():
setattr(self, attr_name, _LazyPop.__Lazy)
def _hub(self):
return self.__lut[self._hub_id]
def _lazy_hub(self):
return self.__lut[self._lazy_hub_id]
def __attr_names(self):
# TODO: '_' - is this actually right? what should I really expose?
attrs = [attr for attr in self.__obj.__dict__ if not attr.startswith("_")]
if isinstance(self.__obj, Hub):
attrs += list(self.__obj._subs)
elif isinstance(self.__obj, Sub):
attrs += list(self.__obj._loaded)
attrs += list(self.__obj._subs)
elif isinstance(self.__obj, LoadedMod):
attrs += list(self.__obj._attrs)
else:
raise Exception(
"Standard objects should not be lazy: {}".format(str(self.__obj))
)
return attrs
def _find_subs(self):
i = 0
subs = [(s._subname, s) for s in self._hub()]
while i < len(subs):
for child in subs[i][1]._subs:
subs.append((".".join([subs[i][0], child]), getattr(subs[i][1], child)))
i += 1
return subs
def _find_module_from_file(self, file):
for path, sub in self._find_subs():
try:
mod = sub._vmap[file]
return ".".join([path, mod]), getattr(sub, mod)
except (AttributeError, KeyError):
pass
else:
raise Exception("Module not loaded on hub.")
def _reset(self):
# A potential issue - we don't do reference counting, so it's *possible*
# that an object we're clearing out has been accessed via two places on
# the test hub:
#
# mock_hub.OBJ.return_value = True
# mock_hub.pop.OBJ
#
# If VAL is the same object on the real hub, and is accessed from *both*
# places on a test hub, mock_hub.pop._reset() will break the coupling -
# mock_hub.VAL and mock_hub.pop.VAL will then refer to different objects.
# Fixing this requires storing back-references in the LUT. (TODO)
# Problems arising this should be rare and also fairly obvious.
# first, reset items with entries in the LUT
items = list(self.__dict__.items())
for k, v in items:
orig = getattr(self.__obj, k, None)
if orig in self.__lut and orig is not self.__obj:
if isinstance(v, _LazyPop):
v._reset()
del self.__lut[orig]
setattr(self, k, _LazyPop.__Lazy)
# now remove any assignments that *wouldn't* be in the lut
# (attrs that weren't assigned on the backing object)
fresh_obj = self.__class__(self.__obj, lut={})
fresh_obj_keys = list(self.__dict__.keys())
for k in fresh_obj_keys:
if k not in fresh_obj.__dict__:
del self.__dict__[k]
def __setattr__(self, key, value):
if isinstance(value, _LazyPop) and value.__class__ is not self.__class__:
if value.__obj in self.__lut:
# we've previously touched this item, we need to clear it out.
self.__lut[value.__obj]._reset()
# we are constructing a hybrid _LazyPop - copy the type of the value but nothing else
value = value.__class__(value.__obj, self.__lut)
self.__lut[value.__obj] = value
if isinstance(value, Contracted) and value.hub is not self._lazy_hub():
# We need to update the Contracted to use our parent hub
value = copy.copy(value)
value.hub = self._lazy_hub()
# we don't update the lut because we don't know what the *original is *
if value.__class__ in self.__lazy_classes and "__obj" not in key:
raise TypeError(
"Mixing of real and test hubs is not supported. "
"Contracteds (hub.sub.mod.func) are supported."
)
super().__setattr__(key, value)
def __getattribute__(self, item):
if item and not item.strip("_"): # only contains underscores, resolve 'this'
stack = inspect.stack(0)
file = stack[1].filename
path, mod = self._find_module_from_file(file)
# go up N steps
parts = path.split(".")
resolved_path = parts[0 : len(parts) - len(item) + 1]
if resolved_path:
orig = getattr(self._hub(), ".".join(resolved_path))
else:
orig = self._hub()
# find/create attr, return
attr = self._orig_to_attr(orig)
self.__lut[orig] = attr
return attr
if "." in item:
result = self
for part in item.split(".").copy():
result = getattr(result, part)
return result
attr = super().__getattribute__(item)
if attr is _LazyPop.__Lazy:
orig = getattr(self.__obj, item)
attr = self._orig_to_attr(orig)
self.__lut[orig] = attr
super().__setattr__(item, attr) # bypass our custom setattr
return attr
def _orig_to_attr(self, orig):
if orig in self.__lut:
attr = self.__lut[orig]
elif [True for cls in self.__lazy_classes if isinstance(orig, cls)]:
attr = self.__class__(orig, self.__lut)
elif isinstance(orig, Contracted):
attr = self._mock_function(orig)
else:
attr = self._mock_attr(orig)
return attr
def _mock_attr(self, a):
return create_autospec(a, spec_set=True)
def _mock_function(self, f):
raise NotImplementedError()
def strip_hub(f: Callable) -> Hub:
"""
returns a no-op function with the same function signature... minus the first parameter (hub).
"""
if inspect.iscoroutinefunction(f):
newf = "async "
else:
newf = ""
newf += "def {}(".format(f.__name__)
params = inspect.signature(f).parameters
new_params = []
for param in params:
if params[param].kind is inspect.Parameter.VAR_POSITIONAL:
new_params.append("*{}".format(param))
elif params[param].kind is inspect.Parameter.VAR_KEYWORD:
new_params.append("**{}".format(param))
else:
new_params.append(param)
if params[param].default is not inspect.Parameter.empty:
new_params[-1] += '="has default"'
newf += ", ".join(new_params[1:]) # skip hub
newf += "): pass"
scope = {}
exec(newf, scope)
return scope[f.__name__]
def mock_hub(hub: Hub) -> "MockHub":
return MockHub(hub)
class MockHub(_LazyPop):
"""
Provides mocks mirroring a real hub::
hub.sub.mod.fn() # mock
hub.sub.mod.attr # mock
"""
def _mock_function(self, f: Contracted) -> Callable:
afunc = create_autospec(strip_hub(f.func), spec_set=True)
afunc.__signature__ = f.signature
return afunc
def fn_hub(hub: Hub) -> "NoContractHub":
return NoContractHub(hub)
class NoContractHub(_LazyPop):
"""
Provides access to real functions, bypassing contracts and mocking attributes::
hub.sub.mod.fn() # executes real function, no contracts
hub.sub.mod.attr # mock
"""
def _mock_function(self, f: Contracted) -> Contracted:
return Contracted(
hub=self._lazy_hub(),
contracts=None,
func=f.func,
ref=f.ref,
name=f.__name__,
)
def mock_contracted(contract_hub, c: Contracted) -> Contracted:
mock_func = create_autospec(c.func, spec_set=True)
mock_func.__signature__ = c.signature # required for python 3.6
mock_func.__module__ = c.func.__module__
mock_func.__dict__.update(copy.deepcopy(c.func.__dict__))
return Contracted(contract_hub, c.contracts, mock_func, c.ref, c.__name__)
class ContractHub(_LazyPop):
"""
Runs a call through the contract system, but the function is a mock. Mostly useful for integration tests:
hub.sub.mod.fn() # executes mock function, real contracts
hub.sub.mod.attr # mock
You can verify what parameters are passed to a function after going through loaded contracts::
contract_hub.sub.mod.fn('foo')
assert contract_hub.sub.mod.fn.called_with('bar')
--------------------------------
You can view or modify the contracts that will be executed on one function for a test - but first:
MODIFYING CONTRACTS THIS WAY IS NOT SAFE ON REAL HUBS AND OTHER TESTING HUB VARIANTS!
I have previously thought of modifying contracts with mocks, only to realize what I really want is to
unit test a specific contract. Think twice before using this functionality.
--------------------------------
The contract modules are visible via hub.sub.mod.fn.contracts, and the contract functions that will
be called, wrapping fn are visible via hub.sub.mod.fn.contract_functions. It is safe to modify the
contracts list or contract_functions dict only on a ContractHub.
Examine that the first contract function to be called is 'foo.pre_fn', then bypass it::
assert contract_hub.sub.mod.fn.contract_functions['pre'][0].__module__ is 'foo'
assert contract_hub.sub.mod.fn.contract_functions['pre'][0].__name__ is 'pre_fn'
hub.sub.mod.fn.contract_functions['pre'][0] = create_autospec(hub.sub.mod.fn.contract_functions['pre'][0])
Assert that one contract will be called before another::
assert contract_hub.sub.mod.fn.contracts.index(contract1) < contract_hub.sub.mod.fn.contracts.index(contract2)
"""
def _mock_function(self, f: Contracted) -> Contracted:
return mock_contracted(self._lazy_hub(), f)
def mock_attr_hub(hub: Hub) -> "MockAttrHub":
return MockAttrHub(hub)
class MockAttrHub(_LazyPop):
"""
Provides an almost-normal hub.
Contracts and functions are executed, but all attrs are mocked.
"""
def _mock_function(self, f) -> Contracted:
return Contracted(
hub=self._lazy_hub(),
contracts=f.contracts,
func=f.func,
ref=f.ref,
name=f.__name__,
)

View File

@@ -0,0 +1,18 @@
"""
Routines to verify the working environment etc.
"""
# Import python libs
import os
import pop.hub
def env(hub: "pop.hub.Hub"):
"""
Verify that the directories specified in the system exist
"""
for key in hub.opts:
if key.endswith("_dir"):
try:
os.makedirs(hub.opts[key])
except OSError:
pass

View File

@@ -0,0 +1,141 @@
"""
The Proc sub is used to spin up worker processes that run hub referenced
coroutines.
"""
# Import python libs
import os
import sys
import atexit
import itertools
import asyncio
import subprocess
# Import third party libs
import msgpack
import pop.hub
def __init__(hub: "pop.hub.Hub"):
"""
Create constants used by the client and server side of procs
"""
hub.proc.DELIM = b"d\xff\xcfCO)\xfe="
hub.proc.D_FLAG = b"D"
hub.proc.I_FLAG = b"I"
hub.proc.Workers = {}
hub.proc.WorkersIter = {}
hub.proc.WorkersTrack = {}
def _get_cmd(hub: "pop.hub.Hub", ind, ref, ret_ref, sock_dir):
"""
Return the shell command to execute that will start up the worker
"""
code = "import sys; "
code += "import pop.hub; "
code += "hub = pop.hub.Hub(); "
code += 'hub.pop.sub.add("pop.mods.proc"); '
code += f'hub.proc.worker.start("{sock_dir}", "{ind}", "{ref}", "{ret_ref}")'
cmd = f"{sys.executable} -c '{code}'"
return cmd
def mk_proc(hub: "pop.hub.Hub", ind, workers, ret_ref, sock_dir):
"""
Create the process and add it to the passed in workers dict at the
specified index
"""
ref = os.urandom(3).hex() + ".sock"
workers[ind] = {"ref": ref}
workers[ind]["path"] = os.path.join(sock_dir, ref)
cmd = _get_cmd(hub, ind, ref, ret_ref, sock_dir)
workers[ind]["proc"] = subprocess.Popen(cmd, shell=True)
workers[ind]["pid"] = workers[ind]["proc"].pid
async def pool(
hub: "pop.hub.Hub", num, name: str = "Workers", callback=None, sock_dir=None
):
"""
Create a new local pool of process based workers
:param num: The number of processes to add to this pool
:param ref: The location on the hub to create the Workers dict used to
store the worker pool, defaults to `hub.pop.proc.Workers`
:param callback: The pop ref to call when the process communicates
back
"""
ret_ref = os.urandom(3).hex() + ".sock"
ret_sock_path = os.path.join(sock_dir, ret_ref)
if not hasattr(hub.proc, "Tracker"):
hub.proc.init.mk_tracker()
workers = {}
if callback:
await asyncio.start_unix_server(
hub.proc.init.ret_work(callback), path=ret_sock_path
)
for ind in range(num):
hub.proc.init.mk_proc(ind, workers, ret_ref, sock_dir)
w_iter = itertools.cycle(workers)
hub.proc.Workers[name] = workers
hub.proc.WorkersIter[name] = w_iter
hub.proc.WorkersTrack[name] = {"subs": [], "ret_ref": ret_ref, "sock_dir": sock_dir}
up = set()
while True:
for ind in workers:
if os.path.exists(workers[ind]["path"]):
up.add(ind)
if len(up) == num:
break
await asyncio.sleep(0.01)
# TODO: This seems to be spawning extra procs, this should be fixed
# asyncio.ensure_future(hub.proc.init.maintain(name))
async def maintain(hub: "pop.hub.Hub", name):
"""
Keep an eye on these processes
"""
workers = hub.proc.Workers[name]
while True:
for ind, data in workers.items():
if not data["proc"].poll():
hub.proc.init.mk_proc(ind, workers)
await asyncio.sleep(2)
def mk_tracker(hub: "pop.hub.Hub"):
"""
Create the process tracker, this simply makes a data structure to hold
process references and sets them to be terminated when the system is
shutdown.
"""
hub.proc.Tracker = True
atexit.register(hub.proc.init.clean)
def clean(hub: "pop.hub.Hub"):
"""
Clean up the processes registered in the tracker
"""
for name, workers in hub.proc.Workers.items():
for ind in workers:
workers[ind]["proc"].terminate()
def ret_work(hub: "pop.hub.Hub", callback):
async def work(reader, writer):
"""
Process the incoming work
"""
inbound = await reader.readuntil(hub.proc.DELIM)
inbound = inbound[: -len(hub.proc.DELIM)]
payload = msgpack.loads(inbound, raw=False)
ret = await callback(payload)
ret = msgpack.dumps(ret, use_bin_type=True)
ret += hub.proc.DELIM
writer.write(ret)
await writer.drain()
writer.close()
return work

View File

@@ -0,0 +1,180 @@
"""
Execute functions or load subs on the workers in the named worker pool
"""
# import python libs
import asyncio
import os
# Import third party libs
import msgpack
import pop.hub
async def add_sub(hub: "pop.hub.Hub", worker_name, *args, **kwargs):
"""
Tell all of the worker in the named pool to load the given sub,
This function takes all of the same arguments as hub.pop.sub.add
"""
ret = {}
workers = hub.proc.Workers[worker_name]
for ind in workers:
payload = {"fun": "sub", "args": args, "kwargs": kwargs}
# TODO: Make these futures to the run at the same time
async for chunk in hub.proc.run.send(workers[ind], payload):
ret[ind] = chunk
hub.proc.WorkersTrack[worker_name]["subs"].append({"args": args, "kwargs": kwargs})
return ret
async def add_proc(hub: "pop.hub.Hub", worker_name):
"""
Add a single process to the worker pool, also make sure that
"""
# grab and extrapolate the data we need
ret_ref = hub.proc.WorkersTrack[worker_name]["ret_ref"]
sock_dir = hub.proc.WorkersTrack[worker_name]["sock_dir"]
workers = hub.proc.Workers[worker_name]
ind = len(workers) + 1
for s_ind in range(len(workers) + 1):
if s_ind not in workers:
ind = s_ind
hub.proc.init.mk_proc(ind, workers, ret_ref, sock_dir)
# Make sure the process is up with a live socket
while True:
if os.path.exists(workers[ind]["path"]):
break
await asyncio.sleep(0.01)
# Add all of the subs that have been added to processes in this pool
for sub in hub.proc.WorkersTrack[worker_name]["subs"]:
payload = {"fun": "sub", "args": sub["args"], "kwargs": sub["kwargs"]}
async for chunk in hub.proc.run.send(workers[ind], payload):
pass
return ind
async def pub(hub: "pop.hub.Hub", worker_name, func_ref, *args, **kwargs):
"""
Execute the given function reference on ALL the workers in the given
worker pool and return the return data from each.
Pass in the arguments for the function, keep in mind that the sub needs
to be loaded into the workers for a function to be available via
hub.proc.run.add_sub
"""
workers = hub.proc.Workers[worker_name]
ret = {}
for ind in workers:
payload = {"fun": "run", "ref": func_ref, "args": args, "kwargs": kwargs}
# TODO: Make these futures to the run at the same time
async for chunk in hub.proc.run.send(workers[ind], payload):
ret[ind] = chunk
return ret
async def set_attr(hub: "pop.hub.Hub", worker_name, ref, value):
"""
Set the given attribute to the given location on the hub of all
worker procs
"""
workers = hub.proc.Workers[worker_name]
ret = {}
for ind in workers:
payload = {"fun": "setattr", "ref": ref, "value": value}
# TODO: Make these futures to the run at the same time
async for chunk in hub.proc.run.send(workers[ind], payload):
ret[ind] = chunk
return ret
async def ind_func(hub: "pop.hub.Hub", worker_name, _ind, func_ref, *args, **kwargs):
"""
Execute the function on the indexed process within the named worker pool
"""
workers = hub.proc.Workers[worker_name]
worker = workers[_ind]
payload = {"fun": "run", "ref": func_ref, "args": args, "kwargs": kwargs}
async for ret in hub.proc.run.send(worker, payload):
return ret
async def func(hub: "pop.hub.Hub", worker_name, func_ref, *args, **kwargs):
"""
Execute the given function reference on one worker in the given worker
pool and return the return data.
Pass in the arguments for the function, keep in mind that the sub needs
to be loaded into the workers for a function to be available via
hub.proc.run.add_sub
"""
ind, coro = await hub.proc.run.track_func(worker_name, func_ref, *args, **kwargs)
return await coro
async def track_func(hub: "pop.hub.Hub", worker_name, func_ref, *args, **kwargs):
"""
Run a function and return the index of the worker that the function was
executed on and a coroutine to track
"""
w_iter = hub.proc.WorkersIter[worker_name]
ind = next(w_iter)
coro = hub.proc.run.ind_func(worker_name, ind, func_ref, *args, **kwargs)
return ind, coro
async def gen(hub: "pop.hub.Hub", worker_name, func_ref, *args, **kwargs):
"""
Execute a generator function reference within one worker within the given
worker pool.
Like `func` the sub needs to be made available to all workers first
"""
ind, coro = await hub.proc.run.track_gen(worker_name, func_ref, *args, **kwargs)
async for chunk in coro:
yield chunk
async def track_gen(hub: "pop.hub.Hub", worker_name, func_ref, *args, **kwargs):
"""
Return an iterable coroutine and the index executed on
"""
w_iter = hub.proc.WorkersIter[worker_name]
ind = next(w_iter)
coro = hub.proc.run.ind_gen(worker_name, ind, func_ref, *args, **kwargs)
return ind, coro
async def ind_gen(hub: "pop.hub.Hub", worker_name, _ind, func_ref, *args, **kwargs):
"""
run the given iterator on the defined index
"""
workers = hub.proc.Workers[worker_name]
worker = workers[_ind]
payload = {"fun": "gen", "ref": func_ref, "args": args, "kwargs": kwargs}
async for chunk in hub.proc.run.send(worker, payload):
yield chunk
async def send(hub: "pop.hub.Hub", worker, payload):
"""
Send the given payload to the given worker, yield iterations based on the
returns from the remote.
"""
mp = msgpack.dumps(payload, use_bin_type=True)
mp += hub.proc.DELIM
reader, writer = await asyncio.open_unix_connection(path=worker["path"])
writer.write(mp)
await writer.drain()
final_ret = True
while True:
ret = await reader.readuntil(hub.proc.DELIM)
p_ret = ret[: -len(hub.proc.DELIM)]
i_flag = p_ret[-1:]
ret = msgpack.loads(p_ret[:-1], raw=False)
if i_flag == hub.proc.D_FLAG:
# break for the end of the sequence
break
yield ret
final_ret = False
if final_ret:
yield ret

View File

@@ -0,0 +1,168 @@
"""
This module is used to manage the process started up by the pool. Work in this
module is used to manage the worker process itself and not other routines on
the hub this process was derived from
This is an exec, not a fork! This is a fresh memory space!
"""
# Import python libs
import os
import types
import asyncio
import pop.hub
# Import third party libs
import msgpack
# TODO: The workers should detect if their controlling process dies and terminate by themselves
# The controlling process will kill them when it exists, but if it exists hard then the workers
# Should be able to also clean themselves up
def start(hub: "pop.hub.Hub", sock_dir, ind, ref, ret_ref):
"""
This function is called by the startup script to create a worker process
:NOTE: This is a new process started from the shell, it does not have any
of the process namespace from the creating process.
This is an EXEC, NOT a FORK!
"""
hub.proc.SOCK_DIR = sock_dir
hub.proc.REF = ref
hub.proc.SOCK_PATH = os.path.join(sock_dir, ref)
hub.proc.RET_REF = ret_ref
hub.proc.RET_SOCK_PATH = os.path.join(sock_dir, ret_ref)
hub.proc.IND = ind
hub.pop.loop.start(hub.proc.worker.hold(), hub.proc.worker.server())
async def hold(hub: "pop.hub.Hub"):
"""
This function just holds the loop open by sleeping in a while loop
"""
while True:
await asyncio.sleep(60)
async def server(hub: "pop.hub.Hub"):
"""
Start the unix socket server to receive commands
"""
await asyncio.start_unix_server(hub.proc.worker.work, path=hub.proc.SOCK_PATH)
async def work(hub: "pop.hub.Hub", reader, writer):
"""
Process the incoming work
"""
inbound = await reader.readuntil(hub.proc.DELIM)
inbound = inbound[: -len(hub.proc.DELIM)]
if msgpack.version < (1, 0, 0):
payload = msgpack.loads(inbound, encoding="utf-8")
else:
payload = msgpack.loads(inbound)
ret = b""
if "fun" not in payload:
ret = {"err": "Invalid format"}
elif payload["fun"] == "sub":
# Time to add a sub to the hub!
try:
hub.proc.worker.add_sub(payload)
ret = {"status": True}
except Exception as exc:
ret = {"status": False, "exc": str(exc)}
elif payload["fun"] == "run":
# Time to do some work!
try:
ret = await hub.proc.worker.run(payload)
except Exception as exc:
ret = {"status": False, "exc": str(exc)}
elif payload["fun"] == "gen":
ret = await hub.proc.worker.gen(payload, reader, writer)
elif payload["fun"] == "setattr":
ret = await hub.proc.worker.set_attr(payload)
ret = msgpack.dumps(ret, use_bin_type=True)
ret += hub.proc.D_FLAG
ret += hub.proc.DELIM
writer.write(ret)
await writer.drain()
writer.close()
def add_sub(hub: "pop.hub.Hub", payload):
"""
Add a new sub onto the hub for this worker
"""
hub.pop.sub.add(*payload["args"], **payload["kwargs"])
async def gen(hub: "pop.hub.Hub", payload, reader, writer):
"""
Run a generator and yield back the returns. Supports a generator and an
async generator
"""
ref = payload.get("ref")
args = payload.get("args", [])
kwargs = payload.get("kwargs", {})
ret = hub.pop.ref.last(ref)(*args, **kwargs)
if isinstance(ret, types.AsyncGeneratorType):
async for chunk in ret:
rchunk = msgpack.dumps(chunk, use_bin_type=True)
rchunk += hub.proc.I_FLAG
rchunk += hub.proc.DELIM
writer.write(rchunk)
await writer.drain()
elif isinstance(ret, types.GeneratorType):
for chunk in ret:
rchunk = msgpack.dumps(chunk, use_bin_type=True)
rchunk += hub.proc.I_FLAG
rchunk += hub.proc.DELIM
writer.write(rchunk)
await writer.drain()
elif asyncio.iscoroutine(ret):
return await ret
else:
return ret
return ""
async def run(hub: "pop.hub.Hub", payload):
"""
Execute the given payload
"""
ref = payload.get("ref")
args = payload.get("args", [])
kwargs = payload.get("kwargs", {})
ret = hub.pop.ref.last(ref)(*args, **kwargs)
if asyncio.iscoroutine(ret):
return await ret
return ret
async def set_attr(hub: "pop.hub.Hub", payload):
"""
Set the named attribute to the hub
"""
ref = payload.get("ref")
value = payload.get("value")
hub.pop.ref.create(ref, value)
async def ret(hub: "pop.hub.Hub", payload):
"""
Send a return payload to the spawning process. This return will be tagged
with the index of the process that returned it
"""
payload = {"ind": hub.proc.IND, "payload": payload}
mp = msgpack.dumps(payload, use_bin_type=True)
mp += hub.proc.DELIM
reader, writer = await asyncio.open_unix_connection(path=hub.proc.RET_SOCK_PATH)
writer.write(mp)
await writer.drain()
ret = await reader.readuntil(hub.proc.DELIM)
ret = ret[: -len(hub.proc.DELIM)]
writer.close()
if msgpack.version < (1, 0, 0):
return msgpack.loads(ret, encoding="utf-8")
else:
return msgpack.loads(ret)

View File

@@ -0,0 +1,58 @@
# -*- coding: utf-8 -*-
"""
Used to scan the given directories for loadable files
"""
# Import python libs
import os
import importlib.machinery
import collections
from typing import Any, Dict, Iterable
PY_END = (".py", ".pyc", ".pyo")
PYEXT_END = tuple(importlib.machinery.EXTENSION_SUFFIXES)
CYTHON_END = (".pyx",)
SKIP_DIRNAMES = ("__pycache__",)
def scan(dirs: Iterable[str]) -> Dict[str, Dict[str, Any]]:
"""
:param dirs: A list of locations to search for importables files
:return A description of importable files
"""
ret = collections.OrderedDict()
ret["python"] = collections.OrderedDict()
ret["cython"] = collections.OrderedDict()
ret["ext"] = collections.OrderedDict()
ret["imp"] = collections.OrderedDict()
for dir_ in dirs:
for fn_ in os.listdir(dir_):
_apply_scan(ret, dir_, fn_)
return ret
def _apply_scan(
ret: Dict[str, Dict[str, Any]], dir_: str, fn_: str
) -> None or Dict[str, Dict[str, Any]]:
"""
Convert the scan data into paths and refs
:param ret: The result of a scan()
:param dir_:
:param fn_:
"""
if fn_.startswith("_"):
return
if os.path.basename(dir_) in SKIP_DIRNAMES:
return
full = os.path.join(dir_, fn_)
if "." not in full:
return
bname = full[: full.rindex(".")]
if fn_.endswith(PY_END):
if bname not in ret["python"]:
ret["python"][bname] = {"path": full}
if fn_.endswith(CYTHON_END):
if bname not in ret["cython"]:
ret["cython"][bname] = {"path": full}
if fn_.endswith(PYEXT_END):
if bname not in ret["ext"]:
ret["ext"][bname] = {"path": full}

View File

@@ -0,0 +1,28 @@
#!/usr/bin/python3
import pop.hub
def pop_seed():
CONFIG = {
"seed_name": {
"positional": True,
"help": "The name of the project that is being created",
},
"type": {
"default": "p",
"options": ["-t"],
"help": 'The type of project to build, by default make a standalone project, but for a vetical app project pass a "v"',
},
"dyne": {
"options": ["-d"],
"default": [],
"nargs": "*",
"help": "A space delimited list of additional dynamic names for vertical app-merging",
},
}
hub = pop.hub.Hub()
hub.pop.sub.add("pop.mods.conf")
hub.opts = hub.conf.reader.read(CONFIG)
hub.pop.seed.new()

View File

@@ -0,0 +1,135 @@
# Import python libs
import inspect
# Import pop libs
import pop.exc
import pop.hub
import pop.loader
from typing import Any, Callable, Dict, Iterable, List
def contract(
hub: "pop.hub.Hub", # pylint: disable=unused-argument
raws: Iterable["pop.loader.LoadedMod"],
mod: "pop.loader.LoadedMod",
):
"""
Verify module level contract - functions only
:param hub: The redistributed pop central hub
:param raws: A list of loaded modules with contracts
:param mod: A loader module
"""
sig_errs = []
sig_miss = []
mname = mod.__name__
for raw in raws:
if isinstance(raw, pop.loader.LoadError):
sig_errs.append(str(raw))
continue
else:
for fun in raw._funcs:
if fun.startswith("sig_"):
tfun = fun[4:]
if tfun not in mod._funcs:
sig_miss.append(tfun)
continue
sig_errs.extend(sig(mod._funcs[tfun].func, raw._funcs[fun].func))
if sig_errs or sig_miss:
msg = ""
if sig_errs:
msg += f"Signature Errors in {mname}:\n"
for err in sig_errs:
msg += f"{err}\n"
if sig_miss:
msg += f"Signature Functions Missing in {mname}:\n"
for err in sig_miss:
msg += f"{err}\n"
msg = msg.strip()
raise pop.exc.ContractSigException(msg)
def sig_map(ver: Callable) -> Dict[str, Any]:
"""
Generates the map dict for the signature verification
"""
vsig = inspect.signature(ver)
vparams = list(vsig.parameters.values())
vdat = {"args": [], "v_pos": -1, "kw": [], "kwargs": False, "ann": {}}
for ind in range(len(vparams)):
param = vparams[ind]
val = param.kind.value
name = param.name
if val == 0 or val == 1:
vdat["args"].append(name)
if param.default != inspect._empty: # Is a KW, can be inside of **kwargs
vdat["kw"].append(name)
elif val == 2:
vdat["v_pos"] = ind
elif val == 3:
vdat["kw"].append(name)
elif val == 4:
vdat["kwargs"] = ind
if param.annotation != inspect._empty:
vdat["ann"][name] = param.annotation
return vdat
def sig(func: Callable, ver: Callable) -> List[str]:
"""
Takes 2 functions, the first function is verified to have a parameter signature
compatible with the second function
"""
errors = []
fsig = inspect.signature(func)
fparams = list(fsig.parameters.values())
vdat = sig_map(ver)
arg_len = len(vdat["args"])
v_pos = False
for ind in range(len(fparams)):
param = fparams[ind]
val = param.kind.value
name = param.name
has_default = param.default != inspect._empty
ann = param.annotation
vann = vdat["ann"].get(name, inspect._empty)
if vann != ann:
errors.append(f'Parameter, "{name}" is type "{str(ann)}" not "{str(vann)}"')
if val == 2:
v_pos = True
if val == 0 or val == 1:
if ind >= arg_len: # Past available positional args
if not vdat["v_pos"] == -1: # Has a *args
if ind >= vdat["v_pos"] and v_pos:
# Invalid unless it is a kw
if not name in vdat["kw"]:
# Is a kw
errors.append(f'Parameter "{name}" is invalid')
if vdat["kwargs"] is False:
errors.append(f'Parameter "{name}" not defined as kw only')
continue
elif vdat["kwargs"] is not False and not has_default:
errors.append(
f'Parameter "{name}" is past available positional params'
)
elif vdat["kwargs"] is False:
errors.append(
f'Parameter "{name}" is past available positional params'
)
else:
v_param = vdat["args"][ind]
if v_param != name:
errors.append(
f'Parameter "{name}" does not have the correct name: {v_param}'
)
if val == 2:
if ind < vdat["v_pos"]:
errors.append(
f'Parameter "{name}" is not in the correct position for *args'
)
if val == 3:
if name not in vdat["kw"] and not vdat["kwargs"]:
errors.append(f'Parameter "{name}" is not available as a kwarg')
if val == 4:
if vdat["kwargs"] is False:
errors.append(f"Kwargs are not permitted as a parameter")
return errors

View File

@@ -0,0 +1,2 @@
# -*- coding: utf-8 -*-
version = "15"