first commit
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,55 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
CLI_CONFIG = {
|
||||
"log_file": {"dyne": "__cli__"},
|
||||
"log_level": {"dyne": "__cli__"},
|
||||
"log_fmt_logfile": {"dyne": "__cli__"},
|
||||
"log_fmt_console": {"dyne": "__cli__"},
|
||||
"log_datefmt": {"dyne": "__cli__"},
|
||||
"log_plugin": {"dyne": "__cli__"},
|
||||
}
|
||||
|
||||
CONFIG = {
|
||||
"log_file": {
|
||||
"dyne": "__cli__",
|
||||
"default": f"{os.path.splitext(os.path.split(sys.argv[0])[1])[0]}.log",
|
||||
"help": "The location of the log file",
|
||||
"group": "Logging Options",
|
||||
},
|
||||
"log_level": {
|
||||
"dyne": "__cli__",
|
||||
"default": "warning",
|
||||
"help": "Set the log level, either quiet, info, warning, or error",
|
||||
"group": "Logging Options",
|
||||
},
|
||||
"log_fmt_logfile": {
|
||||
"dyne": "__cli__",
|
||||
"default": "%(asctime)s,%(msecs)03d [%(name)-17s][%(levelname)-8s] %(message)s",
|
||||
"help": "The format to be given to log file messages",
|
||||
"group": "Logging Options",
|
||||
},
|
||||
"log_fmt_console": {
|
||||
"dyne": "__cli__",
|
||||
"default": "[%(levelname)-8s] %(message)s",
|
||||
"help": "The log formatting used in the console",
|
||||
"group": "Logging Options",
|
||||
},
|
||||
"log_datefmt": {
|
||||
"dyne": "__cli__",
|
||||
"default": "%H:%M:%S",
|
||||
"help": "The date format to display in the logs",
|
||||
"group": "Logging Options",
|
||||
},
|
||||
"log_plugin": {
|
||||
"dyne": "__cli__",
|
||||
"default": "basic",
|
||||
"help": "The logging plugin to use",
|
||||
"group": "Logging Options",
|
||||
},
|
||||
}
|
||||
SUBCOMMANDS = {}
|
||||
DYNE = {
|
||||
"config": ["config"],
|
||||
"log": ["log"],
|
||||
}
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,247 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Translate an options data structure into command line args
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
import sys
|
||||
import inspect
|
||||
import argparse
|
||||
import dict_tools.update
|
||||
from typing import Any, Dict, List, Tuple
|
||||
|
||||
|
||||
def __init__(hub):
|
||||
hub.config.args.DEFAULT = object()
|
||||
|
||||
|
||||
def _keys(opts):
|
||||
"""
|
||||
Return the keys in the right order
|
||||
"""
|
||||
return sorted(opts, key=lambda k: (opts[k].get("display_priority", sys.maxsize), k))
|
||||
|
||||
|
||||
def gather(
|
||||
hub, raw: Dict[str, Any], cli: str, parse_cli: bool
|
||||
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
||||
"""
|
||||
Return the cli arguments as they are parsed
|
||||
"""
|
||||
if not parse_cli:
|
||||
return {}, {}
|
||||
raw_cli = hub.config.args.get_cli(raw, cli)
|
||||
hub.config.args.init_parser()
|
||||
hub.config.args.subparsers(raw, cli)
|
||||
hub.config.args.setup(raw_cli)
|
||||
cli_args = hub.config.args.parse()
|
||||
cli_args = hub.config.args.render(cli_args, raw_cli)
|
||||
cli_args = hub.config.args.clean_defaults(cli_args)
|
||||
return cli_args, raw_cli
|
||||
|
||||
|
||||
def clean_defaults(hub, cli_args: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
If anyone did not pass in an argument then the key will match the
|
||||
bad default and needs to be removed
|
||||
"""
|
||||
ret = {}
|
||||
for key, val in cli_args.items():
|
||||
if val is not hub.config.args.DEFAULT:
|
||||
ret[key] = val
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def init_parser(hub):
|
||||
if "parser" not in hub.config.ARGS:
|
||||
# Instantiate the parser
|
||||
hub.config.ARGS["parser"] = argparse.ArgumentParser()
|
||||
|
||||
|
||||
def get_cli(hub, raw: Dict[str, Any], cli: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Gather the arguments that need to be parsed by the CLI
|
||||
"""
|
||||
ret = {}
|
||||
main = raw.get(cli, {}).get("CLI_CONFIG")
|
||||
main_raw = raw.get(cli, {}).get("CONFIG")
|
||||
|
||||
for key, data in main.items():
|
||||
ret[key] = {}
|
||||
dict_tools.update.update(ret[key], data)
|
||||
if key in main_raw:
|
||||
dict_tools.update.update(ret[key], main_raw[key])
|
||||
if "source" in data:
|
||||
src = raw.get(data["source"], {}).get("CONFIG", {}).get(key)
|
||||
if src is not None:
|
||||
dict_tools.update.update(ret[key], src)
|
||||
if "default" in ret[key]:
|
||||
ret[key]["default"] = hub.config.args.DEFAULT
|
||||
ret.update(hub.config.version.CONFIG)
|
||||
return ret
|
||||
|
||||
|
||||
def subparsers(hub, raw: Dict[str, Any], cli: str) -> bool:
|
||||
"""
|
||||
Look over the data and extract and set up the subparsers for subcommands
|
||||
"""
|
||||
subs = raw.get(cli, {}).get("SUBCOMMANDS")
|
||||
if not subs:
|
||||
return True
|
||||
hub.config.ARGS["sub"] = hub.config.ARGS["parser"].add_subparsers(
|
||||
dest="_subparser_"
|
||||
)
|
||||
hub.config.ARGS["subs"] = {}
|
||||
for arg in _keys(subs):
|
||||
if arg in ("_argparser_",):
|
||||
continue
|
||||
comps = subs[arg]
|
||||
kwargs = {}
|
||||
if "help" in comps:
|
||||
kwargs["help"] = comps["help"]
|
||||
if "desc" in comps:
|
||||
kwargs["description"] = comps["desc"]
|
||||
hub.config.ARGS["subs"][arg] = hub.config.ARGS["sub"].add_parser(arg, **kwargs)
|
||||
return True
|
||||
|
||||
|
||||
def setup(hub, raw_cli: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Take in a pre-defined dict and translate it to args
|
||||
|
||||
opts dict:
|
||||
<arg>:
|
||||
[group]: foo
|
||||
[default]: bar
|
||||
[action]: store_true
|
||||
[options]: # arg will be turned into --arg
|
||||
- '-A'
|
||||
- '--cheese'
|
||||
[choices]:
|
||||
- foo
|
||||
- bar
|
||||
- baz
|
||||
[nargs]: +
|
||||
[type]: int
|
||||
[dest]: cheese
|
||||
help: Some great help message
|
||||
"""
|
||||
# TODO: This should be broken up
|
||||
defaults = {}
|
||||
groups = {}
|
||||
ex_groups = {}
|
||||
for arg in _keys(raw_cli):
|
||||
if arg in ("_argparser_",):
|
||||
continue
|
||||
comps = raw_cli[arg]
|
||||
positional = comps.pop("positional", False)
|
||||
if positional:
|
||||
args = [arg]
|
||||
else:
|
||||
|
||||
args = [f"--{arg.replace('_', '-')}"]
|
||||
for o_str in comps.get("options", ()):
|
||||
if len(o_str) == 1:
|
||||
o_str = f"-{o_str}"
|
||||
elif not o_str.startswith("-"):
|
||||
o_str = f"--{o_str}"
|
||||
if o_str not in args:
|
||||
args.append(o_str)
|
||||
kwargs = {}
|
||||
kwargs["action"] = action = comps.get("action", None)
|
||||
|
||||
if action is None:
|
||||
# Non existing option defaults to a StoreAction in argparse
|
||||
action = hub.config.ARGS["parser"]._registry_get("action", action)
|
||||
|
||||
if isinstance(action, str):
|
||||
signature = inspect.signature(
|
||||
hub.config.ARGS["parser"]._registry_get("action", action).__init__
|
||||
)
|
||||
else:
|
||||
signature = inspect.signature(action.__init__)
|
||||
|
||||
for param in signature.parameters:
|
||||
if param == "self" or param not in comps:
|
||||
continue
|
||||
if param == "dest":
|
||||
kwargs["dest"] = comps.get("dest", arg)
|
||||
continue
|
||||
if param == "help":
|
||||
kwargs["help"] = comps.get("help", "THIS NEEDS SOME DOCUMENTATION!!")
|
||||
continue
|
||||
if param == "default":
|
||||
defaults[comps.get("dest", arg)] = comps[param]
|
||||
kwargs[param] = comps[param]
|
||||
|
||||
if "group" in comps:
|
||||
group = comps["group"]
|
||||
if group not in groups:
|
||||
groups[group] = hub.config.ARGS["parser"].add_argument_group(group)
|
||||
groups[group].add_argument(*args, **kwargs)
|
||||
continue
|
||||
if "ex_group" in comps:
|
||||
group = comps["ex_group"]
|
||||
if group not in ex_groups:
|
||||
ex_groups[group] = hub.config.ARGS[
|
||||
"parser"
|
||||
].add_mutually_exclusive_group()
|
||||
ex_groups[group].add_argument(*args, **kwargs)
|
||||
continue
|
||||
if "subcommands" in comps:
|
||||
subs = comps["subcommands"]
|
||||
if not isinstance(subs, list):
|
||||
subs = [subs]
|
||||
for sub in subs:
|
||||
if sub == "_global_":
|
||||
if "subs" not in hub.config.ARGS:
|
||||
continue
|
||||
hub.config.ARGS["parser"].add_argument(*args, **kwargs)
|
||||
for named, sparse in hub.config.ARGS["subs"].items():
|
||||
sparse.add_argument(*args, **kwargs)
|
||||
continue
|
||||
sparse = hub.config.ARGS.get("subs", {}).get(sub)
|
||||
if not sparse:
|
||||
# Maybe raise exception here? Malformed config?
|
||||
continue
|
||||
sparse.add_argument(*args, **kwargs)
|
||||
continue
|
||||
hub.config.ARGS["parser"].add_argument(*args, **kwargs)
|
||||
return defaults
|
||||
|
||||
|
||||
def parse(
|
||||
hub,
|
||||
args: List[str] = None,
|
||||
namespace: argparse.Namespace = None,
|
||||
only_parse_known_arguments: bool = False,
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Parse the command line options
|
||||
"""
|
||||
if only_parse_known_arguments:
|
||||
opts, unknown_args = hub.config.ARGS["parser"].parse_known_args(args, namespace)
|
||||
opts_dict = opts.__dict__
|
||||
opts_dict["_unknown_args_"] = unknown_args
|
||||
else:
|
||||
opts = hub.config.ARGS["parser"].parse_args(args, namespace)
|
||||
opts_dict = opts.__dict__
|
||||
hub.SUBPARSER = opts_dict.get("_subparser_", None)
|
||||
return opts_dict
|
||||
|
||||
|
||||
def render(hub, cli_args: Dict[str, Any], raw_cli: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
For options specified as such, take the string passed into the cli and
|
||||
render it using the specified render flag
|
||||
"""
|
||||
for key, val in raw_cli.items():
|
||||
if key not in cli_args:
|
||||
continue
|
||||
if "render" not in val:
|
||||
continue
|
||||
if val["default"] != cli_args[key]:
|
||||
# The value was changed, render it
|
||||
cli_args[key] = hub.config.render.init.process(val["render"], cli_args[key])
|
||||
return cli_args
|
||||
Binary file not shown.
@@ -0,0 +1,89 @@
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
class _DefaultOption:
|
||||
pass
|
||||
|
||||
|
||||
def _insert_default_placeholders(raw):
|
||||
defaults = {}
|
||||
for imp in raw:
|
||||
defaults[imp] = {}
|
||||
for key, data in raw[imp]["CONFIG"].items():
|
||||
if "default" not in data:
|
||||
raise KeyError(f"No default value for '{key}' in '{imp}'s conf.py")
|
||||
defaults[imp][key] = data["default"]
|
||||
data["default"] = _DefaultOption
|
||||
return defaults
|
||||
|
||||
|
||||
def _restore_raw_defaults(raw, defaults):
|
||||
# undo our modification of the raw data structure
|
||||
for imp in raw:
|
||||
for key, data in raw[imp]["CONFIG"].items():
|
||||
raw[imp]["CONFIG"][key]["default"] = defaults[imp][key]
|
||||
|
||||
|
||||
def _replace_default_placeholders(ret, defaults):
|
||||
for imp in ret:
|
||||
for key, data in ret[imp].items():
|
||||
if data is _DefaultOption:
|
||||
ret[imp][key] = defaults[imp][key]
|
||||
return ret
|
||||
|
||||
|
||||
def _reroot_paths(defaults, root):
|
||||
for imp in defaults:
|
||||
for key, val in defaults[imp].items():
|
||||
if key == "root_dir":
|
||||
defaults[imp][key] = root
|
||||
elif (key.endswith(("_dir", "_path", "_file"))) and os.path.isabs(
|
||||
val or ""
|
||||
):
|
||||
# only update absolute paths for keys
|
||||
# ending in _dir, _path or _file
|
||||
defaults[imp][key] = _reroot_path(val, imp, root)
|
||||
|
||||
|
||||
def _get_root(ret, cli):
|
||||
if "root_dir" not in ret.get(cli, {}):
|
||||
# there is no root_dir parameter, do not activate roots system
|
||||
# otherwise there would be no way to *disable* the system
|
||||
# either by leaving out root_dir, or manually specifying root_dir=/
|
||||
return None
|
||||
|
||||
# root_dir is not configured, maybe use home directory
|
||||
if ret.get(cli, {})["root_dir"] is _DefaultOption:
|
||||
if hasattr(os, "geteuid") and os.geteuid() != 0:
|
||||
return os.path.expanduser(f"~{os.sep}.{cli}")
|
||||
else:
|
||||
return None
|
||||
|
||||
return ret.get(cli, {})["root_dir"]
|
||||
|
||||
|
||||
def _reroot_path(val, imp, new_root):
|
||||
match = re.search(f"{os.sep + imp}($|{os.sep})", val)
|
||||
if match:
|
||||
if new_root.endswith(os.sep):
|
||||
# val is guaranteed to start with '/' as it's an absolute path
|
||||
# remove one of the duplicate os separators
|
||||
return new_root[:-1] + val
|
||||
else:
|
||||
return new_root + val
|
||||
return val
|
||||
|
||||
|
||||
def pre_apply(hub, ctx):
|
||||
kwargs = ctx.get_arguments()
|
||||
ctx.cache["root_defaults"] = _insert_default_placeholders(kwargs["raw"])
|
||||
|
||||
|
||||
def post_apply(hub, ctx):
|
||||
kwargs = ctx.get_arguments()
|
||||
_restore_raw_defaults(kwargs["raw"], ctx.cache["root_defaults"])
|
||||
root = _get_root(ctx.ret, kwargs["cli"])
|
||||
if root:
|
||||
_reroot_paths(ctx.cache["root_defaults"], root)
|
||||
_replace_default_placeholders(ctx.ret, ctx.cache["root_defaults"])
|
||||
@@ -0,0 +1,101 @@
|
||||
"""
|
||||
Find the conf.py files specified in sources
|
||||
"""
|
||||
# Import python libs
|
||||
import importlib
|
||||
import copy
|
||||
import os
|
||||
import dict_tools
|
||||
|
||||
|
||||
def _load_pyimp(hub, imp):
|
||||
"""
|
||||
Load up a python path, parse it and return the conf dataset
|
||||
"""
|
||||
ret = {imp: {}}
|
||||
cmod = importlib.import_module(f"{imp}.conf")
|
||||
path = os.path.dirname(cmod.__file__)
|
||||
for sec in hub.config.SECTIONS:
|
||||
ret[imp][sec] = copy.deepcopy(getattr(cmod, sec, {}))
|
||||
return path, ret
|
||||
|
||||
|
||||
def load(hub, sources, dyne_names, cli):
|
||||
"""
|
||||
Look over the sources list and find the correct conf.py files
|
||||
"""
|
||||
# Dynamic names
|
||||
# First gather the defined sources, they are the authoritative ones
|
||||
# Then detect what the dynamic names are in the source
|
||||
# Merged the sources dyne names with any passed dyne names
|
||||
# Load up and extend the raw with all of the dynamic names
|
||||
raw = {}
|
||||
dyne = hub.pop.dyne.get()
|
||||
if not isinstance(sources, list):
|
||||
sources = [sources]
|
||||
for source in sources:
|
||||
try:
|
||||
path, data = _load_pyimp(hub, source)
|
||||
except ImportError:
|
||||
continue
|
||||
dict_tools.update.update(raw, data)
|
||||
dnames = set(dyne_names)
|
||||
for source in raw:
|
||||
for dname in raw[source]["DYNE"]:
|
||||
dnames.add(dname)
|
||||
for name in dnames:
|
||||
if name in dyne:
|
||||
if name not in raw:
|
||||
raw[name] = {"CONFIG": {}, "CLI_CONFIG": {}}
|
||||
if "CONFIG" in dyne[name]:
|
||||
config_draw = {}
|
||||
for key, val in dyne[name]["CONFIG"].items():
|
||||
new_dyne = val.get("dyne")
|
||||
if new_dyne == "__cli__":
|
||||
new_dyne = cli
|
||||
if new_dyne:
|
||||
val["source"] = new_dyne
|
||||
config_draw[key] = val
|
||||
if (
|
||||
key in dyne[name]["CLI_CONFIG"]
|
||||
and "dyne" not in dyne[name]["CLI_CONFIG"][key]
|
||||
):
|
||||
dyne[name]["CLI_CONFIG"][key]["dyne"] = new_dyne
|
||||
dict_tools.update.update(raw[cli]["CONFIG"], config_draw)
|
||||
if "CLI_CONFIG" in dyne[name]:
|
||||
cli_draw = {}
|
||||
for key, val in dyne[name]["CLI_CONFIG"].items():
|
||||
new_dyne = val.get("dyne")
|
||||
if new_dyne == "__cli__":
|
||||
new_dyne = cli
|
||||
if new_dyne:
|
||||
val["source"] = new_dyne
|
||||
cli_draw[key] = val
|
||||
dict_tools.update.update(raw[cli]["CLI_CONFIG"], cli_draw)
|
||||
if "SUBCOMMANDS" in dyne[name]:
|
||||
subcmd_draw = {}
|
||||
for key, val in dyne[name]["SUBCOMMANDS"].items():
|
||||
new_dyne = val.get("dyne")
|
||||
if new_dyne == "__cli__":
|
||||
new_dyne = cli
|
||||
if new_dyne:
|
||||
val["source"] = new_dyne
|
||||
subcmd_draw[key] = val
|
||||
dict_tools.update.update(raw[cli]["SUBCOMMANDS"], subcmd_draw)
|
||||
return raw
|
||||
|
||||
|
||||
def verify(hub, opts):
|
||||
"""
|
||||
Verify that the environment and all named directories in the
|
||||
configuration exist
|
||||
"""
|
||||
for imp in opts:
|
||||
for key in opts[imp]:
|
||||
if key == "root_dir":
|
||||
continue
|
||||
if key == "config_dir":
|
||||
continue
|
||||
if key.endswith("_dir"):
|
||||
if not os.path.isdir(opts[imp][key]):
|
||||
os.makedirs(opts[imp][key])
|
||||
Binary file not shown.
@@ -0,0 +1,122 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Configuration file core loading functions
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
import os
|
||||
import glob
|
||||
import fnmatch
|
||||
import dict_tools.update
|
||||
|
||||
|
||||
def parse(hub, raw, cli, os_vars, cli_args, loader):
|
||||
"""
|
||||
Determine if a config file or a config dir has been set up and load it up!
|
||||
"""
|
||||
# This function is the entry point for the config.file sub
|
||||
# Figure out what config file value to use in this priority
|
||||
default = raw[cli]["CONFIG"].get("config", {}).get("default")
|
||||
default_dir = raw[cli]["CONFIG"].get("config_dir", {}).get("default")
|
||||
os_conf = os_vars.get("config", default)
|
||||
os_dir = os_vars.get("config_dir", default_dir)
|
||||
conf = cli_args.get("config", os_conf)
|
||||
dir_ = cli_args.get("config_dir", os_dir)
|
||||
file_opts = {}
|
||||
dir_opts = {}
|
||||
if dir_:
|
||||
file_opts = hub.config.file.init.load_dir(dir_, loader)
|
||||
if conf:
|
||||
dir_opts = hub.config.file.init.load(conf, loader)
|
||||
ret = dict_tools.update.update(dir_opts, file_opts)
|
||||
return ret
|
||||
|
||||
|
||||
def load(hub, paths, loader, includes=True):
|
||||
"""
|
||||
Load a single configuration file
|
||||
"""
|
||||
opts = {}
|
||||
if not isinstance(paths, list):
|
||||
paths = [paths]
|
||||
add = []
|
||||
for fn in paths:
|
||||
add.extend(glob.glob(fn))
|
||||
paths.extend(add)
|
||||
for fn in paths:
|
||||
fn_data = hub.config.render.init.load_file(loader, fn)
|
||||
if includes:
|
||||
fn_data = hub.config.file.init.proc_include(fn, fn_data, loader)
|
||||
dict_tools.update.update(opts, fn_data)
|
||||
return opts
|
||||
|
||||
|
||||
def load_dir(
|
||||
hub,
|
||||
confdir,
|
||||
loader,
|
||||
includes=True,
|
||||
recurse=True,
|
||||
):
|
||||
"""
|
||||
Load takes a directory location to scan for configuration files. These
|
||||
files will be read in.
|
||||
"""
|
||||
opts = {}
|
||||
if not isinstance(confdir, list):
|
||||
confdir = [confdir]
|
||||
confdirs = []
|
||||
for dirs in confdir:
|
||||
if not isinstance(dirs, (list, tuple)):
|
||||
dirs = [dirs]
|
||||
for dir_ in dirs:
|
||||
confdirs.extend(glob.glob(dir_))
|
||||
paths = []
|
||||
for dir_ in confdirs:
|
||||
dirpaths = []
|
||||
if os.path.isdir(dir_):
|
||||
if not recurse:
|
||||
for fn_ in os.listdir(dir_):
|
||||
path = os.path.join(dir_, fn_)
|
||||
if os.path.isdir(path):
|
||||
# Don't process directories
|
||||
continue
|
||||
dirpaths.append(path)
|
||||
else:
|
||||
for root, dirs, files in os.walk(dir_):
|
||||
for fn_ in files:
|
||||
path = os.path.join(root, fn_)
|
||||
dirpaths.append(path)
|
||||
|
||||
# Sort confdir directory paths like:
|
||||
# /b.txt
|
||||
# /c.txt
|
||||
# /a/x.txt
|
||||
# /b/x.txt
|
||||
paths.extend(sorted(dirpaths, key=lambda p: (p.count(os.path.sep), p)))
|
||||
opts = dict_tools.update.update(
|
||||
opts, hub.config.file.init.load(paths, loader, includes)
|
||||
)
|
||||
return opts
|
||||
|
||||
|
||||
def proc_include(hub, fn, opts, loader):
|
||||
"""
|
||||
Process include and include_dir
|
||||
"""
|
||||
dirname = os.path.dirname(fn)
|
||||
if opts.get("include_dir"):
|
||||
idir = opts.pop("include_dir")
|
||||
if not idir.startswith(os.path.abspath(os.sep)):
|
||||
idir = os.path.join(dirname, idir)
|
||||
opts = dict_tools.update.update(
|
||||
opts, hub.config.file.init.load_dir(idir, loader)
|
||||
)
|
||||
hub.config.file.init.proc_include(os.path.join(idir, "f"), opts, loader)
|
||||
if opts.get("include"):
|
||||
ifn = opts.pop("include")
|
||||
if not ifn.startswith(os.path.abspath(os.sep)):
|
||||
ifn = os.path.join(dirname, ifn)
|
||||
opts = dict_tools.update.update(opts, hub.config.file.init.load(ifn, loader))
|
||||
hub.config.file.init.proc_include(ifn, opts, loader)
|
||||
return opts
|
||||
@@ -0,0 +1,9 @@
|
||||
def __init__(hub):
|
||||
"""
|
||||
Load the subdirs for conf
|
||||
"""
|
||||
hub.pop.sub.add(dyne_name="log")
|
||||
hub.pop.sub.load_subdirs(hub.config, recurse=True)
|
||||
hub.config.ARGS = {}
|
||||
hub.config.SECTIONS = ("CONFIG", "CLI_CONFIG", "SUBCOMMANDS", "DYNE")
|
||||
hub.config.CONFIG_SECTIONS = ("CONFIG", "CLI_CONFIG")
|
||||
@@ -0,0 +1,32 @@
|
||||
from typing import List
|
||||
|
||||
|
||||
def load(
|
||||
hub,
|
||||
sources: List[str],
|
||||
cli: str = None,
|
||||
dyne_names: List[str] = None,
|
||||
loader: str = "yaml",
|
||||
parse_cli: bool = True,
|
||||
logs: bool = True,
|
||||
):
|
||||
"""
|
||||
Load up the configs from the integrate system
|
||||
"""
|
||||
if not isinstance(sources, list):
|
||||
sources = [sources]
|
||||
sources.append("pop_config")
|
||||
if dyne_names is None:
|
||||
dyne_names = []
|
||||
raw = hub.config.dirs.load(sources, dyne_names, cli)
|
||||
os_vars = hub.config.os.init.gather(raw)
|
||||
cli_args, raw_cli = hub.config.args.gather(raw, cli, parse_cli)
|
||||
if cli_args.get("version"):
|
||||
hub.config.version.run(cli)
|
||||
configs = hub.config.file.init.parse(raw, cli, os_vars, cli_args, loader)
|
||||
opt = hub.config.order.apply(raw, raw_cli, cli, cli_args, os_vars, configs)
|
||||
hub.OPT = hub.pop.data.imap(opt)
|
||||
|
||||
if logs:
|
||||
log_plugin = hub.OPT[sources[0]].get("log_plugin")
|
||||
getattr(hub, f"log.{log_plugin}.setup")(hub.OPT[sources[0]])
|
||||
@@ -0,0 +1,31 @@
|
||||
def apply(hub, raw, raw_cli, cli, cli_args, os_vars, configs):
|
||||
# Defaults (raw)
|
||||
# Config files (configs)
|
||||
# OS (os_vars)
|
||||
# CLI (cli_args)
|
||||
ret = {}
|
||||
for imp in raw:
|
||||
ret[imp] = {}
|
||||
for key, data in raw[imp]["CONFIG"].items():
|
||||
if "default" in data:
|
||||
ret[imp][key] = data["default"]
|
||||
# TODO: This assumes that we are using the namespace approach,
|
||||
# This makes the config structure the easiest, meaning that components are
|
||||
# namespaced by the user.
|
||||
# Some other additional pattern could be added and this chunk could
|
||||
# be made pluggable.
|
||||
for imp in configs:
|
||||
if imp not in ret:
|
||||
ret[imp] = {}
|
||||
for key in configs[imp]:
|
||||
ret[imp][key] = configs[imp][key]
|
||||
for imp in os_vars:
|
||||
for key in os_vars[imp]:
|
||||
ret[imp][key] = os_vars[imp][key]
|
||||
for key in cli_args:
|
||||
if key in raw_cli:
|
||||
if "source" in raw_cli[key]:
|
||||
ret[raw_cli[key]["source"]][key] = cli_args[key]
|
||||
else:
|
||||
ret[cli][key] = cli_args[key]
|
||||
return ret
|
||||
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,28 @@
|
||||
"""
|
||||
The os sub is used to gather configuration options from the OS facility
|
||||
to send configuration options into applications.
|
||||
"""
|
||||
# Import python libs
|
||||
import os
|
||||
|
||||
|
||||
def gather(hub, raw):
|
||||
"""
|
||||
Collect the keys that need to be found and pass them to the
|
||||
os specific loaded plugin
|
||||
"""
|
||||
ret = {}
|
||||
for imp in raw:
|
||||
for sec in hub.config.CONFIG_SECTIONS:
|
||||
if sec not in raw[imp]:
|
||||
continue
|
||||
for key in raw[imp][sec]:
|
||||
osvar = raw[imp][sec][key].get("os", None)
|
||||
if osvar is not None:
|
||||
val = hub.config.os.system.collect(osvar)
|
||||
if val is not None:
|
||||
src = raw[imp][sec][key].get("source", imp)
|
||||
if src not in ret:
|
||||
ret[src] = {}
|
||||
ret[src][key] = val
|
||||
return ret
|
||||
@@ -0,0 +1,26 @@
|
||||
"""
|
||||
Read in keys from *NIX like oses - AKA Environement variables
|
||||
"""
|
||||
# Import python libs
|
||||
import os
|
||||
|
||||
__virtualname__ = "system"
|
||||
|
||||
|
||||
def __virtual__(hub):
|
||||
"""
|
||||
Don't load on Windows, this is for *nix style platforms
|
||||
"""
|
||||
# TODO: detect if not windows
|
||||
return True
|
||||
|
||||
|
||||
def collect(hub, key):
|
||||
"""
|
||||
Collect the option from environment variable if present
|
||||
"""
|
||||
ret = {}
|
||||
key = key.upper()
|
||||
if key in os.environ:
|
||||
return os.environ[key]
|
||||
return None
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,46 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from typing import Any, Dict, List
|
||||
|
||||
try:
|
||||
import yaml
|
||||
|
||||
HAS_YAML = True
|
||||
except ImportError:
|
||||
HAS_YAML = False
|
||||
|
||||
__virtualname__ = "cli"
|
||||
|
||||
|
||||
def __virtual__(hub):
|
||||
if HAS_YAML:
|
||||
return True
|
||||
return (False, "PyYaml could not be loaded")
|
||||
|
||||
|
||||
def load(hub, path):
|
||||
try:
|
||||
with open(path, "rb") as fp_:
|
||||
ret = {}
|
||||
for line in fp_:
|
||||
ret.update(hub.config.render.cli.render(line))
|
||||
return ret
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
return {}
|
||||
|
||||
|
||||
def render(hub, val: List[str] or str) -> List[str]:
|
||||
"""
|
||||
Take the string and render it in json
|
||||
"""
|
||||
ret = []
|
||||
if isinstance(val, str):
|
||||
val = [val]
|
||||
for v in val:
|
||||
if "=" in v:
|
||||
key, v = v.split("=", maxsplit=1)
|
||||
ret.append({key: yaml.safe_load(v)})
|
||||
else:
|
||||
ret.append(yaml.safe_load(v))
|
||||
|
||||
return ret
|
||||
@@ -0,0 +1,42 @@
|
||||
# The render process for the cli does NOT use the rend project for a couple
|
||||
# of reasons
|
||||
# 1. Config needs to be very early in the startup, therefore it cannot use
|
||||
# asyncio, all rend funcs use asyncio
|
||||
# 2. Config will not allow for template wrapping as the render is just a
|
||||
# single command line render
|
||||
|
||||
|
||||
def process(hub, renderer, value):
|
||||
"""
|
||||
Take a renderer and a value, process it, and return the processed value
|
||||
|
||||
This is intended to load a string through the config render system
|
||||
"""
|
||||
return getattr(hub, f"config.render.{renderer}.render")(value)
|
||||
|
||||
|
||||
def pipe(hub, dpipe, data):
|
||||
"""
|
||||
Given a render pipe, render the given data
|
||||
"""
|
||||
for render in dpipe:
|
||||
if isinstance(render, bytes):
|
||||
render = render.decode()
|
||||
data = hub.config.render.init.process(render, data)
|
||||
return data
|
||||
|
||||
|
||||
def load_file(hub, renderer, fn):
|
||||
"""
|
||||
Load up a file with the passed renderer unless the file contains a
|
||||
renderer she-bang line
|
||||
"""
|
||||
with open(fn, "rb") as rfh:
|
||||
data = rfh.read()
|
||||
if not data:
|
||||
return {}
|
||||
if data.startswith(b"#!"):
|
||||
dpipe = data[2 : data.index(b"\n")].split(b"|")
|
||||
return hub.config.render.init.pipe(dpipe, data)
|
||||
else:
|
||||
return hub.config.render.init.process(renderer, data)
|
||||
@@ -0,0 +1,32 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Define the JSON loader interface
|
||||
"""
|
||||
|
||||
# Import python libs
|
||||
import json
|
||||
|
||||
__virtualname__ = "json"
|
||||
|
||||
|
||||
def __virtual__(hub):
|
||||
return True
|
||||
|
||||
|
||||
def load(hub, path):
|
||||
"""
|
||||
Use json to read in a file
|
||||
"""
|
||||
try:
|
||||
with open(path, "r") as fp_:
|
||||
ret = json.loads(fp_.read())
|
||||
return ret
|
||||
except FileNotFoundError:
|
||||
return {}
|
||||
|
||||
|
||||
def render(hub, val):
|
||||
"""
|
||||
Take the string and render it in json
|
||||
"""
|
||||
return json.loads(val)
|
||||
@@ -0,0 +1,40 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Define the yaml loader interface
|
||||
"""
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
import toml
|
||||
|
||||
HAS_TOML = True
|
||||
except ImportError:
|
||||
HAS_TOML = False
|
||||
|
||||
__virtualname__ = "toml"
|
||||
# __contracts__ = [__virtualname__]
|
||||
|
||||
|
||||
def __virtual__(hub):
|
||||
if HAS_TOML:
|
||||
return True
|
||||
return (False, "TOML could not be loaded")
|
||||
|
||||
|
||||
def load(hub, path):
|
||||
"""
|
||||
use toml to read in a file
|
||||
"""
|
||||
try:
|
||||
with open(path, "rb") as fp_:
|
||||
return toml.load(fp_.read())
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
return {}
|
||||
|
||||
|
||||
def render(hub, val):
|
||||
"""
|
||||
Take the string and render it in json
|
||||
"""
|
||||
return toml.loads(val)
|
||||
@@ -0,0 +1,39 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Define the yaml loader interface
|
||||
"""
|
||||
|
||||
# Import third party libs
|
||||
try:
|
||||
import yaml
|
||||
|
||||
HAS_YAML = True
|
||||
except ImportError:
|
||||
HAS_YAML = False
|
||||
|
||||
__virtualname__ = "yaml"
|
||||
|
||||
|
||||
def __virtual__(hub):
|
||||
if HAS_YAML:
|
||||
return True
|
||||
return (False, "PyYaml could not be loaded")
|
||||
|
||||
|
||||
def load(hub, path):
|
||||
"""
|
||||
use yaml to read in a file
|
||||
"""
|
||||
try:
|
||||
with open(path, "rb") as fp_:
|
||||
return yaml.safe_load(fp_.read())
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
return {}
|
||||
|
||||
|
||||
def render(hub, val):
|
||||
"""
|
||||
Take the string and render it in json
|
||||
"""
|
||||
return yaml.safe_load(val)
|
||||
@@ -0,0 +1,24 @@
|
||||
"""
|
||||
Support embedding version number lookup into cli
|
||||
"""
|
||||
# Import python libs
|
||||
import importlib
|
||||
import sys
|
||||
|
||||
|
||||
CONFIG = {
|
||||
"version": {
|
||||
"default": False,
|
||||
"action": "store_true",
|
||||
"help": "Display version information",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def run(hub, primary):
|
||||
"""
|
||||
Check the version number and then exit
|
||||
"""
|
||||
mod = importlib.import_module(f"{primary}.version")
|
||||
print(f"{primary} {mod.version}")
|
||||
sys.exit(0)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,148 @@
|
||||
import asyncio
|
||||
import inspect
|
||||
import aiologger
|
||||
import aiologger.handlers.streams
|
||||
import aiologger.handlers.files
|
||||
import aiologger.handlers.base
|
||||
import aiologger.levels
|
||||
import aiologger.formatters.base
|
||||
import aiologger.records
|
||||
import logging
|
||||
import pop.contract
|
||||
import sys
|
||||
|
||||
|
||||
def __init__(hub):
|
||||
hub.log.LOGGER = {}
|
||||
hub.log.FILE_HANDLER = None
|
||||
hub.log.STREAM_HANDLER = None
|
||||
|
||||
|
||||
def _stack_frames(relative_start: int) -> inspect.FrameInfo:
|
||||
"""
|
||||
Efficiently access stack frames.
|
||||
:param relative_start: Starting stack depth; The default, 2 is the parent of the
|
||||
caller of stack_frames - the first function that may be unknown.
|
||||
:return: a stack frame
|
||||
"""
|
||||
if hasattr(sys, "_getframe"):
|
||||
# implementation detail of CPython, speeds things up by 100x.
|
||||
frame = sys._getframe(relative_start)
|
||||
while frame:
|
||||
yield frame
|
||||
frame = frame.f_back
|
||||
else:
|
||||
for frame_info in inspect.stack(context=0)[relative_start:]:
|
||||
yield frame_info.frame
|
||||
|
||||
|
||||
def _get_hub_ref() -> str:
|
||||
# Minimize lookup time by starting at frame 5, it will be at least that far back
|
||||
for frame in _stack_frames(5):
|
||||
if isinstance(frame.f_locals.get("self"), pop.contract.Contracted):
|
||||
contracted = frame.f_locals["self"]
|
||||
break
|
||||
else:
|
||||
# Default to the root reference
|
||||
return "hub"
|
||||
|
||||
return contracted, frame.f_lineno
|
||||
|
||||
|
||||
def _get_logger(hub, name: str = "") -> aiologger.Logger:
|
||||
if name not in hub.log.LOGGER:
|
||||
hub.log.LOGGER[name]: aiologger.Logger = aiologger.Logger(
|
||||
name=name, loop=hub.pop.Loop
|
||||
)
|
||||
hub.log.LOGGER[name].level = hub.log.INT_LEVEL
|
||||
if hub.log.FILE_HANDLER:
|
||||
hub.log.LOGGER[name].handlers.append(hub.log.FILE_HANDLER)
|
||||
if hub.log.STREAM_HANDLER:
|
||||
hub.log.LOGGER[name].handlers.append(hub.log.STREAM_HANDLER)
|
||||
return hub.log.LOGGER[name]
|
||||
|
||||
|
||||
def log(hub, level: int, msg: str, *args, **kwargs):
|
||||
if hub.log.INT_LEVEL <= level:
|
||||
contract, lineno = _get_hub_ref()
|
||||
caller = f"{contract.ref}.{contract.func.__name__}"
|
||||
logger: aiologger.Logger = _get_logger(hub, caller)
|
||||
record = aiologger.records.LogRecord(
|
||||
name=caller,
|
||||
pathname=contract.func.__module__,
|
||||
lineno=lineno,
|
||||
level=0, # We have to overwrite this in a secure way
|
||||
msg=msg,
|
||||
args=args,
|
||||
func=contract.func.__name__,
|
||||
**kwargs,
|
||||
)
|
||||
record.levelno = level
|
||||
if level == 5:
|
||||
record.levelname = "TRACE"
|
||||
else:
|
||||
try:
|
||||
record.levelname = aiologger.records.get_level_name(level)
|
||||
except ValueError:
|
||||
record.levelname = f"LEVEL {level}"
|
||||
|
||||
ret = logger.handle(record)
|
||||
if asyncio.iscoroutine(ret):
|
||||
hub.pop.Loop.create_task(ret)
|
||||
|
||||
|
||||
def setup(hub, conf):
|
||||
"""
|
||||
Given the configuration data set up the logger
|
||||
"""
|
||||
# Make sure the loop exists
|
||||
hub.pop.loop.create()
|
||||
# Use the saved root logger
|
||||
root = _get_logger(hub, name="")
|
||||
|
||||
raw_level = conf["log_level"].strip().lower()
|
||||
if raw_level.isdigit():
|
||||
hub.log.INT_LEVEL = int(raw_level)
|
||||
else:
|
||||
hub.log.INT_LEVEL = hub.log.LEVEL.get(raw_level, root.level)
|
||||
|
||||
root.level = hub.log.INT_LEVEL
|
||||
cf = aiologger.formatters.base.Formatter(
|
||||
fmt=conf["log_fmt_console"], datefmt=conf["log_datefmt"]
|
||||
)
|
||||
ch = aiologger.handlers.streams.AsyncStreamHandler(
|
||||
formatter=cf, loop=hub.pop.Loop, stream=sys.stderr
|
||||
)
|
||||
ch._level = hub.log.INT_LEVEL
|
||||
root.add_handler(ch)
|
||||
hub.log.STREAM_HANDLER = ch
|
||||
|
||||
ff = aiologger.formatters.base.Formatter(
|
||||
fmt=conf["log_fmt_console"], datefmt=conf["log_datefmt"]
|
||||
)
|
||||
fh = aiologger.handlers.files.AsyncFileHandler(conf["log_file"], loop=hub.pop.Loop)
|
||||
fh._level = hub.log.INT_LEVEL
|
||||
fh.formatter = ff
|
||||
root.add_handler(fh)
|
||||
hub.log.FILE_HANDLER = fh
|
||||
|
||||
# Put all these functions higher up on the hub
|
||||
hub.log.log = getattr(hub.log, "async").log
|
||||
hub.log.trace = lambda msg, *args, **kwargs: hub.log.log(
|
||||
level=5, msg=msg, *args, **kwargs
|
||||
)
|
||||
hub.log.debug = lambda msg, *args, **kwargs: hub.log.log(
|
||||
level=aiologger.levels.LogLevel.DEBUG, msg=msg, *args, **kwargs
|
||||
)
|
||||
hub.log.info = lambda msg, *args, **kwargs: hub.log.log(
|
||||
level=aiologger.levels.LogLevel.INFO, msg=msg, *args, **kwargs
|
||||
)
|
||||
hub.log.warning = lambda msg, *args, **kwargs: hub.log.log(
|
||||
level=aiologger.levels.LogLevel.WARNING, msg=msg, *args, **kwargs
|
||||
)
|
||||
hub.log.error = lambda msg, *args, **kwargs: hub.log.log(
|
||||
level=aiologger.levels.LogLevel.ERROR, msg=msg, *args, **kwargs
|
||||
)
|
||||
hub.log.critical = lambda msg, *args, **kwargs: hub.log.log(
|
||||
level=aiologger.levels.LogLevel.CRITICAL, msg=msg, *args, **kwargs
|
||||
)
|
||||
@@ -0,0 +1,27 @@
|
||||
import logging
|
||||
|
||||
|
||||
def setup(hub, conf):
|
||||
"""
|
||||
Given the configuration data set up the logger
|
||||
"""
|
||||
# Use the saved root logger
|
||||
root = logging.getLogger()
|
||||
|
||||
raw_level = conf["log_level"].strip().lower()
|
||||
if raw_level.isdigit():
|
||||
hub.log.INT_LEVEL = int(raw_level)
|
||||
else:
|
||||
hub.log.INT_LEVEL = hub.log.LEVEL.get(raw_level, root.level)
|
||||
|
||||
root.setLevel(hub.log.INT_LEVEL)
|
||||
cf = logging.Formatter(fmt=conf["log_fmt_console"], datefmt=conf["log_datefmt"])
|
||||
ch = logging.StreamHandler()
|
||||
ch.setLevel(hub.log.INT_LEVEL)
|
||||
ch.setFormatter(cf)
|
||||
root.addHandler(ch)
|
||||
|
||||
ff = logging.Formatter(fmt=conf["log_fmt_console"], datefmt=conf["log_datefmt"])
|
||||
fh = logging.FileHandler(conf["log_file"])
|
||||
fh.setFormatter(ff)
|
||||
root.addHandler(fh)
|
||||
@@ -0,0 +1,35 @@
|
||||
"""
|
||||
This sub is used to set up logging for pop projects and injects logging
|
||||
options into conf making it easy to add robust logging
|
||||
"""
|
||||
# Import python libs
|
||||
import logging
|
||||
|
||||
|
||||
def __init__(hub):
|
||||
"""
|
||||
Set up variables used by the log subsystem
|
||||
"""
|
||||
logging.addLevelName(5, "TRACE")
|
||||
hub.log.LEVEL = {
|
||||
"notset": logging.NOTSET,
|
||||
"trace": 5,
|
||||
"debug": logging.DEBUG,
|
||||
"info": logging.INFO,
|
||||
"warn": logging.WARN,
|
||||
"warning": logging.WARNING,
|
||||
"error": logging.ERROR,
|
||||
"fatal": logging.FATAL,
|
||||
"critical": logging.CRITICAL,
|
||||
}
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# These should be overwritten by the integrated logger, but here's a contingency
|
||||
hub.log.INT_LEVEL = log.getEffectiveLevel()
|
||||
hub.log.log = log.log
|
||||
hub.log.trace = lambda msg, *args, **kwargs: log.log(5, msg, *args, **kwargs)
|
||||
hub.log.debug = log.debug
|
||||
hub.log.info = log.info
|
||||
hub.log.critical = log.critical
|
||||
hub.log.warning = log.warning
|
||||
hub.log.error = log.error
|
||||
@@ -0,0 +1,10 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import pop.hub
|
||||
|
||||
|
||||
def start():
|
||||
hub = pop.hub.Hub()
|
||||
hub.pop.sub.add(dyne_name="conf")
|
||||
hub.conf.init.cli()
|
||||
@@ -0,0 +1,2 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
version = "6.11"
|
||||
Reference in New Issue
Block a user