text
stringlengths 81
112k
|
|---|
Get equivalent units that are compatible with the udunits2 library
(thus CF-compliant).
Parameters
----------
units : string
A string representation of the units.
prefix : string
Will be added at the beginning of the returned string
(must be a valid udunits2 expression).
suffix : string
Will be added at the end of the returned string
(must be a valid udunits2 expression).
Returns
-------
A string representation of the conforming units.
References
----------
The udunits2 package : http://www.unidata.ucar.edu/software/udunits/
Notes
-----
This function only relies on the table stored in :attr:`UNITS_MAP_CTM2CF`.
Therefore, the units string returned by this function is not certified to
be compatible with udunits2.
Examples
--------
>>> get_cfcompliant_units('molec/cm2')
'count/cm2'
>>> get_cfcompliant_units('v/v')
'1'
>>> get_cfcompliant_units('ppbC', prefix='3')
'3ppb
def get_cfcompliant_units(units, prefix='', suffix=''):
"""
Get equivalent units that are compatible with the udunits2 library
(thus CF-compliant).
Parameters
----------
units : string
A string representation of the units.
prefix : string
Will be added at the beginning of the returned string
(must be a valid udunits2 expression).
suffix : string
Will be added at the end of the returned string
(must be a valid udunits2 expression).
Returns
-------
A string representation of the conforming units.
References
----------
The udunits2 package : http://www.unidata.ucar.edu/software/udunits/
Notes
-----
This function only relies on the table stored in :attr:`UNITS_MAP_CTM2CF`.
Therefore, the units string returned by this function is not certified to
be compatible with udunits2.
Examples
--------
>>> get_cfcompliant_units('molec/cm2')
'count/cm2'
>>> get_cfcompliant_units('v/v')
'1'
>>> get_cfcompliant_units('ppbC', prefix='3')
'3ppb
"""
compliant_units = units
for gcunits, udunits in UNITS_MAP_CTM2CF:
compliant_units = str.replace(compliant_units, gcunits, udunits)
return prefix + compliant_units + suffix
|
Replace characters (e.g., ':', '$', '=', '-') of a variable name, which
may cause problems when using with (CF-)netCDF based packages.
Parameters
----------
varname : string
variable name.
Notes
-----
Characters replacement is based on the table stored in
:attr:`VARNAME_MAP_CHAR`.
def get_valid_varname(varname):
"""
Replace characters (e.g., ':', '$', '=', '-') of a variable name, which
may cause problems when using with (CF-)netCDF based packages.
Parameters
----------
varname : string
variable name.
Notes
-----
Characters replacement is based on the table stored in
:attr:`VARNAME_MAP_CHAR`.
"""
vname = varname
for s, r in VARNAME_MAP_CHAR:
vname = vname.replace(s, r)
return vname
|
Given a Variable constructed from GEOS-Chem output, enforce
CF-compliant metadata and formatting.
Until a bug with lazily-loaded data and masking/scaling is resolved in
xarray, you have the option to manually mask and scale the data here.
Parameters
----------
var : xarray.Variable
A variable holding information decoded from GEOS-Chem output.
mask_and_scale : bool
Flag to scale and mask the data given the unit conversions provided
Returns
-------
out : xarray.Variable
The original variable processed to conform to CF standards
.. note::
This method borrows heavily from the ideas in ``xarray.decode_cf_variable``
def enforce_cf_variable(var, mask_and_scale=True):
""" Given a Variable constructed from GEOS-Chem output, enforce
CF-compliant metadata and formatting.
Until a bug with lazily-loaded data and masking/scaling is resolved in
xarray, you have the option to manually mask and scale the data here.
Parameters
----------
var : xarray.Variable
A variable holding information decoded from GEOS-Chem output.
mask_and_scale : bool
Flag to scale and mask the data given the unit conversions provided
Returns
-------
out : xarray.Variable
The original variable processed to conform to CF standards
.. note::
This method borrows heavily from the ideas in ``xarray.decode_cf_variable``
"""
var = as_variable(var)
data = var._data # avoid loading by accessing _data instead of data
dims = var.dims
attrs = var.attrs.copy()
encoding = var.encoding.copy()
orig_dtype = data.dtype
# Process masking/scaling coordinates. We only expect a "scale" value
# for the units with this output.
if 'scale' in attrs:
scale = attrs.pop('scale')
attrs['scale_factor'] = scale
encoding['scale_factor'] = scale
# TODO: Once the xr.decode_cf bug is fixed, we won't need to manually
# handle masking/scaling
if mask_and_scale:
data = scale*data
# Process units
# TODO: How do we want to handle parts-per-* units? These are not part of
# the udunits standard, and the CF conventions suggest using units
# like 1e-6 for parts-per-million. But we potentially mix mass and
# volume/molar mixing ratios in GEOS-Chem output, so we need a way
# to handle that edge case.
if 'unit' in attrs:
unit = attrs.pop('unit')
unit = get_cfcompliant_units(unit)
attrs['units'] = unit
# TODO: Once the xr.decode_cf bug is fixed, we won't need to manually
# handle masking/scaling
return Variable(dims, data, attrs, encoding=encoding)
|
Returns all entries, which publication date has been hit or which have
no date and which language matches the current language.
def published(self, check_language=True, language=None, kwargs=None,
exclude_kwargs=None):
"""
Returns all entries, which publication date has been hit or which have
no date and which language matches the current language.
"""
if check_language:
qs = NewsEntry.objects.language(language or get_language()).filter(
is_published=True)
else:
qs = self.get_queryset()
qs = qs.filter(
models.Q(pub_date__lte=now()) | models.Q(pub_date__isnull=True)
)
if kwargs is not None:
qs = qs.filter(**kwargs)
if exclude_kwargs is not None:
qs = qs.exclude(**exclude_kwargs)
return qs.distinct().order_by('-pub_date')
|
Returns recently published new entries.
def recent(self, check_language=True, language=None, limit=3, exclude=None,
kwargs=None, category=None):
"""
Returns recently published new entries.
"""
if category:
if not kwargs:
kwargs = {}
kwargs['categories__in'] = [category]
qs = self.published(check_language=check_language, language=language,
kwargs=kwargs)
if exclude:
qs = qs.exclude(pk=exclude.pk)
return qs[:limit]
|
Returns the meta description for the given entry.
def get_newsentry_meta_description(newsentry):
"""Returns the meta description for the given entry."""
if newsentry.meta_description:
return newsentry.meta_description
# If there is no seo addon found, take the info from the placeholders
text = newsentry.get_description()
if len(text) > 160:
return u'{}...'.format(text[:160])
return text
|
DEPRECATED: Template tag to render a placeholder from an NewsEntry object
We don't need this any more because we don't have a placeholders M2M field
on the model any more. Just use the default ``render_placeholder`` tag.
def render_news_placeholder(context, obj, name=False, truncate=False): # pragma: nocover # NOQA
"""
DEPRECATED: Template tag to render a placeholder from an NewsEntry object
We don't need this any more because we don't have a placeholders M2M field
on the model any more. Just use the default ``render_placeholder`` tag.
"""
warnings.warn(
"render_news_placeholder is deprecated. Use render_placeholder"
" instead", DeprecationWarning, stacklevel=2)
result = ''
if context.get('request'):
if isinstance(name, int):
# If the user doesn't want to use a placeholder name, but a cut, we
# need to check if the user has used the name as a number
truncate = name
name = False
if name:
# If the name of the placeholder slot is given, get, render and
# return it!
try:
result = safe(getattr(obj, name).render(context, None))
except AttributeError:
pass
else:
# If no name is provided get the first placeholder with content
for name in ['excerpt', 'content']:
rendered = ''
try:
rendered = safe(getattr(obj, name).render(context, None))
except AttributeError:
pass
if rendered:
result = rendered
break
if truncate:
return truncatewords_html(result, truncate)
return result
|
Check if the requirement is satisfied by the marker.
This function checks for a given Requirement whether its environment marker
is satisfied on the current platform. Currently only the python version and
system platform are checked.
def _requirement_filter_by_marker(req):
# type: (pkg_resources.Requirement) -> bool
"""Check if the requirement is satisfied by the marker.
This function checks for a given Requirement whether its environment marker
is satisfied on the current platform. Currently only the python version and
system platform are checked.
"""
if hasattr(req, 'marker') and req.marker:
marker_env = {
'python_version': '.'.join(map(str, sys.version_info[:2])),
'sys_platform': sys.platform
}
if not req.marker.evaluate(environment=marker_env):
return False
return True
|
Find lowest required version.
Given a single Requirement, this function calculates the lowest required
version to satisfy it. If the requirement excludes a specific version, then
this version will not be used as the minimal supported version.
Examples
--------
>>> req = pkg_resources.Requirement.parse("foobar>=1.0,>2")
>>> _requirement_find_lowest_possible(req)
['foobar', '>=', '1.0']
>>> req = pkg_resources.Requirement.parse("baz>=1.3,>3,!=1.5")
>>> _requirement_find_lowest_possible(req)
['baz', '>=', '1.3']
def _requirement_find_lowest_possible(req):
# type: (pkg_resources.Requirement) -> List[str]
"""Find lowest required version.
Given a single Requirement, this function calculates the lowest required
version to satisfy it. If the requirement excludes a specific version, then
this version will not be used as the minimal supported version.
Examples
--------
>>> req = pkg_resources.Requirement.parse("foobar>=1.0,>2")
>>> _requirement_find_lowest_possible(req)
['foobar', '>=', '1.0']
>>> req = pkg_resources.Requirement.parse("baz>=1.3,>3,!=1.5")
>>> _requirement_find_lowest_possible(req)
['baz', '>=', '1.3']
"""
version_dep = None # type: Optional[str]
version_comp = None # type: Optional[str]
for dep in req.specs:
version = pkg_resources.parse_version(dep[1])
# we don't want to have a not supported version as minimal version
if dep[0] == '!=':
continue
# try to use the lowest version available
# i.e. for ">=0.8.4,>=0.9.7", select "0.8.4"
if (not version_dep or
version < pkg_resources.parse_version(version_dep)):
version_dep = dep[1]
version_comp = dep[0]
assert (version_dep is None and version_comp is None) or \
(version_dep is not None and version_comp is not None)
return [
x for x in (req.unsafe_name, version_comp, version_dep)
if x is not None]
|
Cleanup a list of requirement strings (e.g. from requirements.txt) to only
contain entries valid for this platform and with the lowest required version
only.
Example
-------
>>> from sys import version_info
>>> _requirements_sanitize([
... 'foo>=3.0',
... "monotonic>=1.0,>0.1;python_version=='2.4'",
... "bar>1.0;python_version=='{}.{}'".format(version_info[0], version_info[1])
... ])
['foo >= 3.0', 'bar > 1.0']
def _requirements_sanitize(req_list):
# type: (List[str]) -> List[str]
"""
Cleanup a list of requirement strings (e.g. from requirements.txt) to only
contain entries valid for this platform and with the lowest required version
only.
Example
-------
>>> from sys import version_info
>>> _requirements_sanitize([
... 'foo>=3.0',
... "monotonic>=1.0,>0.1;python_version=='2.4'",
... "bar>1.0;python_version=='{}.{}'".format(version_info[0], version_info[1])
... ])
['foo >= 3.0', 'bar > 1.0']
"""
filtered_req_list = (
_requirement_find_lowest_possible(req) for req in
(pkg_resources.Requirement.parse(s) for s in req_list)
if _requirement_filter_by_marker(req)
)
return [" ".join(req) for req in filtered_req_list]
|
Return a coroutine function.
func: either a coroutine function or a regular function
Note a coroutine function is not a coroutine!
def _ensure_coroutine_function(func):
"""Return a coroutine function.
func: either a coroutine function or a regular function
Note a coroutine function is not a coroutine!
"""
if asyncio.iscoroutinefunction(func):
return func
else:
@asyncio.coroutine
def coroutine_function(evt):
func(evt)
yield
return coroutine_function
|
Return a string uniquely identifying the event.
This string can be used to find the event in the event store UI (cf. id
attribute, which is the UUID that at time of writing doesn't let you
easily find the event).
def location(self):
"""Return a string uniquely identifying the event.
This string can be used to find the event in the event store UI (cf. id
attribute, which is the UUID that at time of writing doesn't let you
easily find the event).
"""
if self._location is None:
self._location = "{}/{}-{}".format(
self.stream,
self.type,
self.sequence,
)
return self._location
|
Return first event matching predicate, or None if none exists.
Note: 'backwards', both here and in Event Store, means 'towards the
event emitted furthest in the past'.
async def find_backwards(self, stream_name, predicate, predicate_label='predicate'):
"""Return first event matching predicate, or None if none exists.
Note: 'backwards', both here and in Event Store, means 'towards the
event emitted furthest in the past'.
"""
logger = self._logger.getChild(predicate_label)
logger.info('Fetching first matching event')
uri = self._head_uri
try:
page = await self._fetcher.fetch(uri)
except HttpNotFoundError as e:
raise StreamNotFoundError() from e
while True:
evt = next(page.iter_events_matching(predicate), None)
if evt is not None:
return evt
uri = page.get_link("next")
if uri is None:
logger.warning("No matching event found")
return None
page = await self._fetcher.fetch(uri)
|
Command line interface for the ``qpass`` program.
def main():
"""Command line interface for the ``qpass`` program."""
# Initialize logging to the terminal.
coloredlogs.install()
# Prepare for command line argument parsing.
action = show_matching_entry
program_opts = dict(exclude_list=[])
show_opts = dict(filters=[], use_clipboard=is_clipboard_supported())
verbosity = 0
# Parse the command line arguments.
try:
options, arguments = getopt.gnu_getopt(
sys.argv[1:],
"elnp:f:x:vqh",
["edit", "list", "no-clipboard", "password-store=", "filter=", "exclude=", "verbose", "quiet", "help"],
)
for option, value in options:
if option in ("-e", "--edit"):
action = edit_matching_entry
elif option in ("-l", "--list"):
action = list_matching_entries
elif option in ("-n", "--no-clipboard"):
show_opts["use_clipboard"] = False
elif option in ("-p", "--password-store"):
stores = program_opts.setdefault("stores", [])
stores.append(PasswordStore(directory=value))
elif option in ("-f", "--filter"):
show_opts["filters"].append(value)
elif option in ("-x", "--exclude"):
program_opts["exclude_list"].append(value)
elif option in ("-v", "--verbose"):
coloredlogs.increase_verbosity()
verbosity += 1
elif option in ("-q", "--quiet"):
coloredlogs.decrease_verbosity()
verbosity -= 1
elif option in ("-h", "--help"):
usage(__doc__)
return
else:
raise Exception("Unhandled option! (programming error)")
if not (arguments or action == list_matching_entries):
usage(__doc__)
return
except Exception as e:
warning("Error: %s", e)
sys.exit(1)
# Execute the requested action.
try:
show_opts["quiet"] = verbosity < 0
kw = show_opts if action == show_matching_entry else {}
action(QuickPass(**program_opts), arguments, **kw)
except PasswordStoreError as e:
# Known issues don't get a traceback.
logger.error("%s", e)
sys.exit(1)
except KeyboardInterrupt:
# If the user interrupted an interactive prompt they most likely did so
# intentionally, so there's no point in generating more output here.
sys.exit(1)
|
Edit the matching entry.
def edit_matching_entry(program, arguments):
"""Edit the matching entry."""
entry = program.select_entry(*arguments)
entry.context.execute("pass", "edit", entry.name)
|
List the entries matching the given keywords/patterns.
def list_matching_entries(program, arguments):
"""List the entries matching the given keywords/patterns."""
output("\n".join(entry.name for entry in program.smart_search(*arguments)))
|
Show the matching entry on the terminal (and copy the password to the clipboard).
def show_matching_entry(program, arguments, use_clipboard=True, quiet=False, filters=()):
"""Show the matching entry on the terminal (and copy the password to the clipboard)."""
entry = program.select_entry(*arguments)
if not quiet:
formatted_entry = entry.format_text(include_password=not use_clipboard, filters=filters)
if formatted_entry and not formatted_entry.isspace():
output(formatted_entry)
if use_clipboard:
entry.copy_password()
|
Parses a set of Payflow Pro response parameter name and value pairs into
a list of PayflowProObjects, and returns a tuple containing the object
list and a dictionary containing any unconsumed data.
The first item in the object list will always be the Response object, and
the RecurringPayments object (if any) will be last.
The presence of any unconsumed data in the resulting dictionary probably
indicates an error or oversight in the PayflowProObject definitions.
def parse_parameters(payflowpro_response_data):
"""
Parses a set of Payflow Pro response parameter name and value pairs into
a list of PayflowProObjects, and returns a tuple containing the object
list and a dictionary containing any unconsumed data.
The first item in the object list will always be the Response object, and
the RecurringPayments object (if any) will be last.
The presence of any unconsumed data in the resulting dictionary probably
indicates an error or oversight in the PayflowProObject definitions.
"""
def build_class(klass, unconsumed_data):
known_att_names_set = set(klass.base_fields.keys())
available_atts_set = known_att_names_set.intersection(unconsumed_data)
if available_atts_set:
available_atts = dict()
for name in available_atts_set:
available_atts[name] = unconsumed_data[name]
del unconsumed_data[name]
return klass(**available_atts)
return None
unconsumed_data = payflowpro_response_data.copy()
# Parse the response data first
response = build_class(Response, unconsumed_data)
result_objects = [response]
# Parse the remaining data
for klass in object.__class__.__subclasses__(PayflowProObject):
obj = build_class(klass, unconsumed_data)
if obj:
result_objects.append(obj)
# Special handling of RecurringPayments
p_count = 1
payments = []
while ("p_result%d" % p_count) in unconsumed_data:
payments.append(RecurringPayment(
p_result = unconsumed_data.pop("p_result%d" % p_count, None),
p_pnref = unconsumed_data.pop("p_pnref%d" % p_count, None),
p_transtate = unconsumed_data.pop("p_transtate%d" % p_count, None),
p_tender = unconsumed_data.pop("p_tender%d" % p_count, None),
p_transtime = unconsumed_data.pop("p_transtime%d" % p_count, None),
p_amt = unconsumed_data.pop("p_amt%d" % p_count, None)))
p_count += 1
if payments:
result_objects.append(RecurringPayments(payments=payments))
return (result_objects, unconsumed_data,)
|
Convert 'items' stored in 'canvas' to SVG 'document'.
If 'items' is None, then all items are convered.
tounicode is a function that get text and returns
it's unicode representation. It should be used when
national characters are used on canvas.
Return list of XML elements
def convert(document, canvas, items=None, tounicode=None):
"""
Convert 'items' stored in 'canvas' to SVG 'document'.
If 'items' is None, then all items are convered.
tounicode is a function that get text and returns
it's unicode representation. It should be used when
national characters are used on canvas.
Return list of XML elements
"""
tk = canvas.tk
global segment
if items is None: # default: all items
items = canvas.find_all()
supported_item_types = \
set(["line", "oval", "polygon", "rectangle", "text", "arc"])
if tounicode is None:
try:
# python3
bytes
tounicode = lambda x: x
except NameError:
# python2
tounicode = lambda text: str(text).encode("utf-8")
elements = []
for item in items:
# skip unsupported items
itemtype = canvas.type(item)
if itemtype not in supported_item_types:
emit_warning("Items of type '%s' are not supported." % itemtype)
continue
# get item coords
coords = canvas.coords(item)
# get item options;
# options is a dict: opt. name -> opt. actual value
tmp = canvas.itemconfigure(item)
options = dict((v0, v4) for v0, v1, v2, v3, v4 in tmp.values())
# get state of item
state = options['state']
if 'current' in options['tags']:
options['state'] = ACTIVE
elif options['state'] == '':
options['state'] = 'normal'
else:
# left state unchanged
assert options['state'] in ['normal', DISABLED, 'hidden']
# skip hidden items
if options['state'] == 'hidden': continue
def get(name, default=""):
if state == ACTIVE and options.get(state + name):
return options.get(state + name)
if state == DISABLED and options.get(state + name):
return options.get(state + name)
if options.get(name):
return options.get(name)
else:
return default
if itemtype == 'line':
options['outline'] = ''
options['activeoutline'] = ''
options['disabledoutline'] = ''
elif itemtype == 'arc' and options['style'] == ARC:
options['fill'] = ''
options['activefill'] = ''
options['disabledfill'] = ''
style = {}
style["stroke"] = HTMLcolor(canvas, get("outline"))
if get("fill"):
style["fill"] = HTMLcolor(canvas, get("fill"))
else:
style["fill"] = "none"
width = float(options['width'])
if state == ACTIVE:
width = max(float(options['activewidth']), width)
elif state == DISABLED:
try:
disabledwidth = options['disabledwidth']
except KeyError:
# Text item might not have 'disabledwidth' option. This raises
# the exception in course of processing of such item.
# Default value is 0. Hence, it shall not affect width.
pass
else:
if float(disabledwidth) > 0:
width = disabledwidth
if width != 1.0:
style['stroke-width'] = width
if width:
dash = canvas.itemcget(item, 'dash')
if state == DISABLED and canvas.itemcget(item, 'disableddash'):
dash = canvas.itemcget(item, 'disableddash')
elif state == ACTIVE and canvas.itemcget(item, 'activedash'):
dash = canvas.itemcget(item, 'activedash')
if dash != '':
try:
dash = tuple(map(int, dash.split()))
except ValueError:
# int can't parse literal, dash defined with -.,_
linewidth = float(get('width'))
dash = parse_dash(dash, linewidth)
style['stroke-dasharray'] = ",".join(map(str, dash))
style['stroke-dashoffset'] = options['dashoffset']
if itemtype == 'line':
# in this case, outline is set with fill property
style["fill"], style["stroke"] = "none", style["fill"]
style['stroke-linecap'] = cap_style[options['capstyle']]
if options['smooth'] in ['1', 'bezier', 'true']:
element = smoothline(document, coords)
elif options['smooth'] == 'raw':
element = cubic_bezier(document, coords)
elif options['smooth'] == '0':
if len(coords) == 4:
# segment
element = segment(document, coords)
else:
# polyline
element = polyline(document, coords)
style['fill'] = "none"
style['stroke-linejoin'] = join_style[options['joinstyle']]
else:
emit_warning("Unknown smooth type: %s. Falling back to smooth=0" % options['smooth'])
element = polyline(coords)
style['stroke-linejoin'] = join_style[options['joinstyle']]
elements.append(element)
if options['arrow'] in [FIRST, BOTH]:
arrow = arrow_head(document, coords[2], coords[3], coords[0], coords[1], options['arrowshape'])
arrow.setAttribute('fill', style['stroke'])
elements.append(arrow)
if options['arrow'] in [LAST, BOTH]:
arrow = arrow_head(document, coords[-4], coords[-3], coords[-2], coords[-1], options['arrowshape'])
arrow.setAttribute('fill', style['stroke'])
elements.append(arrow)
elif itemtype == 'polygon':
if options['smooth'] in ['1', 'bezier', 'true']:
element = smoothpolygon(document, coords)
elif options['smooth'] == '0':
element = polygon(document, coords)
else:
emit_warning("Unknown smooth type: %s. Falling back to smooth=0" % options['smooth'])
element = polygon(document, coords)
elements.append(element)
style['fill-rule'] = 'evenodd'
style['stroke-linejoin'] = join_style[options['joinstyle']]
elif itemtype == 'oval':
element = oval(document, coords)
elements.append(element)
elif itemtype == 'rectangle':
element = rectangle(document, coords)
elements.append(element)
elif itemtype == 'arc':
element = arc(document, coords, options['start'], options['extent'], options['style'])
if options['style'] == ARC:
style['fill'] = "none"
elements.append(element)
elif itemtype == 'text':
style['stroke'] = '' # no stroke
# setup geometry
xmin, ymin, xmax, ymax = canvas.bbox(item)
x = coords[0]
# set y at 'dominant-baseline'
y = ymin + font_metrics(tk, options['font'], 'ascent')
element = setattribs(
document.createElement('text'),
x = x, y = y
)
elements.append(element)
element.appendChild(document.createTextNode(
tounicode(canvas.itemcget(item, 'text'))
))
# 2. Setup style
actual = font_actual(tk, options['font'])
style['fill'] = HTMLcolor(canvas, get('fill'))
style["text-anchor"] = text_anchor[options["anchor"]]
style['font-family'] = actual['family']
# size
size = float(actual['size'])
if size > 0: # size in points
style['font-size'] = "%spt" % size
else: # size in pixels
style['font-size'] = "%s" % (-size)
style['font-style'] = font_style[actual['slant']]
style['font-weight'] = font_weight[actual['weight']]
# overstrike/underline
if actual['overstrike'] and actual['underline']:
style['text-decoration'] = 'underline line-through'
elif actual['overstrike']:
style['text-decoration'] = 'line-through'
elif actual['underline']:
style['text-decoration'] = 'underline'
for attr, value in style.items():
if value != '': # create only nonempty attributes
element.setAttribute(attr, str(value))
return elements
|
Create default SVG document
def SVGdocument():
"Create default SVG document"
import xml.dom.minidom
implementation = xml.dom.minidom.getDOMImplementation()
doctype = implementation.createDocumentType(
"svg", "-//W3C//DTD SVG 1.1//EN",
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"
)
document= implementation.createDocument(None, "svg", doctype)
document.documentElement.setAttribute(
'xmlns', 'http://www.w3.org/2000/svg'
)
return document
|
polyline with 2 vertices using <line> tag
def segment_to_line(document, coords):
"polyline with 2 vertices using <line> tag"
return setattribs(
document.createElement('line'),
x1 = coords[0],
y1 = coords[1],
x2 = coords[2],
y2 = coords[3],
)
|
polyline with more then 2 vertices
def polyline(document, coords):
"polyline with more then 2 vertices"
points = []
for i in range(0, len(coords), 2):
points.append("%s,%s" % (coords[i], coords[i+1]))
return setattribs(
document.createElement('polyline'),
points = ' '.join(points),
)
|
smoothed polyline
def smoothline(document, coords):
"smoothed polyline"
element = document.createElement('path')
path = []
points = [(coords[i], coords[i+1]) for i in range(0, len(coords), 2)]
def pt(points):
x0, y0 = points[0]
x1, y1 = points[1]
p0 = (2*x0-x1, 2*y0-y1)
x0, y0 = points[-1]
x1, y1 = points[-2]
pn = (2*x0-x1, 2*y0-y1)
p = [p0] + points[1:-1] + [pn]
for i in range(1, len(points)-1):
a = p[i-1]
b = p[i]
c = p[i+1]
yield lerp(a, b, 0.5), b, lerp(b, c, 0.5)
for i, (A, B, C) in enumerate(pt(points)):
if i == 0:
path.append("M%s,%s Q%s,%s %s,%s" % (A[0], A[1], B[0], B[1], C[0], C[1]))
else:
path.append("T%s,%s" % (C[0], C[1]))
element.setAttribute('d', ' '.join(path))
return element
|
cubic bezier polyline
def cubic_bezier(document, coords):
"cubic bezier polyline"
element = document.createElement('path')
points = [(coords[i], coords[i+1]) for i in range(0, len(coords), 2)]
path = ["M%s %s" %points[0]]
for n in xrange(1, len(points), 3):
A, B, C = points[n:n+3]
path.append("C%s,%s %s,%s %s,%s" % (A[0], A[1], B[0], B[1], C[0], C[1]))
element.setAttribute('d', ' '.join(path))
return element
|
smoothed filled polygon
def smoothpolygon(document, coords):
"smoothed filled polygon"
element = document.createElement('path')
path = []
points = [(coords[i], coords[i+1]) for i in range(0, len(coords), 2)]
def pt(points):
p = points
n = len(points)
for i in range(0, len(points)):
a = p[(i-1) % n]
b = p[i]
c = p[(i+1) % n]
yield lerp(a, b, 0.5), b, lerp(b, c, 0.5)
for i, (A, B, C) in enumerate(pt(points)):
if i == 0:
path.append("M%s,%s Q%s,%s %s,%s" % (A[0], A[1], B[0], B[1], C[0], C[1]))
else:
path.append("T%s,%s" % (C[0], C[1]))
path.append("z")
element.setAttribute('d', ' '.join(path))
return element
|
circle/ellipse
def oval(document, coords):
"circle/ellipse"
x1, y1, x2, y2 = coords
# circle
if x2-x1 == y2-y1:
return setattribs(document.createElement('circle'),
cx = (x1+x2)/2,
cy = (y1+y2)/2,
r = abs(x2-x1)/2,
)
# ellipse
else:
return setattribs(document.createElement('ellipse'),
cx = (x1+x2)/2,
cy = (y1+y2)/2,
rx = abs(x2-x1)/2,
ry = abs(y2-y1)/2,
)
return element
|
arc, pieslice (filled), arc with chord (filled)
def arc(document, bounding_rect, start, extent, style):
"arc, pieslice (filled), arc with chord (filled)"
(x1, y1, x2, y2) = bounding_rect
import math
cx = (x1 + x2)/2.0
cy = (y1 + y2)/2.0
rx = (x2 - x1)/2.0
ry = (y2 - y1)/2.0
start = math.radians(float(start))
extent = math.radians(float(extent))
# from SVG spec:
# http://www.w3.org/TR/SVG/implnote.html#ArcImplementationNotes
x1 = rx * math.cos(start) + cx
y1 = -ry * math.sin(start) + cy # XXX: ry is negated here
x2 = rx * math.cos(start + extent) + cx
y2 = -ry * math.sin(start + extent) + cy # XXX: ry is negated here
if abs(extent) > math.pi:
fa = 1
else:
fa = 0
if extent > 0.0:
fs = 0
else:
fs = 1
path = []
# common: arc
path.append('M%s,%s' % (x1, y1))
path.append('A%s,%s 0 %d %d %s,%s' % (rx, ry, fa, fs, x2, y2))
if style == ARC:
pass
elif style == CHORD:
path.append('z')
else: # default: pieslice
path.append('L%s,%s' % (cx, cy))
path.append('z')
return setattribs(document.createElement('path'), d = ''.join(path))
|
returns Tk color in form '#rrggbb' or '#rgb
def HTMLcolor(canvas, color):
"returns Tk color in form '#rrggbb' or '#rgb'"
if color:
# r, g, b \in [0..2**16]
r, g, b = ["%02x" % (c // 256) for c in canvas.winfo_rgb(color)]
if (r[0] == r[1]) and (g[0] == g[1]) and (b[0] == b[1]):
# shorter form #rgb
return "#" + r[0] + g[0] + b[0]
else:
return "#" + r + g + b
else:
return color
|
make arrow head at (x1,y1), arrowshape is tuple (d1, d2, d3)
def arrow_head(document, x0, y0, x1, y1, arrowshape):
"make arrow head at (x1,y1), arrowshape is tuple (d1, d2, d3)"
import math
dx = x1 - x0
dy = y1 - y0
poly = document.createElement('polygon')
d = math.sqrt(dx*dx + dy*dy)
if d == 0.0: # XXX: equal, no "close enough"
return poly
try:
d1, d2, d3 = list(map(float, arrowshape))
except ValueError:
d1, d2, d3 = map(float, arrowshape.split())
P0 = (x0, y0)
P1 = (x1, y1)
xa, ya = lerp(P1, P0, d1/d)
xb, yb = lerp(P1, P0, d2/d)
t = d3/d
xc, yc = dx*t, dy*t
points = [
x1, y1,
xb - yc, yb + xc,
xa, ya,
xb + yc, yb - xc,
]
poly.setAttribute('points', ' '.join(map(str, points)))
return poly
|
actual font parameters
def font_actual(tkapp, font):
"actual font parameters"
tmp = tkapp.call('font', 'actual', font)
return dict(
(tmp[i][1:], tmp[i+1]) for i in range(0, len(tmp), 2)
)
|
parse dash pattern specified with string
def parse_dash(string, width):
"parse dash pattern specified with string"
# DashConvert from {tk-sources}/generic/tkCanvUtil.c
w = max(1, int(width + 0.5))
n = len(string)
result = []
for i, c in enumerate(string):
if c == " " and len(result):
result[-1] += w + 1
elif c == "_":
result.append(8*w)
result.append(4*w)
elif c == "-":
result.append(6*w)
result.append(4*w)
elif c == ",":
result.append(4*w)
result.append(4*w)
elif c == ".":
result.append(2*w)
result.append(4*w)
return result
|
Return altitude for given pressure.
This function evaluates a polynomial at log10(pressure) values.
Parameters
----------
pressure : array-like
pressure values [hPa].
p_coef : array-like
coefficients of the polynomial (default values are for the US
Standard Atmosphere).
Returns
-------
altitude : array-like
altitude values [km] (same shape than the pressure input array).
See Also
--------
prof_pressure : Returns pressure for
given altitude.
prof_temperature : Returns air temperature for
given altitude.
Notes
-----
Default coefficient values represent a 5th degree polynomial which had
been fitted to USSA data from 0-100 km. Accuracy is on the order of 1% for
0-100 km and 0.5% below 30 km. This function, with default values, may thus
produce bad results with pressure less than about 3e-4 hPa.
Examples
--------
>>> prof_altitude([1000, 800, 600])
array([ 0.1065092 , 1.95627858, 4.2060627 ])
def prof_altitude(pressure, p_coef=(-0.028389, -0.0493698, 0.485718, 0.278656,
-17.5703, 48.0926)):
"""
Return altitude for given pressure.
This function evaluates a polynomial at log10(pressure) values.
Parameters
----------
pressure : array-like
pressure values [hPa].
p_coef : array-like
coefficients of the polynomial (default values are for the US
Standard Atmosphere).
Returns
-------
altitude : array-like
altitude values [km] (same shape than the pressure input array).
See Also
--------
prof_pressure : Returns pressure for
given altitude.
prof_temperature : Returns air temperature for
given altitude.
Notes
-----
Default coefficient values represent a 5th degree polynomial which had
been fitted to USSA data from 0-100 km. Accuracy is on the order of 1% for
0-100 km and 0.5% below 30 km. This function, with default values, may thus
produce bad results with pressure less than about 3e-4 hPa.
Examples
--------
>>> prof_altitude([1000, 800, 600])
array([ 0.1065092 , 1.95627858, 4.2060627 ])
"""
pressure = np.asarray(pressure)
altitude = np.polyval(p_coef, np.log10(pressure.flatten()))
return altitude.reshape(pressure.shape)
|
Return pressure for given altitude.
This function evaluates a polynomial at altitudes values.
Parameters
----------
altitude : array-like
altitude values [km].
z_coef : array-like
coefficients of the polynomial (default values are for the US
Standard Atmosphere).
Returns
-------
pressure : array-like
pressure values [hPa] (same shape than the altitude input array).
See Also
--------
prof_altitude : Returns altitude for
given pressure.
prof_temperature : Returns air temperature for
given altitude.
Notes
-----
Default coefficient values represent a 5th degree polynomial which had
been fitted to USA data from 0-100 km. Accuracy is on the order of 1% for
0-100 km and 0.5% below 30 km. This function, with default values, may thus
produce bad results with altitude > 100 km.
Examples
--------
>>> prof_pressure([0, 10, 20])
array([ 998.96437334, 264.658697 , 55.28114631])
def prof_pressure(altitude, z_coef=(1.94170e-9, -5.14580e-7, 4.57018e-5,
-1.55620e-3, -4.61994e-2, 2.99955)):
"""
Return pressure for given altitude.
This function evaluates a polynomial at altitudes values.
Parameters
----------
altitude : array-like
altitude values [km].
z_coef : array-like
coefficients of the polynomial (default values are for the US
Standard Atmosphere).
Returns
-------
pressure : array-like
pressure values [hPa] (same shape than the altitude input array).
See Also
--------
prof_altitude : Returns altitude for
given pressure.
prof_temperature : Returns air temperature for
given altitude.
Notes
-----
Default coefficient values represent a 5th degree polynomial which had
been fitted to USA data from 0-100 km. Accuracy is on the order of 1% for
0-100 km and 0.5% below 30 km. This function, with default values, may thus
produce bad results with altitude > 100 km.
Examples
--------
>>> prof_pressure([0, 10, 20])
array([ 998.96437334, 264.658697 , 55.28114631])
"""
altitude = np.asarray(altitude)
pressure = np.power(10, np.polyval(z_coef, altitude.flatten()))
return pressure.reshape(altitude.shape)
|
Iterate over model references for `model_name`
and return a list of parent model specifications (including those of
`model_name`, ordered from parent to child).
def _find_references(model_name, references=None):
"""
Iterate over model references for `model_name`
and return a list of parent model specifications (including those of
`model_name`, ordered from parent to child).
"""
references = references or []
references.append(model_name)
ref = MODELS[model_name].get('reference')
if ref is not None:
_find_references(ref, references)
parent_models = [m for m in references]
parent_models.reverse()
return parent_models
|
Get the grid specifications for a given model.
Parameters
----------
model_name : string
Name of the model. Supports multiple formats
(e.g., 'GEOS5', 'GEOS-5' or 'GEOS_5').
Returns
-------
specifications : dict
Grid specifications as a dictionary.
Raises
------
ValueError
If the model is not supported (see `models`) or if the given
`model_name` corresponds to several entries in the list of
supported models.
def _get_model_info(model_name):
"""
Get the grid specifications for a given model.
Parameters
----------
model_name : string
Name of the model. Supports multiple formats
(e.g., 'GEOS5', 'GEOS-5' or 'GEOS_5').
Returns
-------
specifications : dict
Grid specifications as a dictionary.
Raises
------
ValueError
If the model is not supported (see `models`) or if the given
`model_name` corresponds to several entries in the list of
supported models.
"""
# trying to get as much as possible a valid model name from the given
# `model_name`, using regular expressions.
split_name = re.split(r'[\-_\s]', model_name.strip().upper())
sep_chars = ('', ' ', '-', '_')
gen_seps = itertools.combinations_with_replacement(
sep_chars, len(split_name) - 1
)
test_names = ("".join((n for n in itertools.chain(*list(zip(split_name,
s + ('',))))))
for s in gen_seps)
match_names = list([name for name in test_names if name
in _get_supported_models()])
if not len(match_names):
raise ValueError("Model '{0}' is not supported".format(model_name))
elif len(match_names) > 1:
raise ValueError("Multiple matched models for given model name '{0}'"
.format(model_name))
valid_model_name = match_names[0]
parent_models = _find_references(valid_model_name)
model_spec = dict()
for m in parent_models:
model_spec.update(MODELS[m])
model_spec.pop('reference')
model_spec['model_family'] = parent_models[0]
model_spec['model_name'] = valid_model_name
return model_spec
|
Extract the list of files from a tar or zip archive.
Args:
filename: name of the archive
Returns:
Sorted list of files in the archive, excluding './'
Raises:
ValueError: when the file is neither a zip nor a tar archive
FileNotFoundError: when the provided file does not exist (for Python 3)
IOError: when the provided file does not exist (for Python 2)
def _get_archive_filelist(filename):
# type: (str) -> List[str]
"""Extract the list of files from a tar or zip archive.
Args:
filename: name of the archive
Returns:
Sorted list of files in the archive, excluding './'
Raises:
ValueError: when the file is neither a zip nor a tar archive
FileNotFoundError: when the provided file does not exist (for Python 3)
IOError: when the provided file does not exist (for Python 2)
"""
names = [] # type: List[str]
if tarfile.is_tarfile(filename):
with tarfile.open(filename) as tar_file:
names = sorted(tar_file.getnames())
elif zipfile.is_zipfile(filename):
with zipfile.ZipFile(filename) as zip_file:
names = sorted(zip_file.namelist())
else:
raise ValueError("Can not get filenames from '{!s}'. "
"Not a tar or zip file".format(filename))
if "./" in names:
names.remove("./")
return names
|
Checks if the newly created object is a book and only has an ISBN.
If so, tries to fetch the book data off the internet.
:param uuid: uuid of book to augment
:param client: requesting client
def _augment_book(self, uuid, event):
"""
Checks if the newly created object is a book and only has an ISBN.
If so, tries to fetch the book data off the internet.
:param uuid: uuid of book to augment
:param client: requesting client
"""
try:
if not isbnmeta:
self.log(
"No isbntools found! Install it to get full "
"functionality!",
lvl=warn)
return
new_book = objectmodels['book'].find_one({'uuid': uuid})
try:
if len(new_book.isbn) != 0:
self.log('Got a lookup candidate: ', new_book._fields)
try:
meta = isbnmeta(
new_book.isbn,
service=self.config.isbnservice
)
mapping = libraryfieldmapping[
self.config.isbnservice
]
new_meta = {}
for key in meta.keys():
if key in mapping:
if isinstance(mapping[key], tuple):
name, conv = mapping[key]
try:
new_meta[name] = conv(meta[key])
except ValueError:
self.log(
'Bad value from lookup:',
name, conv, key
)
else:
new_meta[mapping[key]] = meta[key]
new_book.update(new_meta)
new_book.save()
self._notify_result(event, new_book)
self.log("Book successfully augmented from ",
self.config.isbnservice)
except Exception as e:
self.log("Error during meta lookup: ", e, type(e),
new_book.isbn, lvl=error, exc=True)
error_response = {
'component': 'hfos.alert.manager',
'action': 'notify',
'data': {
'type': 'error',
'message': 'Could not look up metadata, sorry:' + str(e)
}
}
self.log(event, event.client, pretty=True)
self.fireEvent(send(event.client.uuid, error_response))
except Exception as e:
self.log("Error during book update.", e, type(e),
exc=True, lvl=error)
except Exception as e:
self.log("Book creation notification error: ", uuid, e, type(e),
lvl=error, exc=True)
|
Lists serial port names
:raises EnvironmentError:
On unsupported or unknown platforms
:returns:
A list of the serial ports available on the system
Courtesy: Thomas ( http://stackoverflow.com/questions/12090503
/listing-available-com-ports-with-python )
def serial_ports():
""" Lists serial port names
:raises EnvironmentError:
On unsupported or unknown platforms
:returns:
A list of the serial ports available on the system
Courtesy: Thomas ( http://stackoverflow.com/questions/12090503
/listing-available-com-ports-with-python )
"""
if sys.platform.startswith('win'):
ports = ['COM%s' % (i + 1) for i in range(256)]
elif sys.platform.startswith('linux') or sys.platform.startswith('cygwin'):
# this excludes your current terminal "/dev/tty"
ports = glob.glob('/dev/tty[A-Za-z]*')
elif sys.platform.startswith('darwin'):
ports = glob.glob('/dev/tty.*')
else:
raise EnvironmentError('Unsupported platform')
result = []
for port in ports:
try:
s = serial.Serial(port)
s.close()
result.append(port)
except (OSError, serial.SerialException):
pass
return result
|
Initiates communication with the remote controlled device.
:param args:
def opened(self, *args):
"""Initiates communication with the remote controlled device.
:param args:
"""
self._serial_open = True
self.log("Opened: ", args, lvl=debug)
self._send_command(b'l,1') # Saying hello, shortly
self.log("Turning off engine, pump and neutralizing rudder")
self._send_command(b'v')
self._handle_servo(self._machine_channel, 0)
self._handle_servo(self._rudder_channel, 127)
self._set_digital_pin(self._pump_channel, 0)
# self._send_command(b'h')
self._send_command(b'l,0')
self._send_command(b'm,HFOS Control')
|
Sets a new machine speed.
:param event:
def on_machinerequest(self, event):
"""
Sets a new machine speed.
:param event:
"""
self.log("Updating new machine power: ", event.controlvalue)
self._handle_servo(self._machine_channel, event.controlvalue)
|
Sets a new rudder angle.
:param event:
def on_rudderrequest(self, event):
"""
Sets a new rudder angle.
:param event:
"""
self.log("Updating new rudder angle: ", event.controlvalue)
self._handle_servo(self._rudder_channel, event.controlvalue)
|
Activates or deactivates a connected pump.
:param event:
def on_pumprequest(self, event):
"""
Activates or deactivates a connected pump.
:param event:
"""
self.log("Updating pump status: ", event.controlvalue)
self._set_digital_pin(self._pump_channel, event.controlvalue)
|
Provisions a list of items according to their schema
:param items: A list of provisionable items.
:param database_object: A warmongo database object
:param overwrite: Causes existing items to be overwritten
:param clear: Clears the collection first (Danger!)
:param skip_user_check: Skips checking if a system user is existing already (for user provisioning)
:return:
def provisionList(items, database_name, overwrite=False, clear=False, skip_user_check=False):
"""Provisions a list of items according to their schema
:param items: A list of provisionable items.
:param database_object: A warmongo database object
:param overwrite: Causes existing items to be overwritten
:param clear: Clears the collection first (Danger!)
:param skip_user_check: Skips checking if a system user is existing already (for user provisioning)
:return:
"""
log('Provisioning', items, database_name, lvl=debug)
system_user = None
def get_system_user():
"""Retrieves the node local system user"""
user = objectmodels['user'].find_one({'name': 'System'})
try:
log('System user uuid: ', user.uuid, lvl=verbose)
return user.uuid
except AttributeError as e:
log('No system user found:', e, lvl=warn)
log('Please install the user provision to setup a system user or check your database configuration',
lvl=error)
return False
# TODO: Do not check this on specific objects but on the model (i.e. once)
def needs_owner(obj):
"""Determines whether a basic object has an ownership field"""
for privilege in obj._fields.get('perms', None):
if 'owner' in obj._fields['perms'][privilege]:
return True
return False
import pymongo
from hfos.database import objectmodels, dbhost, dbport, dbname
database_object = objectmodels[database_name]
log(dbhost, dbname)
# TODO: Fix this to make use of the dbhost
client = pymongo.MongoClient(dbhost, dbport)
db = client[dbname]
if not skip_user_check:
system_user = get_system_user()
if not system_user:
return
else:
# TODO: Evaluate what to do instead of using a hardcoded UUID
# This is usually only here for provisioning the system user
# One way to avoid this, is to create (instead of provision)
# this one upon system installation.
system_user = '0ba87daa-d315-462e-9f2e-6091d768fd36'
col_name = database_object.collection_name()
if clear is True:
log("Clearing collection for", col_name, lvl=warn)
db.drop_collection(col_name)
counter = 0
for no, item in enumerate(items):
new_object = None
item_uuid = item['uuid']
log("Validating object (%i/%i):" % (no + 1, len(items)), item_uuid, lvl=debug)
if database_object.count({'uuid': item_uuid}) > 0:
log('Object already present', lvl=warn)
if overwrite is False:
log("Not updating item", item, lvl=warn)
else:
log("Overwriting item: ", item_uuid, lvl=warn)
new_object = database_object.find_one({'uuid': item_uuid})
new_object._fields.update(item)
else:
new_object = database_object(item)
if new_object is not None:
try:
if needs_owner(new_object):
if not hasattr(new_object, 'owner'):
log('Adding system owner to object.', lvl=verbose)
new_object.owner = system_user
except Exception as e:
log('Error during ownership test:', e, type(e),
exc=True, lvl=error)
try:
new_object.validate()
new_object.save()
counter += 1
except ValidationError as e:
raise ValidationError(
"Could not provision object: " + str(item_uuid), e)
log("Provisioned %i out of %i items successfully." % (counter, len(items)))
|
Create a default field
def DefaultExtension(schema_obj, form_obj, schemata=None):
"""Create a default field"""
if schemata is None:
schemata = ['systemconfig', 'profile', 'client']
DefaultExtends = {
'schema': {
"properties/modules": [
schema_obj
]
},
'form': {
'modules': {
'items/': form_obj
}
}
}
output = {}
for schema in schemata:
output[schema] = DefaultExtends
return output
|
Copies a whole directory tree
def copytree(root_src_dir, root_dst_dir, hardlink=True):
"""Copies a whole directory tree"""
for src_dir, dirs, files in os.walk(root_src_dir):
dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
for file_ in files:
src_file = os.path.join(src_dir, file_)
dst_file = os.path.join(dst_dir, file_)
try:
if os.path.exists(dst_file):
if hardlink:
hfoslog('Removing frontend link:', dst_file,
emitter='BUILDER', lvl=verbose)
os.remove(dst_file)
else:
hfoslog('Overwriting frontend file:', dst_file,
emitter='BUILDER', lvl=verbose)
hfoslog('Hardlinking ', src_file, dst_dir, emitter='BUILDER',
lvl=verbose)
if hardlink:
os.link(src_file, dst_file)
else:
copy(src_file, dst_dir)
except PermissionError as e:
hfoslog(
" No permission to remove/create target %s for "
"frontend:" % ('link' if hardlink else 'copy'),
dst_dir, e, emitter='BUILDER', lvl=error)
except Exception as e:
hfoslog("Error during", 'link' if hardlink else 'copy',
"creation:", type(e), e, emitter='BUILDER',
lvl=error)
hfoslog('Done linking', root_dst_dir, emitter='BUILDER',
lvl=verbose)
|
Builds and installs the frontend
def install_frontend(instance='default', forcereload=False, forcerebuild=False,
forcecopy=True, install=True, development=False, build_type='dist'):
"""Builds and installs the frontend"""
hfoslog("Updating frontend components", emitter='BUILDER')
components = {}
loadable_components = {}
# TODO: Fix this up, it is probably not a sane way to get at the real root
if development:
frontendroot = os.path.abspath(os.path.dirname(os.path.realpath(
__file__)) + "../../../frontend")
else:
frontendroot = '/opt/hfos/frontend'
frontendtarget = os.path.join('/var/lib/hfos', instance, 'frontend')
if install:
cmdline = ["npm", "install"]
hfoslog("Running", cmdline, lvl=verbose,
emitter='BUILDER')
npminstall = Popen(cmdline, cwd=frontendroot)
out, err = npminstall.communicate()
npminstall.wait()
hfoslog("Frontend dependency installing done: ", out,
err, lvl=debug, emitter='BUILDER')
if True: # try:
from pkg_resources import iter_entry_points
entry_point_tuple = (
iter_entry_points(group='hfos.base', name=None),
iter_entry_points(group='hfos.sails', name=None),
iter_entry_points(group='hfos.components', name=None)
)
for iterator in entry_point_tuple:
for entry_point in iterator:
try:
name = entry_point.name
location = entry_point.dist.location
loaded = entry_point.load()
hfoslog("Entry point: ", entry_point,
name,
entry_point.resolve().__module__, lvl=debug,
emitter='BUILDER')
component_name = entry_point.resolve().__module__.split('.')[1]
hfoslog("Loaded: ", loaded, lvl=verbose, emitter='BUILDER')
comp = {
'location': location,
'version': str(entry_point.dist.parsed_version),
'description': loaded.__doc__
}
frontend = os.path.join(location, 'frontend')
hfoslog("Checking component frontend parts: ",
frontend, lvl=verbose, emitter='BUILDER')
if os.path.isdir(
frontend) and frontend != frontendroot:
comp['frontend'] = frontend
else:
hfoslog("Component without frontend "
"directory:", comp, lvl=debug,
emitter='BUILDER')
components[component_name] = comp
loadable_components[component_name] = loaded
hfoslog("Loaded component:", comp, lvl=verbose,
emitter='BUILDER')
except Exception as e:
hfoslog("Could not inspect entrypoint: ", e,
type(e), entry_point, iterator, lvl=error,
exc=True, emitter='BUILDER')
# except Exception as e:
# hfoslog("Error: ", e, type(e), lvl=error, exc=True, emitter='BUILDER')
# return
hfoslog('Components after lookup:', sorted(list(components.keys())), emitter='BUILDER')
def _update_frontends(install=True):
hfoslog("Checking unique frontend locations: ",
loadable_components, lvl=debug, emitter='BUILDER')
importlines = []
modules = []
for name, component in components.items():
if 'frontend' in component:
origin = component['frontend']
target = os.path.join(frontendroot, 'src', 'components',
name)
target = os.path.normpath(target)
if install:
reqfile = os.path.join(origin, 'requirements.txt')
if os.path.exists(reqfile):
# TODO: Speed this up by collecting deps first then doing one single install call
hfoslog("Installing package dependencies", lvl=debug,
emitter='BUILDER')
with open(reqfile, 'r') as f:
cmdline = ["npm", "install"]
for line in f.readlines():
cmdline.append(line.replace("\n", ""))
hfoslog("Running", cmdline, lvl=verbose,
emitter='BUILDER')
npminstall = Popen(cmdline, cwd=frontendroot)
out, err = npminstall.communicate()
npminstall.wait()
hfoslog("Frontend installing done: ", out,
err, lvl=debug, emitter='BUILDER')
# if target in ('/', '/boot', '/usr', '/home', '/root',
# '/var'):
# hfoslog("Unsafe frontend deletion target path, "
# "NOT proceeding! ", target, lvl=critical,
# emitter='BUILDER')
hfoslog("Copying:", origin, target, lvl=debug,
emitter='BUILDER')
copytree(origin, target)
for modulefilename in glob(target + '/*.module.js'):
modulename = os.path.basename(modulefilename).split(
".module.js")[0]
line = u"import {s} from './components/{p}/{" \
u"s}.module';\nmodules.push({s});\n".format(
s=modulename, p=name)
if modulename not in modules:
importlines += line
modules.append(modulename)
else:
hfoslog("Module without frontend:", name, component,
lvl=debug, emitter='BUILDER')
with open(os.path.join(frontendroot, 'src', 'main.tpl.js'),
"r") as f:
main = "".join(f.readlines())
parts = main.split("/* COMPONENT SECTION */")
if len(parts) != 3:
hfoslog("Frontend loader seems damaged! Please check!",
lvl=critical, emitter='BUILDER')
return
try:
with open(os.path.join(frontendroot, 'src', 'main.js'),
"w") as f:
f.write(parts[0])
f.write("/* COMPONENT SECTION:BEGIN */\n")
for line in importlines:
f.write(line)
f.write("/* COMPONENT SECTION:END */\n")
f.write(parts[2])
except Exception as e:
hfoslog("Error during frontend package info writing. Check "
"permissions! ", e, lvl=error, emitter='BUILDER')
def _rebuild_frontend():
hfoslog("Starting frontend build.", lvl=warn, emitter='BUILDER')
npmbuild = Popen(["npm", "run", build_type], cwd=frontendroot)
out, err = npmbuild.communicate()
try:
npmbuild.wait()
except Exception as e:
hfoslog("Error during frontend build", e, type(e),
exc=True, lvl=error, emitter='BUILDER')
return
hfoslog("Frontend build done: ", out, err, lvl=debug, emitter='BUILDER')
copytree(os.path.join(frontendroot, build_type),
frontendtarget, hardlink=False)
copytree(os.path.join(frontendroot, 'assets'),
os.path.join(frontendtarget, 'assets'),
hardlink=False)
hfoslog("Frontend deployed", emitter='BUILDER')
hfoslog("Checking component frontend bits in ", frontendroot,
lvl=verbose, emitter='BUILDER')
_update_frontends(install=install)
if forcerebuild:
_rebuild_frontend()
hfoslog("Done: Install Frontend", emitter='BUILDER')
|
[GROUP] Configuration management operations
def config(ctx):
"""[GROUP] Configuration management operations"""
from hfos import database
database.initialize(ctx.obj['dbhost'], ctx.obj['dbname'])
from hfos.schemata.component import ComponentConfigSchemaTemplate
ctx.obj['col'] = model_factory(ComponentConfigSchemaTemplate)
|
Delete an existing component configuration. This will trigger
the creation of its default configuration upon next restart.
def delete(ctx, componentname):
"""Delete an existing component configuration. This will trigger
the creation of its default configuration upon next restart."""
col = ctx.obj['col']
if col.count({'name': componentname}) > 1:
log('More than one component configuration of this name! Try '
'one of the uuids as argument. Get a list with "config '
'list"')
return
log('Deleting component configuration', componentname,
emitter='MANAGE')
configuration = col.find_one({'name': componentname})
if configuration is None:
configuration = col.find_one({'uuid': componentname})
if configuration is None:
log('Component configuration not found:', componentname,
emitter='MANAGE')
return
configuration.delete()
log('Done')
|
Show the stored, active configuration of a component.
def show(ctx, component):
"""Show the stored, active configuration of a component."""
col = ctx.obj['col']
if col.count({'name': component}) > 1:
log('More than one component configuration of this name! Try '
'one of the uuids as argument. Get a list with "config '
'list"')
return
if component is None:
configurations = col.find()
for configuration in configurations:
log("%-15s : %s" % (configuration.name,
configuration.uuid),
emitter='MANAGE')
else:
configuration = col.find_one({'name': component})
if configuration is None:
configuration = col.find_one({'uuid': component})
if configuration is None:
log('No component with that name or uuid found.')
return
print(json.dumps(configuration.serializablefields(), indent=4))
|
>>> separate_string("test <2>")
(['test ', ''], ['2'])
def separate_string(string):
"""
>>> separate_string("test <2>")
(['test ', ''], ['2'])
"""
string_list = regex.split(r'<(?![!=])', regex.sub(r'>', '<', string))
return string_list[::2], string_list[1::2]
|
>>> overlapping(0, 5, 6, 7)
False
>>> overlapping(1, 2, 0, 4)
True
>>> overlapping(5,6,0,5)
False
def overlapping(start1, end1, start2, end2):
"""
>>> overlapping(0, 5, 6, 7)
False
>>> overlapping(1, 2, 0, 4)
True
>>> overlapping(5,6,0,5)
False
"""
return not ((start1 <= start2 and start1 <= end2 and end1 <= end2 and end1 <= start2) or
(start1 >= start2 and start1 >= end2 and end1 >= end2 and end1 >= start2))
|
>>> remove_lower_overlapping([], [('a', 0, 5)])
[('a', 0, 5)]
>>> remove_lower_overlapping([('z', 0, 4)], [('a', 0, 5)])
[('a', 0, 5)]
>>> remove_lower_overlapping([('z', 5, 6)], [('a', 0, 5)])
[('z', 5, 6), ('a', 0, 5)]
def remove_lower_overlapping(current, higher):
"""
>>> remove_lower_overlapping([], [('a', 0, 5)])
[('a', 0, 5)]
>>> remove_lower_overlapping([('z', 0, 4)], [('a', 0, 5)])
[('a', 0, 5)]
>>> remove_lower_overlapping([('z', 5, 6)], [('a', 0, 5)])
[('z', 5, 6), ('a', 0, 5)]
"""
for (match, h_start, h_end) in higher:
overlaps = list(overlapping_at(h_start, h_end, current))
for overlap in overlaps:
del current[overlap]
if len(overlaps) > 0:
# Keeps order in place
current.insert(overlaps[0], (match, h_start, h_end))
else:
current.append((match, h_start, h_end))
return current
|
Handler for client-side debug requests
def debugrequest(self, event):
"""Handler for client-side debug requests"""
try:
self.log("Event: ", event.__dict__, lvl=critical)
if event.data == "storejson":
self.log("Storing received object to /tmp", lvl=critical)
fp = open('/tmp/hfosdebugger_' + str(
event.user.useruuid) + "_" + str(uuid4()), "w")
json.dump(event.data, fp, indent=True)
fp.close()
if event.data == "memdebug":
self.log("Memory hogs:", lvl=critical)
objgraph.show_most_common_types(limit=20)
if event.data == "growth":
self.log("Memory growth since last call:", lvl=critical)
objgraph.show_growth()
if event.data == "graph":
self._drawgraph()
if event.data == "exception":
class TestException(BaseException):
"""Generic exception to test exception monitoring"""
pass
raise TestException
if event.data == "heap":
self.log("Heap log:", self.heapy.heap(), lvl=critical)
if event.data == "buildfrontend":
self.log("Sending frontend build command")
self.fireEvent(frontendbuildrequest(force=True), "setup")
if event.data == "logtail":
self.fireEvent(logtailrequest(event.user, None, None,
event.client), "logger")
if event.data == "trigger_anchorwatch":
from hfos.anchor.anchorwatcher import cli_trigger_anchorwatch
self.fireEvent(cli_trigger_anchorwatch())
except Exception as e:
self.log("Exception during debug handling:", e, type(e),
lvl=critical)
|
read Event (on channel ``stdin``)
This is the event handler for ``read`` events specifically from the
``stdin`` channel. This is triggered each time stdin has data that
it has read.
def stdin_read(self, data):
"""read Event (on channel ``stdin``)
This is the event handler for ``read`` events specifically from the
``stdin`` channel. This is triggered each time stdin has data that
it has read.
"""
data = data.strip().decode("utf-8")
self.log("Incoming:", data, lvl=verbose)
if len(data) == 0:
self.log('Use /help to get a list of enabled cli hooks')
return
if data[0] == "/":
cmd = data[1:]
args = []
if ' ' in cmd:
cmd, args = cmd.split(' ', maxsplit=1)
args = args.split(' ')
if cmd in self.hooks:
self.log('Firing hooked event:', cmd, args, lvl=debug)
self.fireEvent(self.hooks[cmd](*args))
# TODO: Move these out, so we get a simple logic here
elif cmd == 'frontend':
self.log("Sending %s frontend rebuild event" %
("(forced)" if 'force' in args else ''))
self.fireEvent(
frontendbuildrequest(force='force' in args,
install='install' in args),
"setup")
elif cmd == 'backend':
self.log("Sending backend reload event")
self.fireEvent(componentupdaterequest(force=False), "setup")
else:
self.log('Unknown Command:', cmd, '. Use /help to get a list of enabled '
'cli hooks')
|
Registers a new command line interface event hook as command
def register_event(self, event):
"""Registers a new command line interface event hook as command"""
self.log('Registering event hook:', event.cmd, event.thing,
pretty=True, lvl=verbose)
self.hooks[event.cmd] = event.thing
|
Generate a list of all registered authorized and anonymous events
def populate_user_events():
"""Generate a list of all registered authorized and anonymous events"""
global AuthorizedEvents
global AnonymousEvents
def inheritors(klass):
"""Find inheritors of a specified object class"""
subclasses = {}
subclasses_set = set()
work = [klass]
while work:
parent = work.pop()
for child in parent.__subclasses__():
if child not in subclasses_set:
# pprint(child.__dict__)
name = child.__module__ + "." + child.__name__
if name.startswith('hfos'):
subclasses_set.add(child)
event = {
'event': child,
'name': name,
'doc': child.__doc__,
'args': []
}
if child.__module__ in subclasses:
subclasses[child.__module__][
child.__name__] = event
else:
subclasses[child.__module__] = {
child.__name__: event
}
work.append(child)
return subclasses
# TODO: Change event system again, to catch authorized (i.e. "user") as
# well as normal events, so they can be processed by Automat
# NormalEvents = inheritors(Event)
AuthorizedEvents = inheritors(authorizedevent)
AnonymousEvents = inheritors(anonymousevent)
|
[GROUP] Database management operations
def db(ctx):
"""[GROUP] Database management operations"""
from hfos import database
database.initialize(ctx.obj['dbhost'], ctx.obj['dbname'])
ctx.obj['db'] = database
|
Clears an entire database collection irrevocably. Use with caution!
def clear(ctx, schema):
"""Clears an entire database collection irrevocably. Use with caution!"""
response = _ask('Are you sure you want to delete the collection "%s"' % (
schema), default='N', data_type='bool')
if response is True:
host, port = ctx.obj['dbhost'].split(':')
client = pymongo.MongoClient(host=host, port=int(port))
database = client[ctx.obj['dbname']]
log("Clearing collection for", schema, lvl=warn,
emitter='MANAGE')
result = database.drop_collection(schema)
if not result['ok']:
log("Could not drop collection:", lvl=error)
log(result, pretty=True, lvl=error)
else:
log("Done")
|
Provision a basic system configuration
def provision_system_config(items, database_name, overwrite=False, clear=False, skip_user_check=False):
"""Provision a basic system configuration"""
from hfos.provisions.base import provisionList
from hfos.database import objectmodels
default_system_config_count = objectmodels['systemconfig'].count({
'name': 'Default System Configuration'})
if default_system_config_count == 0 or (clear or overwrite):
provisionList([SystemConfiguration], 'systemconfig', overwrite, clear, skip_user_check)
hfoslog('Provisioning: System: Done.', emitter='PROVISIONS')
else:
hfoslog('Default system configuration already present.', lvl=warn,
emitter='PROVISIONS')
|
Calendar Importer for iCal (ics) files
def ICALImporter(ctx, filename, all, owner, calendar, create_calendar, clear_calendar, dry, execfilter):
"""Calendar Importer for iCal (ics) files
"""
log('iCal importer running')
objectmodels = ctx.obj['db'].objectmodels
if objectmodels['user'].count({'name': owner}) > 0:
owner_object = objectmodels['user'].find_one({'name': owner})
elif objectmodels['user'].count({'uuid': owner}) > 0:
owner_object = objectmodels['user'].find_one({'uuid': owner})
else:
log('User unknown. Specify either uuid or name.', lvl=warn)
return
log('Found user')
if objectmodels['calendar'].count({'name': calendar}) > 0:
calendar = objectmodels['calendar'].find_one({'name': calendar})
elif objectmodels['calendar'].count({'uuid': owner}) > 0:
calendar = objectmodels['calendar'].find_one({'uuid': calendar})
elif create_calendar:
calendar = objectmodels['calendar']({
'uuid': std_uuid(),
'name': calendar
})
else:
log('Calendar unknown and no --create-calendar specified. Specify either uuid or name of an existing calendar.',
lvl=warn)
return
log('Found calendar')
if clear_calendar is True:
log('Clearing calendar events')
for item in objectmodels['event'].find({'calendar': calendar.uuid}):
item.delete()
with open(filename, 'rb') as file_object:
caldata = Calendar.from_ical(file_object.read())
keys = {
'class': 'str',
'created': 'dt',
'description': 'str',
'dtstart': 'dt',
'dtend': 'dt',
'timestamp': 'dt',
'modified': 'dt',
'location': 'str',
'status': 'str',
'summary': 'str',
'uid': 'str'
}
mapping = {
'description': 'summary',
'summary': 'name'
}
imports = []
def ical_import_filter(original, logfacilty):
log('Passthrough filter')
return original
if execfilter is not None:
import os
textFilePath = os.path.abspath(os.path.join(os.path.curdir, execfilter))
textFileFolder = os.path.dirname(textFilePath)
from importlib.machinery import SourceFileLoader
filter_module = SourceFileLoader("importfilter", textFilePath).load_module()
ical_import_filter = filter_module.ical_import_filter
for event in caldata.walk():
if event.name == 'VEVENT':
log(event, lvl=verbose, pretty=True)
initializer = {
'uuid': std_uuid(),
'calendar': calendar.uuid,
}
for item in keys:
thing = event.get(item, None)
if thing is None:
thing = 'NO-' + item
else:
if keys[item] == 'str':
thing = str(thing)
else:
thing = parser.parse(str(thing.dt))
thing = thing.isoformat()
if item in mapping:
item_assignment = mapping[item]
else:
item_assignment = item
initializer[item_assignment] = thing
new_event = objectmodels['event'](initializer)
new_event = ical_import_filter(new_event, log)
imports.append(new_event)
log(new_event, lvl=debug)
for ev in imports:
log(ev.summary)
if not dry:
log('Bulk creating events')
objectmodels['event'].bulk_create(imports)
calendar.save()
else:
log('Dry run - nothing stored.', lvl=warn)
|
Create migration data for a specified schema
def make_migrations(schema=None):
"""Create migration data for a specified schema"""
entrypoints = {}
old = {}
def apply_migrations(migrations, new_model):
"""Apply migration data to compile an up to date model"""
def get_path(raw_path):
"""Get local path of schema definition"""
print("RAW PATH:", raw_path, type(raw_path))
path = []
for item in raw_path.split("["):
print(item)
item = item.rstrip("]")
item = item.replace('"', '')
item = item.replace("'", '')
try:
item = int(item)
except ValueError:
pass
path.append(item)
path.remove('root')
print("PATH:", path)
return path
def apply_entry(changetype, change, result):
"""Upgrade with a single migration"""
def apply_removes(removes, result):
"""Delete removed fields"""
for remove in removes:
path = get_path(remove)
amount = dpath.util.delete(result, path)
assert amount == 1
return result
def apply_additions(additions, result):
"""Add newly added fields"""
for addition in additions:
path = get_path(addition)
entry = additions[addition]
hfoslog('Adding:', entry, 'at', path)
dpath.util.new(result, path, entry)
return result
if changetype == 'type_changes':
hfoslog('Creating new object')
result = change['root']['new_value']
return result
if changetype == 'dictionary_item_added':
hfoslog('Adding items')
result = apply_additions(change, result)
elif changetype == 'dictionary_item_removed':
hfoslog('Removing items')
result = apply_removes(change, result)
elif changetype == 'values_changed':
hfoslog("Changing items' types")
for item in change:
path = get_path(item)
hfoslog('Changing', path, 'from',
change[item]['old_value'], ' to',
change[item]['new_value'])
assert dpath.util.get(result, path) == change[item][
'old_value']
amount = dpath.util.set(result, path, change[item][
'new_value'])
assert amount == 1
return result
def get_renames(migrations):
"""Check migrations for renamed fields"""
hfoslog('Checking for rename operations:')
pprint(migrations)
for entry in migrations:
added = entry.get('dictionary_item_added', None)
removed = entry.get('dictionary_item_removed', None)
renames = []
if added and removed:
for addition in added:
path = get_path(addition)
for removal in removed:
removed_path = get_path(removal)
if path[:-1] == removed_path[:-1]:
hfoslog('Possible rename detected:', removal, '->',
addition)
renames.append((removed_path, path))
return renames
result = {}
for no, migration in enumerate(migrations):
hfoslog('Migrating', no)
hfoslog('Migration:', migration, lvl=debug)
renamed = get_renames(migrations)
for entry in migration:
result = apply_entry(entry, migration[entry], result)
pprint(result)
return result
def write_migration(schema, counter, path, previous, current):
"""Write out complete migration data"""
filename = "%s_%04i.json" % (schema, counter)
migration = DeepDiff(previous, current, verbose_level=2).json
if migration == "{}":
hfoslog('Nothing changed - no new migration data.', lvl=warn)
return
print('Writing migration: ', os.path.join(path, filename))
pprint(migration)
with open(os.path.join(path, filename), 'w') as f:
f.write(migration)
for schema_entrypoint in iter_entry_points(group='hfos.schemata',
name=None):
try:
hfoslog("Schemata found: ", schema_entrypoint.name, lvl=debug,
emitter='DB')
if schema is not None and schema_entrypoint.name != schema:
continue
entrypoints[schema_entrypoint.name] = schema_entrypoint
pprint(schema_entrypoint.dist.location)
schema_top = schema_entrypoint.dist.location
schema_migrations = schema_entrypoint.module_name.replace(
'schemata', 'migrations').replace('.', '/')
path = os.path.join(schema_top, schema_migrations)
new_model = schema_entrypoint.load()['schema']
migrations = []
try:
for file in sorted(os.listdir(path)):
if not file.endswith('.json'):
continue
fullpath = os.path.join(path, file)
hfoslog('Importing migration', fullpath)
with open(fullpath, 'r') as f:
migration = DeepDiff.from_json(f.read())
migrations.append(migration)
hfoslog('Successfully imported')
if len(migrations) == 0:
raise ImportError
pprint(migrations)
model = apply_migrations(migrations, new_model)
write_migration(schema, len(migrations) + 1, path, model,
new_model)
except ImportError as e:
hfoslog('No previous migrations for', schema, e,
type(e), exc=True)
if len(migrations) == 0:
write_migration(schema, 1, path, None, new_model)
except (ImportError, DistributionNotFound) as e:
hfoslog("Problematic schema: ", e, type(e),
schema_entrypoint.name, exc=True, lvl=warn,
emitter='SCHEMATA')
hfoslog("Found schemata: ", sorted(entrypoints.keys()), lvl=debug,
emitter='SCHEMATA')
pprint(entrypoints)
def make_single_migration(old, new):
pass
|
Provides the newly authenticated user with a backlog and general
channel status information
def userlogin(self, event):
"""Provides the newly authenticated user with a backlog and general
channel status information"""
try:
user_uuid = event.useruuid
user = objectmodels['user'].find_one({'uuid': user_uuid})
if user_uuid not in self.lastlogs:
self.log('Setting up lastlog for a new user.', lvl=debug)
lastlog = objectmodels['chatlastlog']({
'owner': user_uuid,
'uuid': std_uuid(),
'channels': {}
})
lastlog.save()
self.lastlogs[user_uuid] = lastlog
self.users[user_uuid] = user
self.user_attention[user_uuid] = None
self._send_status(user_uuid, event.clientuuid)
except Exception as e:
self.log('Error during chat setup of user:', e, type(e), exc=True)
|
Chat event handler for incoming events
:param event: say-event with incoming chat message
def join(self, event):
"""Chat event handler for incoming events
:param event: say-event with incoming chat message
"""
try:
channel_uuid = event.data
user_uuid = event.user.uuid
if channel_uuid in self.chat_channels:
self.log('User joins a known channel', lvl=debug)
if user_uuid in self.chat_channels[channel_uuid].users:
self.log('User already joined', lvl=warn)
else:
self.chat_channels[channel_uuid].users.append(user_uuid)
self.chat_channels[channel_uuid].save()
packet = {
'component': 'hfos.chat.host',
'action': 'join',
'data': channel_uuid
}
self.fireEvent(send(event.client.uuid, packet))
else:
self.log('Request to join unavailable channel', lvl=warn)
except Exception as e:
self.log('Join error:', e, type(e), exc=True, lvl=error)
|
Chat event handler for incoming events
:param event: say-event with incoming chat message
def say(self, event):
"""Chat event handler for incoming events
:param event: say-event with incoming chat message
"""
try:
userid = event.user.uuid
recipient = self._get_recipient(event)
content = self._get_content(event)
message = objectmodels['chatmessage']({
'timestamp': time(),
'recipient': recipient,
'sender': userid,
'content': content,
'uuid': std_uuid()
})
message.save()
chat_packet = {
'component': 'hfos.chat.host',
'action': 'say',
'data': message.serializablefields()
}
if recipient in self.chat_channels:
for useruuid in self.users:
if useruuid in self.chat_channels[recipient].users:
self.log('User in channel', lvl=debug)
self.update_lastlog(useruuid, recipient)
self.log('Sending message', lvl=debug)
self.fireEvent(send(useruuid, chat_packet,
sendtype='user'))
except Exception as e:
self.log("Error: '%s' %s" % (e, type(e)), exc=True, lvl=error)
|
Builds and installs the complete HFOS documentation.
def install_docs(instance, clear_target):
"""Builds and installs the complete HFOS documentation."""
_check_root()
def make_docs():
"""Trigger a Sphinx make command to build the documentation."""
log("Generating HTML documentation")
try:
build = Popen(
[
'make',
'html'
],
cwd='docs/'
)
build.wait()
except Exception as e:
log("Problem during documentation building: ", e, type(e),
exc=True, lvl=error)
return False
return True
make_docs()
# If these need changes, make sure they are watertight and don't remove
# wanted stuff!
target = os.path.join('/var/lib/hfos', instance, 'frontend/docs')
source = 'docs/build/html'
log("Updating documentation directory:", target)
if not os.path.exists(os.path.join(os.path.curdir, source)):
log(
"Documentation not existing yet. Run python setup.py "
"build_sphinx first.", lvl=error)
return
if os.path.exists(target):
log("Path already exists: " + target)
if clear_target:
log("Cleaning up " + target, lvl=warn)
shutil.rmtree(target)
log("Copying docs to " + target)
copy_tree(source, target)
log("Done: Install Docs")
|
Install variable data to /var/[lib,cache]/hfos
def var(ctx, clear_target, clear_all):
"""Install variable data to /var/[lib,cache]/hfos"""
install_var(str(ctx.obj['instance']), clear_target, clear_all)
|
Install required folders in /var
def install_var(instance, clear_target, clear_all):
"""Install required folders in /var"""
_check_root()
log("Checking frontend library and cache directories",
emitter='MANAGE')
uid = pwd.getpwnam("hfos").pw_uid
gid = grp.getgrnam("hfos").gr_gid
join = os.path.join
# If these need changes, make sure they are watertight and don't remove
# wanted stuff!
target_paths = (
'/var/www/challenges', # For LetsEncrypt acme certificate challenges
join('/var/lib/hfos', instance),
join('/var/local/hfos', instance),
join('/var/local/hfos', instance, 'backup'),
join('/var/cache/hfos', instance),
join('/var/cache/hfos', instance, 'tilecache'),
join('/var/cache/hfos', instance, 'rastertiles'),
join('/var/cache/hfos', instance, 'rastercache')
)
logfile = "/var/log/hfos-" + instance + ".log"
for item in target_paths:
if os.path.exists(item):
log("Path already exists: " + item)
if clear_all or (clear_target and 'cache' in item):
log("Cleaning up: " + item, lvl=warn)
shutil.rmtree(item)
if not os.path.exists(item):
log("Creating path: " + item)
os.mkdir(item)
os.chown(item, uid, gid)
# Touch logfile to make sure it exists
open(logfile, "a").close()
os.chown(logfile, uid, gid)
log("Done: Install Var")
|
Install default provisioning data
def provisions(ctx, provision, clear_existing, overwrite, list_provisions):
"""Install default provisioning data"""
install_provisions(ctx, provision, clear_existing, overwrite, list_provisions)
|
Install default provisioning data
def install_provisions(ctx, provision, clear_provisions=False, overwrite=False, list_provisions=False):
"""Install default provisioning data"""
log("Installing HFOS default provisions")
# from hfos.logger import verbosity, events
# verbosity['console'] = verbosity['global'] = events
from hfos import database
database.initialize(ctx.obj['dbhost'], ctx.obj['dbname'])
from hfos.provisions import build_provision_store
provision_store = build_provision_store()
def sort_dependencies(items):
"""Topologically sort the dependency tree"""
g = networkx.DiGraph()
log('Sorting dependencies')
for key, item in items:
log('key: ', key, 'item:', item, pretty=True, lvl=debug)
dependencies = item.get('dependencies', [])
if isinstance(dependencies, str):
dependencies = [dependencies]
if key not in g:
g.add_node(key)
for link in dependencies:
g.add_edge(key, link)
if not networkx.is_directed_acyclic_graph(g):
log('Cycles in provosioning dependency graph detected!', lvl=error)
log('Involved provisions:', list(networkx.simple_cycles(g)), lvl=error)
topology = list(networkx.algorithms.topological_sort(g))
topology.reverse()
log(topology, pretty=True)
return topology
if list_provisions:
sort_dependencies(provision_store.items())
exit()
def provision_item(item):
"""Provision a single provisioning element"""
method = item.get('method', provisionList)
model = item.get('model')
data = item.get('data')
method(data, model, overwrite=overwrite, clear=clear_provisions)
if provision is not None:
if provision in provision_store:
log("Provisioning ", provision, pretty=True)
provision_item(provision_store[provision])
else:
log("Unknown provision: ", provision, "\nValid provisions are",
list(provision_store.keys()),
lvl=error,
emitter='MANAGE')
else:
for name in sort_dependencies(provision_store.items()):
log("Provisioning", name, pretty=True)
provision_item(provision_store[name])
log("Done: Install Provisions")
|
Install the plugin modules
def install_modules(wip):
"""Install the plugin modules"""
def install_module(hfos_module):
"""Install a single module via setuptools"""
try:
setup = Popen(
[
sys.executable,
'setup.py',
'develop'
],
cwd='modules/' + hfos_module + "/"
)
setup.wait()
except Exception as e:
log("Problem during module installation: ", hfos_module, e,
type(e), exc=True, lvl=error)
return False
return True
# TODO: Sort module dependencies via topological sort or let pip do this in future.
# # To get the module dependencies:
# packages = {}
# for provision_entrypoint in iter_entry_points(group='hfos.provisions',
# name=None):
# log("Found packages: ", provision_entrypoint.dist.project_name, lvl=warn)
#
# _package_name = provision_entrypoint.dist.project_name
# _package = pkg_resources.working_set.by_key[_package_name]
#
# print([str(r) for r in _package.requires()]) # retrieve deps from setup.py
modules_production = [
# TODO: Poor man's dependency management, as long as the modules are
# installed from local sources and they're not available on pypi,
# which would handle real dependency management for us:
'navdata',
# Now all the rest:
'alert',
'automat',
'busrepeater',
'calendar',
'countables',
'dash',
# 'dev',
'enrol',
'mail',
'maps',
'nmea',
'nodestate',
'project',
'webguides',
'wiki'
]
modules_wip = [
'calc',
'camera',
'chat',
'comms',
'contacts',
'crew',
'equipment',
'filemanager',
'garden',
'heroic',
'ldap',
'library',
'logbook',
'protocols',
'polls',
'mesh',
'robot',
'switchboard',
'shareables',
]
installables = modules_production
if wip:
installables.extend(modules_wip)
success = []
failed = []
for installable in installables:
log('Installing module ', installable)
if install_module(installable):
success.append(installable)
else:
failed.append(installable)
log('Installed modules: ', success)
if len(failed) > 0:
log('Failed modules: ', failed)
log('Done: Install Modules')
|
Install systemd service configuration
def service(ctx):
"""Install systemd service configuration"""
install_service(ctx.obj['instance'], ctx.obj['dbhost'], ctx.obj['dbname'], ctx.obj['port'])
|
Install systemd service configuration
def install_service(instance, dbhost, dbname, port):
"""Install systemd service configuration"""
_check_root()
log("Installing systemd service")
launcher = os.path.realpath(__file__).replace('manage', 'launcher')
executable = sys.executable + " " + launcher
executable += " --instance " + instance
executable += " --dbname " + dbname + " --dbhost " + dbhost
executable += " --port " + port
executable += " --dolog --logfile /var/log/hfos-" + instance + ".log"
executable += " --logfileverbosity 30 -q"
definitions = {
'instance': instance,
'executable': executable
}
service_name = 'hfos-' + instance + '.service'
write_template_file(os.path.join('dev/templates', service_template),
os.path.join('/etc/systemd/system/', service_name),
definitions)
Popen([
'systemctl',
'enable',
service_name
])
log('Launching service')
Popen([
'systemctl',
'start',
service_name
])
log("Done: Install Service")
|
Install nginx configuration
def nginx(ctx, hostname):
"""Install nginx configuration"""
install_nginx(ctx.obj['dbhost'], ctx.obj['dbname'], ctx.obj['port'], hostname)
|
Install nginx configuration
def install_nginx(instance, dbhost, dbname, port, hostname=None):
"""Install nginx configuration"""
_check_root()
log("Installing nginx configuration")
if hostname is None:
try:
configuration = _get_system_configuration(dbhost, dbname)
hostname = configuration.hostname
except Exception as e:
log('Exception:', e, type(e), exc=True, lvl=error)
log("""Could not determine public fully qualified hostname!
Check systemconfig (see db view and db modify commands) or specify
manually with --hostname host.domain.tld
Using 'localhost' for now""", lvl=warn)
hostname = 'localhost'
definitions = {
'instance': instance,
'server_public_name': hostname,
'ssl_certificate': cert_file,
'ssl_key': key_file,
'host_url': 'http://127.0.0.1:%i/' % port
}
if distribution == 'DEBIAN':
configuration_file = '/etc/nginx/sites-available/hfos.%s.conf' % instance
configuration_link = '/etc/nginx/sites-enabled/hfos.%s.conf' % instance
elif distribution == 'ARCH':
configuration_file = '/etc/nginx/nginx.conf'
configuration_link = None
else:
log('Unsure how to proceed, you may need to specify your '
'distribution', lvl=error)
return
log('Writing nginx HFOS site definition')
write_template_file(os.path.join('dev/templates', nginx_configuration),
configuration_file,
definitions)
if configuration_link is not None:
log('Enabling nginx HFOS site (symlink)')
if not os.path.exists(configuration_link):
os.symlink(configuration_file, configuration_link)
log('Restarting nginx service')
Popen([
'systemctl',
'restart',
'nginx.service'
])
log("Done: Install nginx configuration")
|
Install a local SSL certificate
def install_cert(selfsigned):
"""Install a local SSL certificate"""
_check_root()
if selfsigned:
log('Generating self signed (insecure) certificate/key '
'combination')
try:
os.mkdir('/etc/ssl/certs/hfos')
except FileExistsError:
pass
except PermissionError:
log("Need root (e.g. via sudo) to generate ssl certificate")
sys.exit(1)
def create_self_signed_cert():
"""Create a simple self signed SSL certificate"""
# create a key pair
k = crypto.PKey()
k.generate_key(crypto.TYPE_RSA, 1024)
if os.path.exists(cert_file):
try:
certificate = open(cert_file, "rb").read()
old_cert = crypto.load_certificate(crypto.FILETYPE_PEM,
certificate)
serial = old_cert.get_serial_number() + 1
except (crypto.Error, OSError) as e:
log('Could not read old certificate to increment '
'serial:', type(e), e, exc=True, lvl=warn)
serial = 1
else:
serial = 1
# create a self-signed certificate
certificate = crypto.X509()
certificate.get_subject().C = "DE"
certificate.get_subject().ST = "Berlin"
certificate.get_subject().L = "Berlin"
# noinspection PyPep8
certificate.get_subject().O = "Hackerfleet"
certificate.get_subject().OU = "Hackerfleet"
certificate.get_subject().CN = gethostname()
certificate.set_serial_number(serial)
certificate.gmtime_adj_notBefore(0)
certificate.gmtime_adj_notAfter(10 * 365 * 24 * 60 * 60)
certificate.set_issuer(certificate.get_subject())
certificate.set_pubkey(k)
certificate.sign(k, b'sha512')
open(key_file, "wt").write(str(
crypto.dump_privatekey(crypto.FILETYPE_PEM, k),
encoding="ASCII"))
open(cert_file, "wt").write(str(
crypto.dump_certificate(crypto.FILETYPE_PEM, certificate),
encoding="ASCII"))
open(combined_file, "wt").write(str(
crypto.dump_certificate(crypto.FILETYPE_PEM, certificate),
encoding="ASCII") + str(
crypto.dump_privatekey(crypto.FILETYPE_PEM, k),
encoding="ASCII"))
create_self_signed_cert()
log('Done: Install Cert')
else:
# TODO
log('Not implemented yet. You can build your own certificate and '
'store it in /etc/ssl/certs/hfos/server-cert.pem - it should '
'be a certificate with key, as this is used server side and '
'there is no way to enter a separate key.', lvl=error)
|
Build and install frontend
def frontend(ctx, dev, rebuild, no_install, build_type):
"""Build and install frontend"""
install_frontend(instance=ctx.obj['instance'],
forcerebuild=rebuild,
development=dev,
install=not no_install,
build_type=build_type)
|
Default-Install everything installable
\b
This includes
* System user (hfos.hfos)
* Self signed certificate
* Variable data locations (/var/lib/hfos and /var/cache/hfos)
* All the official modules in this repository
* Default module provisioning data
* Documentation
* systemd service descriptor
It does NOT build and install the HTML5 frontend.
def install_all(ctx, clear_all):
"""Default-Install everything installable
\b
This includes
* System user (hfos.hfos)
* Self signed certificate
* Variable data locations (/var/lib/hfos and /var/cache/hfos)
* All the official modules in this repository
* Default module provisioning data
* Documentation
* systemd service descriptor
It does NOT build and install the HTML5 frontend."""
_check_root()
instance = ctx.obj['instance']
dbhost = ctx.obj['dbhost']
dbname = ctx.obj['dbname']
port = ctx.obj['port']
install_system_user()
install_cert(selfsigned=True)
install_var(instance, clear_target=clear_all, clear_all=clear_all)
install_modules(wip=False)
install_provisions(provision=None, clear_provisions=clear_all)
install_docs(instance, clear_target=clear_all)
install_service(instance, dbhost, dbname, port)
install_nginx(instance, dbhost, dbname, port)
log('Done')
|
Uninstall data and resource locations
def uninstall():
"""Uninstall data and resource locations"""
_check_root()
response = _ask("This will delete all data of your HFOS installations! Type"
"YES to continue:", default="N", show_hint=False)
if response == 'YES':
shutil.rmtree('/var/lib/hfos')
shutil.rmtree('/var/cache/hfos')
|
Update a HFOS node
def update(ctx, no_restart, no_rebuild):
"""Update a HFOS node"""
# 0. (NOT YET! MAKE A BACKUP OF EVERYTHING)
# 1. update repository
# 2. update frontend repository
# 3. (Not yet: update venv)
# 4. rebuild frontend
# 5. restart service
instance = ctx.obj['instance']
log('Pulling github updates')
run_process('.', ['git', 'pull', 'origin', 'master'])
run_process('./frontend', ['git', 'pull', 'origin', 'master'])
if not no_rebuild:
log('Rebuilding frontend')
install_frontend(instance, forcerebuild=True, install=False, development=True)
if not no_restart:
log('Restaring service')
if instance != 'hfos':
instance = 'hfos-' + instance
run_process('.', ['sudo', 'systemctl', 'restart', instance])
log('Done')
|
Handles incoming raw sensor data
:param event: Raw sentences incoming data
def raw_data(self, event):
"""Handles incoming raw sensor data
:param event: Raw sentences incoming data
"""
self.log('Received raw data from bus', lvl=events)
if not parse:
return
nmea_time = event.data[0]
try:
parsed_data = parse(event.data[1])
except Exception as e:
self.log('Unparseable sentence:', event.data[1], e, type(e),
exc=True, lvl=warn)
self.unparsable += event
return
bus = event.bus
sensor_data_package = self._handle(parsed_data)
self.log("Sensor data:", sensor_data_package, lvl=verbose)
if sensor_data_package:
# pprint(sensor_data_package)
self.fireEvent(sensordata(sensor_data_package, nmea_time, bus),
"navdata")
|
DANGER!
*This command is a maintenance tool and clears the complete database.*
def clear_all():
"""DANGER!
*This command is a maintenance tool and clears the complete database.*
"""
sure = input("Are you sure to drop the complete database content? (Type "
"in upppercase YES)")
if not (sure == 'YES'):
db_log('Not deleting the database.')
sys.exit(5)
client = pymongo.MongoClient(host=dbhost, port=dbport)
db = client[dbname]
for col in db.collection_names(include_system_collections=False):
db_log("Dropping collection ", col, lvl=warn)
db.drop_collection(col)
|
Generate factories to construct objects from schemata
def _build_model_factories(store):
"""Generate factories to construct objects from schemata"""
result = {}
for schemaname in store:
schema = None
try:
schema = store[schemaname]['schema']
except KeyError:
schemata_log("No schema found for ", schemaname, lvl=critical, exc=True)
try:
result[schemaname] = warmongo.model_factory(schema)
except Exception as e:
schemata_log("Could not create factory for schema ", schemaname, schema, lvl=critical, exc=True)
return result
|
Generate database collections with indices from the schemastore
def _build_collections(store):
"""Generate database collections with indices from the schemastore"""
result = {}
client = pymongo.MongoClient(host=dbhost, port=dbport)
db = client[dbname]
for schemaname in store:
schema = None
indices = None
try:
schema = store[schemaname]['schema']
indices = store[schemaname].get('indices', None)
except KeyError:
db_log("No schema found for ", schemaname, lvl=critical)
try:
result[schemaname] = db[schemaname]
except Exception:
db_log("Could not get collection for schema ", schemaname, schema, lvl=critical, exc=True)
if indices is not None:
col = db[schemaname]
db_log('Adding indices to', schemaname, lvl=debug)
i = 0
keys = list(indices.keys())
while i < len(indices):
index_name = keys[i]
index = indices[index_name]
index_type = index.get('type', None)
index_unique = index.get('unique', False)
index_sparse = index.get('sparse', True)
index_reindex = index.get('reindex', False)
if index_type in (None, 'text'):
index_type = pymongo.TEXT
elif index_type == '2dsphere':
index_type = pymongo.GEOSPHERE
def do_index():
col.ensure_index([(index_name, index_type)],
unique=index_unique,
sparse=index_sparse)
db_log('Enabling index of type', index_type, 'on', index_name, lvl=debug)
try:
do_index()
i += 1
except pymongo.errors.OperationFailure:
db_log(col.list_indexes().__dict__, pretty=True, lvl=verbose)
if not index_reindex:
db_log('Index was not created!', lvl=warn)
i += 1
else:
try:
col.drop_index(index_name)
do_index()
i += 1
except pymongo.errors.OperationFailure as e:
db_log('Index recreation problem:', exc=True, lvl=error)
col.drop_indexes()
i = 0
# for index in col.list_indexes():
# db_log("Index: ", index)
return result
|
Initializes the database connectivity, schemata and finally object models
def initialize(address='127.0.0.1:27017', database_name='hfos', instance_name="default", reload=False):
"""Initializes the database connectivity, schemata and finally object models"""
global schemastore
global l10n_schemastore
global objectmodels
global collections
global dbhost
global dbport
global dbname
global instance
global initialized
if initialized and not reload:
hfoslog('Already initialized and not reloading.', lvl=warn, emitter="DB", frame_ref=2)
return
dbhost = address.split(':')[0]
dbport = int(address.split(":")[1]) if ":" in address else 27017
dbname = database_name
db_log("Using database:", dbname, '@', dbhost, ':', dbport)
try:
client = pymongo.MongoClient(host=dbhost, port=dbport)
db = client[dbname]
db_log("Database: ", db.command('buildinfo'), lvl=debug)
except Exception as e:
db_log("No database available! Check if you have mongodb > 3.0 "
"installed and running as well as listening on port 27017 "
"of localhost. (Error: %s) -> EXIT" % e, lvl=critical)
sys.exit(5)
warmongo.connect(database_name)
schemastore = _build_schemastore_new()
l10n_schemastore = _build_l10n_schemastore(schemastore)
objectmodels = _build_model_factories(schemastore)
collections = _build_collections(schemastore)
instance = instance_name
initialized = True
|
Profiles object model handling with a very simple benchmarking test
def profile(schemaname='sensordata', profiletype='pjs'):
"""Profiles object model handling with a very simple benchmarking test"""
db_log("Profiling ", schemaname)
schema = schemastore[schemaname]['schema']
db_log("Schema: ", schema, lvl=debug)
testclass = None
if profiletype == 'warmongo':
db_log("Running Warmongo benchmark")
testclass = warmongo.model_factory(schema)
elif profiletype == 'pjs':
db_log("Running PJS benchmark")
try:
import python_jsonschema_objects as pjs
except ImportError:
db_log("PJS benchmark selected but not available. Install "
"python_jsonschema_objects (PJS)")
return
db_log()
builder = pjs.ObjectBuilder(schema)
ns = builder.build_classes()
pprint(ns)
testclass = ns[schemaname]
db_log("ns: ", ns, lvl=warn)
if testclass is not None:
db_log("Instantiating elements...")
for i in range(100):
testclass()
else:
db_log("No Profiletype available!")
db_log("Profiling done")
|
Exports all collections to (JSON-) files.
def backup(schema, uuid, export_filter, export_format, filename, pretty, export_all, omit):
"""Exports all collections to (JSON-) files."""
export_format = export_format.upper()
if pretty:
indent = 4
else:
indent = 0
f = None
if filename:
try:
f = open(filename, 'w')
except (IOError, PermissionError) as e:
backup_log('Could not open output file for writing:', exc=True, lvl=error)
return
def output(what, convert=False):
"""Output the backup in a specified format."""
if convert:
if export_format == 'JSON':
data = json.dumps(what, indent=indent)
else:
data = ""
else:
data = what
if not filename:
print(data)
else:
f.write(data)
if schema is None:
if export_all is False:
backup_log('No schema given.', lvl=warn)
return
else:
schemata = objectmodels.keys()
else:
schemata = [schema]
all_items = {}
for schema_item in schemata:
model = objectmodels[schema_item]
if uuid:
obj = model.find({'uuid': uuid})
elif export_filter:
obj = model.find(literal_eval(export_filter))
else:
obj = model.find()
items = []
for item in obj:
fields = item.serializablefields()
for field in omit:
try:
fields.pop(field)
except KeyError:
pass
items.append(fields)
all_items[schema_item] = items
# if pretty is True:
# output('\n// Objectmodel: ' + schema_item + '\n\n')
# output(schema_item + ' = [\n')
output(all_items, convert=True)
if f is not None:
f.flush()
f.close()
|
Checks node local collection storage sizes
def _check_collections(self):
"""Checks node local collection storage sizes"""
self.collection_sizes = {}
self.collection_total = 0
for col in self.db.collection_names(include_system_collections=False):
self.collection_sizes[col] = self.db.command('collstats', col).get(
'storageSize', 0)
self.collection_total += self.collection_sizes[col]
sorted_x = sorted(self.collection_sizes.items(),
key=operator.itemgetter(1))
for item in sorted_x:
self.log("Collection size (%s): %.2f MB" % (
item[0], item[1] / 1024.0 / 1024),
lvl=verbose)
self.log("Total collection sizes: %.2f MB" % (self.collection_total /
1024.0 / 1024))
|
Checks used filesystem storage sizes
def _check_free_space(self):
"""Checks used filesystem storage sizes"""
def get_folder_size(path):
"""Aggregates used size of a specified path, recursively"""
total_size = 0
for item in walk(path):
for file in item[2]:
try:
total_size = total_size + getsize(join(item[0], file))
except (OSError, PermissionError) as e:
self.log("error with file: " + join(item[0], file), e)
return total_size
for name, checkpoint in self.config.locations.items():
try:
stats = statvfs(checkpoint['location'])
except (OSError, PermissionError) as e:
self.log('Location unavailable:', name, e, type(e),
lvl=error, exc=True)
continue
free_space = stats.f_frsize * stats.f_bavail
used_space = get_folder_size(
checkpoint['location']
) / 1024.0 / 1024
self.log('Location %s uses %.2f MB' % (name, used_space))
if free_space < checkpoint['minimum']:
self.log('Short of free space on %s: %.2f MB left' % (
name, free_space / 1024.0 / 1024 / 1024),
lvl=warn)
|
Worker task to send out an email, which blocks the process unless it is threaded
def send_mail_worker(config, mail, event):
"""Worker task to send out an email, which blocks the process unless it is threaded"""
log = ""
try:
if config.mail_ssl:
server = SMTP_SSL(config.mail_server, port=config.mail_server_port, timeout=30)
else:
server = SMTP(config.mail_server, port=config.mail_server_port, timeout=30)
if config.mail_tls:
log += 'Starting TLS\n'
server.starttls()
if config.mail_username != '':
log += 'Logging in with ' + str(config.mail_username) + "\n"
server.login(config.mail_username, config.mail_password)
else:
log += 'No username, trying anonymous access\n'
log += 'Sending Mail\n'
response_send = server.send_message(mail)
server.quit()
except timeout as e:
log += 'Could not send email to enrollee, mailserver timeout: ' + str(e) + "\n"
return False, log, event
log += 'Server response:' + str(response_send)
return True, log, event
|
Reload the current configuration and set up everything depending on it
def reload_configuration(self, event):
"""Reload the current configuration and set up everything depending on it"""
super(EnrolManager, self).reload_configuration(event)
self.log('Reloaded configuration.')
self._setup()
|
An admin user requests a change to an enrolment
def change(self, event):
"""An admin user requests a change to an enrolment"""
uuid = event.data['uuid']
status = event.data['status']
if status not in ['Open', 'Pending', 'Accepted', 'Denied', 'Resend']:
self.log('Erroneous status for enrollment requested!', lvl=warn)
return
self.log('Changing status of an enrollment', uuid, 'to', status)
enrollment = objectmodels['enrollment'].find_one({'uuid': uuid})
if enrollment is not None:
self.log('Enrollment found', lvl=debug)
else:
return
if status == 'Resend':
enrollment.timestamp = std_now()
enrollment.save()
self._send_invitation(enrollment, event)
reply = {True: 'Resent'}
else:
enrollment.status = status
enrollment.save()
reply = {True: enrollment.serializablefields()}
if status == 'Accepted' and enrollment.method == 'Enrolled':
self._create_user(enrollment.name, enrollment.password, enrollment.email, 'Invited', event.client.uuid)
self._send_acceptance(enrollment, None, event)
packet = {
'component': 'hfos.enrol.enrolmanager',
'action': 'change',
'data': reply
}
self.log('packet:', packet, lvl=verbose)
self.fireEvent(send(event.client.uuid, packet))
self.log('Enrollment changed', lvl=debug)
|
An enrolled user wants to change their password
def changepassword(self, event):
"""An enrolled user wants to change their password"""
old = event.data['old']
new = event.data['new']
uuid = event.user.uuid
# TODO: Write email to notify user of password change
user = objectmodels['user'].find_one({'uuid': uuid})
if std_hash(old, self.salt) == user.passhash:
user.passhash = std_hash(new, self.salt)
user.save()
packet = {
'component': 'hfos.enrol.enrolmanager',
'action': 'changepassword',
'data': True
}
self.fireEvent(send(event.client.uuid, packet))
self.log('Successfully changed password for user', uuid)
else:
packet = {
'component': 'hfos.enrol.enrolmanager',
'action': 'changepassword',
'data': False
}
self.fireEvent(send(event.client.uuid, packet))
self.log('User tried to change password without supplying old one', lvl=warn)
|
A new user has been invited to enrol by an admin user
def invite(self, event):
"""A new user has been invited to enrol by an admin user"""
self.log('Inviting new user to enrol')
name = event.data['name']
email = event.data['email']
method = event.data['method']
self._invite(name, method, email, event.client.uuid, event)
|
A user tries to self-enrol with the enrolment form
def enrol(self, event):
"""A user tries to self-enrol with the enrolment form"""
if self.config.allow_registration is False:
self.log('Someone tried to register although enrolment is closed.')
return
self.log('Client trying to register a new account:', event, pretty=True)
# self.log(event.data, pretty=True)
uuid = event.client.uuid
if uuid in self.captchas and event.data.get('captcha', None) == self.captchas[uuid]['text']:
self.log('Captcha solved!')
else:
self.log('Captcha failed!')
self._fail(event, _('You did not solve the captcha correctly.', event))
self._generate_captcha(event)
return
mail = event.data.get('mail', None)
if mail is None:
self._fail(event, _('You have to supply all required fields.', event))
return
elif not validate_email(mail):
self._fail(event, _('The supplied email address seems invalid', event))
return
if objectmodels['user'].count({'mail': mail}) > 0:
self._fail(event, _('Your mail address cannot be used.', event))
return
password = event.data.get('password', None)
if password is None or len(password) < 5:
self._fail(event, _('Your password is not long enough.', event))
return
username = event.data.get('username', None)
if username is None or len(username) < 1:
self._fail(event, _('Your username is not long enough.', event))
return
elif (objectmodels['user'].count({'name': username}) > 0) or \
(objectmodels['enrollment'].count({'name': username}) > 0):
self._fail(event, _('The username you supplied is not available.', event))
return
self.log('Provided data is good to enrol.')
if self.config.no_verify:
self._create_user(username, password, mail, 'Enrolled', uuid)
else:
self._invite(username, 'Enrolled', mail, uuid, event, password)
|
A challenge/response for an enrolment has been accepted
def accept(self, event):
"""A challenge/response for an enrolment has been accepted"""
self.log('Invitation accepted:', event.__dict__, lvl=debug)
try:
uuid = event.data
enrollment = objectmodels['enrollment'].find_one({
'uuid': uuid
})
if enrollment is not None:
self.log('Enrollment found', lvl=debug)
if enrollment.status == 'Open':
self.log('Enrollment is still open', lvl=debug)
if enrollment.method == 'Invited' and self.config.auto_accept_invited:
enrollment.status = 'Accepted'
data = 'You should have received an email with your new password ' \
'and can now log in to the system and start to use it. <br/>' \
'Please change your password immediately after logging in'
password = std_human_uid().replace(" ", '')
self._create_user(enrollment.name, password, enrollment.email, enrollment.method, uuid)
self._send_acceptance(enrollment, password, event)
elif enrollment.method == 'Enrolled' and self.config.auto_accept_enrolled:
enrollment.status = 'Accepted'
data = 'Your account is now activated.'
self._create_user(enrollment.name, enrollment.password, enrollment.email, enrollment.method,
uuid)
# TODO: Evaluate if sending an acceptance mail makes sense
# self._send_acceptance(enrollment, "", event)
else:
enrollment.status = 'Pending'
data = 'Someone has to confirm your enrollment ' \
'first. Thank you, for your patience.'
# TODO: Alert admin users
enrollment.save()
# Reaffirm acceptance to end user, when clicking on the link multiple times
elif enrollment.status == 'Accepted':
data = 'You can now log in to the system and start to use it.'
elif enrollment.status == 'Pending':
data = 'Someone has to confirm your enrollment ' \
'first. Thank you, for your patience.'
else:
self.log('Enrollment has been closed already!', lvl=warn)
self._fail(event)
return
packet = {
'component': 'hfos.enrol.enrolmanager',
'action': 'accept',
'data': {True: data}
}
self.fireEvent(send(event.client.uuid, packet))
else:
self.log('No enrollment available.', lvl=warn)
self._fail(event)
except Exception as e:
self.log('Error during invitation accept handling:', e, type(e),
lvl=warn, exc=True)
|
An anonymous client wants to know if we're open for enrollment
def status(self, event):
"""An anonymous client wants to know if we're open for enrollment"""
self.log('Registration status requested')
response = {
'component': 'hfos.enrol.enrolmanager',
'action': 'status',
'data': self.config.allow_registration
}
self.fire(send(event.client.uuid, response))
|
An anonymous client requests a password reset
def request_reset(self, event):
"""An anonymous client requests a password reset"""
self.log('Password reset request received:', event.__dict__, lvl=hilight)
user_object = objectmodels['user']
email = event.data.get('email', None)
email_user = None
if email is not None and user_object.count({'mail': email}) > 0:
email_user = user_object.find_one({'mail': email})
if email_user is None:
self._fail(event, msg="Mail address unknown")
return
|
Delayed transmission of a requested captcha
def captcha_transmit(self, captcha, uuid):
"""Delayed transmission of a requested captcha"""
self.log('Transmitting captcha')
response = {
'component': 'hfos.enrol.enrolmanager',
'action': 'captcha',
'data': b64encode(captcha['image'].getvalue()).decode('utf-8')
}
self.fire(send(uuid, response))
|
Actually invite a given user
def _invite(self, name, method, email, uuid, event, password=""):
"""Actually invite a given user"""
props = {
'uuid': std_uuid(),
'status': 'Open',
'name': name,
'method': method,
'email': email,
'password': password,
'timestamp': std_now()
}
enrollment = objectmodels['enrollment'](props)
enrollment.save()
self.log('Enrollment stored', lvl=debug)
self._send_invitation(enrollment, event)
packet = {
'component': 'hfos.enrol.enrolmanager',
'action': 'invite',
'data': [True, email]
}
self.fireEvent(send(uuid, packet))
|
Create a new user and all initial data
def _create_user(self, username, password, mail, method, uuid):
"""Create a new user and all initial data"""
try:
if method == 'Invited':
config_role = self.config.group_accept_invited
else:
config_role = self.config.group_accept_enrolled
roles = []
if ',' in config_role:
for item in config_role.split(','):
roles.append(item.lstrip().rstrip())
else:
roles = [config_role]
newuser = objectmodels['user']({
'name': username,
'passhash': std_hash(password, self.salt),
'mail': mail,
'uuid': std_uuid(),
'roles': roles,
'created': std_now()
})
if method == 'Invited':
newuser.needs_password_change = True
newuser.save()
except Exception as e:
self.log("Problem creating new user: ", type(e), e,
lvl=error)
return
try:
newprofile = objectmodels['profile']({
'uuid': std_uuid(),
'owner': newuser.uuid
})
self.log("New profile uuid: ", newprofile.uuid,
lvl=verbose)
newprofile.save()
packet = {
'component': 'hfos.enrol.enrolmanager',
'action': 'enrol',
'data': [True, mail]
}
self.fireEvent(send(uuid, packet))
# TODO: Notify crew-admins
except Exception as e:
self.log("Problem creating new profile: ", type(e),
e, lvl=error)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.