text
stringlengths 81
112k
|
|---|
Send an invitation mail to an open enrolment
def _send_invitation(self, enrollment, event):
"""Send an invitation mail to an open enrolment"""
self.log('Sending enrollment status mail to user')
self._send_mail(self.config.invitation_subject, self.config.invitation_mail, enrollment, event)
|
Send an acceptance mail to an open enrolment
def _send_acceptance(self, enrollment, password, event):
"""Send an acceptance mail to an open enrolment"""
self.log('Sending acceptance status mail to user')
if password is not "":
password_hint = '\n\nPS: Your new password is ' + password + ' - please change it after your first login!'
acceptance_text = self.config.acceptance_mail + password_hint
else:
acceptance_text = self.config.acceptance_mail
self._send_mail(self.config.acceptance_subject, acceptance_text, enrollment, event)
|
Connect to mail server and send actual email
def _send_mail(self, subject, template, enrollment, event):
"""Connect to mail server and send actual email"""
context = {
'name': enrollment.name,
'invitation_url': self.invitation_url,
'node_name': self.node_name,
'node_url': self.node_url,
'uuid': enrollment.uuid
}
mail = render(template, context)
self.log('Mail:', mail, lvl=verbose)
mime_mail = MIMEText(mail)
mime_mail['Subject'] = render(subject, context)
mime_mail['From'] = render(self.config.mail_from, {'hostname': self.hostname})
mime_mail['To'] = enrollment.email
self.log('MimeMail:', mime_mail, lvl=verbose)
if self.config.mail_send is True:
self.log('Sending mail to', enrollment.email)
self.fireEvent(task(send_mail_worker, self.config, mime_mail, event), "enrolworkers")
else:
self.log('Not sending mail, here it is for debugging info:', mail, pretty=True)
|
Chat event handler for incoming events
:param event: say-event with incoming chat message
def say(self, event):
"""Chat event handler for incoming events
:param event: say-event with incoming chat message
"""
try:
userid = event.user.uuid
recipient = self._get_recipient(event)
content = self._get_content(event)
if self.config.name in content:
self.log('I think, someone mentioned me:', content)
except Exception as e:
self.log("Error: '%s' %s" % (e, type(e)), exc=True, lvl=error)
|
Register event hook on reception of add_auth_hook-event
def add_auth_hook(self, event):
"""Register event hook on reception of add_auth_hook-event"""
self.log('Adding authentication hook for', event.authenticator_name)
self.auth_hooks[event.authenticator_name] = event.event
|
Sends a failure message to the requesting client
def _fail(self, event, message='Invalid credentials'):
"""Sends a failure message to the requesting client"""
notification = {
'component': 'auth',
'action': 'fail',
'data': message
}
ip = event.sock.getpeername()[0]
self.failing_clients[ip] = event
Timer(3, Event.create('notify_fail', event.clientuuid, notification, ip)).register(self)
|
Send login notification to client
def _login(self, event, user_account, user_profile, client_config):
"""Send login notification to client"""
user_account.lastlogin = std_now()
user_account.save()
user_account.passhash = ""
self.fireEvent(
authentication(user_account.name, (
user_account, user_profile, client_config),
event.clientuuid,
user_account.uuid,
event.sock),
"auth")
|
Handles authentication requests from clients
:param event: AuthenticationRequest with user's credentials
def authenticationrequest(self, event):
"""Handles authentication requests from clients
:param event: AuthenticationRequest with user's credentials
"""
if event.sock.getpeername()[0] in self.failing_clients:
self.log('Client failed a login and has to wait', lvl=debug)
return
if event.auto:
self._handle_autologin(event)
else:
self._handle_login(event)
|
Automatic logins for client configurations that allow it
def _handle_autologin(self, event):
"""Automatic logins for client configurations that allow it"""
self.log("Verifying automatic login request")
# TODO: Check for a common secret
# noinspection PyBroadException
try:
client_config = objectmodels['client'].find_one({
'uuid': event.requestedclientuuid
})
except Exception:
client_config = None
if client_config is None or client_config.autologin is False:
self.log("Autologin failed:", event.requestedclientuuid,
lvl=error)
self._fail(event)
return
try:
user_account = objectmodels['user'].find_one({
'uuid': client_config.owner
})
if user_account is None:
raise AuthenticationError
self.log("Autologin for", user_account.name, lvl=debug)
except Exception as e:
self.log("No user object due to error: ", e, type(e),
lvl=error)
self._fail(event)
return
if user_account.active is False:
self.log("Account deactivated.")
self._fail(event, 'Account deactivated.')
return
user_profile = self._get_profile(user_account)
self._login(event, user_account, user_profile, client_config)
self.log("Autologin successful!", lvl=warn)
|
Manual password based login
def _handle_login(self, event):
"""Manual password based login"""
# TODO: Refactor to simplify
self.log("Auth request for ", event.username, 'client:',
event.clientuuid)
# TODO: Define the requirements for secure passwords etc.
# They're also required in the Enrol module..!
if (len(event.username) < 1) or (len(event.password) < 5):
self.log("Illegal username or password received, login cancelled", lvl=warn)
self._fail(event, 'Password or username too short')
return
client_config = None
try:
user_account = objectmodels['user'].find_one({
'name': event.username
})
# self.log("Account: %s" % user_account._fields, lvl=debug)
if user_account is None:
raise AuthenticationError
except Exception as e:
self.log("No userobject due to error: ", e, type(e),
lvl=error)
self._fail(event)
return
self.log("User found.", lvl=debug)
if user_account.active is False:
self.log("Account deactivated.")
self._fail(event, 'Account deactivated.')
return
if not std_hash(event.password, self.salt) == user_account.passhash:
self.log("Password was wrong!", lvl=warn)
self._fail(event)
return
self.log("Passhash matches, checking client and profile.",
lvl=debug)
requested_client_uuid = event.requestedclientuuid
if requested_client_uuid is not None:
client_config = objectmodels['client'].find_one({
'uuid': requested_client_uuid
})
if client_config:
self.log("Checking client configuration permissions",
lvl=debug)
# TODO: Shareable client configurations?
if client_config.owner != user_account.uuid:
client_config = None
self.log("Unauthorized client configuration "
"requested",
lvl=warn)
else:
self.log("Unknown client configuration requested: ",
requested_client_uuid, event.__dict__,
lvl=warn)
if not client_config:
self.log("Creating new default client configuration")
# Either no configuration was found or not requested
# -> Create a new client configuration
uuid = event.clientuuid if event.clientuuid is not None else str(uuid4())
client_config = objectmodels['client']({'uuid': uuid})
client_config.name = std_human_uid(kind='place')
client_config.description = "New client configuration from " + user_account.name
client_config.owner = user_account.uuid
# TODO: Get client configuration storage done right, this one is too simple
client_config.save()
user_profile = self._get_profile(user_account)
self._login(event, user_account, user_profile, client_config)
self.log("Done with Login request", lvl=debug)
|
Retrieves a user's profile
def _get_profile(self, user_account):
"""Retrieves a user's profile"""
try:
# TODO: Load active profile, not just any
user_profile = objectmodels['profile'].find_one(
{'owner': str(user_account.uuid)})
self.log("Profile: ", user_profile,
user_account.uuid, lvl=debug)
except Exception as e:
self.log("No profile due to error: ", e, type(e),
lvl=error)
user_profile = None
if not user_profile:
default = {
'uuid': std_uuid(),
'owner': user_account.uuid,
'userdata': {
'notes': 'Default profile of ' + user_account.name
}
}
user_profile = objectmodels['profile'](default)
user_profile.save()
return user_profile
|
Called when a sensor sends a new raw data to this serial connector.
The data is sanitized and sent to the registered protocol
listeners as time/raw/bus sentence tuple.
def _parse(self, bus, data):
"""
Called when a sensor sends a new raw data to this serial connector.
The data is sanitized and sent to the registered protocol
listeners as time/raw/bus sentence tuple.
"""
sen_time = time.time()
try:
# Split up multiple sentences
if isinstance(data, bytes):
data = data.decode('ascii')
dirtysentences = data.split("\n")
sentences = [(sen_time, x) for x in dirtysentences if x]
def unique(it):
s = set()
for el in it:
if el not in s:
s.add(el)
yield el
else:
# TODO: Make sure, this is not identical but new data
self.log("Duplicate sentence received: ", el,
lvl=debug)
sentences = list(unique(sentences))
return sentences
except Exception as e:
self.log("Error during data unpacking: ", e, type(e), lvl=error,
exc=True)
|
Handles incoming raw sensor data
:param data: raw incoming data
def serial_packet(self, event):
"""Handles incoming raw sensor data
:param data: raw incoming data
"""
self.log('Incoming serial packet:', event.__dict__, lvl=verbose)
if self.scanning:
pass
else:
# self.log("Incoming data: ", '%.50s ...' % event.data, lvl=debug)
sanitized_data = self._parse(event.bus, event.data)
self.log('Sanitized data:', sanitized_data, lvl=verbose)
if sanitized_data is not None:
self._broadcast(event.bus, sanitized_data)
|
Entry point for the script
def main():
"""Entry point for the script"""
desc = 'Converts between geodetic, modified apex, quasi-dipole and MLT'
parser = argparse.ArgumentParser(description=desc, prog='apexpy')
parser.add_argument('source', metavar='SOURCE',
choices=['geo', 'apex', 'qd', 'mlt'],
help='Convert from {geo, apex, qd, mlt}')
parser.add_argument('dest', metavar='DEST',
choices=['geo', 'apex', 'qd', 'mlt'],
help='Convert to {geo, apex, qd, mlt}')
desc = 'YYYY[MM[DD[HHMMSS]]] date/time for IGRF coefficients, time part '
desc += 'required for MLT calculations'
parser.add_argument('date', metavar='DATE', help=desc)
parser.add_argument('--height', dest='height', default=0, metavar='HEIGHT',
type=float, help='height for conversion')
parser.add_argument('--refh', dest='refh', metavar='REFH', type=float,
default=0,
help='reference height for modified apex coordinates')
parser.add_argument('-i', '--input', dest='file_in', metavar='FILE_IN',
type=argparse.FileType('r'), default=STDIN,
help='input file (stdin if none specified)')
parser.add_argument('-o', '--output', dest='file_out', metavar='FILE_OUT',
type=argparse.FileType('wb'), default=STDOUT,
help='output file (stdout if none specified)')
args = parser.parse_args()
array = np.loadtxt(args.file_in, ndmin=2)
if 'mlt' in [args.source, args.dest] and len(args.date) < 14:
desc = 'full date/time YYYYMMDDHHMMSS required for MLT calculations'
raise ValueError(desc)
if 9 <= len(args.date) <= 13:
desc = 'full date/time must be given as YYYYMMDDHHMMSS, not ' + \
'YYYYMMDDHHMMSS'[:len(args.date)]
raise ValueError(desc)
datetime = dt.datetime.strptime(args.date,
'%Y%m%d%H%M%S'[:len(args.date)-2])
A = apexpy.Apex(date=datetime, refh=args.refh)
lats, lons = A.convert(array[:, 0], array[:, 1], args.source, args.dest,
args.height, datetime=datetime)
np.savetxt(args.file_out, np.column_stack((lats, lons)), fmt='%.8f')
|
Executes an external process via subprocess.Popen
def run_process(cwd, args):
"""Executes an external process via subprocess.Popen"""
try:
process = check_output(args, cwd=cwd, stderr=STDOUT)
return process
except CalledProcessError as e:
log('Uh oh, the teapot broke again! Error:', e, type(e), lvl=verbose, pretty=True)
log(e.cmd, e.returncode, e.output, lvl=verbose)
return e.output
|
Securely and interactively ask for a password
def _ask_password():
"""Securely and interactively ask for a password"""
password = "Foo"
password_trial = ""
while password != password_trial:
password = getpass.getpass()
password_trial = getpass.getpass(prompt="Repeat:")
if password != password_trial:
print("\nPasswords do not match!")
return password
|
Obtain user credentials by arguments or asking the user
def _get_credentials(username=None, password=None, dbhost=None):
"""Obtain user credentials by arguments or asking the user"""
# Database salt
system_config = dbhost.objectmodels['systemconfig'].find_one({
'active': True
})
try:
salt = system_config.salt.encode('ascii')
except (KeyError, AttributeError):
log('No systemconfig or it is without a salt! '
'Reinstall the system provisioning with'
'hfos_manage.py install provisions -p system')
sys.exit(3)
if username is None:
username = _ask("Please enter username: ")
else:
username = username
if password is None:
password = _ask_password()
else:
password = password
try:
password = password.encode('utf-8')
except UnicodeDecodeError:
password = password
passhash = hashlib.sha512(password)
passhash.update(salt)
return username, passhash.hexdigest()
|
Interactively ask the user for data
def _ask(question, default=None, data_type='str', show_hint=False):
"""Interactively ask the user for data"""
data = default
if data_type == 'bool':
data = None
default_string = "Y" if default else "N"
while data not in ('Y', 'J', 'N', '1', '0'):
data = input("%s? [%s]: " % (question, default_string)).upper()
if data == '':
return default
return data in ('Y', 'J', '1')
elif data_type in ('str', 'unicode'):
if show_hint:
msg = "%s? [%s] (%s): " % (question, default, data_type)
else:
msg = question
data = input(msg)
if len(data) == 0:
data = default
elif data_type == 'int':
if show_hint:
msg = "%s? [%s] (%s): " % (question, default, data_type)
else:
msg = question
data = input(msg)
if len(data) == 0:
data = int(default)
else:
data = int(data)
return data
|
Makes sure the latitude is inside [-90, 90], clipping close values
(tolerance 1e-4).
Parameters
==========
lat : array_like
latitude
name : str, optional
parameter name to use in the exception message
Returns
=======
lat : ndarray or float
Same as input where values just outside the range have been
clipped to [-90, 90]
Raises
======
ValueError
if any values are too far outside the range [-90, 90]
def checklat(lat, name='lat'):
"""Makes sure the latitude is inside [-90, 90], clipping close values
(tolerance 1e-4).
Parameters
==========
lat : array_like
latitude
name : str, optional
parameter name to use in the exception message
Returns
=======
lat : ndarray or float
Same as input where values just outside the range have been
clipped to [-90, 90]
Raises
======
ValueError
if any values are too far outside the range [-90, 90]
"""
if np.all(np.float64(lat) >= -90) and np.all(np.float64(lat) <= 90):
return lat
if np.isscalar(lat):
if lat > 90 and np.isclose(lat, 90, rtol=0, atol=1e-4):
lat = 90
return lat
elif lat < -90 and np.isclose(lat, -90, rtol=0, atol=1e-4):
lat = -90
return lat
else:
lat = np.float64(lat) # make sure we have an array, not list
lat[(lat > 90) & (np.isclose(lat, 90, rtol=0, atol=1e-4))] = 90
lat[(lat < -90) & (np.isclose(lat, -90, rtol=0, atol=1e-4))] = -90
if np.all(lat >= -90) and np.all(lat <= 90):
return lat
# we haven't returned yet, so raise exception
raise ValueError(name + ' must be in [-90, 90]')
|
Computes sinIm from modified apex latitude.
Parameters
==========
alat : array_like
Modified apex latitude
Returns
=======
sinIm : ndarray or float
def getsinIm(alat):
"""Computes sinIm from modified apex latitude.
Parameters
==========
alat : array_like
Modified apex latitude
Returns
=======
sinIm : ndarray or float
"""
alat = np.float64(alat)
return 2*np.sin(np.radians(alat))/np.sqrt(4 - 3*np.cos(np.radians(alat))**2)
|
Computes cosIm from modified apex latitude.
Parameters
==========
alat : array_like
Modified apex latitude
Returns
=======
cosIm : ndarray or float
def getcosIm(alat):
"""Computes cosIm from modified apex latitude.
Parameters
==========
alat : array_like
Modified apex latitude
Returns
=======
cosIm : ndarray or float
"""
alat = np.float64(alat)
return np.cos(np.radians(alat))/np.sqrt(4 - 3*np.cos(np.radians(alat))**2)
|
Converts :class:`datetime.date` or :class:`datetime.datetime` to decimal
year.
Parameters
==========
date : :class:`datetime.date` or :class:`datetime.datetime`
Returns
=======
year : float
Decimal year
Notes
=====
The algorithm is taken from http://stackoverflow.com/a/6451892/2978652
def toYearFraction(date):
"""Converts :class:`datetime.date` or :class:`datetime.datetime` to decimal
year.
Parameters
==========
date : :class:`datetime.date` or :class:`datetime.datetime`
Returns
=======
year : float
Decimal year
Notes
=====
The algorithm is taken from http://stackoverflow.com/a/6451892/2978652
"""
def sinceEpoch(date):
"""returns seconds since epoch"""
return time.mktime(date.timetuple())
year = date.year
startOfThisYear = dt.datetime(year=year, month=1, day=1)
startOfNextYear = dt.datetime(year=year+1, month=1, day=1)
yearElapsed = sinceEpoch(date) - sinceEpoch(startOfThisYear)
yearDuration = sinceEpoch(startOfNextYear) - sinceEpoch(startOfThisYear)
fraction = yearElapsed/yearDuration
return date.year + fraction
|
Converts geocentric latitude to geodetic latitude using WGS84.
Parameters
==========
gclat : array_like
Geocentric latitude
Returns
=======
gdlat : ndarray or float
Geodetic latitude
def gc2gdlat(gclat):
"""Converts geocentric latitude to geodetic latitude using WGS84.
Parameters
==========
gclat : array_like
Geocentric latitude
Returns
=======
gdlat : ndarray or float
Geodetic latitude
"""
WGS84_e2 = 0.006694379990141317 # WGS84 first eccentricity squared
return np.rad2deg(-np.arctan(np.tan(np.deg2rad(gclat))/(WGS84_e2 - 1)))
|
Finds subsolar geocentric latitude and longitude.
Parameters
==========
datetime : :class:`datetime.datetime`
Returns
=======
sbsllat : float
Latitude of subsolar point
sbsllon : float
Longitude of subsolar point
Notes
=====
Based on formulas in Astronomical Almanac for the year 1996, p. C24.
(U.S. Government Printing Office, 1994). Usable for years 1601-2100,
inclusive. According to the Almanac, results are good to at least 0.01
degree latitude and 0.025 degrees longitude between years 1950 and 2050.
Accuracy for other years has not been tested. Every day is assumed to have
exactly 86400 seconds; thus leap seconds that sometimes occur on December
31 are ignored (their effect is below the accuracy threshold of the
algorithm).
After Fortran code by A. D. Richmond, NCAR. Translated from IDL
by K. Laundal.
def subsol(datetime):
"""Finds subsolar geocentric latitude and longitude.
Parameters
==========
datetime : :class:`datetime.datetime`
Returns
=======
sbsllat : float
Latitude of subsolar point
sbsllon : float
Longitude of subsolar point
Notes
=====
Based on formulas in Astronomical Almanac for the year 1996, p. C24.
(U.S. Government Printing Office, 1994). Usable for years 1601-2100,
inclusive. According to the Almanac, results are good to at least 0.01
degree latitude and 0.025 degrees longitude between years 1950 and 2050.
Accuracy for other years has not been tested. Every day is assumed to have
exactly 86400 seconds; thus leap seconds that sometimes occur on December
31 are ignored (their effect is below the accuracy threshold of the
algorithm).
After Fortran code by A. D. Richmond, NCAR. Translated from IDL
by K. Laundal.
"""
# convert to year, day of year and seconds since midnight
year = datetime.year
doy = datetime.timetuple().tm_yday
ut = datetime.hour * 3600 + datetime.minute * 60 + datetime.second
if not 1601 <= year <= 2100:
raise ValueError('Year must be in [1601, 2100]')
yr = year - 2000
nleap = int(np.floor((year - 1601.0) / 4.0))
nleap -= 99
if year <= 1900:
ncent = int(np.floor((year - 1601.0) / 100.0))
ncent = 3 - ncent
nleap = nleap + ncent
l0 = -79.549 + (-0.238699 * (yr - 4.0 * nleap) + 3.08514e-2 * nleap)
g0 = -2.472 + (-0.2558905 * (yr - 4.0 * nleap) - 3.79617e-2 * nleap)
# Days (including fraction) since 12 UT on January 1 of IYR:
df = (ut / 86400.0 - 1.5) + doy
# Mean longitude of Sun:
lmean = l0 + 0.9856474 * df
# Mean anomaly in radians:
grad = np.radians(g0 + 0.9856003 * df)
# Ecliptic longitude:
lmrad = np.radians(lmean + 1.915 * np.sin(grad)
+ 0.020 * np.sin(2.0 * grad))
sinlm = np.sin(lmrad)
# Obliquity of ecliptic in radians:
epsrad = np.radians(23.439 - 4e-7 * (df + 365 * yr + nleap))
# Right ascension:
alpha = np.degrees(np.arctan2(np.cos(epsrad) * sinlm, np.cos(lmrad)))
# Declination, which is also the subsolar latitude:
sslat = np.degrees(np.arcsin(np.sin(epsrad) * sinlm))
# Equation of time (degrees):
etdeg = lmean - alpha
nrot = round(etdeg / 360.0)
etdeg = etdeg - 360.0 * nrot
# Subsolar longitude:
sslon = 180.0 - (ut / 240.0 + etdeg) # Earth rotates one degree every 240 s.
nrot = round(sslon / 360.0)
sslon = sslon - 360.0 * nrot
return sslat, sslon
|
Make the authentication headers needed to use the Appveyor API.
def make_auth_headers():
"""Make the authentication headers needed to use the Appveyor API."""
if not os.path.exists(".appveyor.token"):
raise RuntimeError(
"Please create a file named `.appveyor.token` in the current directory. "
"You can get the token from https://ci.appveyor.com/api-token"
)
with open(".appveyor.token") as f:
token = f.read().strip()
headers = {
'Authorization': 'Bearer {}'.format(token),
}
return headers
|
Get the details of the latest Appveyor build.
def get_project_build(account_project):
"""Get the details of the latest Appveyor build."""
url = make_url("/projects/{account_project}", account_project=account_project)
response = requests.get(url, headers=make_auth_headers())
return response.json()
|
Download a file from `url` to `filename`.
def download_url(url, filename, headers):
"""Download a file from `url` to `filename`."""
ensure_dirs(filename)
response = requests.get(url, headers=headers, stream=True)
if response.status_code == 200:
with open(filename, 'wb') as f:
for chunk in response.iter_content(16 * 1024):
f.write(chunk)
|
Display a list of registered schemata
def cli_schemata_list(self, *args):
"""Display a list of registered schemata"""
self.log('Registered schemata languages:', ",".join(sorted(l10n_schemastore.keys())))
self.log('Registered Schemata:', ",".join(sorted(schemastore.keys())))
if '-c' in args or '-config' in args:
self.log('Registered Configuration Schemata:', ",".join(sorted(configschemastore.keys())), pretty=True)
|
Display a schemata's form definition
def cli_form(self, *args):
"""Display a schemata's form definition"""
if args[0] == '*':
for schema in schemastore:
self.log(schema, ':', schemastore[schema]['form'], pretty=True)
else:
self.log(schemastore[args[0]]['form'], pretty=True)
|
Display a single schema definition
def cli_schema(self, *args):
"""Display a single schema definition"""
key = None
if len(args) > 1:
key = args[1]
args = list(args)
if '-config' in args or '-c' in args:
store = configschemastore
try:
args.remove('-c')
args.remove('-config')
except ValueError:
pass
else:
store = schemastore
def output(schema):
self.log("%s :" % schema)
if key == 'props':
self.log(store[schema]['schema']['properties'], pretty=True)
elif key == 'perms':
try:
self.log(store[schema]['schema']['roles_create'], pretty=True)
except KeyError:
self.log('Schema', schema, 'has no role for creation', lvl=warn)
try:
self.log(store[schema]['schema']['properties']['perms']['properties'], pretty=True)
except KeyError:
self.log('Schema', schema, 'has no permissions', lvl=warn)
else:
self.log(store[schema]['schema'], pretty=True)
if '*' in args:
for schema in store:
output(schema)
else:
output(args[0])
|
List all available form definitions
def cli_forms(self, *args):
"""List all available form definitions"""
forms = []
missing = []
for key, item in schemastore.items():
if 'form' in item and len(item['form']) > 0:
forms.append(key)
else:
missing.append(key)
self.log('Schemata with form:', forms)
self.log('Missing forms:', missing)
|
Show default permissions for all schemata
def cli_default_perms(self, *args):
"""Show default permissions for all schemata"""
for key, item in schemastore.items():
# self.log(item, pretty=True)
if item['schema'].get('no_perms', False):
self.log('Schema without permissions:', key)
continue
try:
perms = item['schema']['properties']['perms']['properties']
if perms == {}:
self.log('Schema:', item, pretty=True)
self.log(
'Schema:', key,
'read', perms['read']['default'],
'write', perms['write']['default'],
'list', perms['list']['default'],
'create', item['schema']['roles_create']
)
except KeyError as e:
self.log('Fishy schema found:', key, e, lvl=error)
self.log(item, pretty=True)
|
Sets up the application after startup.
def ready(self):
"""Sets up the application after startup."""
self.log('Got', len(schemastore), 'data and',
len(configschemastore), 'component schemata.', lvl=debug)
|
Return all known schemata to the requesting client
def all(self, event):
"""Return all known schemata to the requesting client"""
self.log("Schemarequest for all schemata from",
event.user, lvl=debug)
response = {
'component': 'hfos.events.schemamanager',
'action': 'all',
'data': l10n_schemastore[event.client.language]
}
self.fireEvent(send(event.client.uuid, response))
|
Return a single schema
def get(self, event):
"""Return a single schema"""
self.log("Schemarequest for", event.data, "from",
event.user, lvl=debug)
if event.data in schemastore:
response = {
'component': 'hfos.events.schemamanager',
'action': 'get',
'data': l10n_schemastore[event.client.language][event.data]
}
self.fireEvent(send(event.client.uuid, response))
else:
self.log("Unavailable schema requested!", lvl=warn)
|
Return all configurable components' schemata
def configuration(self, event):
"""Return all configurable components' schemata"""
try:
self.log("Schemarequest for all configuration schemata from",
event.user.account.name, lvl=debug)
response = {
'component': 'hfos.events.schemamanager',
'action': 'configuration',
'data': configschemastore
}
self.fireEvent(send(event.client.uuid, response))
except Exception as e:
self.log("ERROR:", e)
|
Generates a regular expression controlled UUID field
def uuid_object(title="Reference", description="Select an object", default=None, display=True):
"""Generates a regular expression controlled UUID field"""
uuid = {
'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{'
'4}-['
'a-fA-F0-9]{4}-[a-fA-F0-9]{12}$',
'type': 'string',
'title': title,
'description': description,
}
if not display:
uuid['x-schema-form'] = {
'condition': "false"
}
if default is not None:
uuid['default'] = default
return uuid
|
Generates a basic object with RBAC properties
def base_object(name,
no_perms=False,
has_owner=True,
hide_owner=True,
has_uuid=True,
roles_write=None,
roles_read=None,
roles_list=None,
roles_create=None,
all_roles=None):
"""Generates a basic object with RBAC properties"""
base_schema = {
'id': '#' + name,
'type': 'object',
'name': name,
'properties': {}
}
if not no_perms:
if all_roles:
roles_create = ['admin', all_roles]
roles_write = ['admin', all_roles]
roles_read = ['admin', all_roles]
roles_list = ['admin', all_roles]
else:
if roles_write is None:
roles_write = ['admin']
if roles_read is None:
roles_read = ['admin']
if roles_list is None:
roles_list = ['admin']
if roles_create is None:
roles_create = ['admin']
if isinstance(roles_create, str):
roles_create = [roles_create]
if isinstance(roles_write, str):
roles_write = [roles_write]
if isinstance(roles_read, str):
roles_read = [roles_read]
if isinstance(roles_list, str):
roles_list = [roles_list]
if has_owner:
roles_write.append('owner')
roles_read.append('owner')
roles_list.append('owner')
base_schema['roles_create'] = roles_create
base_schema['properties'].update({
'perms': {
'id': '#perms',
'type': 'object',
'name': 'perms',
'properties': {
'write': {
'type': 'array',
'default': roles_write,
'items': {
'type': 'string',
}
},
'read': {
'type': 'array',
'default': roles_read,
'items': {
'type': 'string',
}
},
'list': {
'type': 'array',
'default': roles_list,
'items': {
'type': 'string',
}
}
},
'default': {},
'x-schema-form': {
'condition': "false"
}
},
'name': {
'type': 'string',
'description': 'Name of ' + name
}
})
if has_owner:
# TODO: Schema should allow specification of non-local owners as
# well as special accounts like admin or even system perhaps
# base_schema['required'] = base_schema.get('required', [])
# base_schema['required'].append('owner')
base_schema['properties'].update({
'owner': uuid_object(title='Unique Owner ID', display=hide_owner)
})
else:
base_schema['no_perms'] = True
# TODO: Using this causes all sorts of (obvious) problems with the object
# manager
if has_uuid:
base_schema['properties'].update({
'uuid': uuid_object(title='Unique ' + name + ' ID', display=False)
})
base_schema['required'] = ["uuid"]
return base_schema
|
Lists serial port names
:raises EnvironmentError:
On unsupported or unknown platforms
:returns:
A list of the serial ports available on the system
Courtesy: Thomas ( http://stackoverflow.com/questions/12090503
/listing-available-com-ports-with-python )
def serial_ports():
""" Lists serial port names
:raises EnvironmentError:
On unsupported or unknown platforms
:returns:
A list of the serial ports available on the system
Courtesy: Thomas ( http://stackoverflow.com/questions/12090503
/listing-available-com-ports-with-python )
"""
if sys.platform.startswith('win'):
ports = ['COM%s' % (i + 1) for i in range(256)]
elif sys.platform.startswith('linux') or sys.platform.startswith('cygwin'):
# this excludes your current terminal "/dev/tty"
ports = glob.glob('/dev/tty[A-Za-z]*')
elif sys.platform.startswith('darwin'):
ports = glob.glob('/dev/tty.*')
else:
raise EnvironmentError('Unsupported platform')
result = []
for port in ports:
try:
s = serial.Serial(port)
s.close()
result.append(port)
except (OSError, serial.SerialException) as e:
hfoslog('Could not open serial port:', port, e, type(e),
exc=True, lvl=warn)
return result
|
Generates a new reference frame from incoming sensordata
:param event: new sensordata to be merged into referenceframe
def sensordata(self, event):
"""
Generates a new reference frame from incoming sensordata
:param event: new sensordata to be merged into referenceframe
"""
if len(self.datatypes) == 0:
return
data = event.data
timestamp = event.timestamp
# bus = event.bus
# TODO: What about multiple busses? That is prepared, but how exactly
# should they be handled?
self.log("New incoming navdata:", data, lvl=verbose)
for name, value in data.items():
if name in self.datatypes:
ref = self.datatypes[name]
self.sensed[name] = ref
if ref.lastvalue != str(value):
# self.log("Reference outdated:", ref._fields)
item = {
'value': value,
'timestamp': timestamp,
'type': name
}
self.referenceframe[name] = value
self.referenceages[name] = timestamp
# self.log("Subscriptions:", self.subscriptions, ref.name)
if ref.name in self.subscriptions:
packet = {
'component': 'hfos.navdata.sensors',
'action': 'update',
'data': item
}
self.log("Serving update: ", packet, lvl=verbose)
for uuid in self.subscriptions[ref.name]:
self.log("Serving to ", uuid, lvl=events)
self.fireEvent(send(uuid, packet),
'hfosweb')
# self.log("New item: ", item)
sensordata = objectmodels['sensordata'](item)
# self.log("Value entry:", sensordata._fields)
if ref.record:
self.log("Recording updated reference:",
sensordata._fields)
sensordata.save()
ref.lastvalue = str(value)
ref.timestamp = timestamp
else:
self.log("Unknown sensor data received!", data, lvl=warn)
|
Pushes the current :referenceframe: out to clients.
:return:
def navdatapush(self):
"""
Pushes the current :referenceframe: out to clients.
:return:
"""
try:
self.fireEvent(referenceframe({
'data': self.referenceframe, 'ages': self.referenceages
}), "navdata")
self.intervalcount += 1
if self.intervalcount == self.passiveinterval and len(
self.referenceframe) > 0:
self.fireEvent(broadcast('users', {
'component': 'hfos.navdata.sensors',
'action': 'update',
'data': {
'data': self.referenceframe,
'ages': self.referenceages
}
}), "hfosweb")
self.intervalcount = 0
# self.log("Reference frame successfully pushed.",
# lvl=verbose)
except Exception as e:
self.log("Could not push referenceframe: ", e, type(e),
lvl=critical)
|
Export stored objects
Warning! This functionality is work in progress and you may destroy live data by using it!
Be very careful when using the export/import functionality!
def db_export(schema, uuid, object_filter, export_format, filename, pretty, all_schemata, omit):
"""Export stored objects
Warning! This functionality is work in progress and you may destroy live data by using it!
Be very careful when using the export/import functionality!"""
internal_backup(schema, uuid, object_filter, export_format, filename, pretty, all_schemata, omit)
|
Import objects from file
Warning! This functionality is work in progress and you may destroy live data by using it!
Be very careful when using the export/import functionality!
def db_import(ctx, schema, uuid, object_filter, import_format, filename, all_schemata, dry):
"""Import objects from file
Warning! This functionality is work in progress and you may destroy live data by using it!
Be very careful when using the export/import functionality!"""
import_format = import_format.upper()
with open(filename, 'r') as f:
json_data = f.read()
data = json.loads(json_data) # , parse_float=True, parse_int=True)
if schema is None:
if all_schemata is False:
log('No schema given. Read the help', lvl=warn)
return
else:
schemata = data.keys()
else:
schemata = [schema]
from hfos import database
database.initialize(ctx.obj['dbhost'], ctx.obj['dbname'])
all_items = {}
total = 0
for schema_item in schemata:
model = database.objectmodels[schema_item]
objects = data[schema_item]
if uuid:
for item in objects:
if item['uuid'] == uuid:
items = [model(item)]
else:
items = []
for item in objects:
thing = model(item)
items.append(thing)
schema_total = len(items)
total += schema_total
if dry:
log('Would import', schema_total, 'items of', schema_item)
all_items[schema_item] = items
if dry:
log('Would import', total, 'objects.')
else:
log('Importing', total, 'objects.')
for schema_name, item_list in all_items.items():
log('Importing', len(item_list), 'objects of type', schema_name)
for item in item_list:
item._fields['_id'] = bson.objectid.ObjectId(item._fields['_id'])
item.save()
|
Checks if the newly created object is a wikipage..
If so, rerenders the automatic index.
:param event: objectchange or objectcreation event
def _page_update(self, event):
"""
Checks if the newly created object is a wikipage..
If so, rerenders the automatic index.
:param event: objectchange or objectcreation event
"""
try:
if event.schema == 'wikipage':
self._update_index()
except Exception as e:
self.log("Page creation notification error: ", event, e,
type(e), lvl=error)
|
Attempt to drop privileges and change user to 'hfos' user/group
def drop_privileges(uid_name='hfos', gid_name='hfos'):
"""Attempt to drop privileges and change user to 'hfos' user/group"""
if os.getuid() != 0:
hfoslog("Not root, cannot drop privileges", lvl=warn, emitter='CORE')
return
try:
# Get the uid/gid from the name
running_uid = pwd.getpwnam(uid_name).pw_uid
running_gid = grp.getgrnam(gid_name).gr_gid
# Remove group privileges
os.setgroups([])
# Try setting the new uid/gid
os.setgid(running_gid)
os.setuid(running_uid)
# Ensure a very conservative umask
# old_umask = os.umask(22)
hfoslog('Privileges dropped', emitter='CORE')
except Exception as e:
hfoslog('Could not drop privileges:', e, type(e), exc=True, lvl=error, emitter='CORE')
|
Preliminary HFOS application Launcher
def construct_graph(args):
"""Preliminary HFOS application Launcher"""
app = Core(args)
setup_root(app)
if args['debug']:
from circuits import Debugger
hfoslog("Starting circuits debugger", lvl=warn, emitter='GRAPH')
dbg = Debugger().register(app)
# TODO: Make these configurable from modules, navdata is _very_ noisy
# but should not be listed _here_
dbg.IgnoreEvents.extend([
"read", "_read", "write", "_write",
"stream_success", "stream_complete",
"serial_packet", "raw_data", "stream",
"navdatapush", "referenceframe",
"updateposition", "updatesubscriptions",
"generatevesseldata", "generatenavdata", "sensordata",
"reset_flood_offenders", "reset_flood_counters", # Flood counters
"task_success", "task_done", # Thread completion
"keepalive" # IRC Gateway
])
hfoslog("Beginning graph assembly.", emitter='GRAPH')
if args['drawgraph']:
from circuits.tools import graph
graph(app)
if args['opengui']:
import webbrowser
# TODO: Fix up that url:
webbrowser.open("http://%s:%i/" % (args['host'], args['port']))
hfoslog("Graph assembly done.", emitter='GRAPH')
return app
|
Bootstrap basics, assemble graph and hand over control to the Core
component
def launch(run=True, **args):
"""Bootstrap basics, assemble graph and hand over control to the Core
component"""
verbosity['console'] = args['log'] if not args['quiet'] else 100
verbosity['global'] = min(args['log'], args['logfileverbosity'])
verbosity['file'] = args['logfileverbosity'] if args['dolog'] else 100
set_logfile(args['logfilepath'], args['instance'])
if args['livelog'] is True:
from hfos import logger
logger.live = True
hfoslog("Running with Python", sys.version.replace("\n", ""),
sys.platform, lvl=debug, emitter='CORE')
hfoslog("Interpreter executable:", sys.executable, emitter='CORE')
if args['cert'] is not None:
hfoslog("Warning! Using SSL without nginx is currently not broken!",
lvl=critical, emitter='CORE')
hfoslog("Initializing database access", emitter='CORE', lvl=debug)
initialize(args['dbhost'], args['dbname'], args['instance'])
server = construct_graph(args)
if run and not args['norun']:
server.run()
return server
|
All components have initialized, set up the component
configuration schema-store, run the local server and drop privileges
def ready(self, source):
"""All components have initialized, set up the component
configuration schema-store, run the local server and drop privileges"""
from hfos.database import configschemastore
configschemastore[self.name] = self.configschema
self._start_server()
if not self.insecure:
self._drop_privileges()
self.fireEvent(cli_register_event('components', cli_components))
self.fireEvent(cli_register_event('drop_privileges', cli_drop_privileges))
self.fireEvent(cli_register_event('reload_db', cli_reload_db))
self.fireEvent(cli_register_event('reload', cli_reload))
self.fireEvent(cli_register_event('quit', cli_quit))
self.fireEvent(cli_register_event('info', cli_info))
|
Event hook to trigger a new frontend build
def trigger_frontend_build(self, event):
"""Event hook to trigger a new frontend build"""
from hfos.database import instance
install_frontend(instance=instance,
forcerebuild=event.force,
install=event.install,
development=self.development
)
|
Experimental call to reload the component tree
def cli_reload(self, event):
"""Experimental call to reload the component tree"""
self.log('Reloading all components.')
self.update_components(forcereload=True)
initialize()
from hfos.debugger import cli_compgraph
self.fireEvent(cli_compgraph())
|
Provides information about the running instance
def cli_info(self, event):
"""Provides information about the running instance"""
self.log('Instance:', self.instance,
'Dev:', self.development,
'Host:', self.host,
'Port:', self.port,
'Insecure:', self.insecure,
'Frontend:', self.frontendtarget)
|
Run the node local server
def _start_server(self, *args):
"""Run the node local server"""
self.log("Starting server", args)
secure = self.certificate is not None
if secure:
self.log("Running SSL server with cert:", self.certificate)
else:
self.log("Running insecure server without SSL. Do not use without SSL proxy in production!", lvl=warn)
try:
self.server = Server(
(self.host, self.port),
secure=secure,
certfile=self.certificate # ,
# inherit=True
).register(self)
except PermissionError:
self.log('Could not open (privileged?) port, check '
'permissions!', lvl=critical)
|
Check all known entry points for components. If necessary,
manage configuration updates
def update_components(self, forcereload=False, forcerebuild=False,
forcecopy=True, install=False):
"""Check all known entry points for components. If necessary,
manage configuration updates"""
# TODO: See if we can pull out major parts of the component handling.
# They are also used in the manage tool to instantiate the
# component frontend bits.
self.log("Updating components")
components = {}
if True: # try:
from pkg_resources import iter_entry_points
entry_point_tuple = (
iter_entry_points(group='hfos.base', name=None),
iter_entry_points(group='hfos.sails', name=None),
iter_entry_points(group='hfos.components', name=None)
)
for iterator in entry_point_tuple:
for entry_point in iterator:
try:
name = entry_point.name
location = entry_point.dist.location
loaded = entry_point.load()
self.log("Entry point: ", entry_point,
name,
entry_point.resolve(), lvl=verbose)
self.log("Loaded: ", loaded, lvl=verbose)
comp = {
'package': entry_point.dist.project_name,
'location': location,
'version': str(entry_point.dist.parsed_version),
'description': loaded.__doc__
}
components[name] = comp
self.loadable_components[name] = loaded
self.log("Loaded component:", comp, lvl=verbose)
except Exception as e:
self.log("Could not inspect entrypoint: ", e,
type(e), entry_point, iterator, lvl=error,
exc=True)
# for name in components.keys():
# try:
# self.log(self.loadable_components[name])
# configobject = {
# 'type': 'object',
# 'properties':
# self.loadable_components[name].configprops
# }
# ComponentBaseConfigSchema['schema'][
# 'properties'][
# 'settings'][
# 'oneOf'].append(configobject)
# except (KeyError, AttributeError) as e:
# self.log('Problematic configuration
# properties in '
# 'component ', name, exc=True)
#
# schemastore['component'] = ComponentBaseConfigSchema
# except Exception as e:
# self.log("Error: ", e, type(e), lvl=error, exc=True)
# return
self.log("Checking component frontend bits in ", self.frontendroot,
lvl=verbose)
# pprint(self.config._fields)
diff = set(components) ^ set(self.config.components)
if diff or forcecopy and self.config.frontendenabled:
self.log("Old component configuration differs:", diff, lvl=debug)
self.log(self.config.components, components, lvl=verbose)
self.config.components = components
else:
self.log("No component configuration change. Proceeding.")
if forcereload:
self.log("Restarting all components.", lvl=warn)
self._instantiate_components(clear=True)
|
Check if it is enabled and start the frontend http & websocket
def _start_frontend(self, restart=False):
"""Check if it is enabled and start the frontend http & websocket"""
self.log(self.config, self.config.frontendenabled, lvl=verbose)
if self.config.frontendenabled and not self.frontendrunning or restart:
self.log("Restarting webfrontend services on",
self.frontendtarget)
self.static = Static("/",
docroot=self.frontendtarget).register(
self)
self.websocket = WebSocketsDispatcher("/websocket").register(self)
self.frontendrunning = True
|
Inspect all loadable components and run them
def _instantiate_components(self, clear=True):
"""Inspect all loadable components and run them"""
if clear:
import objgraph
from copy import deepcopy
from circuits.tools import kill
from circuits import Component
for comp in self.runningcomponents.values():
self.log(comp, type(comp), isinstance(comp, Component), pretty=True)
kill(comp)
# removables = deepcopy(list(self.runningcomponents.keys()))
#
# for key in removables:
# comp = self.runningcomponents[key]
# self.log(comp)
# comp.unregister()
# comp.stop()
# self.runningcomponents.pop(key)
#
# objgraph.show_backrefs([comp],
# max_depth=5,
# filter=lambda x: type(x) not in [list, tuple, set],
# highlight=lambda x: type(x) in [ConfigurableComponent],
# filename='backref-graph_%s.png' % comp.uniquename)
# del comp
# del removables
self.runningcomponents = {}
self.log('Not running blacklisted components: ',
self.component_blacklist,
lvl=debug)
running = set(self.loadable_components.keys()).difference(
self.component_blacklist)
self.log('Starting components: ', sorted(running))
for name, componentdata in self.loadable_components.items():
if name in self.component_blacklist:
continue
self.log("Running component: ", name, lvl=verbose)
try:
if name in self.runningcomponents:
self.log("Component already running: ", name,
lvl=warn)
else:
runningcomponent = componentdata()
runningcomponent.register(self)
self.runningcomponents[name] = runningcomponent
except Exception as e:
self.log("Could not register component: ", name, e,
type(e), lvl=error, exc=True)
|
Sets up the application after startup.
def started(self, component):
"""Sets up the application after startup."""
self.log("Running.")
self.log("Started event origin: ", component, lvl=verbose)
populate_user_events()
from hfos.events.system import AuthorizedEvents
self.log(len(AuthorizedEvents), "authorized event sources:",
list(AuthorizedEvents.keys()), lvl=debug)
self._instantiate_components()
self._start_frontend()
self.fire(ready(), "hfosweb")
|
[GROUP] User management operations
def user(ctx, username, password):
"""[GROUP] User management operations"""
ctx.obj['username'] = username
ctx.obj['password'] = password
|
Internal method to create a normal user
def _create_user(ctx):
"""Internal method to create a normal user"""
username, passhash = _get_credentials(ctx.obj['username'],
ctx.obj['password'],
ctx.obj['db'])
if ctx.obj['db'].objectmodels['user'].count({'name': username}) > 0:
raise KeyError()
new_user = ctx.obj['db'].objectmodels['user']({
'uuid': str(uuid4()),
'created': std_now()
})
new_user.name = username
new_user.passhash = passhash
return new_user
|
Creates a new local user
def create_user(ctx):
"""Creates a new local user"""
try:
new_user = _create_user(ctx)
new_user.save()
log("Done")
except KeyError:
log('User already exists', lvl=warn)
|
Creates a new local user and assigns admin role
def create_admin(ctx):
"""Creates a new local user and assigns admin role"""
try:
admin = _create_user(ctx)
admin.roles.append('admin')
admin.save()
log("Done")
except KeyError:
log('User already exists', lvl=warn)
|
Delete a local user
def delete_user(ctx, yes):
"""Delete a local user"""
if ctx.obj['username'] is None:
username = _ask("Please enter username:")
else:
username = ctx.obj['username']
del_user = ctx.obj['db'].objectmodels['user'].find_one({'name': username})
if yes or _ask('Confirm deletion', default=False, data_type='bool'):
try:
del_user.delete()
log("Done")
except AttributeError:
log('User not found', lvl=warn)
else:
log("Cancelled")
|
Change password of an existing user
def change_password(ctx):
"""Change password of an existing user"""
username, passhash = _get_credentials(ctx.obj['username'],
ctx.obj['password'],
ctx.obj['db'])
change_user = ctx.obj['db'].objectmodels['user'].find_one({
'name': username
})
if change_user is None:
log('No such user', lvl=warn)
return
change_user.passhash = passhash
change_user.save()
log("Done")
|
List all locally known users
def list_users(ctx, search, uuid, active):
"""List all locally known users"""
users = ctx.obj['db'].objectmodels['user']
for found_user in users.find():
if not search or (search and search in found_user.name):
# TODO: Not 2.x compatible
print(found_user.name, end=' ' if active or uuid else '\n')
if uuid:
print(found_user.uuid, end=' ' if active else '\n')
if active:
print(found_user.active)
log("Done")
|
Disable an existing user
def disable(ctx):
"""Disable an existing user"""
if ctx.obj['username'] is None:
log('Specify the username with "iso db user --username ..."')
return
change_user = ctx.obj['db'].objectmodels['user'].find_one({
'name': ctx.obj['username']
})
change_user.active = False
change_user.save()
log('Done')
|
Enable an existing user
def enable(ctx):
"""Enable an existing user"""
if ctx.obj['username'] is None:
log('Specify the username with "iso db user --username ..."')
return
change_user = ctx.obj['db'].objectmodels['user'].find_one({
'name': ctx.obj['username']
})
change_user.active = True
change_user.save()
log('Done')
|
Grant a role to an existing user
def add_role(ctx, role):
"""Grant a role to an existing user"""
if role is None:
log('Specify the role with --role')
return
if ctx.obj['username'] is None:
log('Specify the username with --username')
return
change_user = ctx.obj['db'].objectmodels['user'].find_one({
'name': ctx.obj['username']
})
if role not in change_user.roles:
change_user.roles.append(role)
change_user.save()
log('Done')
else:
log('User already has that role!', lvl=warn)
|
>>> class mock_framework:
... def assertIn(self, item, list, msg="Failed asserting item is in list"):
... if item not in list: raise Exception(msg)
... def assertTrue(self, value, msg="Failed asserting true"):
... if not value: raise Exception(msg)
... def assertFalse(self, value, msg): self.assertTrue(not value, msg)
>>> check_expression(mock_framework(),
... {'class': {'group' :{'Matches': " 0 | 1", 'Non-Matches': "2 | 0 2", 'Expression': "[0-1]"}}})
def check_expression(testing_framework, expression_dict):
"""
>>> class mock_framework:
... def assertIn(self, item, list, msg="Failed asserting item is in list"):
... if item not in list: raise Exception(msg)
... def assertTrue(self, value, msg="Failed asserting true"):
... if not value: raise Exception(msg)
... def assertFalse(self, value, msg): self.assertTrue(not value, msg)
>>> check_expression(mock_framework(),
... {'class': {'group' :{'Matches': " 0 | 1", 'Non-Matches': "2 | 0 2", 'Expression': "[0-1]"}}})
"""
expression_sub = get_expression_sub()
for expression_type_name, expression_type in expression_dict.items():
for name, expression_object in expression_type.items():
if 'Matches' in expression_object.keys():
for test in expression_object['Matches'].split('|'):
# Substitute and check to make sure that the entire string matches
result = expression_sub(expression_object['Expression'], '', test.strip()) == ''
testing_framework.assertTrue(result, match_error_msg.format(expression_type_name, name, test))
if 'Non-Matches' in expression_object.keys():
for test in expression_object['Non-Matches'].split('|'):
result = expression_sub(expression_object['Expression'], '', test.strip()) == ''
testing_framework.assertFalse(result, non_match_error_msg.format(expression_type_name, name, test))
|
Assemble a list of future alerts
def _get_future_tasks(self):
"""Assemble a list of future alerts"""
self.alerts = {}
now = std_now()
for task in objectmodels['task'].find({'alert_time': {'$gt': now}}):
self.alerts[task.alert_time] = task
self.log('Found', len(self.alerts), 'future tasks')
|
Periodical check to issue due alerts
def check_alerts(self):
"""Periodical check to issue due alerts"""
alerted = []
for alert_time, task in self.alerts.items():
task_time = dateutil.parser.parse(alert_time)
if task_time < get_time():
self.log('Alerting about task now:', task)
address = objectmodels['user'].find_one({'uuid': task.owner}).mail
subject = "Task alert: %s" % task.name
text = """Task alert is due:\n%s""" % task.notes
self.fireEvent(send_mail(address, subject, text))
alerted.append(task.alert_time)
for item in alerted:
del self.alerts[item]
|
Handles incoming raw sensor data and broadcasts it to specified
udp servers and connected tcp clients
:param data: NMEA raw sentences incoming data
def read(self, data):
"""Handles incoming raw sensor data and broadcasts it to specified
udp servers and connected tcp clients
:param data: NMEA raw sentences incoming data
"""
self.log('Received NMEA data:', data, lvl=debug)
# self.log(data, pretty=True)
if self._tcp_socket is not None and \
len(self._connected_tcp_endpoints) > 0:
self.log('Publishing data on tcp server', lvl=debug)
for endpoint in self._connected_tcp_endpoints:
self.fireEvent(
write(
endpoint,
bytes(data, 'ascii')),
self.channel + '_tcp'
)
if self._udp_socket is not None and \
len(self.config.udp_endpoints) > 0:
self.log('Publishing data to udp endpoints', lvl=debug)
for endpoint in self.config.udp_endpoints:
host, port = endpoint.split(":")
self.log('Transmitting to', endpoint, lvl=verbose)
self.fireEvent(
write(
(host, int(port)),
bytes(data, 'ascii')
),
self.channel +
'_udp'
)
|
Generates a command map
def cmdmap(xdot):
"""Generates a command map"""
# TODO: Integrate the output into documentation
from copy import copy
def print_commands(command, map_output, groups=None, depth=0):
if groups is None:
groups = []
if 'commands' in command.__dict__:
if len(groups) > 0:
if xdot:
line = " %s -> %s [weight=1.0];\n" % (groups[-1], command.name)
else:
line = " " * (depth - 1) + "%s %s\n" % (groups[-1], command.name)
map_output.append(line)
for item in command.commands.values():
subgroups = copy(groups)
subgroups.append(command.name)
print_commands(item, map_output, subgroups, depth + 1)
else:
if xdot:
line = " %s -> %s [weight=%1.1f];\n" % (groups[-1], command.name, len(groups))
else:
line = " " * (len(groups) - 3 + depth) + "%s %s\n" % (groups[-1], command.name)
map_output.append(line)
output = []
print_commands(cli, output)
output = [line.replace("cli", "isomer") for line in output]
if xdot:
with open('iso.dot', 'w') as f:
f.write('strict digraph {\n')
f.writelines(sorted(output))
f.write('}')
run_process('.', ['xdot', 'iso.dot'])
else:
print("".join(output))
|
Render a given pystache template
with given content
def format_template(template, content):
"""Render a given pystache template
with given content"""
import pystache
result = u""
if True: # try:
result = pystache.render(template, content, string_encoding='utf-8')
# except (ValueError, KeyError) as e:
# print("Templating error: %s %s" % (e, type(e)))
# pprint(result)
return result
|
Render a given pystache template file with given content
def format_template_file(filename, content):
"""Render a given pystache template file with given content"""
with open(filename, 'r') as f:
template = f.read()
if type(template) != str:
template = template.decode('utf-8')
return format_template(template, content)
|
Write a new file from a given pystache template file and content
def write_template_file(source, target, content):
"""Write a new file from a given pystache template file and content"""
# print(formatTemplateFile(source, content))
print(target)
data = format_template_file(source, content)
with open(target, 'w') as f:
for line in data:
if type(line) != str:
line = line.encode('utf-8')
f.write(line)
|
Insert a new nginx service definition
def insert_nginx_service(definition): # pragma: no cover
"""Insert a new nginx service definition"""
config_file = '/etc/nginx/sites-available/hfos.conf'
splitter = "### SERVICE DEFINITIONS ###"
with open(config_file, 'r') as f:
old_config = "".join(f.readlines())
pprint(old_config)
if definition in old_config:
print("Service definition already inserted")
return
parts = old_config.split(splitter)
print(len(parts))
if len(parts) != 3:
print("Nginx configuration seems to be changed and cannot be "
"extended automatically anymore!")
pprint(parts)
return
try:
with open(config_file, "w") as f:
f.write(parts[0])
f.write(splitter + "\n")
f.write(parts[1])
for line in definition:
f.write(line)
f.write("\n " + splitter)
f.write(parts[2])
except Exception as e:
print("Error during Nginx configuration extension:", type(e), e)
|
[GROUP] Role based access control
def rbac(ctx, schema, object_filter, action, role, all_schemata):
"""[GROUP] Role based access control"""
database = ctx.obj['db']
if schema is None:
if all_schemata is False:
log('No schema given. Read the RBAC group help', lvl=warn)
sys.exit()
else:
schemata = database.objectmodels.keys()
else:
schemata = [schema]
things = []
if object_filter is None:
parsed_filter = {}
else:
parsed_filter = json.loads(object_filter)
for schema in schemata:
for obj in database.objectmodels[schema].find(parsed_filter):
things.append(obj)
if len(things) == 0:
log('No objects matched the criteria.', lvl=warn)
sys.exit()
ctx.obj['objects'] = things
ctx.obj['action'] = action
ctx.obj['role'] = role
|
Adds a role to an action on objects
def add_action_role(ctx):
"""Adds a role to an action on objects"""
objects = ctx.obj['objects']
action = ctx.obj['action']
role = ctx.obj['role']
if action is None or role is None:
log('You need to specify an action or role to the RBAC command group for this to work.', lvl=warn)
return
for item in objects:
if role not in item.perms[action]:
item.perms[action].append(role)
item.save()
log("Done")
|
Deletes a role from an action on objects
def del_action_role(ctx):
"""Deletes a role from an action on objects"""
objects = ctx.obj['objects']
action = ctx.obj['action']
role = ctx.obj['role']
if action is None or role is None:
log('You need to specify an action or role to the RBAC command group for this to work.', lvl=warn)
return
for item in objects:
if role in item.perms[action]:
item.perms[action].remove(role)
item.save()
log("Done")
|
Changes the ownership of objects
def change_owner(ctx, owner, uuid):
"""Changes the ownership of objects"""
objects = ctx.obj['objects']
database = ctx.obj['db']
if uuid is True:
owner_filter = {'uuid': owner}
else:
owner_filter = {'name': owner}
owner = database.objectmodels['user'].find_one(owner_filter)
if owner is None:
log('User unknown.', lvl=error)
return
for item in objects:
item.owner = owner.uuid
item.save()
log('Done')
|
Notify a user
def notify(self, event):
"""Notify a user"""
self.log('Got a notification event!')
self.log(event, pretty=True)
self.log(event.__dict__)
|
Handler to deal with a possibly disconnected remote controlling
client
:param event: ClientDisconnect Event
def clientdisconnect(self, event):
"""Handler to deal with a possibly disconnected remote controlling
client
:param event: ClientDisconnect Event
"""
try:
if event.clientuuid == self.remote_controller:
self.log("Remote controller disconnected!", lvl=critical)
self.remote_controller = None
except Exception as e:
self.log("Strange thing while client disconnected", e, type(e))
|
Processes configuration list requests
:param event:
def getlist(self, event):
"""Processes configuration list requests
:param event:
"""
try:
componentlist = model_factory(Schema).find({})
data = []
for comp in componentlist:
try:
data.append({
'name': comp.name,
'uuid': comp.uuid,
'class': comp.componentclass,
'active': comp.active
})
except AttributeError:
self.log('Bad component without component class encountered:', lvl=warn)
self.log(comp.serializablefields(), pretty=True, lvl=warn)
data = sorted(data, key=lambda x: x['name'])
response = {
'component': 'hfos.ui.configurator',
'action': 'getlist',
'data': data
}
self.fireEvent(send(event.client.uuid, response))
return
except Exception as e:
self.log("List error: ", e, type(e), lvl=error, exc=True)
|
Store a given configuration
def put(self, event):
"""Store a given configuration"""
self.log("Configuration put request ",
event.user)
try:
component = model_factory(Schema).find_one({
'uuid': event.data['uuid']
})
component.update(event.data)
component.save()
response = {
'component': 'hfos.ui.configurator',
'action': 'put',
'data': True
}
self.log('Updated component configuration:',
component.name)
self.fireEvent(reload_configuration(component.name))
except (KeyError, ValueError, ValidationError, PermissionError) as e:
response = {
'component': 'hfos.ui.configurator',
'action': 'put',
'data': False
}
self.log('Storing component configuration failed: ',
type(e), e, exc=True, lvl=error)
self.fireEvent(send(event.client.uuid, response))
return
|
Get a stored configuration
def get(self, event):
"""Get a stored configuration"""
try:
comp = event.data['uuid']
except KeyError:
comp = None
if not comp:
self.log('Invalid get request without schema or component',
lvl=error)
return
self.log("Config data get request for ", event.data, "from",
event.user)
component = model_factory(Schema).find_one({
'uuid': comp
})
response = {
'component': 'hfos.ui.configurator',
'action': 'get',
'data': component.serializablefields()
}
self.fireEvent(send(event.client.uuid, response))
|
Records a single snapshot
def rec(self):
"""Records a single snapshot"""
try:
self._snapshot()
except Exception as e:
self.log("Timer error: ", e, type(e), lvl=error)
|
Toggles the camera system recording state
def _toggle_filming(self):
"""Toggles the camera system recording state"""
if self._filming:
self.log("Stopping operation")
self._filming = False
self.timer.stop()
else:
self.log("Starting operation")
self._filming = True
self.timer.start()
|
A client has disconnected, update possible subscriptions accordingly.
:param event:
def client_disconnect(self, event):
"""
A client has disconnected, update possible subscriptions accordingly.
:param event:
"""
self.log("Removing disconnected client from subscriptions", lvl=debug)
client_uuid = event.clientuuid
self._unsubscribe(client_uuid)
|
Get a specified object
def get(self, event):
"""Get a specified object"""
try:
data, schema, user, client = self._get_args(event)
except AttributeError:
return
object_filter = self._get_filter(event)
if 'subscribe' in data:
do_subscribe = data['subscribe'] is True
else:
do_subscribe = False
try:
uuid = str(data['uuid'])
except (KeyError, TypeError):
uuid = ""
opts = schemastore[schema].get('options', {})
hidden = opts.get('hidden', [])
if object_filter == {}:
if uuid == "":
self.log('Object with no filter/uuid requested:', schema,
data,
lvl=warn)
return
object_filter = {'uuid': uuid}
storage_object = None
storage_object = objectmodels[schema].find_one(object_filter)
if not storage_object:
self._cancel_by_error(event, uuid + '(' + str(object_filter) + ') of ' + schema +
' unavailable')
return
if storage_object:
self.log("Object found, checking permissions: ", data, lvl=verbose)
if not self._check_permissions(user, 'read',
storage_object):
self._cancel_by_permission(schema, data, event)
return
for field in hidden:
storage_object._fields.pop(field, None)
if do_subscribe and uuid != "":
self._add_subscription(uuid, event)
result = {
'component': 'hfos.events.objectmanager',
'action': 'get',
'data': {
'schema': schema,
'uuid': uuid,
'object': storage_object.serializablefields()
}
}
self._respond(None, result, event)
|
Search for an object
def search(self, event):
"""Search for an object"""
try:
data, schema, user, client = self._get_args(event)
except AttributeError:
return
# object_filter['$text'] = {'$search': str(data['search'])}
if data.get('fulltext', False) is True:
object_filter = {
'name': {
'$regex': str(data['search']),
'$options': '$i'
}
}
else:
if isinstance(data['search'], dict):
object_filter = data['search']
else:
object_filter = {}
if 'fields' in data:
fields = data['fields']
else:
fields = []
skip = data.get('skip', 0)
limit = data.get('limit', 0)
sort = data.get('sort', None)
# page = data.get('page', 0)
# count = data.get('count', 0)
#
# if page > 0 and count > 0:
# skip = page * count
# limit = count
if 'subscribe' in data:
self.log('Subscription:', data['subscribe'], lvl=verbose)
do_subscribe = data['subscribe'] is True
else:
do_subscribe = False
object_list = []
size = objectmodels[schema].count(object_filter)
if size > WARNSIZE and (limit > 0 and limit > WARNSIZE):
self.log("Getting a very long (", size, ") list of items for ", schema,
lvl=warn)
opts = schemastore[schema].get('options', {})
hidden = opts.get('hidden', [])
self.log("object_filter: ", object_filter, ' Schema: ', schema,
"Fields: ", fields,
lvl=verbose)
options = {}
if skip > 0:
options['skip'] = skip
if limit > 0:
options['limit'] = limit
if sort is not None:
options['sort'] = []
for item in sort:
key = item[0]
direction = item[1]
direction = ASCENDING if direction == 'asc' else DESCENDING
options['sort'].append([key, direction])
cursor = objectmodels[schema].find(object_filter, **options)
for item in cursor:
if not self._check_permissions(user, 'list', item):
continue
self.log("Search found item: ", item, lvl=verbose)
try:
list_item = {'uuid': item.uuid}
if fields in ('*', ['*']):
item_fields = item.serializablefields()
for field in hidden:
item_fields.pop(field, None)
object_list.append(item_fields)
else:
if 'name' in item._fields:
list_item['name'] = item.name
for field in fields:
if field in item._fields and field not in hidden:
list_item[field] = item._fields[field]
else:
list_item[field] = None
object_list.append(list_item)
if do_subscribe:
self._add_subscription(item.uuid, event)
except Exception as e:
self.log("Faulty object or field: ", e, type(e),
item._fields, fields, lvl=error, exc=True)
# self.log("Generated object search list: ", object_list)
result = {
'component': 'hfos.events.objectmanager',
'action': 'search',
'data': {
'schema': schema,
'list': object_list,
'size': size
}
}
self._respond(None, result, event)
|
Get a list of objects
def objectlist(self, event):
"""Get a list of objects"""
self.log('LEGACY LIST FUNCTION CALLED!', lvl=warn)
try:
data, schema, user, client = self._get_args(event)
except AttributeError:
return
object_filter = self._get_filter(event)
self.log('Object list for', schema, 'requested from',
user.account.name, lvl=debug)
if 'fields' in data:
fields = data['fields']
else:
fields = []
object_list = []
opts = schemastore[schema].get('options', {})
hidden = opts.get('hidden', [])
if objectmodels[schema].count(object_filter) > WARNSIZE:
self.log("Getting a very long list of items for ", schema,
lvl=warn)
try:
for item in objectmodels[schema].find(object_filter):
try:
if not self._check_permissions(user, 'list', item):
continue
if fields in ('*', ['*']):
item_fields = item.serializablefields()
for field in hidden:
item_fields.pop(field, None)
object_list.append(item_fields)
else:
list_item = {'uuid': item.uuid}
if 'name' in item._fields:
list_item['name'] = item._fields['name']
for field in fields:
if field in item._fields and field not in hidden:
list_item[field] = item._fields[field]
else:
list_item[field] = None
object_list.append(list_item)
except Exception as e:
self.log("Faulty object or field: ", e, type(e),
item._fields, fields, lvl=error, exc=True)
except ValidationError as e:
self.log('Invalid object in database encountered!', e, exc=True,
lvl=warn)
# self.log("Generated object list: ", object_list)
result = {
'component': 'hfos.events.objectmanager',
'action': 'getlist',
'data': {
'schema': schema,
'list': object_list
}
}
self._respond(None, result, event)
|
Change an existing object
def change(self, event):
"""Change an existing object"""
try:
data, schema, user, client = self._get_args(event)
except AttributeError:
return
try:
uuid = data['uuid']
change = data['change']
field = change['field']
new_data = change['value']
except KeyError as e:
self.log("Update request with missing arguments!", data, e,
lvl=critical)
self._cancel_by_error(event, 'missing_args')
return
storage_object = None
try:
storage_object = objectmodels[schema].find_one({'uuid': uuid})
except Exception as e:
self.log('Change for unknown object requested:', schema, data, lvl=warn)
if storage_object is None:
self._cancel_by_error(event, 'not_found')
return
if not self._check_permissions(user, 'write', storage_object):
self._cancel_by_permission(schema, data, event)
return
self.log("Changing object:", storage_object._fields, lvl=debug)
storage_object._fields[field] = new_data
self.log("Storing object:", storage_object._fields, lvl=debug)
try:
storage_object.validate()
except ValidationError:
self.log("Validation of changed object failed!",
storage_object, lvl=warn)
self._cancel_by_error(event, 'invalid_object')
return
storage_object.save()
self.log("Object stored.")
result = {
'component': 'hfos.events.objectmanager',
'action': 'change',
'data': {
'schema': schema,
'uuid': uuid
}
}
self._respond(None, result, event)
|
Put an object
def put(self, event):
"""Put an object"""
try:
data, schema, user, client = self._get_args(event)
except AttributeError:
return
try:
clientobject = data['obj']
uuid = clientobject['uuid']
except KeyError as e:
self.log("Put request with missing arguments!", e, data,
lvl=critical)
return
try:
model = objectmodels[schema]
created = False
storage_object = None
if uuid != 'create':
storage_object = model.find_one({'uuid': uuid})
if uuid == 'create' or model.count({'uuid': uuid}) == 0:
if uuid == 'create':
uuid = str(uuid4())
created = True
clientobject['uuid'] = uuid
clientobject['owner'] = user.uuid
storage_object = model(clientobject)
if not self._check_create_permission(user, schema):
self._cancel_by_permission(schema, data, event)
return
if storage_object is not None:
if not self._check_permissions(user, 'write', storage_object):
self._cancel_by_permission(schema, data, event)
return
self.log("Updating object:", storage_object._fields, lvl=debug)
storage_object.update(clientobject)
else:
storage_object = model(clientobject)
if not self._check_permissions(user, 'write', storage_object):
self._cancel_by_permission(schema, data, event)
return
self.log("Storing object:", storage_object._fields, lvl=debug)
try:
storage_object.validate()
except ValidationError:
self.log("Validation of new object failed!", clientobject,
lvl=warn)
storage_object.save()
self.log("Object %s stored." % schema)
# Notify backend listeners
if created:
notification = objectcreation(
storage_object.uuid, schema, client
)
else:
notification = objectchange(
storage_object.uuid, schema, client
)
self._update_subscribers(schema, storage_object)
result = {
'component': 'hfos.events.objectmanager',
'action': 'put',
'data': {
'schema': schema,
'object': storage_object.serializablefields(),
'uuid': storage_object.uuid,
}
}
self._respond(notification, result, event)
except Exception as e:
self.log("Error during object storage:", e, type(e), data,
lvl=error, exc=True, pretty=True)
|
Delete an existing object
def delete(self, event):
"""Delete an existing object"""
try:
data, schema, user, client = self._get_args(event)
except AttributeError:
return
try:
uuids = data['uuid']
if not isinstance(uuids, list):
uuids = [uuids]
if schema not in objectmodels.keys():
self.log("Unknown schema encountered: ", schema, lvl=warn)
return
for uuid in uuids:
self.log("Looking for object to be deleted:", uuid, lvl=debug)
storage_object = objectmodels[schema].find_one({'uuid': uuid})
if not storage_object:
self._cancel_by_error(event, 'not found')
return
self.log("Found object.", lvl=debug)
if not self._check_permissions(user, 'write', storage_object):
self._cancel_by_permission(schema, data, event)
return
# self.log("Fields:", storage_object._fields, "\n\n\n",
# storage_object.__dict__)
storage_object.delete()
self.log("Deleted. Preparing notification.", lvl=debug)
notification = objectdeletion(uuid, schema, client)
if uuid in self.subscriptions:
deletion = {
'component': 'hfos.events.objectmanager',
'action': 'deletion',
'data': {
'schema': schema,
'uuid': uuid,
}
}
for recipient in self.subscriptions[uuid]:
self.fireEvent(send(recipient, deletion))
del (self.subscriptions[uuid])
result = {
'component': 'hfos.events.objectmanager',
'action': 'delete',
'data': {
'schema': schema,
'uuid': storage_object.uuid
}
}
self._respond(notification, result, event)
except Exception as e:
self.log("Error during delete request: ", e, type(e),
lvl=error)
|
Subscribe to an object's future changes
def subscribe(self, event):
"""Subscribe to an object's future changes"""
uuids = event.data
if not isinstance(uuids, list):
uuids = [uuids]
subscribed = []
for uuid in uuids:
try:
self._add_subscription(uuid, event)
subscribed.append(uuid)
except KeyError:
continue
result = {
'component': 'hfos.events.objectmanager',
'action': 'subscribe',
'data': {
'uuid': subscribed, 'success': True
}
}
self._respond(None, result, event)
|
Unsubscribe from an object's future changes
def unsubscribe(self, event):
"""Unsubscribe from an object's future changes"""
# TODO: Automatic Unsubscription
uuids = event.data
if not isinstance(uuids, list):
uuids = [uuids]
result = []
for uuid in uuids:
if uuid in self.subscriptions:
self.subscriptions[uuid].pop(event.client.uuid)
if len(self.subscriptions[uuid]) == 0:
del (self.subscriptions[uuid])
result.append(uuid)
result = {
'component': 'hfos.events.objectmanager',
'action': 'unsubscribe',
'data': {
'uuid': result, 'success': True
}
}
self._respond(None, result, event)
|
OM event handler for to be stored and client shared objectmodels
:param event: OMRequest with uuid, schema and object data
def update_subscriptions(self, event):
"""OM event handler for to be stored and client shared objectmodels
:param event: OMRequest with uuid, schema and object data
"""
# self.log("Event: '%s'" % event.__dict__)
try:
self._update_subscribers(event.schema, event.data)
except Exception as e:
self.log("Error during subscription update: ", type(e), e,
exc=True)
|
Project Importer for Github Repository Issues
Argument REPOSITORY must be given as 'username/repository'
Owner and project have to be UUIDs
def GithubImporter(ctx, repository, all, owner, project, ignore_labels, no_tags, username, password):
"""Project Importer for Github Repository Issues
Argument REPOSITORY must be given as 'username/repository'
Owner and project have to be UUIDs
"""
db = ctx.obj['db']
if project is not None:
project_obj = db.objectmodels['project'].find_one({'uuid': project})
if project_obj is None:
project_obj = db.objectmodels['project'].find_one({'name': project})
if project_obj is None:
log('Project not found.', lvl=error)
return
else:
project_uuid = project_obj.uuid
else:
project_uuid = None
tags = {}
if not ignore_labels:
for tag in db.objectmodels['tag'].find():
tags[tag.name.lower()] = tag
def write_issue(issue):
"""Stores a single github issue as task"""
if 'pull_request' not in issue:
issue_tags = []
if not ignore_labels:
for l in issue['labels']:
if l['name'].lower() not in tags:
initial = {
'uuid': std_uuid(),
'name': l['name']
}
new_tag = db.objectmodels['tag'](initial)
new_tag.save()
tags[new_tag.name] = new_tag
issue_tags.append(new_tag.uuid)
else:
issue_tags.append(tags[l['name'].lower()].uuid)
date = issue['created_at'].split('T')[0]
initial = {
'uuid': std_uuid(),
'name': issue['title'],
'notes': str(issue['state']) + "\n\n" + issue['html_url'],
'created': date,
'project': project_uuid
}
if len(issue_tags) > 0:
initial['tags'] = issue_tags
task = db.objectmodels['task'](initial)
task.save()
else:
log('Pull request issue:', issue, lvl=debug)
def write_issues(r):
"""Parses JSON response and stores all issues."""
if r.status_code != 200:
raise Exception(r.status_code)
for issue in r.json():
write_issue(issue)
def get_issues(name, state, auth):
"""Requests issues from GitHub API"""
url = 'https://api.github.com/repos/{}/issues?state={}'.format(name, state)
r = requests.get(url, auth=auth)
write_issues(r)
# Multiple requests are required if response is paged
if 'link' in r.headers:
pages = {rel[6:-1]: url[url.index('<') + 1:-1] for url, rel in
(link.split(';') for link in
r.headers['link'].split(','))}
while 'last' in pages and 'next' in pages:
pages = {rel[6:-1]: url[url.index('<') + 1:-1] for url, rel in
(link.split(';') for link in
r.headers['link'].split(','))}
r = requests.get(pages['next'], auth=auth)
write_issues(r)
if pages['next'] == pages['last']:
break
# username = input("Username for 'https://github.com': ")
# password = getpass("Password for 'https://{}@github.com': ".format(username))
# auth = (username, password)
# for repository in args.repositories:
# get_issues(repository)
if all:
state = 'all'
else:
state = 'open'
auth = (username, password)
get_issues(repository, state, auth)
|
Compile a list of all available language translations
def all_languages():
"""Compile a list of all available language translations"""
rv = []
for lang in os.listdir(localedir):
base = lang.split('_')[0].split('.')[0].split('@')[0]
if 2 <= len(base) <= 3 and all(c.islower() for c in base):
if base != 'all':
rv.append(lang)
rv.sort()
rv.append('en')
l10n_log('Registered languages:', rv, lvl=verbose)
return rv
|
Get a descriptive title for all languages
def language_token_to_name(languages):
"""Get a descriptive title for all languages"""
result = {}
with open(os.path.join(localedir, 'languages.json'), 'r') as f:
language_lookup = json.load(f)
for language in languages:
language = language.lower()
try:
result[language] = language_lookup[language]
except KeyError:
l10n_log('Language token lookup not found:', language, lvl=warn)
result[language] = language
return result
|
Debugging function to print all message language variants
def print_messages(domain, msg):
"""Debugging function to print all message language variants"""
domain = Domain(domain)
for lang in all_languages():
print(lang, ':', domain.get(lang, msg))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.