text
stringlengths 81
112k
|
|---|
Return magnitude of the vector.
def magnitude(self):
"""Return magnitude of the vector."""
return math.sqrt(
reduce(lambda x, y: x + y, [x ** 2 for x in self.to_list()])
)
|
Return a Vector instance as the vector sum of two vectors.
def sum(self, vector):
"""Return a Vector instance as the vector sum of two vectors."""
return self.from_list(
[x + vector.vector[i] for i, x in self.to_list()]
)
|
Return the dot product of two vectors.
If theta is given then the dot product is computed as
v1*v1 = |v1||v2|cos(theta). Argument theta
is measured in degrees.
def dot(self, vector, theta=None):
"""Return the dot product of two vectors.
If theta is given then the dot product is computed as
v1*v1 = |v1||v2|cos(theta). Argument theta
is measured in degrees.
"""
if theta is not None:
return (self.magnitude() * vector.magnitude() *
math.degrees(math.cos(theta)))
return (reduce(lambda x, y: x + y,
[x * vector.vector[i] for i, x in self.to_list()()]))
|
Return a Vector instance as the cross product of two vectors
def cross(self, vector):
"""Return a Vector instance as the cross product of two vectors"""
return Vector((self.y * vector.z - self.z * vector.y),
(self.z * vector.x - self.x * vector.z),
(self.x * vector.y - self.y * vector.x))
|
Return a Vector instance of the unit vector
def unit(self):
"""Return a Vector instance of the unit vector"""
return Vector(
(self.x / self.magnitude()),
(self.y / self.magnitude()),
(self.z / self.magnitude())
)
|
Return the angle between two vectors in degrees.
def angle(self, vector):
"""Return the angle between two vectors in degrees."""
return math.degrees(
math.acos(
self.dot(vector) /
(self.magnitude() * vector.magnitude())
)
)
|
Return True if vectors are non-parallel.
Non-parallel vectors are vectors which are neither parallel
nor perpendicular to each other.
def non_parallel(self, vector):
"""Return True if vectors are non-parallel.
Non-parallel vectors are vectors which are neither parallel
nor perpendicular to each other.
"""
if (self.is_parallel(vector) is not True and
self.is_perpendicular(vector) is not True):
return True
return False
|
Returns the rotated vector. Assumes angle is in radians
def rotate(self, angle, axis=(0, 0, 1)):
"""Returns the rotated vector. Assumes angle is in radians"""
if not all(isinstance(a, int) for a in axis):
raise ValueError
x, y, z = self.x, self.y, self.z
# Z axis rotation
if(axis[2]):
x = (self.x * math.cos(angle) - self.y * math.sin(angle))
y = (self.x * math.sin(angle) + self.y * math.cos(angle))
# Y axis rotation
if(axis[1]):
x = self.x * math.cos(angle) + self.z * math.sin(angle)
z = -self.x * math.sin(angle) + self.z * math.cos(angle)
# X axis rotation
if(axis[0]):
y = self.y * math.cos(angle) - self.z * math.sin(angle)
z = self.y * math.sin(angle) + self.z * math.cos(angle)
return Vector(x, y, z)
|
Return a Vector instance from two given points.
def from_points(cls, point1, point2):
"""Return a Vector instance from two given points."""
if isinstance(point1, Point) and isinstance(point2, Point):
displacement = point1.substract(point2)
return cls(displacement.x, displacement.y, displacement.z)
raise TypeError
|
Returns a Vector instance from spherical coordinates
def spherical(cls, mag, theta, phi=0):
'''Returns a Vector instance from spherical coordinates'''
return cls(
mag * math.sin(phi) * math.cos(theta), # X
mag * math.sin(phi) * math.sin(theta), # Y
mag * math.cos(phi) # Z
)
|
Returns a Vector instance from cylindircal coordinates
def cylindrical(cls, mag, theta, z=0):
'''Returns a Vector instance from cylindircal coordinates'''
return cls(
mag * math.cos(theta), # X
mag * math.sin(theta), # Y
z # Z
)
|
Modulus function which returns numerator if modulus is zero
def amod(a, b):
'''Modulus function which returns numerator if modulus is zero'''
modded = int(a % b)
return b if modded is 0 else modded
|
Determine the Julian date for the next or previous weekday
def search_weekday(weekday, jd, direction, offset):
'''Determine the Julian date for the next or previous weekday'''
return weekday_before(weekday, jd + (direction * offset))
|
Return (year, month, day) tuple that represents nth weekday of month in year.
If n==0, returns last weekday of month. Weekdays: Monday=0
def nth_day_of_month(n, weekday, month, year):
"""
Return (year, month, day) tuple that represents nth weekday of month in year.
If n==0, returns last weekday of month. Weekdays: Monday=0
"""
if not (0 <= n <= 5):
raise IndexError("Nth day of month must be 0-5. Received: {}".format(n))
if not (0 <= weekday <= 6):
raise IndexError("Weekday must be 0-6")
firstday, daysinmonth = calendar.monthrange(year, month)
# Get first WEEKDAY of month
first_weekday_of_kind = 1 + (weekday - firstday) % 7
if n == 0:
# find last weekday of kind, which is 5 if these conditions are met, else 4
if first_weekday_of_kind in [1, 2, 3] and first_weekday_of_kind + 28 < daysinmonth:
n = 5
else:
n = 4
day = first_weekday_of_kind + ((n - 1) * 7)
if day > daysinmonth:
raise IndexError("No {}th day of month {}".format(n, month))
return (year, month, day)
|
Returns a list of filenames based on the type of IRAF input.
Handles lists, wild-card characters, and at-files. For special
at-files, use the atfile keyword to process them.
This function is recursive, so IRAF lists can also contain at-files
and wild-card characters, e.g. `a.fits`, `@file.lst`, `*flt.fits`.
def irafglob(inlist, atfile=None):
""" Returns a list of filenames based on the type of IRAF input.
Handles lists, wild-card characters, and at-files. For special
at-files, use the atfile keyword to process them.
This function is recursive, so IRAF lists can also contain at-files
and wild-card characters, e.g. `a.fits`, `@file.lst`, `*flt.fits`.
"""
# Sanity check
if inlist is None or len(inlist) == 0:
return []
# Determine which form of input was provided:
if isinstance(inlist, list):
# python list
flist = []
for f in inlist:
flist += irafglob(f)
elif ',' in inlist:
# comma-separated string list
flist = []
for f in inlist.split(','):
f = f.strip()
flist += irafglob(f)
elif inlist[0] == '@':
# file list
flist = []
for f in open(inlist[1:], 'r').readlines():
f = f.rstrip()
# hook for application specific atfiles.
if atfile:
f = atfile(f)
flist += irafglob(f)
else:
# shell globbing
if osfn:
inlist = osfn(inlist)
flist = glob.glob(inlist)
return flist
|
Return binary format of packet.
The returned string is the binary format of the packet with
stuffing and framing applied. It is ready to be sent to
the GPS.
def pack(self):
"""Return binary format of packet.
The returned string is the binary format of the packet with
stuffing and framing applied. It is ready to be sent to
the GPS.
"""
# Possible structs for packet ID.
#
try:
structs_ = get_structs_for_fields([self.fields[0]])
except (TypeError):
# TypeError, if self.fields[0] is a wrong argument to `chr()`.
raise PackError(self)
# Possible structs for packet ID + subcode
#
if structs_ == []:
try:
structs_ = get_structs_for_fields([self.fields[0], self.fields[1]])
except (IndexError, TypeError):
# IndexError, if no self.fields[1]
# TypeError, if self.fields[1] is a wrong argument to `chr()`.
raise PackError(self)
# Try to pack the packet with any of the possible structs.
#
for struct_ in structs_:
try:
return struct_.pack(*self.fields)
except struct.error:
pass
# We only get here if the ``return`` inside the``for`` loop
# above wasn't reached, i.e. none of the `structs_` matched.
#
raise PackError(self)
|
Instantiate `Packet` from binary string.
:param rawpacket: TSIP pkt in binary format.
:type rawpacket: String.
`rawpacket` must already have framing (DLE...DLE/ETX) removed and
byte stuffing reversed.
def unpack(cls, rawpacket):
"""Instantiate `Packet` from binary string.
:param rawpacket: TSIP pkt in binary format.
:type rawpacket: String.
`rawpacket` must already have framing (DLE...DLE/ETX) removed and
byte stuffing reversed.
"""
structs_ = get_structs_for_rawpacket(rawpacket)
for struct_ in structs_:
try:
return cls(*struct_.unpack(rawpacket))
except struct.error:
raise
# Try next one.
pass
# Packet ID 0xff is a pseudo-packet representing
# packets unknown to `python-TSIP` in their raw format.
#
return cls(0xff, rawpacket)
|
Handle standard PRIMARY clipboard access. Note that offset and length
are passed as strings. This differs from CLIPBOARD.
def ch_handler(offset=0, length=-1, **kw):
""" Handle standard PRIMARY clipboard access. Note that offset and length
are passed as strings. This differs from CLIPBOARD. """
global _lastSel
offset = int(offset)
length = int(length)
if length < 0: length = len(_lastSel)
return _lastSel[offset:offset+length]
|
Put the given string into the given clipboard.
def put(text, cbname):
""" Put the given string into the given clipboard. """
global _lastSel
_checkTkInit()
if cbname == 'CLIPBOARD':
_theRoot.clipboard_clear()
if text:
# for clipboard_append, kwds can be -displayof, -format, or -type
_theRoot.clipboard_append(text)
return
if cbname == 'PRIMARY':
_lastSel = text
_theRoot.selection_handle(ch_handler, selection='PRIMARY')
# we need to claim/own it so that ch_handler is used
_theRoot.selection_own(selection='PRIMARY')
# could add command arg for a func to be called when we lose ownership
return
raise RuntimeError("Unexpected clipboard name: "+str(cbname))
|
Get the contents of the given clipboard.
def get(cbname):
""" Get the contents of the given clipboard. """
_checkTkInit()
if cbname == 'PRIMARY':
try:
return _theRoot.selection_get(selection='PRIMARY')
except:
return None
if cbname == 'CLIPBOARD':
try:
return _theRoot.selection_get(selection='CLIPBOARD')
except:
return None
raise RuntimeError("Unexpected clipboard name: "+str(cbname))
|
Creates a measurement deviceCfg from the input configuration.
:param: deviceCfg: the deviceCfg cfg.
:param: handlers: the loaded handlers.
:return: the constructed deviceCfg.
def createDevice(self, deviceCfg):
"""
Creates a measurement deviceCfg from the input configuration.
:param: deviceCfg: the deviceCfg cfg.
:param: handlers: the loaded handlers.
:return: the constructed deviceCfg.
"""
ioCfg = deviceCfg['io']
type = deviceCfg['type']
if type == 'mpu6050':
fs = deviceCfg.get('fs')
name = deviceCfg.get('name')
if ioCfg['type'] == 'mock':
provider = ioCfg.get('provider')
if provider is not None and provider == 'white noise':
dataProvider = WhiteNoiseProvider()
else:
raise ValueError(provider + " is not a supported mock io data provider")
self.logger.warning("Loading mock data provider for mpu6050")
io = mock_io(dataProvider=dataProvider.provide)
elif ioCfg['type'] == 'smbus':
busId = ioCfg['busId']
self.logger.warning("Loading smbus %d", busId)
io = smbus_io(busId)
else:
raise ValueError(ioCfg['type'] + " is not a supported io provider")
self.logger.warning("Loading mpu6050 " + name + "/" + str(fs))
return mpu6050(io, name=name, fs=fs) if name is not None else mpu6050(io, fs=fs)
else:
raise ValueError(type + " is not a supported device")
|
Loads the recordingDevices specified in the configuration.
:param: handlers the loaded handlers.
:return: the constructed recordingDevices in a dict keyed by name.
def _loadRecordingDevices(self):
"""
Loads the recordingDevices specified in the configuration.
:param: handlers the loaded handlers.
:return: the constructed recordingDevices in a dict keyed by name.
"""
return {device.name: device for device in
[self.createDevice(deviceCfg) for deviceCfg in self.config['accelerometers']]}
|
Creates a data handler from the input configuration.
:param handler: the handler cfg.
:return: the constructed handler.
def createHandler(self, handler):
"""
Creates a data handler from the input configuration.
:param handler: the handler cfg.
:return: the constructed handler.
"""
target = handler['target']
if handler['type'] == 'log':
self.logger.warning("Initialising csvlogger to log data to " + target)
return CSVLogger('recorder', handler['name'], target)
elif handler['type'] == 'post':
self.logger.warning("Initialising http logger to log data to " + target)
return HttpPoster(handler['name'], target)
|
creates a dictionary of named handler instances
:return: the dictionary
def _loadHandlers(self):
"""
creates a dictionary of named handler instances
:return: the dictionary
"""
return {handler.name: handler for handler in map(self.createHandler, self.config['handlers'])}
|
stores a chunk of new file, this is a nop if the file already exists.
:param filename: the filename.
:param chunkIdx: the chunk idx.
:param totalChunks: the no of chunks expected.
:return: the no of bytes written and 200 or 400 if nothing was written.
def put(self, filename, chunkIdx, totalChunks):
"""
stores a chunk of new file, this is a nop if the file already exists.
:param filename: the filename.
:param chunkIdx: the chunk idx.
:param totalChunks: the no of chunks expected.
:return: the no of bytes written and 200 or 400 if nothing was written.
"""
logger.info('handling chunk ' + chunkIdx + ' of ' + totalChunks + ' for ' + filename)
import flask
bytesWritten = self._uploadController.writeChunk(flask.request.stream, filename, int(chunkIdx))
return str(bytesWritten), 200 if bytesWritten > 0 else 400
|
Deletes the named file.
:param name: the name.
:return: 200 if it was deleted, 404 if it doesn't exist or 500 for anything else.
def delete(self, name):
"""
Deletes the named file.
:param name: the name.
:return: 200 if it was deleted, 404 if it doesn't exist or 500 for anything else.
"""
try:
result = self._uploadController.delete(name)
return None, 200 if result is not None else 404
except Exception as e:
return str(e), 500
|
Stores a new target.
:param name: the name.
:param start: start time.
:param end: end time.
:return:
def put(self, name, start, end):
"""
Stores a new target.
:param name: the name.
:param start: start time.
:param end: end time.
:return:
"""
entry = self._uploadController.getEntry(name)
if entry is not None:
return None, 200 if self._targetController.storeFromWav(entry, start, end) else 500
else:
return None, 404
|
:param name:
:param start:
:param end:
:param resolution:
:param window:
:return: an analysed file.
def get(self, name, start, end, resolution, window):
"""
:param name:
:param start:
:param end:
:param resolution:
:param window:
:return: an analysed file.
"""
logger.info(
'Analysing ' + name + ' from ' + start + ' to ' + end + ' at ' + resolution + 'x resolution using ' + window + ' window')
signal = self._uploadController.loadSignal(name,
start=start if start != 'start' else None,
end=end if end != 'end' else None)
if signal is not None:
window = tuple(filter(None, window.split(' ')))
if len(window) == 2:
window = (window[0], float(window[1]))
import time
data = {
'spectrum': self._jsonify(
signal.spectrum(ref=SPECLAB_REFERENCE, segmentLengthMultiplier=int(resolution), window=window)
),
'peakSpectrum': self._jsonify(
signal.peakSpectrum(ref=SPECLAB_REFERENCE, segmentLengthMultiplier=int(resolution), window=window)
),
'analysedAt': int(time.time() * 1000)
}
return data, 200
else:
return None, 404
|
Completes the specified upload.
:param filename: the filename.
:param totalChunks: the no of chunks.
:param status: the status of the upload.
:return: 200.
def put(self, filename, totalChunks, status):
"""
Completes the specified upload.
:param filename: the filename.
:param totalChunks: the no of chunks.
:param status: the status of the upload.
:return: 200.
"""
logger.info('Completing ' + filename + ' - ' + status)
self._uploadController.finalise(filename, int(totalChunks), status)
return None, 200
|
Patches the metadata associated with the new measurement, if this impacts the measurement length then a new
measurement is created otherwise it just updates it in place.
:param measurementId:
:return:
def patch(self, measurementId):
"""
Patches the metadata associated with the new measurement, if this impacts the measurement length then a new
measurement is created otherwise it just updates it in place.
:param measurementId:
:return:
"""
data = request.get_json()
if data is not None:
logger.debug('Received payload for ' + measurementId + ' - ' + str(data))
if self._measurementController.editMeasurement(measurementId, data):
return None, 200
else:
logger.warning('Unable to edit payload ' + measurementId)
return None, 404
else:
logger.error('Invalid data payload received ' + measurementId)
return None, 400
|
Initiates a new measurement. Accepts a json payload with the following attributes;
* duration: in seconds
* startTime OR delay: a date in YMD_HMS format or a delay in seconds
* description: some free text information about the measurement
:return:
def put(self, measurementId):
"""
Initiates a new measurement. Accepts a json payload with the following attributes;
* duration: in seconds
* startTime OR delay: a date in YMD_HMS format or a delay in seconds
* description: some free text information about the measurement
:return:
"""
json = request.get_json()
try:
start = self._calculateStartTime(json)
except ValueError:
return 'invalid date format in request', 400
duration = json['duration'] if 'duration' in json else 10
if start is None:
# should never happen but just in case
return 'no start time', 400
else:
scheduled, message = self._measurementController.schedule(measurementId, duration, start,
description=json.get('description'))
return message, 200 if scheduled else 400
|
Calculates an absolute start time from the json payload. This is either the given absolute start time (+2s) or
the time in delay seconds time. If the resulting date is in the past then now is returned instead.
:param json: the payload from the UI
:return: the absolute start time.
def _calculateStartTime(self, json):
"""
Calculates an absolute start time from the json payload. This is either the given absolute start time (+2s) or
the time in delay seconds time. If the resulting date is in the past then now is returned instead.
:param json: the payload from the UI
:return: the absolute start time.
"""
start = json['startTime'] if 'startTime' in json else None
delay = json['delay'] if 'delay' in json else None
if start is None and delay is None:
return self._getAbsoluteTime(datetime.datetime.utcnow(), 2)
elif start is not None:
target = datetime.datetime.strptime(start, DATETIME_FORMAT)
if target <= datetime.datetime.utcnow():
time = self._getAbsoluteTime(datetime.datetime.utcnow(), 2)
logger.warning('Date requested is in the past (' + start + '), defaulting to ' +
time.strftime(DATETIME_FORMAT))
return time
else:
return target
elif delay is not None:
return self._getAbsoluteTime(datetime.datetime.utcnow(), delay)
else:
return None
|
Adds the delay in seconds to the start time.
:param start:
:param delay:
:return: a datetimem for the specified point in time.
def _getAbsoluteTime(self, start, delay):
"""
Adds the delay in seconds to the start time.
:param start:
:param delay:
:return: a datetimem for the specified point in time.
"""
return start + datetime.timedelta(days=0, seconds=delay)
|
Deletes the named measurement.
:return: 200 if something was deleted, 404 if the measurement doesn't exist, 500 in any other case.
def delete(self, measurementId):
"""
Deletes the named measurement.
:return: 200 if something was deleted, 404 if the measurement doesn't exist, 500 in any other case.
"""
message, count, deleted = self._measurementController.delete(measurementId)
if count == 0:
return message, 404
elif deleted is None:
return message, 500
else:
return deleted, 200
|
Initialises the measurement session from the given device.
:param measurementId:
:param deviceId:
:return:
def put(self, measurementId, deviceId):
"""
Initialises the measurement session from the given device.
:param measurementId:
:param deviceId:
:return:
"""
logger.info('Starting measurement ' + measurementId + ' for ' + deviceId)
if self._measurementController.startMeasurement(measurementId, deviceId):
logger.info('Started measurement ' + measurementId + ' for ' + deviceId)
return None, 200
else:
logger.warning('Failed to start measurement ' + measurementId + ' for ' + deviceId)
return None, 404
|
Store a bunch of data for this measurement session.
:param measurementId:
:param deviceId:
:return:
def put(self, measurementId, deviceId):
"""
Store a bunch of data for this measurement session.
:param measurementId:
:param deviceId:
:return:
"""
data = request.get_json()
if data is not None:
parsedData = json.loads(data)
logger.debug('Received payload ' + measurementId + '/' + deviceId + ': ' +
str(len(parsedData)) + ' records')
if self._measurementController.recordData(measurementId, deviceId, parsedData):
return None, 200
else:
logger.warning('Unable to record payload ' + measurementId + '/' + deviceId)
return None, 404
else:
logger.error('Invalid data payload received ' + measurementId + '/' + deviceId)
return None, 400
|
Fails the measurement for this device.
:param measurementId: the measurement name.
:param deviceId: the device name.
:return: 200 if
def put(self, measurementId, deviceId):
"""
Fails the measurement for this device.
:param measurementId: the measurement name.
:param deviceId: the device name.
:return: 200 if
"""
payload = request.get_json()
failureReason = json.loads(payload).get('failureReason') if payload is not None else None
logger.warning('Failing measurement ' + measurementId + ' for ' + deviceId + ' because ' + str(failureReason))
if self._measurementController.failMeasurement(measurementId, deviceId, failureReason=failureReason):
logger.warning('Failed measurement ' + measurementId + ' for ' + deviceId)
return None, 200
else:
logger.error('Unable to fail measurement ' + measurementId + ' for ' + deviceId)
return None, 404
|
Puts a new device into the device store
:param deviceId:
:return:
def put(self, deviceId):
"""
Puts a new device into the device store
:param deviceId:
:return:
"""
device = request.get_json()
logger.debug("Received /devices/" + deviceId + " - " + str(device))
self._deviceController.accept(deviceId, device)
return None, 200
|
Print a list of strings centered in columns. Determine the number
of columns and lines on the fly. Return the result, ready to print.
in_strings is a list/tuple/iterable of strings
min_pad is number of spaces to appear on each side of a single string (so
you will see twice this many spaces between 2 strings)
def printColsAuto(in_strings, term_width=80, min_pad=1):
""" Print a list of strings centered in columns. Determine the number
of columns and lines on the fly. Return the result, ready to print.
in_strings is a list/tuple/iterable of strings
min_pad is number of spaces to appear on each side of a single string (so
you will see twice this many spaces between 2 strings)
"""
# sanity check
assert in_strings and len(in_strings)>0, 'Unexpected: '+repr(in_strings)
# get max width in input
maxWidth = len(max(in_strings, key=len)) + (2*min_pad) # width with pad
numCols = term_width//maxWidth # integer div
# set numCols so we take advantage of the whole line width
numCols = min(numCols, len(in_strings))
# easy case - single column or too big
if numCols < 2:
# one or some items are too big but print one item per line anyway
lines = [x.center(term_width) for x in in_strings]
return '\n'.join(lines)
# normal case - 2 or more columns
colWidth = term_width//numCols # integer div
# colWidth is guaranteed to be larger than all items in input
retval = ''
for i in range(len(in_strings)):
retval+=in_strings[i].center(colWidth)
if (i+1)%numCols == 0:
retval += '\n'
return retval.rstrip()
|
Print elements of list in cols columns
def printCols(strlist,cols=5,width=80):
"""Print elements of list in cols columns"""
# This may exist somewhere in the Python standard libraries?
# Should probably rewrite this, it is pretty crude.
nlines = (len(strlist)+cols-1)//cols
line = nlines*[""]
for i in range(len(strlist)):
c, r = divmod(i,nlines)
nwid = c*width//cols - len(line[r])
if nwid>0:
line[r] = line[r] + nwid*" " + strlist[i]
else:
line[r] = line[r] + " " + strlist[i]
for s in line:
print(s)
|
Strip single or double quotes off string; remove embedded quote pairs
def stripQuotes(value):
"""Strip single or double quotes off string; remove embedded quote pairs"""
if value[:1] == '"':
value = value[1:]
if value[-1:] == '"':
value = value[:-1]
# replace "" with "
value = re.sub(_re_doubleq2, '"', value)
elif value[:1] == "'":
value = value[1:]
if value[-1:] == "'":
value = value[:-1]
# replace '' with '
value = re.sub(_re_singleq2, "'", value)
return value
|
Take a string as input (e.g. a line in a csv text file), and break
it into tokens separated by commas while ignoring commas embedded inside
quoted sections. This is exactly what the 'csv' module is meant for, so
we *should* be using it, save that it has two bugs (described next) which
limit our use of it. When these bugs are fixed, this function should be
forsaken in favor of direct use of the csv module (or similar).
The basic use case is to split a function signature string, so for:
afunc(arg1='str1', arg2='str, with, embedded, commas', arg3=7)
we want a 3 element sequence:
["arg1='str1'", "arg2='str, with, embedded, commas'", "arg3=7"]
but:
>>> import csv
>>> y = "arg1='str1', arg2='str, with, embedded, commas', arg3=7"
>>> rdr = csv.reader( (y,), dialect='excel', quotechar="'", skipinitialspace=True)
>>> l = rdr.next(); print(len(l), str(l)) # doctest: +SKIP
6 ["arg1='str1'", "arg2='str", 'with', 'embedded', "commas'", "arg3=7"]
which we can see is not correct - we wanted 3 tokens. This occurs in
Python 2.5.2 and 2.6. It seems to be due to the text at the start of each
token ("arg1=") i.e. because the quote isn't for the whole token. If we
were to remove the names of the args and the equal signs, it works:
>>> x = "'str1', 'str, with, embedded, commas', 7"
>>> rdr = csv.reader( (x,), dialect='excel', quotechar="'", skipinitialspace=True)
>>> l = rdr.next(); print(len(l), str(l)) # doctest: +SKIP
3 ['str1', 'str, with, embedded, commas', '7']
But even this usage is delicate - when we turn off skipinitialspace, it
fails:
>>> x = "'str1', 'str, with, embedded, commas', 7"
>>> rdr = csv.reader( (x,), dialect='excel', quotechar="'")
>>> l = rdr.next(); print(len(l), str(l)) # doctest: +SKIP
6 ['str1', " 'str", ' with', ' embedded', " commas'", ' 7']
So, for now, we'll roll our own.
def csvSplit(line, delim=',', allowEol=True):
""" Take a string as input (e.g. a line in a csv text file), and break
it into tokens separated by commas while ignoring commas embedded inside
quoted sections. This is exactly what the 'csv' module is meant for, so
we *should* be using it, save that it has two bugs (described next) which
limit our use of it. When these bugs are fixed, this function should be
forsaken in favor of direct use of the csv module (or similar).
The basic use case is to split a function signature string, so for:
afunc(arg1='str1', arg2='str, with, embedded, commas', arg3=7)
we want a 3 element sequence:
["arg1='str1'", "arg2='str, with, embedded, commas'", "arg3=7"]
but:
>>> import csv
>>> y = "arg1='str1', arg2='str, with, embedded, commas', arg3=7"
>>> rdr = csv.reader( (y,), dialect='excel', quotechar="'", skipinitialspace=True)
>>> l = rdr.next(); print(len(l), str(l)) # doctest: +SKIP
6 ["arg1='str1'", "arg2='str", 'with', 'embedded', "commas'", "arg3=7"]
which we can see is not correct - we wanted 3 tokens. This occurs in
Python 2.5.2 and 2.6. It seems to be due to the text at the start of each
token ("arg1=") i.e. because the quote isn't for the whole token. If we
were to remove the names of the args and the equal signs, it works:
>>> x = "'str1', 'str, with, embedded, commas', 7"
>>> rdr = csv.reader( (x,), dialect='excel', quotechar="'", skipinitialspace=True)
>>> l = rdr.next(); print(len(l), str(l)) # doctest: +SKIP
3 ['str1', 'str, with, embedded, commas', '7']
But even this usage is delicate - when we turn off skipinitialspace, it
fails:
>>> x = "'str1', 'str, with, embedded, commas', 7"
>>> rdr = csv.reader( (x,), dialect='excel', quotechar="'")
>>> l = rdr.next(); print(len(l), str(l)) # doctest: +SKIP
6 ['str1', " 'str", ' with', ' embedded', " commas'", ' 7']
So, for now, we'll roll our own.
"""
# Algorithm: read chars left to right, go from delimiter to delimiter,
# but as soon as a single/double/triple quote is hit, scan forward
# (ignoring all else) until its matching end-quote is found.
# For now, we will not specially handle escaped quotes.
tokens = []
ldl = len(delim)
keepOnRollin = line is not None and len(line) > 0
while keepOnRollin:
tok = _getCharsUntil(line, delim, True, allowEol=allowEol)
# len of token should always be > 0 because it includes end delimiter
# except on last token
if len(tok) > 0:
# append it, but without the delimiter
if tok[-ldl:] == delim:
tokens.append(tok[:-ldl])
else:
tokens.append(tok) # tok goes to EOL - has no delimiter
keepOnRollin = False
line = line[len(tok):]
else:
# This is the case of the empty end token
tokens.append('')
keepOnRollin = False
return tokens
|
Same thing as glob.glob, but recursively checks subdirs.
def rglob(root, pattern):
""" Same thing as glob.glob, but recursively checks subdirs. """
# Thanks to Alex Martelli for basics on Stack Overflow
retlist = []
if None not in (pattern, root):
for base, dirs, files in os.walk(root):
goodfiles = fnmatch.filter(files, pattern)
retlist.extend(os.path.join(base, f) for f in goodfiles)
return retlist
|
Set a file named fname to be writable (or not) by user, with the
option to ignore errors. There is nothing ground-breaking here, but I
was annoyed with having to repeate this little bit of code.
def setWritePrivs(fname, makeWritable, ignoreErrors=False):
""" Set a file named fname to be writable (or not) by user, with the
option to ignore errors. There is nothing ground-breaking here, but I
was annoyed with having to repeate this little bit of code. """
privs = os.stat(fname).st_mode
try:
if makeWritable:
os.chmod(fname, privs | stat.S_IWUSR)
else:
os.chmod(fname, privs & (~ stat.S_IWUSR))
except OSError:
if ignoreErrors:
pass # just try, don't whine
else:
raise
|
Remove escapes from in front of quotes (which IRAF seems to
just stick in for fun sometimes.) Remove \-newline too.
If quoted is true, removes all blanks following \-newline
(which is a nasty thing IRAF does for continuations inside
quoted strings.)
XXX Should we remove \\ too?
def removeEscapes(value, quoted=0):
"""Remove escapes from in front of quotes (which IRAF seems to
just stick in for fun sometimes.) Remove \-newline too.
If quoted is true, removes all blanks following \-newline
(which is a nasty thing IRAF does for continuations inside
quoted strings.)
XXX Should we remove \\ too?
"""
i = value.find(r'\"')
while i>=0:
value = value[:i] + value[i+1:]
i = value.find(r'\"',i+1)
i = value.find(r"\'")
while i>=0:
value = value[:i] + value[i+1:]
i = value.find(r"\'",i+1)
# delete backslash-newlines
i = value.find("\\\n")
while i>=0:
j = i+2
if quoted:
# ignore blanks and tabs following \-newline in quoted strings
for c in value[i+2:]:
if c not in ' \t':
break
j = j+1
value = value[:i] + value[j:]
i = value.find("\\\n",i+1)
return value
|
Convert CL parameter or variable name to Python-acceptable name
Translate embedded dollar signs to 'DOLLAR'
Add 'PY' prefix to components that are Python reserved words
Add 'PY' prefix to components start with a number
If dot != 0, also replaces '.' with 'DOT'
def translateName(s, dot=0):
"""Convert CL parameter or variable name to Python-acceptable name
Translate embedded dollar signs to 'DOLLAR'
Add 'PY' prefix to components that are Python reserved words
Add 'PY' prefix to components start with a number
If dot != 0, also replaces '.' with 'DOT'
"""
s = s.replace('$', 'DOLLAR')
sparts = s.split('.')
for i in range(len(sparts)):
if sparts[i] == "" or sparts[i][0] in string.digits or \
keyword.iskeyword(sparts[i]):
sparts[i] = 'PY' + sparts[i]
if dot:
return 'DOT'.join(sparts)
else:
return '.'.join(sparts)
|
In case the _default_root value is required, you may
safely call this ahead of time to ensure that it has been
initialized. If it has already been, this is a no-op.
def init_tk_default_root(withdraw=True):
""" In case the _default_root value is required, you may
safely call this ahead of time to ensure that it has been
initialized. If it has already been, this is a no-op.
"""
if not capable.OF_GRAPHICS:
raise RuntimeError("Cannot run this command without graphics")
if not TKNTR._default_root: # TKNTR imported above
junk = TKNTR.Tk()
# tkinter._default_root is now populated (== junk)
retval = TKNTR._default_root
if withdraw and retval:
retval.withdraw()
return retval
|
Read a line from file while running Tk mainloop.
If the file is not line-buffered then the Tk mainloop will stop
running after one character is typed. The function will still work
but Tk widgets will stop updating. This should work OK for stdin and
other line-buffered filehandles. If file is omitted, reads from
sys.stdin.
The file must have a readline method. If it does not have a fileno
method (which can happen e.g. for the status line input on the
graphics window) then the readline method is simply called directly.
def tkreadline(file=None):
"""Read a line from file while running Tk mainloop.
If the file is not line-buffered then the Tk mainloop will stop
running after one character is typed. The function will still work
but Tk widgets will stop updating. This should work OK for stdin and
other line-buffered filehandles. If file is omitted, reads from
sys.stdin.
The file must have a readline method. If it does not have a fileno
method (which can happen e.g. for the status line input on the
graphics window) then the readline method is simply called directly.
"""
if file is None:
file = sys.stdin
if not hasattr(file, "readline"):
raise TypeError("file must be a filehandle with a readline method")
# Call tkread now...
# BUT, if we get in here for something not GUI-related (e.g. terminal-
# focused code in a sometimes-GUI app) then skip tkread and simply call
# readline on the input eg. stdin. Otherwise we'd fail in _TkRead().read()
try:
fd = file.fileno()
except:
fd = None
if (fd and capable.OF_GRAPHICS):
tkread(fd, 0)
# if EOF was encountered on a tty, avoid reading again because
# it actually requests more data
if not select.select([fd],[],[],0)[0]:
return ''
return file.readline()
|
Given a URL, try to pop it up in a browser on most platforms.
brow_bin is only used on OS's where there is no "open" or "start" cmd.
def launchBrowser(url, brow_bin='mozilla', subj=None):
""" Given a URL, try to pop it up in a browser on most platforms.
brow_bin is only used on OS's where there is no "open" or "start" cmd.
"""
if not subj: subj = url
# Tries to use webbrowser module on most OSes, unless a system command
# is needed. (E.g. win, linux, sun, etc)
if sys.platform not in ('os2warp, iphone'): # try webbrowser w/ everything?
import webbrowser
if not webbrowser.open(url):
print("Error opening URL: "+url)
else:
print('Help on "'+subj+'" is now being displayed in a web browser')
return
# Go ahead and fork a subprocess to call the correct binary
pid = os.fork()
if pid == 0: # child
if sys.platform == 'darwin':
if 0 != os.system('open "'+url+'"'): # does not seem to keep '#.*'
print("Error opening URL: "+url)
os._exit(0)
# The following retries if "-remote" doesnt work, opening a new browser
# cmd = brow_bin+" -remote 'openURL("+url+")' '"+url+"' 1> /dev/null 2>&1"
# if 0 != os.system(cmd)
# print "Running "+brow_bin+" for HTML help..."
# os.execvp(brow_bin,[brow_bin,url])
# os._exit(0)
else: # parent
if not subj:
subj = url
print('Help on "'+subj+'" is now being displayed in a browser')
|
Read nbytes characters from file while running Tk mainloop
def read(self, file, nbytes):
"""Read nbytes characters from file while running Tk mainloop"""
if not capable.OF_GRAPHICS:
raise RuntimeError("Cannot run this command without graphics")
if isinstance(file, int):
fd = file
else:
# Otherwise, assume we have Python file object
try:
fd = file.fileno()
except:
raise TypeError("file must be an integer or a filehandle/socket")
init_tk_default_root() # harmless if already done
self.widget = TKNTR._default_root
if not self.widget:
# no Tk widgets yet, so no need for mainloop
# (shouldnt happen now with init_tk_default_root)
s = []
while nbytes>0:
snew = os.read(fd, nbytes) # returns bytes in PY3K
if snew:
if PY3K: snew = snew.decode('ascii','replace')
s.append(snew)
nbytes -= len(snew)
else:
# EOF -- just return what we have so far
break
return "".join(s)
else:
self.nbytes = nbytes
self.value = []
self.widget.tk.createfilehandler(fd,
TKNTR.READABLE | TKNTR.EXCEPTION,
self._read)
try:
self.widget.mainloop()
finally:
self.widget.tk.deletefilehandler(fd)
return "".join(self.value)
|
Read waiting data and terminate Tk mainloop if done
def _read(self, fd, mask):
"""Read waiting data and terminate Tk mainloop if done"""
try:
# if EOF was encountered on a tty, avoid reading again because
# it actually requests more data
if select.select([fd],[],[],0)[0]:
snew = os.read(fd, self.nbytes) # returns bytes in PY3K
if PY3K: snew = snew.decode('ascii','replace')
self.value.append(snew)
self.nbytes -= len(snew)
else:
snew = ''
if (self.nbytes <= 0 or len(snew) == 0) and self.widget:
# stop the mainloop
self.widget.quit()
except OSError:
raise IOError("Error reading from %s" % (fd,))
|
reads a delimited file and converts into a Signal
:param filename: string
:param timeColumnIdx: 0 indexed column number
:param dataColumnIdx: 0 indexed column number
:param delimiter: char
:return a Signal instance
def loadSignalFromDelimitedFile(filename, timeColumnIdx=0, dataColumnIdx=1, delimiter=',', skipHeader=0) -> Signal:
""" reads a delimited file and converts into a Signal
:param filename: string
:param timeColumnIdx: 0 indexed column number
:param dataColumnIdx: 0 indexed column number
:param delimiter: char
:return a Signal instance
"""
data = np.genfromtxt(filename, delimiter=delimiter, skip_header=skipHeader)
columnCount = data.shape[1]
if columnCount < timeColumnIdx + 1:
raise ValueError(
filename + ' has only ' + columnCount + ' columns, time values can\'t be at column ' + timeColumnIdx)
if columnCount < dataColumnIdx + 1:
raise ValueError(
filename + ' has only ' + columnCount + ' columns, data values can\'t be at column ' + dataColumnIdx)
t = data[:, [timeColumnIdx]]
samples = data[:, [dataColumnIdx]]
# calculate fs as the interval between the time samples
fs = int(round(1 / (np.diff(t, n=1, axis=0).mean()), 0))
source = Signal(samples.ravel(), fs)
return source
|
reads a wav file into a Signal and scales the input so that the sample are expressed in real world values
(as defined by the calibration signal).
:param inputSignalFile: a path to the input signal file
:param calibrationSignalFile: a path the calibration signal file
:param calibrationRealWorldValue: the real world value represented by the calibration signal
:param bitDepth: the bit depth of the input signal, used to rescale the value to a range of +1 to -1
:returns: a Signal
def loadSignalFromWav(inputSignalFile, calibrationRealWorldValue=None, calibrationSignalFile=None, start=None,
end=None) -> Signal:
""" reads a wav file into a Signal and scales the input so that the sample are expressed in real world values
(as defined by the calibration signal).
:param inputSignalFile: a path to the input signal file
:param calibrationSignalFile: a path the calibration signal file
:param calibrationRealWorldValue: the real world value represented by the calibration signal
:param bitDepth: the bit depth of the input signal, used to rescale the value to a range of +1 to -1
:returns: a Signal
"""
inputSignal = readWav(inputSignalFile, start=start, end=end)
if calibrationSignalFile is not None:
calibrationSignal = readWav(calibrationSignalFile)
scalingFactor = calibrationRealWorldValue / np.max(calibrationSignal.samples)
return Signal(inputSignal.samples * scalingFactor, inputSignal.fs)
else:
return inputSignal
|
reads a wav file into a Signal.
:param inputSignalFile: a path to the input signal file
:param selectedChannel: the channel to read.
:param start: the time to start reading from in HH:mm:ss.SSS format.
:param end: the time to end reading from in HH:mm:ss.SSS format.
:returns: Signal.
def readWav(inputSignalFile, selectedChannel=1, start=None, end=None) -> Signal:
""" reads a wav file into a Signal.
:param inputSignalFile: a path to the input signal file
:param selectedChannel: the channel to read.
:param start: the time to start reading from in HH:mm:ss.SSS format.
:param end: the time to end reading from in HH:mm:ss.SSS format.
:returns: Signal.
"""
def asFrames(time, fs):
hours, minutes, seconds = (time.split(":"))[-3:]
hours = int(hours)
minutes = int(minutes)
seconds = float(seconds)
millis = int((3600000 * hours) + (60000 * minutes) + (1000 * seconds))
return int(millis * (fs / 1000))
import soundfile as sf
if start is not None or end is not None:
info = sf.info(inputSignalFile)
startFrame = 0 if start is None else asFrames(start, info.samplerate)
endFrame = None if end is None else asFrames(end, info.samplerate)
ys, frameRate = sf.read(inputSignalFile, start=startFrame, stop=endFrame)
else:
ys, frameRate = sf.read(inputSignalFile)
return Signal(ys[::selectedChannel], frameRate)
|
A factory method for loading a tri axis measurement from a single file.
:param filename: the file to load from.
:param timeColumnIdx: the column containing time data.
:param xIdx: the column containing x axis data.
:param yIdx: the column containing y axis data.
:param zIdx: the column containing z axis data.
:param delimiter: the delimiter.
:param skipHeader: how many rows of headers to skip.
:return: the measurement
def loadTriAxisSignalFromFile(filename, timeColumnIdx=0, xIdx=1, yIdx=2, zIdx=3, delimiter=',',
skipHeader=0) -> TriAxisSignal:
"""
A factory method for loading a tri axis measurement from a single file.
:param filename: the file to load from.
:param timeColumnIdx: the column containing time data.
:param xIdx: the column containing x axis data.
:param yIdx: the column containing y axis data.
:param zIdx: the column containing z axis data.
:param delimiter: the delimiter.
:param skipHeader: how many rows of headers to skip.
:return: the measurement
"""
return TriAxisSignal(
x=loadSignalFromDelimitedFile(filename, timeColumnIdx=timeColumnIdx, dataColumnIdx=xIdx,
delimiter=delimiter, skipHeader=skipHeader),
y=loadSignalFromDelimitedFile(filename, timeColumnIdx=timeColumnIdx, dataColumnIdx=yIdx,
delimiter=delimiter, skipHeader=skipHeader),
z=loadSignalFromDelimitedFile(filename, timeColumnIdx=timeColumnIdx, dataColumnIdx=zIdx,
delimiter=delimiter, skipHeader=skipHeader))
|
analyses the source and returns a PSD, segment is set to get ~1Hz frequency resolution
:param ref: the reference value for dB purposes.
:param segmentLengthMultiplier: allow for increased resolution.
:param mode: cq or none.
:return:
f : ndarray
Array of sample frequencies.
Pxx : ndarray
Power spectral density.
def psd(self, ref=None, segmentLengthMultiplier=1, mode=None, **kwargs):
"""
analyses the source and returns a PSD, segment is set to get ~1Hz frequency resolution
:param ref: the reference value for dB purposes.
:param segmentLengthMultiplier: allow for increased resolution.
:param mode: cq or none.
:return:
f : ndarray
Array of sample frequencies.
Pxx : ndarray
Power spectral density.
"""
def analysisFunc(x, nperseg, **kwargs):
f, Pxx_den = signal.welch(self.samples, self.fs, nperseg=nperseg, detrend=False, **kwargs)
if ref is not None:
Pxx_den = librosa.power_to_db(Pxx_den, ref)
return f, Pxx_den
if mode == 'cq':
return self._cq(analysisFunc, segmentLengthMultiplier)
else:
return analysisFunc(0, self.getSegmentLength() * segmentLengthMultiplier, **kwargs)
|
analyses the source to generate the linear spectrum.
:param ref: the reference value for dB purposes.
:param segmentLengthMultiplier: allow for increased resolution.
:param mode: cq or none.
:return:
f : ndarray
Array of sample frequencies.
Pxx : ndarray
linear spectrum.
def spectrum(self, ref=None, segmentLengthMultiplier=1, mode=None, **kwargs):
"""
analyses the source to generate the linear spectrum.
:param ref: the reference value for dB purposes.
:param segmentLengthMultiplier: allow for increased resolution.
:param mode: cq or none.
:return:
f : ndarray
Array of sample frequencies.
Pxx : ndarray
linear spectrum.
"""
def analysisFunc(x, nperseg, **kwargs):
f, Pxx_spec = signal.welch(self.samples, self.fs, nperseg=nperseg, scaling='spectrum', detrend=False,
**kwargs)
Pxx_spec = np.sqrt(Pxx_spec)
# it seems a 3dB adjustment is required to account for the change in nperseg
if x > 0:
Pxx_spec = Pxx_spec / (10 ** ((3 * x) / 20))
if ref is not None:
Pxx_spec = librosa.amplitude_to_db(Pxx_spec, ref)
return f, Pxx_spec
if mode == 'cq':
return self._cq(analysisFunc, segmentLengthMultiplier)
else:
return analysisFunc(0, self.getSegmentLength() * segmentLengthMultiplier, **kwargs)
|
analyses the source to generate the max values per bin per segment
:param ref: the reference value for dB purposes.
:param segmentLengthMultiplier: allow for increased resolution.
:param mode: cq or none.
:return:
f : ndarray
Array of sample frequencies.
Pxx : ndarray
linear spectrum max values.
def peakSpectrum(self, ref=None, segmentLengthMultiplier=1, mode=None, window='hann'):
"""
analyses the source to generate the max values per bin per segment
:param ref: the reference value for dB purposes.
:param segmentLengthMultiplier: allow for increased resolution.
:param mode: cq or none.
:return:
f : ndarray
Array of sample frequencies.
Pxx : ndarray
linear spectrum max values.
"""
def analysisFunc(x, nperseg):
freqs, _, Pxy = signal.spectrogram(self.samples,
self.fs,
window=window,
nperseg=int(nperseg),
noverlap=int(nperseg // 2),
detrend=False,
scaling='spectrum')
Pxy_max = np.sqrt(Pxy.max(axis=-1).real)
if x > 0:
Pxy_max = Pxy_max / (10 ** ((3 * x) / 20))
if ref is not None:
Pxy_max = librosa.amplitude_to_db(Pxy_max, ref=ref)
return freqs, Pxy_max
if mode == 'cq':
return self._cq(analysisFunc, segmentLengthMultiplier)
else:
return analysisFunc(0, self.getSegmentLength() * segmentLengthMultiplier)
|
analyses the source to generate a spectrogram
:param ref: the reference value for dB purposes.
:param segmentLengthMultiplier: allow for increased resolution.
:return:
t : ndarray
Array of time slices.
f : ndarray
Array of sample frequencies.
Pxx : ndarray
linear spectrum values.
def spectrogram(self, ref=None, segmentLengthMultiplier=1, window='hann'):
"""
analyses the source to generate a spectrogram
:param ref: the reference value for dB purposes.
:param segmentLengthMultiplier: allow for increased resolution.
:return:
t : ndarray
Array of time slices.
f : ndarray
Array of sample frequencies.
Pxx : ndarray
linear spectrum values.
"""
t, f, Sxx = signal.spectrogram(self.samples,
self.fs,
window=window,
nperseg=self.getSegmentLength() * segmentLengthMultiplier,
detrend=False,
scaling='spectrum')
Sxx = np.sqrt(Sxx)
if ref is not None:
Sxx = librosa.amplitude_to_db(Sxx, ref)
return t, f, Sxx
|
Creates a copy of the signal with the low pass applied, args specifed are passed through to _butter.
:return:
def lowPass(self, *args):
"""
Creates a copy of the signal with the low pass applied, args specifed are passed through to _butter.
:return:
"""
return Signal(self._butter(self.samples, 'low', *args), fs=self.fs)
|
Creates a copy of the signal with the high pass applied, args specifed are passed through to _butter.
:return:
def highPass(self, *args):
"""
Creates a copy of the signal with the high pass applied, args specifed are passed through to _butter.
:return:
"""
return Signal(self._butter(self.samples, 'high', *args), fs=self.fs)
|
Applies a digital butterworth filter via filtfilt at the specified f3 and order. Default values are set to
correspond to apparently sensible filters that distinguish between vibration and tilt from an accelerometer.
:param data: the data to filter.
:param btype: high or low.
:param f3: the f3 of the filter.
:param order: the filter order.
:return: the filtered signal.
def _butter(self, data, btype, f3=2, order=2):
"""
Applies a digital butterworth filter via filtfilt at the specified f3 and order. Default values are set to
correspond to apparently sensible filters that distinguish between vibration and tilt from an accelerometer.
:param data: the data to filter.
:param btype: high or low.
:param f3: the f3 of the filter.
:param order: the filter order.
:return: the filtered signal.
"""
b, a = signal.butter(order, f3 / (0.5 * self.fs), btype=btype)
y = signal.filtfilt(b, a, data)
return y
|
gets the named analysis on the given axis and caches the result (or reads from the cache if data is available
already)
:param axis: the named axis.
:param analysis: the analysis name.
:return: the analysis tuple.
def _getAnalysis(self, axis, analysis, ref=None):
"""
gets the named analysis on the given axis and caches the result (or reads from the cache if data is available
already)
:param axis: the named axis.
:param analysis: the analysis name.
:return: the analysis tuple.
"""
cache = self.cache.get(str(ref))
if cache is None:
cache = {'x': {}, 'y': {}, 'z': {}, 'sum': {}}
self.cache[str(ref)] = cache
if axis in cache:
data = self.cache['raw'].get(axis, None)
cachedAxis = cache.get(axis)
if cachedAxis.get(analysis) is None:
if axis == 'sum':
if self._canSum(analysis):
fx, Pxx = self._getAnalysis('x', analysis)
fy, Pxy = self._getAnalysis('y', analysis)
fz, Pxz = self._getAnalysis('z', analysis)
# calculate the sum of the squares with an additional weighting for x and y
Psum = (((Pxx * 2.2) ** 2) + ((Pxy * 2.4) ** 2) + (Pxz ** 2)) ** 0.5
if ref is not None:
Psum = librosa.amplitude_to_db(Psum, ref)
cachedAxis[analysis] = (fx, Psum)
else:
return None
else:
cachedAxis[analysis] = getattr(data.highPass(), analysis)(ref=ref)
return cachedAxis[analysis]
else:
return None
|
Checks if a given date is a legal positivist date
def legal_date(year, month, day):
'''Checks if a given date is a legal positivist date'''
try:
assert year >= 1
assert 0 < month <= 14
assert 0 < day <= 28
if month == 14:
if isleap(year + YEAR_EPOCH - 1):
assert day <= 2
else:
assert day == 1
except AssertionError:
raise ValueError("Invalid Positivist date: ({}, {}, {})".format(year, month, day))
return True
|
Convert a Positivist date to Julian day count.
def to_jd(year, month, day):
'''Convert a Positivist date to Julian day count.'''
legal_date(year, month, day)
gyear = year + YEAR_EPOCH - 1
return (
gregorian.EPOCH - 1 + (365 * (gyear - 1)) +
floor((gyear - 1) / 4) + (-floor((gyear - 1) / 100)) +
floor((gyear - 1) / 400) + (month - 1) * 28 + day
)
|
Convert a Julian day count to Positivist date.
def from_jd(jd):
'''Convert a Julian day count to Positivist date.'''
try:
assert jd >= EPOCH
except AssertionError:
raise ValueError('Invalid Julian day')
depoch = floor(jd - 0.5) + 0.5 - gregorian.EPOCH
quadricent = floor(depoch / gregorian.INTERCALATION_CYCLE_DAYS)
dqc = depoch % gregorian.INTERCALATION_CYCLE_DAYS
cent = floor(dqc / gregorian.LEAP_SUPPRESSION_DAYS)
dcent = dqc % gregorian.LEAP_SUPPRESSION_DAYS
quad = floor(dcent / gregorian.LEAP_CYCLE_DAYS)
dquad = dcent % gregorian.LEAP_CYCLE_DAYS
yindex = floor(dquad / gregorian.YEAR_DAYS)
year = (
quadricent * gregorian.INTERCALATION_CYCLE_YEARS +
cent * gregorian.LEAP_SUPPRESSION_YEARS +
quad * gregorian.LEAP_CYCLE_YEARS + yindex
)
if yindex == 4:
yearday = 365
year = year - 1
else:
yearday = (
depoch -
quadricent * gregorian.INTERCALATION_CYCLE_DAYS -
cent * gregorian.LEAP_SUPPRESSION_DAYS -
quad * gregorian.LEAP_CYCLE_DAYS -
yindex * gregorian.YEAR_DAYS
)
month = floor(yearday / 28)
return (year - YEAR_EPOCH + 2, month + 1, int(yearday - (month * 28)) + 1)
|
Give the name of the month and day for a given date.
Returns:
tuple month_name, day_name
def dayname(year, month, day):
'''
Give the name of the month and day for a given date.
Returns:
tuple month_name, day_name
'''
legal_date(year, month, day)
yearday = (month - 1) * 28 + day
if isleap(year + YEAR_EPOCH - 1):
dname = data.day_names_leap[yearday - 1]
else:
dname = data.day_names[yearday - 1]
return MONTHS[month - 1], dname
|
:return: true if the lastUpdateTime is more than maxAge seconds ago.
def hasExpired(self):
"""
:return: true if the lastUpdateTime is more than maxAge seconds ago.
"""
return (datetime.datetime.utcnow() - self.lastUpdateTime).total_seconds() > self.maxAgeSeconds
|
Adds the named device to the store.
:param deviceId:
:param device:
:return:
def accept(self, deviceId, device):
"""
Adds the named device to the store.
:param deviceId:
:param device:
:return:
"""
storedDevice = self.devices.get(deviceId)
if storedDevice is None:
logger.info('Initialising device ' + deviceId)
storedDevice = Device(self.maxAgeSeconds)
storedDevice.deviceId = deviceId
# this uses an async handler to decouple the recorder put (of the data) from the analyser handling that data
# thus the recorder will become free as soon as it has handed off the data. This means delivery is only
# guaranteed as long as the analyser stays up but this is not a system that sits on top of a bulletproof
# message bus so unlucky :P
storedDevice.dataHandler = AsyncHandler('analyser', CSVLogger('analyser', deviceId, self.dataDir))
else:
logger.debug('Pinged by device ' + deviceId)
storedDevice.payload = device
storedDevice.lastUpdateTime = datetime.datetime.utcnow()
# TODO if device has FAILED, do something?
self.devices.update({deviceId: storedDevice})
self.targetStateController.updateDeviceState(storedDevice.payload)
|
The devices in the given state or all devices is the arg is none.
:param status: the state to match against.
:return: the devices
def getDevices(self, status=None):
"""
The devices in the given state or all devices is the arg is none.
:param status: the state to match against.
:return: the devices
"""
return [d for d in self.devices.values() if status is None or d.payload.get('status') == status]
|
gets the named device.
:param id: the id.
:return: the device
def getDevice(self, id):
"""
gets the named device.
:param id: the id.
:return: the device
"""
return next(iter([d for d in self.devices.values() if d.deviceId == id]), None)
|
A housekeeping function which runs in a worker thread and which evicts devices that haven't sent an update for a
while.
def _evictStaleDevices(self):
"""
A housekeeping function which runs in a worker thread and which evicts devices that haven't sent an update for a
while.
"""
while self.running:
expiredDeviceIds = [key for key, value in self.devices.items() if value.hasExpired()]
for key in expiredDeviceIds:
logger.warning("Device timeout, removing " + key)
del self.devices[key]
time.sleep(1)
# TODO send reset after a device fails
logger.warning("DeviceCaretaker is now shutdown")
|
Schedules the requested measurement session with all INITIALISED devices.
:param measurementId:
:param duration:
:param start:
:return: a dict of device vs status.
def scheduleMeasurement(self, measurementId, duration, start):
"""
Schedules the requested measurement session with all INITIALISED devices.
:param measurementId:
:param duration:
:param start:
:return: a dict of device vs status.
"""
# TODO subtract 1s from start and format
results = {}
for device in self.getDevices(RecordingDeviceStatus.INITIALISED.name):
logger.info('Sending measurement ' + measurementId + ' to ' + device.payload['serviceURL'])
try:
resp = self.httpclient.put(device.payload['serviceURL'] + '/measurements/' + measurementId,
json={'duration': duration, 'at': start.strftime(DATETIME_FORMAT)})
logger.info('Response for ' + measurementId + ' from ' + device.payload['serviceURL'] + ' is ' +
str(resp.status_code))
results[device] = resp.status_code
except Exception as e:
logger.exception(e)
results[device] = 500
return results
|
Allows the UI to update parameters ensuring that all devices are kept in sync. Payload is json in TargetState
format.
:return:
def patch(self):
"""
Allows the UI to update parameters ensuring that all devices are kept in sync. Payload is json in TargetState
format.
:return:
"""
# TODO block until all devices have updated?
json = request.get_json()
logger.info("Updating target state with " + str(json))
self._targetStateController.updateTargetState(json)
return None, 200
|
Parse a comma-separated list of values, or a filename (starting with @)
containing a list value on each line.
def list_parse(name_list):
"""Parse a comma-separated list of values, or a filename (starting with @)
containing a list value on each line.
"""
if name_list and name_list[0] == '@':
value = name_list[1:]
if not os.path.exists(value):
log.warning('The file %s does not exist' % value)
return
try:
return [v.strip() for v in open(value, 'r').readlines()]
except IOError as e:
log.warning('reading %s failed: %s; ignoring this file' %
(value, e))
else:
return [v.strip() for v in name_list.split(',')]
|
Create the minimum match dictionary of keys
def _mmInit(self):
"""Create the minimum match dictionary of keys"""
# cache references to speed up loop a bit
mmkeys = {}
mmkeysGet = mmkeys.setdefault
minkeylength = self.minkeylength
for key in self.data.keys():
# add abbreviations as short as minkeylength
# always add at least one entry (even for key="")
lenkey = len(key)
start = min(minkeylength,lenkey)
for i in range(start,lenkey+1):
mmkeysGet(key[0:i],[]).append(key)
self.mmkeys = mmkeys
|
Hook to resolve ambiguities in selected keys
def resolve(self, key, keylist):
"""Hook to resolve ambiguities in selected keys"""
raise AmbiguousKeyError("Ambiguous key "+ repr(key) +
", could be any of " + str(sorted(keylist)))
|
Add a new key/item pair to the dictionary. Resets an existing
key value only if this is an exact match to a known key.
def add(self, key, item):
"""Add a new key/item pair to the dictionary. Resets an existing
key value only if this is an exact match to a known key."""
mmkeys = self.mmkeys
if mmkeys is not None and not (key in self.data):
# add abbreviations as short as minkeylength
# always add at least one entry (even for key="")
lenkey = len(key)
start = min(self.minkeylength,lenkey)
# cache references to speed up loop a bit
mmkeysGet = mmkeys.setdefault
for i in range(start,lenkey+1):
mmkeysGet(key[0:i],[]).append(key)
self.data[key] = item
|
Raises exception if key is ambiguous
def get(self, key, failobj=None, exact=0):
"""Raises exception if key is ambiguous"""
if not exact:
key = self.getfullkey(key,new=1)
return self.data.get(key,failobj)
|
Raises an exception if key is ambiguous
def _has(self, key, exact=0):
"""Raises an exception if key is ambiguous"""
if not exact:
key = self.getfullkey(key,new=1)
return key in self.data
|
Returns a list of all the matching values for key,
containing a single entry for unambiguous matches and
multiple entries for ambiguous matches.
def getall(self, key, failobj=None):
"""Returns a list of all the matching values for key,
containing a single entry for unambiguous matches and
multiple entries for ambiguous matches."""
if self.mmkeys is None: self._mmInit()
k = self.mmkeys.get(key)
if not k: return failobj
return list(map(self.data.get, k))
|
Returns a list of the full key names (not the items)
for all the matching values for key. The list will
contain a single entry for unambiguous matches and
multiple entries for ambiguous matches.
def getallkeys(self, key, failobj=None):
"""Returns a list of the full key names (not the items)
for all the matching values for key. The list will
contain a single entry for unambiguous matches and
multiple entries for ambiguous matches."""
if self.mmkeys is None: self._mmInit()
return self.mmkeys.get(key, failobj)
|
Returns failobj if key is not found or is ambiguous
def get(self, key, failobj=None, exact=0):
"""Returns failobj if key is not found or is ambiguous"""
if not exact:
try:
key = self.getfullkey(key)
except KeyError:
return failobj
return self.data.get(key,failobj)
|
Returns false if key is not found or is ambiguous
def _has(self, key, exact=0):
"""Returns false if key is not found or is ambiguous"""
if not exact:
try:
key = self.getfullkey(key)
return 1
except KeyError:
return 0
else:
return key in self.data
|
parameter factory function
fields is a list of the comma-separated fields (as in the .par file).
Each entry is a string or None (indicating that field was omitted.)
Set the strict parameter to a non-zero value to do stricter parsing
(to find errors in the input)
def parFactory(fields, strict=0):
"""parameter factory function
fields is a list of the comma-separated fields (as in the .par file).
Each entry is a string or None (indicating that field was omitted.)
Set the strict parameter to a non-zero value to do stricter parsing
(to find errors in the input)"""
if len(fields) < 3 or None in fields[0:3]:
raise SyntaxError("At least 3 fields must be given")
type = fields[1]
if type in _string_types:
return IrafParS(fields,strict)
elif type == 'R':
return StrictParR(fields,1)
elif type in _real_types:
return IrafParR(fields,strict)
elif type == "I":
return StrictParI(fields,1)
elif type == "i":
return IrafParI(fields,strict)
elif type == "b":
return IrafParB(fields,strict)
elif type == "ar":
return IrafParAR(fields,strict)
elif type == "ai":
return IrafParAI(fields,strict)
elif type == "as":
return IrafParAS(fields,strict)
elif type == "ab":
return IrafParAB(fields,strict)
elif type[:1] == "a":
raise SyntaxError("Cannot handle arrays of type %s" % type)
else:
raise SyntaxError("Cannot handle parameter type %s" % type)
|
Set cmdline flag
def setCmdline(self,value=1):
"""Set cmdline flag"""
# set through dictionary to avoid extra calls to __setattr__
if value:
self.__dict__['flags'] = self.flags | _cmdlineFlag
else:
self.__dict__['flags'] = self.flags & ~_cmdlineFlag
|
Set changed flag
def setChanged(self,value=1):
"""Set changed flag"""
# set through dictionary to avoid another call to __setattr__
if value:
self.__dict__['flags'] = self.flags | _changedFlag
else:
self.__dict__['flags'] = self.flags & ~_changedFlag
|
Return true if this parameter is learned
Hidden parameters are not learned; automatic parameters inherit
behavior from package/cl; other parameters are learned.
If mode is set, it determines how automatic parameters behave.
If not set, cl.mode parameter determines behavior.
def isLearned(self, mode=None):
"""Return true if this parameter is learned
Hidden parameters are not learned; automatic parameters inherit
behavior from package/cl; other parameters are learned.
If mode is set, it determines how automatic parameters behave.
If not set, cl.mode parameter determines behavior.
"""
if "l" in self.mode: return 1
if "h" in self.mode: return 0
if "a" in self.mode:
if mode is None: mode = 'ql' # that is, iraf.cl.mode
if "h" in mode and "l" not in mode:
return 0
return 1
|
Interactively prompt for parameter value
def getWithPrompt(self):
"""Interactively prompt for parameter value"""
if self.prompt:
pstring = self.prompt.split("\n")[0].strip()
else:
pstring = self.name
if self.choice:
schoice = list(map(self.toString, self.choice))
pstring = pstring + " (" + "|".join(schoice) + ")"
elif self.min not in [None, INDEF] or \
self.max not in [None, INDEF]:
pstring = pstring + " ("
if self.min not in [None, INDEF]:
pstring = pstring + self.toString(self.min)
pstring = pstring + ":"
if self.max not in [None, INDEF]:
pstring = pstring + self.toString(self.max)
pstring = pstring + ")"
# add current value as default
if self.value is not None:
pstring = pstring + " (" + self.toString(self.value,quoted=1) + ")"
pstring = pstring + ": "
# don't redirect stdin/out unless redirected filehandles are also ttys
# or unless originals are NOT ttys
stdout = sys.__stdout__
try:
if sys.stdout.isatty() or not stdout.isatty():
stdout = sys.stdout
except AttributeError:
pass
stdin = sys.__stdin__
try:
if sys.stdin.isatty() or not stdin.isatty():
stdin = sys.stdin
except AttributeError:
pass
# print prompt, suppressing both newline and following space
stdout.write(pstring)
stdout.flush()
ovalue = irafutils.tkreadline(stdin)
value = ovalue.strip()
# loop until we get an acceptable value
while (1):
try:
# null input usually means use current value as default
# check it anyway since it might not be acceptable
if value == "": value = self._nullPrompt()
self.set(value)
# None (no value) is not acceptable value after prompt
if self.value is not None: return
# if not EOF, keep looping
if ovalue == "":
stdout.flush()
raise EOFError("EOF on parameter prompt")
print("Error: specify a value for the parameter")
except ValueError as e:
print(str(e))
stdout.write(pstring)
stdout.flush()
ovalue = irafutils.tkreadline(stdin)
value = ovalue.strip()
|
Return value of this parameter as a string (or in native format
if native is non-zero.)
def get(self, field=None, index=None, lpar=0, prompt=1, native=0, mode="h"):
"""Return value of this parameter as a string (or in native format
if native is non-zero.)"""
if field and field != "p_value":
# note p_value comes back to this routine, so shortcut that case
return self._getField(field,native=native,prompt=prompt)
# may prompt for value if prompt flag is set
if prompt: self._optionalPrompt(mode)
if index is not None:
raise SyntaxError("Parameter "+self.name+" is not an array")
if native:
rv = self.value
else:
rv = self.toString(self.value)
return rv
|
Set value of this parameter from a string or other value.
Field is optional parameter field (p_prompt, p_minimum, etc.)
Index is optional array index (zero-based). Set check=0 to
assign the value without checking to see if it is within
the min-max range or in the choice list.
def set(self, value, field=None, index=None, check=1):
"""Set value of this parameter from a string or other value.
Field is optional parameter field (p_prompt, p_minimum, etc.)
Index is optional array index (zero-based). Set check=0 to
assign the value without checking to see if it is within
the min-max range or in the choice list."""
if index is not None:
raise SyntaxError("Parameter "+self.name+" is not an array")
if field:
self._setField(value,field,check=check)
else:
if check:
self.value = self.checkValue(value)
else:
self.value = self._coerceValue(value)
self.setChanged()
|
Check and convert a parameter value.
Raises an exception if the value is not permitted for this
parameter. Otherwise returns the value (converted to the
right type.)
def checkValue(self,value,strict=0):
"""Check and convert a parameter value.
Raises an exception if the value is not permitted for this
parameter. Otherwise returns the value (converted to the
right type.)
"""
v = self._coerceValue(value,strict)
return self.checkOneValue(v,strict)
|
Checks a single value to see if it is in range or choice list
Allows indirection strings starting with ")". Assumes
v has already been converted to right value by
_coerceOneValue. Returns value if OK, or raises
ValueError if not OK.
def checkOneValue(self,v,strict=0):
"""Checks a single value to see if it is in range or choice list
Allows indirection strings starting with ")". Assumes
v has already been converted to right value by
_coerceOneValue. Returns value if OK, or raises
ValueError if not OK.
"""
if v in [None, INDEF] or (isinstance(v,str) and v[:1] == ")"):
return v
elif v == "":
# most parameters treat null string as omitted value
return None
elif self.choice is not None and v not in self.choiceDict:
schoice = list(map(self.toString, self.choice))
schoice = "|".join(schoice)
raise ValueError("Parameter %s: "
"value %s is not in choice list (%s)" %
(self.name, str(v), schoice))
elif (self.min not in [None, INDEF] and v<self.min):
raise ValueError("Parameter %s: "
"value `%s' is less than minimum `%s'" %
(self.name, str(v), str(self.min)))
elif (self.max not in [None, INDEF] and v>self.max):
raise ValueError("Parameter %s: "
"value `%s' is greater than maximum `%s'" %
(self.name, str(v), str(self.max)))
return v
|
Return dpar-style executable assignment for parameter
Default is to write CL version of code; if cl parameter is
false, writes Python executable code instead.
def dpar(self, cl=1):
"""Return dpar-style executable assignment for parameter
Default is to write CL version of code; if cl parameter is
false, writes Python executable code instead.
"""
sval = self.toString(self.value, quoted=1)
if not cl:
if sval == "": sval = "None"
s = "%s = %s" % (self.name, sval)
return s
|
Return pretty list description of parameter
def pretty(self,verbose=0):
"""Return pretty list description of parameter"""
# split prompt lines and add blanks in later lines to align them
plines = self.prompt.split('\n')
for i in range(len(plines)-1): plines[i+1] = 32*' ' + plines[i+1]
plines = '\n'.join(plines)
namelen = min(len(self.name), 12)
pvalue = self.get(prompt=0,lpar=1)
alwaysquoted = ['s', 'f', '*gcur', '*imcur', '*ukey', 'pset']
if self.type in alwaysquoted and self.value is not None: pvalue = '"' + pvalue + '"'
if self.mode == "h":
s = "%13s = %-15s %s" % ("("+self.name[:namelen],
pvalue+")", plines)
else:
s = "%13s = %-15s %s" % (self.name[:namelen],
pvalue, plines)
if not verbose: return s
if self.choice is not None:
s = s + "\n" + 32*" " + "|"
nline = 33
for i in range(len(self.choice)):
sch = str(self.choice[i]) + "|"
s = s + sch
nline = nline + len(sch) + 1
if nline > 80:
s = s + "\n" + 32*" " + "|"
nline = 33
elif self.min not in [None, INDEF] or self.max not in [None, INDEF]:
s = s + "\n" + 32*" "
if self.min not in [None, INDEF]:
s = s + str(self.min) + " <= "
s = s + self.name
if self.max not in [None, INDEF]:
s = s + " <= " + str(self.max)
return s
|
Return .par format string for this parameter
If dolist is set, returns fields as a list of strings. Default
is to return a single string appropriate for writing to a file.
def save(self, dolist=0):
"""Return .par format string for this parameter
If dolist is set, returns fields as a list of strings. Default
is to return a single string appropriate for writing to a file.
"""
quoted = not dolist
fields = 7*[""]
fields[0] = self.name
fields[1] = self.type
fields[2] = self.mode
fields[3] = self.toString(self.value,quoted=quoted)
if self.choice is not None:
schoice = list(map(self.toString, self.choice))
schoice.insert(0,'')
schoice.append('')
fields[4] = repr('|'.join(schoice))
elif self.min not in [None,INDEF]:
fields[4] = self.toString(self.min,quoted=quoted)
if self.max not in [None,INDEF]:
fields[5] = self.toString(self.max,quoted=quoted)
if self.prompt:
if quoted:
sprompt = repr(self.prompt)
else:
sprompt = self.prompt
# prompt can have embedded newlines (which are printed)
sprompt = sprompt.replace(r'\012', '\n')
sprompt = sprompt.replace(r'\n', '\n')
fields[6] = sprompt
# delete trailing null parameters
for i in [6,5,4]:
if fields[i] != "": break
del fields[i]
if dolist:
return fields
else:
return ','.join(fields)
|
Set choice parameter from string s
def _setChoice(self,s,strict=0):
"""Set choice parameter from string s"""
clist = _getChoice(s,strict)
self.choice = list(map(self._coerceValue, clist))
self._setChoiceDict()
|
Create dictionary for choice list
def _setChoiceDict(self):
"""Create dictionary for choice list"""
# value is name of choice parameter (same as key)
self.choiceDict = {}
for c in self.choice: self.choiceDict[c] = c
|
Interactively prompt for parameter if necessary
Prompt for value if
(1) mode is hidden but value is undefined or bad, or
(2) mode is query and value was not set on command line
Never prompt for "u" mode parameters, which are local variables.
def _optionalPrompt(self, mode):
"""Interactively prompt for parameter if necessary
Prompt for value if
(1) mode is hidden but value is undefined or bad, or
(2) mode is query and value was not set on command line
Never prompt for "u" mode parameters, which are local variables.
"""
if (self.mode == "h") or (self.mode == "a" and mode == "h"):
# hidden parameter
if not self.isLegal():
self.getWithPrompt()
elif self.mode == "u":
# "u" is a special mode used for local variables in CL scripts
# They should never prompt under any circumstances
if not self.isLegal():
raise ValueError(
"Attempt to access undefined local variable `%s'" %
self.name)
else:
# query parameter
if self.isCmdline()==0:
self.getWithPrompt()
|
Get p_filename field for this parameter
Same as get for non-list params
def _getPFilename(self,native,prompt):
"""Get p_filename field for this parameter
Same as get for non-list params
"""
return self.get(native=native,prompt=prompt)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.