text
stringlengths
81
112k
Legacy acl format kept for bw. compatibility :param permissions: :return: def permission_to_04_acls(permissions): """ Legacy acl format kept for bw. compatibility :param permissions: :return: """ acls = [] for perm in permissions: if perm.type == "user": acls.append((perm.user.id, perm.perm_name)) elif perm.type == "group": acls.append(("group:%s" % perm.group.id, perm.perm_name)) return acls
Returns a list of permissions in a format understood by pyramid :param permissions: :return: def permission_to_pyramid_acls(permissions): """ Returns a list of permissions in a format understood by pyramid :param permissions: :return: """ acls = [] for perm in permissions: if perm.type == "user": acls.append((Allow, perm.user.id, perm.perm_name)) elif perm.type == "group": acls.append((Allow, "group:%s" % perm.group.id, perm.perm_name)) return acls
[ { 'self': [1, 1, 3, 3, 1, 1], 'f': lambda x: x%2, 'assert': lambda ret: ret == [[1, 1], [3, 3], [1, 1]] } ] def ChunkBy(self, f=None): """ [ { 'self': [1, 1, 3, 3, 1, 1], 'f': lambda x: x%2, 'assert': lambda ret: ret == [[1, 1], [3, 3], [1, 1]] } ] """ if f is None: return _chunk(self) if is_to_destruct(f): f = destruct_func(f) return _chunk(self, f)
[ { 'self': [1, 2, 3], 'f': lambda x: x%2, 'assert': lambda ret: ret[0] == [2] and ret[1] == [1, 3] } ] def GroupBy(self: Iterable, f=None): """ [ { 'self': [1, 2, 3], 'f': lambda x: x%2, 'assert': lambda ret: ret[0] == [2] and ret[1] == [1, 3] } ] """ if f and is_to_destruct(f): f = destruct_func(f) return _group_by(self, f)
[ { 'self': [1, 2, 3], 'n': 2, 'assert': lambda ret: list(ret) == [1, 2] } ] def Take(self: Iterable, n): """ [ { 'self': [1, 2, 3], 'n': 2, 'assert': lambda ret: list(ret) == [1, 2] } ] """ for i, e in enumerate(self): if i == n: break yield e
[ { 'self': [1, 2, 3], 'f': lambda e: e%2, 'assert': lambda ret: list(ret) == [1, 3] } ] def TakeIf(self: Iterable, f): """ [ { 'self': [1, 2, 3], 'f': lambda e: e%2, 'assert': lambda ret: list(ret) == [1, 3] } ] """ if is_to_destruct(f): f = destruct_func(f) return (e for e in self if f(e))
[ { 'self': [1, 2, 3, 4, 5], 'f': lambda x: x < 4, 'assert': lambda ret: list(ret) == [1, 2, 3] } ] def TakeWhile(self: Iterable, f): """ [ { 'self': [1, 2, 3, 4, 5], 'f': lambda x: x < 4, 'assert': lambda ret: list(ret) == [1, 2, 3] } ] """ if is_to_destruct(f): f = destruct_func(f) for e in self: if not f(e): break yield e
[ { 'self': [1, 2, 3, 4, 5], 'n': 3, 'assert': lambda ret: list(ret) == [1, 2] } ] def Drop(self: Iterable, n): """ [ { 'self': [1, 2, 3, 4, 5], 'n': 3, 'assert': lambda ret: list(ret) == [1, 2] } ] """ con = tuple(self) n = len(con) - n if n <= 0: yield from con else: for i, e in enumerate(con): if i == n: break yield e
[ { 'self': [1, 2, 3, 4, 5], 'n': 3, 'assert': lambda ret: list(ret) == [4, 5] } ] def Skip(self: Iterable, n): """ [ { 'self': [1, 2, 3, 4, 5], 'n': 3, 'assert': lambda ret: list(ret) == [4, 5] } ] """ con = iter(self) for i, _ in enumerate(con): if i == n: break return con
[ { 'self': [1, 2, 3, 4, 5], 'n': 3, 'assert': lambda ret: list(ret) == [4, 5, 1, 2, 3] } ] def Shift(self, n): """ [ { 'self': [1, 2, 3, 4, 5], 'n': 3, 'assert': lambda ret: list(ret) == [4, 5, 1, 2, 3] } ] """ headn = tuple(Take(self, n)) yield from self yield from headn
[ { 'self': [1, 2, 3], ':args': [[4, 5, 6], [7, 8, 9]], 'assert': lambda ret: list(ret) == [1, 2, 3, 4, 5, 6, 7, 8, 9] } ] def Concat(self: Iterable, *others): """ [ { 'self': [1, 2, 3], ':args': [[4, 5, 6], [7, 8, 9]], 'assert': lambda ret: list(ret) == [1, 2, 3, 4, 5, 6, 7, 8, 9] } ] """ return concat_generator(self, *[unbox_if_flow(other) for other in others])
Checks if an attribute is being accessed throught the default queryset manager, ie: MyClass.objects.filter(some='value') def _called_thru_default_qs(self, node): """Checks if an attribute is being accessed throught the default queryset manager, ie: MyClass.objects.filter(some='value')""" last_child = node.last_child() if not last_child: return False # the default qs manager is called 'objects', we check for it here attrname = getattr(last_child, 'attrname', None) if attrname != 'objects': return False base_cls = last_child.last_child() base_classes = DOCUMENT_BASES for cls in base_cls.inferred(): if node_is_subclass(cls, *base_classes): return True return False
returns all objects of specific type - will work correctly with sqlalchemy inheritance models, you should normally use models base_query() instead of this function its for bw. compat purposes :param klass: :param db_session: :return: def all(cls, klass, db_session=None): """ returns all objects of specific type - will work correctly with sqlalchemy inheritance models, you should normally use models base_query() instead of this function its for bw. compat purposes :param klass: :param db_session: :return: """ db_session = get_db_session(db_session) return db_session.query(klass)
returns base query for specific service :param db_session: :return: query def base_query(cls, db_session=None): """ returns base query for specific service :param db_session: :return: query """ db_session = get_db_session(db_session) return db_session.query(cls.model)
Event method wrapper for bot mixins. When a bot is constructed, its metaclass inspects all members of all base classes, and looks for methods marked with an event attribute which is assigned via this wrapper. It then stores all the methods in a dict that maps event names to lists of these methods, which are each called when the event occurs. def on(event, *args, **kwargs): """ Event method wrapper for bot mixins. When a bot is constructed, its metaclass inspects all members of all base classes, and looks for methods marked with an event attribute which is assigned via this wrapper. It then stores all the methods in a dict that maps event names to lists of these methods, which are each called when the event occurs. """ def wrapper(func): for i, arg in args: kwargs[i] = arg func.event = Event(event, kwargs) return func return wrapper
utility function that attempts to return sqlalchemy session that could have been created/passed in one of few ways: * It first tries to read session attached to instance if object argument was passed * then it tries to return session passed as argument * finally tries to read pylons-like threadlocal called DBSession * if this fails exception is thrown :param session: :param obj: :return: def get_db_session(session=None, obj=None): """ utility function that attempts to return sqlalchemy session that could have been created/passed in one of few ways: * It first tries to read session attached to instance if object argument was passed * then it tries to return session passed as argument * finally tries to read pylons-like threadlocal called DBSession * if this fails exception is thrown :param session: :param obj: :return: """ # try to read the session from instance from ziggurat_foundations import models if obj: return sa.orm.session.object_session(obj) # try passed session elif session: return session # try global pylons-like session then elif models.DBSession: return models.DBSession raise ZigguratSessionException("No Session found")
return dictionary of keys and values corresponding to this model's data - if include_keys is null the function will return all keys :param exclude_keys: (optional) is a list of columns from model that should not be returned by this function :param include_keys: (optional) is a list of columns from model that should be returned by this function :return: def get_dict(self, exclude_keys=None, include_keys=None): """ return dictionary of keys and values corresponding to this model's data - if include_keys is null the function will return all keys :param exclude_keys: (optional) is a list of columns from model that should not be returned by this function :param include_keys: (optional) is a list of columns from model that should be returned by this function :return: """ d = {} exclude_keys_list = exclude_keys or [] include_keys_list = include_keys or [] for k in self._get_keys(): if k not in exclude_keys_list and ( k in include_keys_list or not include_keys ): d[k] = getattr(self, k) return d
return list of tuples keys and values corresponding to this model's data def get_appstruct(self): """ return list of tuples keys and values corresponding to this model's data """ result = [] for k in self._get_keys(): result.append((k, getattr(self, k))) return result
updates instance properties *for column names that exist* for this model and are keys present in passed dictionary :param appstruct: (dictionary) :param exclude_keys: (optional) is a list of columns from model that should not be updated by this function :param include_keys: (optional) is a list of columns from model that should be updated by this function :return: def populate_obj(self, appstruct, exclude_keys=None, include_keys=None): """ updates instance properties *for column names that exist* for this model and are keys present in passed dictionary :param appstruct: (dictionary) :param exclude_keys: (optional) is a list of columns from model that should not be updated by this function :param include_keys: (optional) is a list of columns from model that should be updated by this function :return: """ exclude_keys_list = exclude_keys or [] include_keys_list = include_keys or [] for k in self._get_keys(): if ( k in appstruct and k not in exclude_keys_list and (k in include_keys_list or not include_keys) ): setattr(self, k, appstruct[k])
updates instance properties *for column names that exist* for this model and are properties present in passed dictionary :param instance: :param exclude_keys: (optional) is a list of columns from model that should not be updated by this function :param include_keys: (optional) is a list of columns from model that should be updated by this function :return: def populate_obj_from_obj(self, instance, exclude_keys=None, include_keys=None): """ updates instance properties *for column names that exist* for this model and are properties present in passed dictionary :param instance: :param exclude_keys: (optional) is a list of columns from model that should not be updated by this function :param include_keys: (optional) is a list of columns from model that should be updated by this function :return: """ exclude_keys_list = exclude_keys or [] include_keys_list = include_keys or [] for k in self._get_keys(): if ( hasattr(instance, k) and k not in exclude_keys_list and (k in include_keys_list or not include_keys) ): setattr(self, k, getattr(instance, k))
Deletes the object via session, this will permanently delete the object from storage on commit :param db_session: :return: def delete(self, db_session=None): """ Deletes the object via session, this will permanently delete the object from storage on commit :param db_session: :return: """ db_session = get_db_session(db_session, self) db_session.delete(self)
turn off the HX711 :return: always True :rtype bool def power_down(self): """ turn off the HX711 :return: always True :rtype bool """ GPIO.output(self._pd_sck, False) GPIO.output(self._pd_sck, True) time.sleep(0.01) return True
power up the HX711 :return: always True :rtype bool def power_up(self): """ power up the HX711 :return: always True :rtype bool """ GPIO.output(self._pd_sck, False) time.sleep(0.01) return True
reset the HX711 and prepare it for the next reading :return: True on success :rtype bool :raises GenericHX711Exception def reset(self): """ reset the HX711 and prepare it for the next reading :return: True on success :rtype bool :raises GenericHX711Exception """ logging.debug("power down") self.power_down() logging.debug("power up") self.power_up() logging.debug("read some raw data") result = self.get_raw_data(6) if result is False: raise GenericHX711Exception("failed to reset HX711") else: return True
check if "times" is within the borders defined in the class :param times: "times" to check :type times: int def _validate_measure_count(self, times): """ check if "times" is within the borders defined in the class :param times: "times" to check :type times: int """ if not self.min_measures <= times <= self.max_measures: raise ParameterValidationError( "{times} is not within the borders defined in the class".format( times=times ) )
validate a given value for gain_A :type gain_A: int :raises: ValueError def _validate_gain_A_value(self, gain_A): """ validate a given value for gain_A :type gain_A: int :raises: ValueError """ if gain_A not in self._valid_gains_for_channel_A: raise ParameterValidationError("{gain_A} is not a valid gain".format(gain_A=gain_A))
check if ther is som data is ready to get read. :return True if there is some date :rtype bool def _ready(self): """ check if ther is som data is ready to get read. :return True if there is some date :rtype bool """ # if DOUT pin is low, data is ready for reading _is_ready = GPIO.input(self._dout) == 0 logging.debug("check data ready for reading: {result}".format( result="YES" if _is_ready is True else "NO" )) return _is_ready
Finish data transmission from HX711 by setting next required gain and channel Only called from the _read function. :param num: how often so do the set (1...3) :type num: int :return True on success :rtype bool def _set_channel_gain(self, num): """ Finish data transmission from HX711 by setting next required gain and channel Only called from the _read function. :param num: how often so do the set (1...3) :type num: int :return True on success :rtype bool """ if not 1 <= num <= 3: raise AttributeError( """"num" has to be in the range of 1 to 3""" ) for _ in range(num): logging.debug("_set_channel_gain called") start_counter = time.perf_counter() # start timer now. GPIO.output(self._pd_sck, True) # set high GPIO.output(self._pd_sck, False) # set low end_counter = time.perf_counter() # stop timer time_elapsed = float(end_counter - start_counter) # check if HX711 did not turn off... # if pd_sck pin is HIGH for 60 µs and more the HX 711 enters power down mode. if time_elapsed >= 0.00006: logging.warning( 'setting gain and channel took more than 60µs. ' 'Time elapsed: {:0.8f}'.format(time_elapsed) ) # hx711 has turned off. First few readings are inaccurate. # Despite this reading was ok and data can be used. result = self.get_raw_data(times=6) # set for the next reading. if result is False: raise GenericHX711Exception("channel was not set properly") return True
- read the bit stream from HX711 and convert to an int value. - validates the acquired data :param max_tries: how often to try to get data :type max_tries: int :return raw data :rtype: int def _read(self, max_tries=40): """ - read the bit stream from HX711 and convert to an int value. - validates the acquired data :param max_tries: how often to try to get data :type max_tries: int :return raw data :rtype: int """ # start by setting the pd_sck to false GPIO.output(self._pd_sck, False) # init the counter ready_counter = 0 # loop until HX711 is ready # halt when maximum number of tires is reached while self._ready() is False: time.sleep(0.01) # sleep for 10 ms before next try ready_counter += 1 # increment counter # check loop count # and stop when defined maximum is reached if ready_counter >= max_tries: logging.debug('self._read() not ready after 40 trials\n') return False data_in = 0 # 2's complement data from hx 711 # read first 24 bits of data for i in range(24): # start timer start_counter = time.perf_counter() # request next bit from HX711 GPIO.output(self._pd_sck, True) GPIO.output(self._pd_sck, False) # stop timer end_counter = time.perf_counter() time_elapsed = float(end_counter - start_counter) # check if the hx 711 did not turn off: # if pd_sck pin is HIGH for 60 us and more than the HX 711 enters power down mode. if time_elapsed >= 0.00006: logging.debug('Reading data took longer than 60µs. Time elapsed: {:0.8f}'.format(time_elapsed)) return False # Shift the bits as they come to data_in variable. # Left shift by one bit then bitwise OR with the new bit. data_in = (data_in << 1) | GPIO.input(self._dout) if self.channel == 'A' and self.channel_a_gain == 128: self._set_channel_gain(num=1) # send one bit elif self.channel == 'A' and self.channel_a_gain == 64: self._set_channel_gain(num=3) # send three bits else: self._set_channel_gain(num=2) # send two bits logging.debug('Binary value as it has come: ' + str(bin(data_in))) # check if data is valid # 0x800000 is the lowest # 0x7fffff is the highest possible value from HX711 if data_in == 0x7fffff or data_in == 0x800000: logging.debug('Invalid data detected: ' + str(data_in)) return False # calculate int from 2's complement signed_data = 0 if (data_in & 0x800000): # 0b1000 0000 0000 0000 0000 0000 check if the sign bit is 1. Negative number. signed_data = -((data_in ^ 0xffffff) + 1) # convert from 2's complement to int else: # else do not do anything the value is positive number signed_data = data_in logging.debug('Converted 2\'s complement value: ' + str(signed_data)) return signed_data
do some readings and aggregate them using the defined statistics function :param times: how many measures to aggregate :type times: int :return: the aggregate of the measured values :rtype float def get_raw_data(self, times=5): """ do some readings and aggregate them using the defined statistics function :param times: how many measures to aggregate :type times: int :return: the aggregate of the measured values :rtype float """ self._validate_measure_count(times) data_list = [] while len(data_list) < times: data = self._read() if data not in [False, -1]: data_list.append(data) return data_list
This returns you subtree of ordered objects relative to the start resource_id (currently only implemented in postgresql) :param resource_id: :param limit_depth: :param db_session: :return: def from_resource_deeper( self, resource_id=None, limit_depth=1000000, db_session=None, *args, **kwargs ): """ This returns you subtree of ordered objects relative to the start resource_id (currently only implemented in postgresql) :param resource_id: :param limit_depth: :param db_session: :return: """ return self.service.from_resource_deeper( resource_id=resource_id, limit_depth=limit_depth, db_session=db_session, *args, **kwargs )
This deletes whole branch with children starting from resource_id :param resource_id: :param db_session: :return: def delete_branch(self, resource_id=None, db_session=None, *args, **kwargs): """ This deletes whole branch with children starting from resource_id :param resource_id: :param db_session: :return: """ return self.service.delete_branch( resource_id=resource_id, db_session=db_session, *args, **kwargs )
This returns you subtree of ordered objects relative to the start parent_id (currently only implemented in postgresql) :param resource_id: :param limit_depth: :param db_session: :return: def from_parent_deeper( self, parent_id=None, limit_depth=1000000, db_session=None, *args, **kwargs ): """ This returns you subtree of ordered objects relative to the start parent_id (currently only implemented in postgresql) :param resource_id: :param limit_depth: :param db_session: :return: """ return self.service.from_parent_deeper( parent_id=parent_id, limit_depth=limit_depth, db_session=db_session, *args, **kwargs )
Returns a dictionary in form of {node:Resource, children:{node_id: Resource}} :param result: :return: def build_subtree_strut(self, result, *args, **kwargs): """ Returns a dictionary in form of {node:Resource, children:{node_id: Resource}} :param result: :return: """ return self.service.build_subtree_strut(result=result, *args, **kwargs)
This returns you path to root node starting from object_id currently only for postgresql :param object_id: :param limit_depth: :param db_session: :return: def path_upper( self, object_id, limit_depth=1000000, db_session=None, *args, **kwargs ): """ This returns you path to root node starting from object_id currently only for postgresql :param object_id: :param limit_depth: :param db_session: :return: """ return self.service.path_upper( object_id=object_id, limit_depth=limit_depth, db_session=db_session, *args, **kwargs )
Moves node to new location in the tree :param resource_id: resource to move :param to_position: new position :param new_parent_id: new parent id :param db_session: :return: def move_to_position( self, resource_id, to_position, new_parent_id=noop, db_session=None, *args, **kwargs ): """ Moves node to new location in the tree :param resource_id: resource to move :param to_position: new position :param new_parent_id: new parent id :param db_session: :return: """ return self.service.move_to_position( resource_id=resource_id, to_position=to_position, new_parent_id=new_parent_id, db_session=db_session, *args, **kwargs )
Shifts ordering to "close gaps" after node deletion or being moved to another branch, begins the shift from given position :param parent_id: :param position: :param db_session: :return: def shift_ordering_down( self, parent_id, position, db_session=None, *args, **kwargs ): """ Shifts ordering to "close gaps" after node deletion or being moved to another branch, begins the shift from given position :param parent_id: :param position: :param db_session: :return: """ return self.service.shift_ordering_down( parent_id=parent_id, position=position, db_session=db_session, *args, **kwargs )
Shifts ordering to "open a gap" for node insertion, begins the shift from given position :param parent_id: :param position: :param db_session: :return: def shift_ordering_up(self, parent_id, position, db_session=None, *args, **kwargs): """ Shifts ordering to "open a gap" for node insertion, begins the shift from given position :param parent_id: :param position: :param db_session: :return: """ return self.service.shift_ordering_up( parent_id=parent_id, position=position, db_session=db_session, *args, **kwargs )
Sets node position for new node in the tree :param resource_id: resource to move :param to_position: new position :param db_session: :return:def count_children(cls, resource_id, db_session=None): def set_position(self, resource_id, to_position, db_session=None, *args, **kwargs): """ Sets node position for new node in the tree :param resource_id: resource to move :param to_position: new position :param db_session: :return:def count_children(cls, resource_id, db_session=None): """ return self.service.set_position( resource_id=resource_id, to_position=to_position, db_session=db_session, *args, **kwargs )
Checks if parent destination is valid for node :param resource_id: :param new_parent_id: :param db_session: :return: def check_node_parent( self, resource_id, new_parent_id, db_session=None, *args, **kwargs ): """ Checks if parent destination is valid for node :param resource_id: :param new_parent_id: :param db_session: :return: """ return self.service.check_node_parent( resource_id=resource_id, new_parent_id=new_parent_id, db_session=db_session, *args, **kwargs )
Counts children of resource node :param resource_id: :param db_session: :return: def count_children(self, resource_id, db_session=None, *args, **kwargs): """ Counts children of resource node :param resource_id: :param db_session: :return: """ return self.service.count_children( resource_id=resource_id, db_session=db_session, *args, **kwargs )
Checks if node position for given parent is valid, raises exception if this is not the case :param parent_id: :param position: :param on_same_branch: indicates that we are checking same branch :param db_session: :return: def check_node_position( self, parent_id, position, on_same_branch, db_session=None, *args, **kwargs ): """ Checks if node position for given parent is valid, raises exception if this is not the case :param parent_id: :param position: :param on_same_branch: indicates that we are checking same branch :param db_session: :return: """ return self.service.check_node_position( parent_id=parent_id, position=position, on_same_branch=on_same_branch, db_session=db_session, *args, **kwargs )
takes a list of lists, l and returns a flat list def flatten_list(l: List[list]) -> list: """ takes a list of lists, l and returns a flat list """ return [v for inner_l in l for v in inner_l]
Read in NEM file and return meter readings named tuple :param file_path: The NEM file to process :returns: The file that was created def read_nem_file(file_path: str) -> NEMFile: """ Read in NEM file and return meter readings named tuple :param file_path: The NEM file to process :returns: The file that was created """ _, file_extension = os.path.splitext(file_path) if file_extension.lower() == '.zip': with zipfile.ZipFile(file_path, 'r') as archive: for csv_file in archive.namelist(): with archive.open(csv_file) as csv_text: # Zip file is open in binary mode # So decode then convert back to list nmi_file = csv_text.read().decode('utf-8').splitlines() reader = csv.reader(nmi_file, delimiter=',') return parse_nem_rows(reader, file_name=csv_file) with open(file_path) as nmi_file: return parse_nem_file(nmi_file)
Parse NEM file and return meter readings named tuple def parse_nem_file(nem_file) -> NEMFile: """ Parse NEM file and return meter readings named tuple """ reader = csv.reader(nem_file, delimiter=',') return parse_nem_rows(reader, file_name=nem_file)
Parse NEM row iterator and return meter readings named tuple def parse_nem_rows(nem_list: Iterable, file_name=None) -> NEMFile: """ Parse NEM row iterator and return meter readings named tuple """ header = HeaderRecord(None, None, None, None, file_name) readings = dict() # readings nested by NMI then channel trans = dict() # transactions nested by NMI then channel nmi_d = None # current NMI details block that readings apply to for i, row in enumerate(nem_list): record_indicator = int(row[0]) if i == 0 and record_indicator != 100: raise ValueError("NEM Files must start with a 100 row") if record_indicator == 100: header = parse_100_row(row, file_name) if header.version_header not in ['NEM12', 'NEM13']: raise ValueError("Invalid NEM version {}".format( header.version_header)) elif record_indicator == 900: for nmi in readings: for suffix in readings[nmi]: readings[nmi][suffix] = flatten_list(readings[nmi][suffix]) break # End of file elif header.version_header == 'NEM12' and record_indicator == 200: try: nmi_details = parse_200_row(row) except ValueError: logging.error('Error passing 200 row:') logging.error(row) raise nmi_d = nmi_details if nmi_d.nmi not in readings: readings[nmi_d.nmi] = {} if nmi_d.nmi_suffix not in readings[nmi_d.nmi]: readings[nmi_d.nmi][nmi_d.nmi_suffix] = [] if nmi_d.nmi not in trans: trans[nmi_d.nmi] = {} if nmi_d.nmi_suffix not in trans[nmi_d.nmi]: trans[nmi_d.nmi][nmi_d.nmi_suffix] = [] elif header.version_header == 'NEM12' and record_indicator == 300: num_intervals = int(24 * 60 / nmi_d.interval_length) assert len(row) > num_intervals, "Incomplete 300 Row in {}".format( file_name) interval_record = parse_300_row(row, nmi_d.interval_length, nmi_d.uom) # don't flatten the list of interval readings at this stage, # as they may need to be adjusted by a 400 row readings[nmi_d.nmi][nmi_d.nmi_suffix].append( interval_record.interval_values) elif header.version_header == 'NEM12' and record_indicator == 400: event_record = parse_400_row(row) readings[nmi_d.nmi][nmi_d.nmi_suffix][-1] = update_reading_events( readings[nmi_d.nmi][nmi_d.nmi_suffix][-1], event_record) elif header.version_header == 'NEM12' and record_indicator == 500: b2b_details = parse_500_row(row) trans[nmi_d.nmi][nmi_d.nmi_suffix].append(b2b_details) elif header.version_header == 'NEM13' and record_indicator == 550: b2b_details = parse_550_row(row) trans[nmi_d.nmi][nmi_d.nmi_suffix].append(b2b_details) elif header.version_header == 'NEM13' and record_indicator == 250: basic_data = parse_250_row(row) reading = calculate_manual_reading(basic_data) nmi_d = basic_data if basic_data.nmi not in readings: readings[nmi_d.nmi] = {} if nmi_d.nmi_suffix not in readings[nmi_d.nmi]: readings[nmi_d.nmi][nmi_d.nmi_suffix] = [] if nmi_d.nmi not in trans: trans[nmi_d.nmi] = {} if nmi_d.nmi_suffix not in trans[nmi_d.nmi]: trans[nmi_d.nmi][nmi_d.nmi_suffix] = [] readings[nmi_d.nmi][nmi_d.nmi_suffix].append([reading]) else: logging.warning( "Record indicator %s not supported and was skipped", record_indicator) return NEMFile(header, readings, trans)
Calculate the interval between two manual readings def calculate_manual_reading(basic_data: BasicMeterData) -> Reading: """ Calculate the interval between two manual readings """ t_start = basic_data.previous_register_read_datetime t_end = basic_data.current_register_read_datetime read_start = basic_data.previous_register_read read_end = basic_data.current_register_read value = basic_data.quantity uom = basic_data.uom quality_method = basic_data.current_quality_method return Reading(t_start, t_end, value, uom, quality_method, "", "", read_start, read_end)
Parse header record (100) def parse_100_row(row: list, file_name: str) -> HeaderRecord: """ Parse header record (100) """ return HeaderRecord( row[1], parse_datetime(row[2]), row[3], row[4], file_name, )
Parse NMI data details record (200) def parse_200_row(row: list) -> NmiDetails: """ Parse NMI data details record (200) """ return NmiDetails(row[1], row[2], row[3], row[4], row[5], row[6], row[7], int(row[8]), parse_datetime(row[9]))
Parse basic meter data record (250) def parse_250_row(row: list) -> BasicMeterData: """ Parse basic meter data record (250) """ return BasicMeterData(row[1], row[2], row[3], row[4], row[5], row[6], row[7], float(row[8]), parse_datetime(row[9]), row[10], row[11], row[12], float(row[13]), parse_datetime( row[14]), row[15], row[16], row[17], float(row[18]), row[19], row[20], parse_datetime(row[21]), parse_datetime(row[22]))
Interval data record (300) def parse_300_row(row: list, interval: int, uom: str) -> IntervalRecord: """ Interval data record (300) """ num_intervals = int(24 * 60 / interval) interval_date = parse_datetime(row[1]) last_interval = 2 + num_intervals quality_method = row[last_interval] interval_values = parse_interval_records( row[2:last_interval], interval_date, interval, uom, quality_method) return IntervalRecord(interval_date, interval_values, row[last_interval + 0], row[last_interval + 1], row[last_interval + 2], parse_datetime(row[last_interval + 3]), parse_datetime(row[last_interval + 4]))
Convert interval values into tuples with datetime def parse_interval_records(interval_record, interval_date, interval, uom, quality_method) -> List[Reading]: """ Convert interval values into tuples with datetime """ interval_delta = timedelta(minutes=interval) return [ Reading( t_start=interval_date + (i * interval_delta), t_end=interval_date + (i * interval_delta) + interval_delta, read_value=parse_reading(val), uom=uom, quality_method=quality_method, event_code="", # event is unknown at time of reading event_desc="", # event is unknown at time of reading read_start=None, read_end=None # No before and after readings for intervals ) for i, val in enumerate(interval_record) ]
Convert reading value to float (if possible) def parse_reading(val: str) -> Optional[float]: """ Convert reading value to float (if possible) """ try: return float(val) except ValueError: logging.warning('Reading of "%s" is not a number', val) return None
Interval event record (400) def parse_400_row(row: list) -> tuple: """ Interval event record (400) """ return EventRecord(int(row[1]), int(row[2]), row[3], row[4], row[5])
Updates readings from a 300 row to reflect any events found in a subsequent 400 row def update_reading_events(readings, event_record): """ Updates readings from a 300 row to reflect any events found in a subsequent 400 row """ # event intervals are 1-indexed for i in range(event_record.start_interval - 1, event_record.end_interval): readings[i] = Reading( t_start=readings[i].t_start, t_end=readings[i].t_end, read_value=readings[i].read_value, uom=readings[i].uom, quality_method=event_record.quality_method, event_code=event_record.reason_code, event_desc=event_record.reason_description, read_start=readings[i].read_start, read_end=readings[i].read_end) return readings
Parse a datetime string into a python datetime object def parse_datetime(record: str) -> Optional[datetime]: """ Parse a datetime string into a python datetime object """ # NEM defines Date8, DateTime12 and DateTime14 format_strings = {8: '%Y%m%d', 12: '%Y%m%d%H%M', 14: '%Y%m%d%H%M%S'} if record == '': return None return datetime.strptime(record.strip(), format_strings[len(record.strip())])
Called from ``gnotty.server.run`` and parses any CLI args provided. Also handles loading settings from the Python module specified with the ``--conf-file`` arg. CLI args take precedence over any settings defined in the Python module defined by ``--conf-file``. def parse_args(self): """ Called from ``gnotty.server.run`` and parses any CLI args provided. Also handles loading settings from the Python module specified with the ``--conf-file`` arg. CLI args take precedence over any settings defined in the Python module defined by ``--conf-file``. """ options, _ = parser.parse_args() file_settings = {} if options.CONF_FILE: execfile(options.CONF_FILE, {}, file_settings) for option in self.option_list: if option.dest: file_value = file_settings.get("GNOTTY_%s" % option.dest, None) # optparse doesn't seem to provide a way to determine if # an option's value was provided as a CLI arg, or if the # default is being used, so we manually check sys.argv, # since provided CLI args should take precedence over # any settings defined in a conf module. flags = option._short_opts + option._long_opts in_argv = set(flags) & set(sys.argv) options_value = getattr(options, option.dest) if file_value and not in_argv: self[option.dest] = file_value elif in_argv: self[option.dest] = options_value else: self[option.dest] = self.get(option.dest, options_value) self.set_max_message_length() self["STATIC_URL"] = "/static/" self["LOG_LEVEL"] = getattr(logging, self["LOG_LEVEL"])
Provides a consistent color for a nickname. Uses first 6 chars of nickname's md5 hash, and then slightly darkens the rgb values for use on a light background. def color(nickname): """ Provides a consistent color for a nickname. Uses first 6 chars of nickname's md5 hash, and then slightly darkens the rgb values for use on a light background. """ _hex = md5(nickname).hexdigest()[:6] darken = lambda s: str(int(round(int(s, 16) * .7))) return "rgb(%s)" % ",".join([darken(_hex[i:i+2]) for i in range(6)[::2]])
Join the channel once connected to the IRC server. def on_welcome(self, connection, event): """ Join the channel once connected to the IRC server. """ connection.join(self.channel, key=settings.IRC_CHANNEL_KEY or "")
Increment a digit on the nickname if it's in use, and re-connect. def on_nicknameinuse(self, connection, event): """ Increment a digit on the nickname if it's in use, and re-connect. """ digits = "" while self.nickname[-1].isdigit(): digits = self.nickname[-1] + digits self.nickname = self.nickname[:-1] digits = 1 if not digits else int(digits) + 1 self.nickname += str(digits) self.connect(self.host, self.port, self.nickname)
Nicer shortcut for sending a message to a channel. Also irclib doesn't handle unicode so we bypass its privmsg -> send_raw methods and use its socket directly. def message_channel(self, message): """ Nicer shortcut for sending a message to a channel. Also irclib doesn't handle unicode so we bypass its privmsg -> send_raw methods and use its socket directly. """ data = "PRIVMSG %s :%s\r\n" % (self.channel, message) self.connection.socket.send(data.encode("utf-8"))
Send a message to the channel. We also emit the message back to the sender's WebSocket. def emit_message(self, message): """ Send a message to the channel. We also emit the message back to the sender's WebSocket. """ try: nickname_color = self.nicknames[self.nickname] except KeyError: # Only accept messages if we've joined. return message = message[:settings.MAX_MESSAGE_LENGTH] # Handle IRC commands. if message.startswith("/"): self.connection.send_raw(message.lstrip("/")) return self.message_channel(message) self.namespace.emit("message", self.nickname, message, nickname_color)
Send the nickname list to the Websocket. Called whenever the nicknames list changes. def emit_nicknames(self): """ Send the nickname list to the Websocket. Called whenever the nicknames list changes. """ nicknames = [{"nickname": name, "color": color(name)} for name in sorted(self.nicknames.keys())] self.namespace.emit("nicknames", nicknames)
Initial list of nicknames received - remove op/voice prefixes, and send the list to the WebSocket. def on_namreply(self, connection, event): """ Initial list of nicknames received - remove op/voice prefixes, and send the list to the WebSocket. """ for nickname in event.arguments()[-1].split(): nickname = nickname.lstrip("@+") self.nicknames[nickname] = color(nickname) self.emit_nicknames()
Someone joined the channel - send the nicknames list to the WebSocket. def on_join(self, connection, event): """ Someone joined the channel - send the nicknames list to the WebSocket. """ #from time import sleep; sleep(10) # Simulate a slow connection nickname = self.get_nickname(event) nickname_color = color(nickname) self.nicknames[nickname] = nickname_color self.namespace.emit("join") self.namespace.emit("message", nickname, "joins", nickname_color) self.emit_nicknames()
Someone changed their nickname - send the nicknames list to the WebSocket. def on_nick(self, connection, event): """ Someone changed their nickname - send the nicknames list to the WebSocket. """ old_nickname = self.get_nickname(event) old_color = self.nicknames.pop(old_nickname) new_nickname = event.target() message = "is now known as %s" % new_nickname self.namespace.emit("message", old_nickname, message, old_color) new_color = color(new_nickname) self.nicknames[new_nickname] = new_color self.emit_nicknames() if self.nickname == old_nickname: self.nickname = new_nickname
Someone left the channel - send the nicknames list to the WebSocket. def on_quit(self, connection, event): """ Someone left the channel - send the nicknames list to the WebSocket. """ nickname = self.get_nickname(event) nickname_color = self.nicknames[nickname] del self.nicknames[nickname] self.namespace.emit("message", nickname, "leaves", nickname_color) self.emit_nicknames()
Messages received in the channel - send them to the WebSocket. def on_pubmsg(self, connection, event): """ Messages received in the channel - send them to the WebSocket. """ for message in event.arguments(): nickname = self.get_nickname(event) nickname_color = self.nicknames[nickname] self.namespace.emit("message", nickname, message, nickname_color)
Fetch row using primary key - will use existing object in session if already present :param resource_id: :param db_session: :return: def get(cls, resource_id, db_session=None): """ Fetch row using primary key - will use existing object in session if already present :param resource_id: :param db_session: :return: """ db_session = get_db_session(db_session) return db_session.query(cls.model).get(resource_id)
returns all permissions that given user has for this resource from groups and directly set ones too :param instance: :param user: :param db_session: :return: def perms_for_user(cls, instance, user, db_session=None): """ returns all permissions that given user has for this resource from groups and directly set ones too :param instance: :param user: :param db_session: :return: """ db_session = get_db_session(db_session, instance) query = db_session.query( cls.models_proxy.GroupResourcePermission.group_id.label("owner_id"), cls.models_proxy.GroupResourcePermission.perm_name, sa.literal("group").label("type"), ) query = query.filter( cls.models_proxy.GroupResourcePermission.group_id.in_( [gr.id for gr in user.groups] ) ) query = query.filter( cls.models_proxy.GroupResourcePermission.resource_id == instance.resource_id ) query2 = db_session.query( cls.models_proxy.UserResourcePermission.user_id.label("owner_id"), cls.models_proxy.UserResourcePermission.perm_name, sa.literal("user").label("type"), ) query2 = query2.filter( cls.models_proxy.UserResourcePermission.user_id == user.id ) query2 = query2.filter( cls.models_proxy.UserResourcePermission.resource_id == instance.resource_id ) query = query.union(query2) groups_dict = dict([(g.id, g) for g in user.groups]) perms = [ PermissionTuple( user, row.perm_name, row.type, groups_dict.get(row.owner_id) if row.type == "group" else None, instance, False, True, ) for row in query ] # include all perms if user is the owner of this resource if instance.owner_user_id == user.id: perms.append( PermissionTuple( user, ALL_PERMISSIONS, "user", None, instance, True, True ) ) groups_dict = dict([(g.id, g) for g in user.groups]) if instance.owner_group_id in groups_dict: perms.append( PermissionTuple( user, ALL_PERMISSIONS, "group", groups_dict.get(instance.owner_group_id), instance, True, True, ) ) return perms
returns permissions that given user has for this resource without ones inherited from groups that user belongs to :param instance: :param user: :param db_session: :return: def direct_perms_for_user(cls, instance, user, db_session=None): """ returns permissions that given user has for this resource without ones inherited from groups that user belongs to :param instance: :param user: :param db_session: :return: """ db_session = get_db_session(db_session, instance) query = db_session.query( cls.models_proxy.UserResourcePermission.user_id, cls.models_proxy.UserResourcePermission.perm_name, ) query = query.filter(cls.models_proxy.UserResourcePermission.user_id == user.id) query = query.filter( cls.models_proxy.UserResourcePermission.resource_id == instance.resource_id ) perms = [ PermissionTuple(user, row.perm_name, "user", None, instance, False, True) for row in query ] # include all perms if user is the owner of this resource if instance.owner_user_id == user.id: perms.append( PermissionTuple(user, ALL_PERMISSIONS, "user", None, instance, True) ) return perms
returns permissions that given user has for this resource that are inherited from groups :param instance: :param user: :param db_session: :return: def group_perms_for_user(cls, instance, user, db_session=None): """ returns permissions that given user has for this resource that are inherited from groups :param instance: :param user: :param db_session: :return: """ db_session = get_db_session(db_session, instance) perms = resource_permissions_for_users( cls.models_proxy, ANY_PERMISSION, resource_ids=[instance.resource_id], user_ids=[user.id], db_session=db_session, ) perms = [p for p in perms if p.type == "group"] # include all perms if user is the owner of this resource groups_dict = dict([(g.id, g) for g in user.groups]) if instance.owner_group_id in groups_dict: perms.append( PermissionTuple( user, ALL_PERMISSIONS, "group", groups_dict.get(instance.owner_group_id), instance, True, True, ) ) return perms
return PermissionTuples for users AND groups that have given permission for the resource, perm_name is __any_permission__ then users with any permission will be listed :param instance: :param perm_name: :param user_ids: limits the permissions to specific user ids :param group_ids: limits the permissions to specific group ids :param limit_group_permissions: should be used if we do not want to have user objects returned for group permissions, this might cause performance issues for big groups :param skip_group_perms: do not attach group permissions to the resultset :param db_session: :return: def users_for_perm( cls, instance, perm_name, user_ids=None, group_ids=None, limit_group_permissions=False, skip_group_perms=False, db_session=None, ): """ return PermissionTuples for users AND groups that have given permission for the resource, perm_name is __any_permission__ then users with any permission will be listed :param instance: :param perm_name: :param user_ids: limits the permissions to specific user ids :param group_ids: limits the permissions to specific group ids :param limit_group_permissions: should be used if we do not want to have user objects returned for group permissions, this might cause performance issues for big groups :param skip_group_perms: do not attach group permissions to the resultset :param db_session: :return: """ # noqa db_session = get_db_session(db_session, instance) users_perms = resource_permissions_for_users( cls.models_proxy, [perm_name], [instance.resource_id], user_ids=user_ids, group_ids=group_ids, limit_group_permissions=limit_group_permissions, skip_group_perms=skip_group_perms, db_session=db_session, ) if instance.owner_user_id: users_perms.append( PermissionTuple( instance.owner, ALL_PERMISSIONS, "user", None, instance, True, True ) ) if instance.owner_group_id and not skip_group_perms: for user in instance.owner_group.users: users_perms.append( PermissionTuple( user, ALL_PERMISSIONS, "group", instance.owner_group, instance, True, True, ) ) return users_perms
fetch the resouce by id :param resource_id: :param db_session: :return: def by_resource_id(cls, resource_id, db_session=None): """ fetch the resouce by id :param resource_id: :param db_session: :return: """ db_session = get_db_session(db_session) query = db_session.query(cls.model).filter( cls.model.resource_id == int(resource_id) ) return query.first()
fetch permissions by group and permission name :param resource_id: :param group_id: :param perm_name: :param db_session: :return: def perm_by_group_and_perm_name( cls, resource_id, group_id, perm_name, db_session=None ): """ fetch permissions by group and permission name :param resource_id: :param group_id: :param perm_name: :param db_session: :return: """ db_session = get_db_session(db_session) query = db_session.query(cls.models_proxy.GroupResourcePermission) query = query.filter( cls.models_proxy.GroupResourcePermission.group_id == group_id ) query = query.filter( cls.models_proxy.GroupResourcePermission.perm_name == perm_name ) query = query.filter( cls.models_proxy.GroupResourcePermission.resource_id == resource_id ) return query.first()
return PermissionTuples for groups that have given permission for the resource, perm_name is __any_permission__ then users with any permission will be listed :param instance: :param perm_name: :param group_ids: limits the permissions to specific group ids :param limit_group_permissions: should be used if we do not want to have user objects returned for group permissions, this might cause performance issues for big groups :param db_session: :return: def groups_for_perm( cls, instance, perm_name, group_ids=None, limit_group_permissions=False, db_session=None, ): """ return PermissionTuples for groups that have given permission for the resource, perm_name is __any_permission__ then users with any permission will be listed :param instance: :param perm_name: :param group_ids: limits the permissions to specific group ids :param limit_group_permissions: should be used if we do not want to have user objects returned for group permissions, this might cause performance issues for big groups :param db_session: :return: """ # noqa db_session = get_db_session(db_session, instance) group_perms = resource_permissions_for_users( cls.models_proxy, [perm_name], [instance.resource_id], group_ids=group_ids, limit_group_permissions=limit_group_permissions, skip_user_perms=True, db_session=db_session, ) if instance.owner_group_id: for user in instance.owner_group.users: group_perms.append( PermissionTuple( user, ALL_PERMISSIONS, "group", instance.owner_group, instance, True, True, ) ) return group_perms
Selects resource for update - locking access for other transactions :param resource_id: :param db_session: :return: def lock_resource_for_update(cls, resource_id, db_session): """ Selects resource for update - locking access for other transactions :param resource_id: :param db_session: :return: """ db_session = get_db_session(db_session) query = db_session.query(cls.model) query = query.filter(cls.model.resource_id == resource_id) query = query.with_for_update() return query.first()
Fetch row using primary key - will use existing object in session if already present :param user_id: :param db_session: :return: def get(cls, user_id, db_session=None): """ Fetch row using primary key - will use existing object in session if already present :param user_id: :param db_session: :return: """ db_session = get_db_session(db_session) return db_session.query(cls.model).get(user_id)
returns all non-resource permissions based on what groups user belongs and directly set ones for this user :param instance: :param db_session: :return: def permissions(cls, instance, db_session=None): """ returns all non-resource permissions based on what groups user belongs and directly set ones for this user :param instance: :param db_session: :return: """ db_session = get_db_session(db_session, instance) query = db_session.query( cls.models_proxy.GroupPermission.group_id.label("owner_id"), cls.models_proxy.GroupPermission.perm_name.label("perm_name"), sa.literal("group").label("type"), ) query = query.filter( cls.models_proxy.GroupPermission.group_id == cls.models_proxy.UserGroup.group_id ) query = query.filter( cls.models_proxy.User.id == cls.models_proxy.UserGroup.user_id ) query = query.filter(cls.models_proxy.User.id == instance.id) query2 = db_session.query( cls.models_proxy.UserPermission.user_id.label("owner_id"), cls.models_proxy.UserPermission.perm_name.label("perm_name"), sa.literal("user").label("type"), ) query2 = query2.filter(cls.models_proxy.UserPermission.user_id == instance.id) query = query.union(query2) groups_dict = dict([(g.id, g) for g in instance.groups]) return [ PermissionTuple( instance, row.perm_name, row.type, groups_dict.get(row.owner_id) if row.type == "group" else None, None, False, True, ) for row in query ]
returns all resources that user has perms for (note that at least one perm needs to be met) :param instance: :param perms: :param resource_ids: restricts the search to specific resources :param resource_types: :param db_session: :return: def resources_with_perms( cls, instance, perms, resource_ids=None, resource_types=None, db_session=None ): """ returns all resources that user has perms for (note that at least one perm needs to be met) :param instance: :param perms: :param resource_ids: restricts the search to specific resources :param resource_types: :param db_session: :return: """ # owned entities have ALL permissions so we return those resources too # even without explict perms set # TODO: implement admin superrule perm - maybe return all apps db_session = get_db_session(db_session, instance) query = db_session.query(cls.models_proxy.Resource).distinct() group_ids = [gr.id for gr in instance.groups] # if user has some groups lets try to join based on their permissions if group_ids: join_conditions = ( cls.models_proxy.GroupResourcePermission.group_id.in_(group_ids), cls.models_proxy.Resource.resource_id == cls.models_proxy.GroupResourcePermission.resource_id, cls.models_proxy.GroupResourcePermission.perm_name.in_(perms), ) query = query.outerjoin( (cls.models_proxy.GroupResourcePermission, sa.and_(*join_conditions)) ) # ensure outerjoin permissions are correct - # dont add empty rows from join # conditions are - join ON possible group permissions # OR owning group/user query = query.filter( sa.or_( cls.models_proxy.Resource.owner_user_id == instance.id, cls.models_proxy.Resource.owner_group_id.in_(group_ids), cls.models_proxy.GroupResourcePermission.perm_name != None, ) # noqa ) else: # filter just by username query = query.filter(cls.models_proxy.Resource.owner_user_id == instance.id) # lets try by custom user permissions for resource query2 = db_session.query(cls.models_proxy.Resource).distinct() query2 = query2.filter( cls.models_proxy.UserResourcePermission.user_id == instance.id ) query2 = query2.filter( cls.models_proxy.Resource.resource_id == cls.models_proxy.UserResourcePermission.resource_id ) query2 = query2.filter( cls.models_proxy.UserResourcePermission.perm_name.in_(perms) ) if resource_ids: query = query.filter( cls.models_proxy.Resource.resource_id.in_(resource_ids) ) query2 = query2.filter( cls.models_proxy.Resource.resource_id.in_(resource_ids) ) if resource_types: query = query.filter( cls.models_proxy.Resource.resource_type.in_(resource_types) ) query2 = query2.filter( cls.models_proxy.Resource.resource_type.in_(resource_types) ) query = query.union(query2) query = query.order_by(cls.models_proxy.Resource.resource_name) return query
Returns a list of groups users belongs to with eager loaded resources owned by those groups :param instance: :return: def groups_with_resources(cls, instance): """ Returns a list of groups users belongs to with eager loaded resources owned by those groups :param instance: :return: """ return instance.groups_dynamic.options( sa.orm.eagerload(cls.models_proxy.Group.resources) )
returns list of permissions and resources for this user :param instance: :param resource_ids: restricts the search to specific resources :param resource_types: restricts the search to specific resource types :param db_session: :return: def resources_with_possible_perms( cls, instance, resource_ids=None, resource_types=None, db_session=None ): """ returns list of permissions and resources for this user :param instance: :param resource_ids: restricts the search to specific resources :param resource_types: restricts the search to specific resource types :param db_session: :return: """ perms = resource_permissions_for_users( cls.models_proxy, ANY_PERMISSION, resource_ids=resource_ids, resource_types=resource_types, user_ids=[instance.id], db_session=db_session, ) for resource in instance.resources: perms.append( PermissionTuple( instance, ALL_PERMISSIONS, "user", None, resource, True, True ) ) for group in cls.groups_with_resources(instance): for resource in group.resources: perms.append( PermissionTuple( instance, ALL_PERMISSIONS, "group", group, resource, True, True ) ) return perms
returns user gravatar url :param instance: :param default: :param kwargs: :return: def gravatar_url(cls, instance, default="mm", **kwargs): """ returns user gravatar url :param instance: :param default: :param kwargs: :return: """ # construct the url hash = hashlib.md5(instance.email.encode("utf8").lower()).hexdigest() if "d" not in kwargs: kwargs["d"] = default params = "&".join( [ six.moves.urllib.parse.urlencode({key: value}) for key, value in kwargs.items() ] ) return "https://secure.gravatar.com/avatar/{}?{}".format(hash, params)
sets new password on a user using password manager :param instance: :param raw_password: :return: def set_password(cls, instance, raw_password): """ sets new password on a user using password manager :param instance: :param raw_password: :return: """ # support API for both passlib 1.x and 2.x hash_callable = getattr( instance.passwordmanager, "hash", instance.passwordmanager.encrypt ) password = hash_callable(raw_password) if six.PY2: instance.user_password = password.decode("utf8") else: instance.user_password = password cls.regenerate_security_code(instance)
checks string with users password hash using password manager :param instance: :param raw_password: :param enable_hash_migration: if legacy hashes should be migrated :return: def check_password(cls, instance, raw_password, enable_hash_migration=True): """ checks string with users password hash using password manager :param instance: :param raw_password: :param enable_hash_migration: if legacy hashes should be migrated :return: """ verified, replacement_hash = instance.passwordmanager.verify_and_update( raw_password, instance.user_password ) if enable_hash_migration and replacement_hash: if six.PY2: instance.user_password = replacement_hash.decode("utf8") else: instance.user_password = replacement_hash return verified
fetch user by user id :param user_id: :param db_session: :return: def by_id(cls, user_id, db_session=None): """ fetch user by user id :param user_id: :param db_session: :return: """ db_session = get_db_session(db_session) query = db_session.query(cls.model) query = query.filter(cls.model.id == user_id) query = query.options(sa.orm.eagerload("groups")) return query.first()
fetch user objects by user name and security code :param user_name: :param security_code: :param db_session: :return: def by_user_name_and_security_code(cls, user_name, security_code, db_session=None): """ fetch user objects by user name and security code :param user_name: :param security_code: :param db_session: :return: """ db_session = get_db_session(db_session) query = db_session.query(cls.model) query = query.filter( sa.func.lower(cls.model.user_name) == (user_name or "").lower() ) query = query.filter(cls.model.security_code == security_code) return query.first()
fetch user objects by user names :param user_names: :param db_session: :return: def by_user_names(cls, user_names, db_session=None): """ fetch user objects by user names :param user_names: :param db_session: :return: """ user_names = [(name or "").lower() for name in user_names] db_session = get_db_session(db_session) query = db_session.query(cls.model) query = query.filter(sa.func.lower(cls.model.user_name).in_(user_names)) # q = q.options(sa.orm.eagerload(cls.groups)) return query
fetch users with similar names using LIKE clause :param user_name: :param db_session: :return: def user_names_like(cls, user_name, db_session=None): """ fetch users with similar names using LIKE clause :param user_name: :param db_session: :return: """ db_session = get_db_session(db_session) query = db_session.query(cls.model) query = query.filter( sa.func.lower(cls.model.user_name).like((user_name or "").lower()) ) query = query.order_by(cls.model.user_name) # q = q.options(sa.orm.eagerload('groups')) return query
fetch user object by email :param email: :param db_session: :return: def by_email(cls, email, db_session=None): """ fetch user object by email :param email: :param db_session: :return: """ db_session = get_db_session(db_session) query = db_session.query(cls.model).filter( sa.func.lower(cls.model.email) == (email or "").lower() ) query = query.options(sa.orm.eagerload("groups")) return query.first()
return users hat have one of given permissions :param perm_names: :param db_session: :return: def users_for_perms(cls, perm_names, db_session=None): """ return users hat have one of given permissions :param perm_names: :param db_session: :return: """ db_session = get_db_session(db_session) query = db_session.query(cls.model) query = query.filter( cls.models_proxy.User.id == cls.models_proxy.UserGroup.user_id ) query = query.filter( cls.models_proxy.UserGroup.group_id == cls.models_proxy.GroupPermission.group_id ) query = query.filter(cls.models_proxy.GroupPermission.perm_name.in_(perm_names)) query2 = db_session.query(cls.model) query2 = query2.filter( cls.models_proxy.User.id == cls.models_proxy.UserPermission.user_id ) query2 = query2.filter( cls.models_proxy.UserPermission.perm_name.in_(perm_names) ) users = query.union(query2).order_by(cls.model.id) return users
Store join times for current nicknames when we first join. def handle_joined(self, connection, event): """ Store join times for current nicknames when we first join. """ nicknames = [s.lstrip("@+") for s in event.arguments()[-1].split()] for nickname in nicknames: self.joined[nickname] = datetime.now()
Store join time for a nickname when it joins. def handle_join(self, connection, event): """ Store join time for a nickname when it joins. """ nickname = self.get_nickname(event) self.joined[nickname] = datetime.now()
Store quit time for a nickname when it quits. def handle_quit(self, connection, event): """ Store quit time for a nickname when it quits. """ nickname = self.get_nickname(event) self.quit[nickname] = datetime.now() del self.joined[nickname]
Returns human friendly version of the timespan between now and the given datetime. def timesince(self, when): """ Returns human friendly version of the timespan between now and the given datetime. """ units = ( ("year", 60 * 60 * 24 * 365), ("week", 60 * 60 * 24 * 7), ("day", 60 * 60 * 24), ("hour", 60 * 60), ("minute", 60), ("second", 1), ) delta = datetime.now() - when total_seconds = delta.days * 60 * 60 * 24 + delta.seconds parts = [] for name, seconds in units: value = total_seconds / seconds if value > 0: total_seconds %= seconds s = "s" if value != 1 else "" parts.append("%s %s%s" % (value, name, s)) return " and ".join(", ".join(parts).rsplit(", ", 1))
Shows version information. def version(self, event): """ Shows version information. """ name = "%s.%s" % (self.__class__.__module__, self.__class__.__name__) return "%s [%s]" % (settings.GNOTTY_VERSION_STRING, name)
Lists all available commands. def commands(self, event): """ Lists all available commands. """ commands = sorted(self.commands_dict().keys()) return "Available commands: %s" % " ".join(commands)
Shows the help message for the bot. Takes an optional command name which when given, will show help for that command. def help(self, event, command_name=None): """ Shows the help message for the bot. Takes an optional command name which when given, will show help for that command. """ if command_name is None: return ("Type !commands for a list of all commands. Type " "!help [command] to see help for a specific command.") try: command = self.commands_dict()[command_name] except KeyError: return "%s is not a command" % command_name argspec = getargspec(command) args = argspec.args[2:] defaults = argspec.defaults or [] for i in range(-1, -len(defaults) - 1, -1): args[i] = "%s [default: %s]" % (args[i], defaults[i]) args = ", ".join(args) help = getdoc(command).replace("\n", " ") return "help for %s: (args: %s) %s" % (command_name, args, help)
Shows the amount of time since the given nickname has been in the channel. If no nickname is given, I'll use my own. def uptime(self, event, nickname=None): """ Shows the amount of time since the given nickname has been in the channel. If no nickname is given, I'll use my own. """ if nickname and nickname != self.nickname: try: uptime = self.timesince(self.joined[nickname]) except KeyError: return "%s is not in the channel" % nickname else: if nickname == self.get_nickname(event): prefix = "you have" else: prefix = "%s has" % nickname return "%s been here for %s" % (prefix, uptime) uptime = self.timesince(self.joined[self.nickname]) return "I've been here for %s" % uptime
Shows the amount of time since the given nickname was last seen in the channel. def seen(self, event, nickname): """ Shows the amount of time since the given nickname was last seen in the channel. """ try: self.joined[nickname] except KeyError: pass else: if nickname == self.get_nickname(event): prefix = "you are" else: prefix = "%s is" % nickname return "%s here right now" % prefix try: seen = self.timesince(self.quit[nickname]) except KeyError: return "%s has never been seen" % nickname else: return "%s was last seen %s ago" % (nickname, seen)